Compare commits

..

No commits in common. "master" and "v0.13.35" have entirely different histories.

88 changed files with 2034 additions and 3080 deletions

1
.gitignore vendored
View File

@ -38,5 +38,4 @@ release
.gofuzz .gofuzz
*-fuzz.zip *-fuzz.zip
*.test *.test
.firebase

View File

@ -12,7 +12,8 @@ FROM golang:1.14-alpine as go-build
RUN apk update && \ RUN apk update && \
apk add --no-cache make && \ apk add --no-cache make && \
apk add --no-cache git && \ apk add --no-cache git && \
apk add --no-cache jq apk add --no-cache jq && \
apk add --no-cache upx=3.95-r2
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
@ -28,9 +29,9 @@ COPY --from=react-build /web/build/ ./internal/serv/web/build
RUN go mod vendor RUN go mod vendor
RUN make build RUN make build
# RUN echo "Compressing binary, will take a bit of time..." && \ RUN echo "Compressing binary, will take a bit of time..." && \
# upx --ultra-brute -qq super-graph && \ upx --ultra-brute -qq super-graph && \
# upx -t super-graph upx -t super-graph

View File

@ -52,9 +52,7 @@ func main() {
} }
}` }`
ctx = context.WithValue(ctx, core.UserIDKey, 1) res, err := sg.GraphQL(context.Background(), query, nil)
res, err := sg.GraphQL(ctx, query, nil)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -92,7 +90,7 @@ This compiler is what sits at the heart of Super Graph, with layers of useful fu
- Fuzz tested for security - Fuzz tested for security
- Database migrations tool - Database migrations tool
- Database seeding tool - Database seeding tool
- Works with Postgres and Yugabyte DB - Works with Postgres and YugabyteDB
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver - OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
## Documentation ## Documentation

View File

@ -32,9 +32,7 @@
} }
}` }`
ctx = context.WithValue(ctx, core.UserIDKey, 1) res, err := sg.GraphQL(context.Background(), query, nil)
res, err := sg.GraphQL(ctx, query, nil)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }
@ -49,7 +47,6 @@ import (
"crypto/sha256" "crypto/sha256"
"database/sql" "database/sql"
"encoding/json" "encoding/json"
"hash/maphash"
_log "log" _log "log"
"os" "os"
@ -84,12 +81,12 @@ type SuperGraph struct {
schema *psql.DBSchema schema *psql.DBSchema
allowList *allow.List allowList *allow.List
encKey [32]byte encKey [32]byte
hashSeed maphash.Seed prepared map[string]*preparedItem
queries map[uint64]*query
roles map[string]*Role roles map[string]*Role
getRole *sql.Stmt getRole *sql.Stmt
rmap map[uint64]resolvFn rmap map[uint64]*resolvFn
abacEnabled bool abacEnabled bool
anonExists bool
qc *qcode.Compiler qc *qcode.Compiler
pc *psql.Compiler pc *psql.Compiler
ge *graphql.Engine ge *graphql.Engine
@ -108,11 +105,10 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
} }
sg := &SuperGraph{ sg := &SuperGraph{
conf: conf, conf: conf,
db: db, db: db,
dbinfo: dbinfo, dbinfo: dbinfo,
log: _log.New(os.Stdout, "", 0), log: _log.New(os.Stdout, "", 0),
hashSeed: maphash.MakeSeed(),
} }
if err := sg.initConfig(); err != nil { if err := sg.initConfig(); err != nil {
@ -139,7 +135,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
return nil, err return nil, err
} }
if conf.SecretKey != "" { if len(conf.SecretKey) != 0 {
sk := sha256.Sum256([]byte(conf.SecretKey)) sk := sha256.Sum256([]byte(conf.SecretKey))
conf.SecretKey = "" conf.SecretKey = ""
sg.encKey = sk sg.encKey = sk

View File

@ -1,19 +1,72 @@
package core package core
import ( import (
"bytes"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io"
"github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/jsn" "github.com/dosco/super-graph/jsn"
) )
// argList function is used to create a list of arguments to pass // argMap function is used to string replace variables with values by
// to a prepared statement. // the fasttemplate code
func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
return func(w io.Writer, tag string) (int, error) {
switch tag {
case "user_id_provider":
if v := c.Value(UserIDProviderKey); v != nil {
return io.WriteString(w, v.(string))
}
return 0, argErr("user_id_provider")
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) { case "user_id":
params := md.Params() if v := c.Value(UserIDKey); v != nil {
vars := make([]interface{}, len(params)) return io.WriteString(w, v.(string))
}
return 0, argErr("user_id")
case "user_role":
if v := c.Value(UserRoleKey); v != nil {
return io.WriteString(w, v.(string))
}
return 0, argErr("user_role")
}
fields := jsn.Get(c.vars, [][]byte{[]byte(tag)})
if len(fields) == 0 {
return 0, argErr(tag)
}
v := fields[0].Value
// Open and close quotes
if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' {
fields[0].Value = v[1 : len(v)-1]
}
if tag == "cursor" {
if bytes.EqualFold(v, []byte("null")) {
return io.WriteString(w, ``)
}
v1, err := c.sg.decrypt(string(fields[0].Value))
if err != nil {
return 0, err
}
return w.Write(v1)
}
return w.Write(escSQuote(fields[0].Value))
}
}
// argList function is used to create a list of arguments to pass
// to a prepared statement. FYI no escaping of single quotes is
// needed here
func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
vars := make([]interface{}, len(args))
var fields map[string]json.RawMessage var fields map[string]json.RawMessage
var err error var err error
@ -26,30 +79,31 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
} }
} }
for i, p := range params { for i := range args {
switch p.Name { av := args[i]
case "user_id": switch {
case bytes.Equal(av, []byte("user_id")):
if v := c.Value(UserIDKey); v != nil { if v := c.Value(UserIDKey); v != nil {
vars[i] = v.(string) vars[i] = v.(string)
} else { } else {
return nil, argErr(p) return nil, argErr("user_id")
} }
case "user_id_provider": case bytes.Equal(av, []byte("user_id_provider")):
if v := c.Value(UserIDProviderKey); v != nil { if v := c.Value(UserIDProviderKey); v != nil {
vars[i] = v.(string) vars[i] = v.(string)
} else { } else {
return nil, argErr(p) return nil, argErr("user_id_provider")
} }
case "user_role": case bytes.Equal(av, []byte("user_role")):
if v := c.Value(UserRoleKey); v != nil { if v := c.Value(UserRoleKey); v != nil {
vars[i] = v.(string) vars[i] = v.(string)
} else { } else {
return nil, argErr(p) return nil, argErr("user_role")
} }
case "cursor": case bytes.Equal(av, []byte("cursor")):
if v, ok := fields["cursor"]; ok && v[0] == '"' { if v, ok := fields["cursor"]; ok && v[0] == '"' {
v1, err := c.sg.decrypt(string(v[1 : len(v)-1])) v1, err := c.sg.decrypt(string(v[1 : len(v)-1]))
if err != nil { if err != nil {
@ -57,33 +111,25 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
} }
vars[i] = v1 vars[i] = v1
} else { } else {
return nil, argErr(p) return nil, argErr("cursor")
} }
default: default:
if v, ok := fields[p.Name]; ok { if v, ok := fields[string(av)]; ok {
switch {
case p.IsArray && v[0] != '[':
return nil, fmt.Errorf("variable '%s' should be an array of type '%s'", p.Name, p.Type)
case p.Type == "json" && v[0] != '[' && v[0] != '{':
return nil, fmt.Errorf("variable '%s' should be an array or object", p.Name)
}
switch v[0] { switch v[0] {
case '[', '{': case '[', '{':
vars[i] = v vars[i] = v
default: default:
var val interface{} var val interface{}
if err := json.Unmarshal(v, &val); err != nil { if err := json.Unmarshal(v, &val); err != nil {
return nil, err return nil, err
} }
vars[i] = val vars[i] = val
} }
} else { } else {
return nil, argErr(p) return nil, argErr(string(av))
} }
} }
} }
@ -91,6 +137,32 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
return vars, nil return vars, nil
} }
func argErr(p psql.Param) error { //
return fmt.Errorf("required variable '%s' of type '%s' must be set", p.Name, p.Type) func escSQuote(b []byte) []byte {
var buf *bytes.Buffer
s := 0
for i := range b {
if b[i] == '\'' {
if buf == nil {
buf = &bytes.Buffer{}
}
buf.Write(b[s:i])
buf.WriteString(`''`)
s = i + 1
}
}
if buf == nil {
return b
}
l := len(b)
if s < (l - 1) {
buf.Write(b[s:l])
}
return buf.Bytes()
}
func argErr(name string) error {
return fmt.Errorf("query requires variable '%s' to be set", name)
} }

13
core/args_test.go Normal file
View File

@ -0,0 +1,13 @@
package core
import "testing"
func TestEscQuote(t *testing.T) {
val := "That's the worst, don''t be calling me's again"
exp := "That''s the worst, don''''t be calling me''s again"
ret := escSQuote([]byte(val))
if exp != string(ret) {
t.Errorf("escSQuote failed: %s", string(ret))
}
}

View File

@ -12,10 +12,10 @@ import (
) )
type stmt struct { type stmt struct {
role *Role role *Role
qc *qcode.QCode qc *qcode.QCode
md psql.Metadata skipped uint32
sql string sql string
} }
func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string) ([]stmt, error) { func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string) ([]stmt, error) {
@ -62,11 +62,12 @@ func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, er
stmts := []stmt{stmt{role: ro, qc: qc}} stmts := []stmt{stmt{role: ro, qc: qc}}
w := &bytes.Buffer{} w := &bytes.Buffer{}
stmts[0].md, err = sg.pc.Compile(w, qc, psql.Variables(vm)) skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
if err != nil { if err != nil {
return nil, err return nil, err
} }
stmts[0].skipped = skipped
stmts[0].sql = w.String() stmts[0].sql = w.String()
return stmts, nil return stmts, nil
@ -82,13 +83,12 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
} }
} }
if sg.conf.RolesQuery == "" { if len(sg.conf.RolesQuery) == 0 {
return nil, errors.New("roles_query not defined") return nil, errors.New("roles_query not defined")
} }
stmts := make([]stmt, 0, len(sg.conf.Roles)) stmts := make([]stmt, 0, len(sg.conf.Roles))
w := &bytes.Buffer{} w := &bytes.Buffer{}
md := psql.Metadata{}
for i := 0; i < len(sg.conf.Roles); i++ { for i := 0; i < len(sg.conf.Roles); i++ {
role := &sg.conf.Roles[i] role := &sg.conf.Roles[i]
@ -104,20 +104,19 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
} }
stmts = append(stmts, stmt{role: role, qc: qc}) stmts = append(stmts, stmt{role: role, qc: qc})
s := &stmts[len(stmts)-1]
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md) skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
if err != nil { if err != nil {
return nil, err return nil, err
} }
s := &stmts[len(stmts)-1]
s.skipped = skipped
s.sql = w.String() s.sql = w.String()
s.md = md
w.Reset() w.Reset()
} }
sql, err := sg.renderUserQuery(md, stmts) sql, err := sg.renderUserQuery(stmts)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -127,13 +126,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
} }
//nolint: errcheck //nolint: errcheck
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) { func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
w := &bytes.Buffer{} w := &bytes.Buffer{}
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `) io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
for _, s := range stmts { for _, s := range stmts {
if s.role.Match == "" && if len(s.role.Match) == 0 &&
s.role.Name != "user" && s.role.Name != "anon" { s.role.Name != "user" && s.role.Name != "anon" {
continue continue
} }
@ -145,12 +144,12 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
} }
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`) io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
md.RenderVar(w, sg.conf.RolesQuery) io.WriteString(w, sg.conf.RolesQuery)
io.WriteString(w, `) THEN `) io.WriteString(w, `) THEN `)
io.WriteString(w, `(SELECT (CASE`) io.WriteString(w, `(SELECT (CASE`)
for _, s := range stmts { for _, s := range stmts {
if s.role.Match == "" { if len(s.role.Match) == 0 {
continue continue
} }
io.WriteString(w, ` WHEN `) io.WriteString(w, ` WHEN `)
@ -161,7 +160,7 @@ func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, e
} }
io.WriteString(w, ` ELSE 'user' END) FROM (`) io.WriteString(w, ` ELSE 'user' END) FROM (`)
md.RenderVar(w, sg.conf.RolesQuery) io.WriteString(w, sg.conf.RolesQuery)
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `) io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `) io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)

View File

@ -30,10 +30,12 @@ type Config struct {
// or other database functions // or other database functions
SetUserID bool `mapstructure:"set_user_id"` SetUserID bool `mapstructure:"set_user_id"`
// DefaultBlock ensures that in anonymous mode (role 'anon') all tables // DefaultAllow reverses the blocked by default behaviour for queries in
// are blocked from queries and mutations. To open access to tables in // anonymous mode. (anon role)
// anonymous mode they have to be added to the 'anon' role config. // For example if the table `users` is not listed under the anon role then
DefaultBlock bool `mapstructure:"default_block"` // access to it would by default for unauthenticated queries this reverses
// this behavior (!!! Use with caution !!!!)
DefaultAllow bool `mapstructure:"default_allow"`
// Vars is a map of hardcoded variables that can be leveraged in your // Vars is a map of hardcoded variables that can be leveraged in your
// queries (eg variable admin_id will be $admin_id in the query) // queries (eg variable admin_id will be $admin_id in the query)
@ -55,9 +57,6 @@ type Config struct {
// Roles contains all the configuration for all the roles you want to support // Roles contains all the configuration for all the roles you want to support
// `user` and `anon` are two default roles. User role is for when a user ID is // `user` and `anon` are two default roles. User role is for when a user ID is
// available and Anon when it's not. // available and Anon when it's not.
//
// If you're using the RolesQuery config to enable atribute based acess control then
// you can add more custom roles.
Roles []Role Roles []Role
// Inflections is to add additionally singular to plural mappings // Inflections is to add additionally singular to plural mappings
@ -72,7 +71,6 @@ type Config struct {
type Table struct { type Table struct {
Name string Name string
Table string Table string
Type string
Blocklist []string Blocklist []string
Remotes []Remote Remotes []Remote
Columns []Column Columns []Column
@ -110,12 +108,12 @@ type Role struct {
// RoleTable struct contains role specific access control values for a database table // RoleTable struct contains role specific access control values for a database table
type RoleTable struct { type RoleTable struct {
Name string Name string
ReadOnly bool `mapstructure:"read_only"` ReadOnly *bool `mapstructure:"read_only"`
Query *Query Query Query
Insert *Insert Insert Insert
Update *Update Update Update
Delete *Delete Delete Delete
} }
// Query struct contains access control values for query operations // Query struct contains access control values for query operations
@ -124,7 +122,7 @@ type Query struct {
Filters []string Filters []string
Columns []string Columns []string
DisableFunctions bool `mapstructure:"disable_functions"` DisableFunctions bool `mapstructure:"disable_functions"`
Block bool Block *bool
} }
// Insert struct contains access control values for insert operations // Insert struct contains access control values for insert operations
@ -132,7 +130,7 @@ type Insert struct {
Filters []string Filters []string
Columns []string Columns []string
Presets map[string]string Presets map[string]string
Block bool Block *bool
} }
// Insert struct contains access control values for update operations // Insert struct contains access control values for update operations
@ -140,84 +138,43 @@ type Update struct {
Filters []string Filters []string
Columns []string Columns []string
Presets map[string]string Presets map[string]string
Block bool Block *bool
} }
// Delete struct contains access control values for delete operations // Delete struct contains access control values for delete operations
type Delete struct { type Delete struct {
Filters []string Filters []string
Columns []string Columns []string
Block bool Block *bool
}
// AddRoleTable function is a helper function to make it easy to add per-table
// row-level config
func (c *Config) AddRoleTable(role, table string, conf interface{}) error {
var r *Role
for i := range c.Roles {
if strings.EqualFold(c.Roles[i].Name, role) {
r = &c.Roles[i]
break
}
}
if r == nil {
nr := Role{Name: role}
c.Roles = append(c.Roles, nr)
r = &nr
}
var t *RoleTable
for i := range r.Tables {
if strings.EqualFold(r.Tables[i].Name, table) {
t = &r.Tables[i]
break
}
}
if t == nil {
nt := RoleTable{Name: table}
r.Tables = append(r.Tables, nt)
t = &nt
}
switch v := conf.(type) {
case Query:
t.Query = &v
case Insert:
t.Insert = &v
case Update:
t.Update = &v
case Delete:
t.Delete = &v
default:
return fmt.Errorf("unsupported object type: %t", v)
}
return nil
} }
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV // ReadInConfig function reads in the config file for the environment specified in the GO_ENV
// environment variable. This is the best way to create a new Super Graph config. // environment variable. This is the best way to create a new Super Graph config.
func ReadInConfig(configFile string) (*Config, error) { func ReadInConfig(configFile string) (*Config, error) {
cp := path.Dir(configFile) cpath := path.Dir(configFile)
vi := newViper(cp, path.Base(configFile)) cfile := path.Base(configFile)
vi := newViper(cpath, cfile)
if err := vi.ReadInConfig(); err != nil { if err := vi.ReadInConfig(); err != nil {
return nil, err return nil, err
} }
if pcf := vi.GetString("inherits"); pcf != "" { inherits := vi.GetString("inherits")
cf := vi.ConfigFileUsed()
vi = newViper(cp, pcf) if inherits != "" {
vi = newViper(cpath, inherits)
if err := vi.ReadInConfig(); err != nil { if err := vi.ReadInConfig(); err != nil {
return nil, err return nil, err
} }
if v := vi.GetString("inherits"); v != "" { if vi.IsSet("inherits") {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)", pcf, v) return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
} }
vi.SetConfigFile(cf) vi.SetConfigName(cfile)
if err := vi.MergeInConfig(); err != nil { if err := vi.MergeInConfig(); err != nil {
return nil, err return nil, err
@ -231,7 +188,7 @@ func ReadInConfig(configFile string) (*Config, error) {
} }
if c.AllowListFile == "" { if c.AllowListFile == "" {
c.AllowListFile = path.Join(cp, "allow.list") c.AllowListFile = path.Join(cpath, "allow.list")
} }
return c, nil return c, nil
@ -245,7 +202,7 @@ func newViper(configPath, configFile string) *viper.Viper {
vi.AutomaticEnv() vi.AutomaticEnv()
if filepath.Ext(configFile) != "" { if filepath.Ext(configFile) != "" {
vi.SetConfigFile(path.Join(configPath, configFile)) vi.SetConfigFile(configFile)
} else { } else {
vi.SetConfigName(configFile) vi.SetConfigName(configFile)
vi.AddConfigPath(configPath) vi.AddConfigPath(configPath)

View File

@ -5,6 +5,11 @@ import (
"errors" "errors"
) )
const (
openVar = "{{"
closeVar = "}}"
)
var ( var (
errNotFound = errors.New("not found in prepared statements") errNotFound = errors.New("not found in prepared statements")
) )

View File

@ -1,15 +1,17 @@
package core package core
import ( import (
"bytes"
"context" "context"
"database/sql" "database/sql"
"encoding/json" "encoding/json"
"fmt" "fmt"
"hash/maphash"
"time" "time"
"github.com/dosco/super-graph/core/internal/psql" "github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
"github.com/valyala/fasttemplate"
) )
type OpType int type OpType int
@ -91,8 +93,7 @@ func (sg *SuperGraph) initCompilers() error {
} }
sg.qc, err = qcode.NewCompiler(qcode.Config{ sg.qc, err = qcode.NewCompiler(qcode.Config{
DefaultBlock: sg.conf.DefaultBlock, Blocklist: sg.conf.Blocklist,
Blocklist: sg.conf.Blocklist,
}) })
if err != nil { if err != nil {
return err return err
@ -125,7 +126,7 @@ func (c *scontext) execQuery() ([]byte, error) {
return nil, err return nil, err
} }
if len(data) == 0 || st.md.Skipped() == 0 { if len(data) == 0 || st.skipped == 0 {
return data, nil return data, nil
} }
@ -166,44 +167,32 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
} else { } else {
role = c.role role = c.role
} }
c.res.role = role c.res.role = role
h := maphash.Hash{} ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
h.SetSeed(c.sg.hashSeed)
id := queryID(&h, c.res.name, role)
q, ok := c.sg.queries[id]
if !ok { if !ok {
return nil, nil, errNotFound return nil, nil, errNotFound
} }
c.res.sql = ps.st.sql
if q.sd == nil {
q.Do(func() { c.sg.prepare(q, role) })
if q.err != nil {
return nil, nil, err
}
}
c.res.sql = q.st.sql
var root []byte var root []byte
var row *sql.Row var row *sql.Row
varsList, err := c.argList(q.st.md) varsList, err := c.argList(ps.args)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
if useTx { if useTx {
row = tx.Stmt(q.sd).QueryRow(varsList...) row = tx.Stmt(ps.sd).QueryRow(varsList...)
} else { } else {
row = q.sd.QueryRow(varsList...) row = ps.sd.QueryRow(varsList...)
} }
if q.roleArg { if ps.roleArg {
err = row.Scan(&role, &root) err = row.Scan(&role, &root)
} else { } else {
err = row.Scan(&root) err = row.Scan(&root)
@ -217,15 +206,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
if useTx { if useTx {
if err := tx.Commit(); err != nil { if err := tx.Commit(); err != nil {
return nil, nil, q.err return nil, nil, err
} }
} }
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil { if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
return nil, nil, err return nil, nil, err
} }
return root, &q.st, nil return root, &ps.st, nil
} }
func (c *scontext) resolveSQL() ([]byte, *stmt, error) { func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
@ -263,23 +252,15 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
return nil, nil, err return nil, nil, err
} }
st := &stmts[0] st := &stmts[0]
c.res.sql = st.sql
varList, err := c.argList(st.md) t := fasttemplate.New(st.sql, openVar, closeVar)
buf := &bytes.Buffer{}
_, err = t.ExecuteFunc(buf, c.argMap())
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
// finalSQL := buf.String() finalSQL := buf.String()
////
// _, err = t.ExecuteFunc(buf, c.argMap(st.md))
// if err != nil {
// return nil, nil, err
// }
// finalSQL := buf.String()
/////
// var stime time.Time // var stime time.Time
@ -294,9 +275,9 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
// defaultRole := c.role // defaultRole := c.role
if useTx { if useTx {
row = tx.QueryRowContext(c, st.sql, varList...) row = tx.QueryRow(finalSQL)
} else { } else {
row = c.sg.db.QueryRowContext(c, st.sql, varList...) row = c.sg.db.QueryRow(finalSQL)
} }
if len(stmts) > 1 { if len(stmts) > 1 {
@ -305,7 +286,9 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
err = row.Scan(&root) err = row.Scan(&root)
} }
if role == "" { c.res.sql = finalSQL
if len(role) == 0 {
c.res.role = c.role c.res.role = c.role
} else { } else {
c.res.role = role c.res.role = role

View File

@ -2,7 +2,9 @@ package core
import ( import (
"fmt" "fmt"
"regexp"
"strings" "strings"
"unicode"
"github.com/dosco/super-graph/core/internal/psql" "github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
@ -16,12 +18,17 @@ func (sg *SuperGraph) initConfig() error {
flect.AddPlural(k, v) flect.AddPlural(k, v)
} }
// Variables: Validate and sanitize
for k, v := range c.Vars {
c.Vars[k] = sanitizeVars(v)
}
// Tables: Validate and sanitize // Tables: Validate and sanitize
tm := make(map[string]struct{}) tm := make(map[string]struct{})
for i := 0; i < len(c.Tables); i++ { for i := 0; i < len(c.Tables); i++ {
t := &c.Tables[i] t := &c.Tables[i]
// t.Name = flect.Pluralize(strings.ToLower(t.Name)) t.Name = flect.Pluralize(strings.ToLower(t.Name))
if _, ok := tm[t.Name]; ok { if _, ok := tm[t.Name]; ok {
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...) sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
@ -73,24 +80,18 @@ func (sg *SuperGraph) initConfig() error {
sg.roles["anon"] = &ur sg.roles["anon"] = &ur
} }
if c.RolesQuery == "" { // Roles: validate and sanitize
sg.log.Printf("INF roles_query not defined: attribute based access control disabled") c.RolesQuery = sanitizeVars(c.RolesQuery)
} else {
n := 0
for k, v := range sg.roles {
if k == "user" || k == "anon" {
n++
} else if v.Match != "" {
n++
}
}
sg.abacEnabled = (n > 2)
if !sg.abacEnabled { if c.RolesQuery == "" {
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)") sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
}
} }
_, userExists := sg.roles["user"]
_, sg.anonExists = sg.roles["anon"]
sg.abacEnabled = userExists && c.RolesQuery != ""
return nil return nil
} }
@ -100,26 +101,21 @@ func getDBTableAliases(c *Config) map[string][]string {
for i := range c.Tables { for i := range c.Tables {
t := c.Tables[i] t := c.Tables[i]
if t.Table != "" && t.Type == "" { if len(t.Table) == 0 || len(t.Columns) != 0 {
m[t.Table] = append(m[t.Table], t.Name) continue
} }
m[t.Table] = append(m[t.Table], t.Name)
} }
return m return m
} }
func addTables(c *Config, di *psql.DBInfo) error { func addTables(c *Config, di *psql.DBInfo) error {
var err error
for _, t := range c.Tables { for _, t := range c.Tables {
switch t.Type { if t.Table == "" || len(t.Columns) == 0 {
case "json", "jsonb": continue
err = addJsonTable(di, t.Columns, t)
case "polymorphic":
err = addVirtualTable(di, t.Columns, t)
} }
if err := addTable(di, t.Columns, t); err != nil {
if err != nil {
return err return err
} }
@ -127,18 +123,17 @@ func addTables(c *Config, di *psql.DBInfo) error {
return nil return nil
} }
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error { func addTable(di *psql.DBInfo, cols []Column, t Table) error {
// This is for jsonb columns that want to be tables.
bc, ok := di.GetColumn(t.Table, t.Name) bc, ok := di.GetColumn(t.Table, t.Name)
if !ok { if !ok {
return fmt.Errorf( return fmt.Errorf(
"json table: column '%s' not found on table '%s'", "Column '%s' not found on table '%s'",
t.Name, t.Table) t.Name, t.Table)
} }
if bc.Type != "json" && bc.Type != "jsonb" { if bc.Type != "json" && bc.Type != "jsonb" {
return fmt.Errorf( return fmt.Errorf(
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid", "Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
t.Name, t.Table, bc.Type) t.Name, t.Table, bc.Type)
} }
@ -165,38 +160,8 @@ func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
return nil return nil
} }
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
if len(cols) == 0 {
return fmt.Errorf("polymorphic table: no id column specified")
}
c := cols[0]
if c.ForeignKey == "" {
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
}
s := strings.SplitN(c.ForeignKey, ".", 2)
if len(s) != 2 {
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
}
di.VTables = append(di.VTables, psql.VirtualTable{
Name: t.Name,
IDColumn: c.Name,
TypeColumn: s[0],
FKeyColumn: s[1],
})
return nil
}
func addForeignKeys(c *Config, di *psql.DBInfo) error { func addForeignKeys(c *Config, di *psql.DBInfo) error {
for _, t := range c.Tables { for _, t := range c.Tables {
if t.Type != "" {
continue
}
for _, c := range t.Columns { for _, c := range t.Columns {
if c.ForeignKey == "" { if c.ForeignKey == "" {
continue continue
@ -210,52 +175,30 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
} }
func addForeignKey(di *psql.DBInfo, c Column, t Table) error { func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
var tn string c1, ok := di.GetColumn(t.Name, c.Name)
if t.Type == "polymorphic" {
tn = t.Table
} else {
tn = t.Name
}
c1, ok := di.GetColumn(tn, c.Name)
if !ok { if !ok {
return fmt.Errorf( return fmt.Errorf(
"config: invalid table '%s' or column '%s' defined", "Invalid table '%s' or column '%s' in Config",
tn, c.Name) t.Name, c.Name)
} }
v := strings.SplitN(c.ForeignKey, ".", 2) v := strings.SplitN(c.ForeignKey, ".", 2)
if len(v) != 2 { if len(v) != 2 {
return fmt.Errorf( return fmt.Errorf(
"config: invalid foreign_key defined for table '%s' and column '%s': %s", "Invalid foreign_key in Config for table '%s' and column '%s",
tn, c.Name, c.ForeignKey) t.Name, c.Name)
}
// check if it's a polymorphic foreign key
if _, ok := di.GetColumn(tn, v[0]); ok {
c2, ok := di.GetColumn(tn, v[1])
if !ok {
return fmt.Errorf(
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
v[1], tn, c.Name)
}
c1.FKeyTable = v[0]
c1.FKeyColID = []int16{c2.ID}
return nil
} }
fkt, fkc := v[0], v[1] fkt, fkc := v[0], v[1]
c3, ok := di.GetColumn(fkt, fkc) c2, ok := di.GetColumn(fkt, fkc)
if !ok { if !ok {
return fmt.Errorf( return fmt.Errorf(
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'", "Invalid foreign_key in Config for table '%s' and column '%s",
t.Name, c.Name, v[0], v[1]) t.Name, c.Name)
} }
c1.FKeyTable = fkt c1.FKeyTable = fkt
c1.FKeyColID = []int16{c3.ID} c1.FKeyColID = []int16{c2.ID}
return nil return nil
} }
@ -263,7 +206,7 @@ func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
func addRoles(c *Config, qc *qcode.Compiler) error { func addRoles(c *Config, qc *qcode.Compiler) error {
for _, r := range c.Roles { for _, r := range c.Roles {
for _, t := range r.Tables { for _, t := range r.Tables {
if err := addRole(qc, r, t, c.DefaultBlock); err != nil { if err := addRole(qc, r, t, c.DefaultAllow); err != nil {
return err return err
} }
} }
@ -272,56 +215,67 @@ func addRoles(c *Config, qc *qcode.Compiler) error {
return nil return nil
} }
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultBlock bool) error { func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultAllow bool) error {
ro := false // read-only ro := true // read-only
if defaultBlock && r.Name == "anon" { if defaultAllow {
ro = true ro = false
} }
if t.ReadOnly { if r.Name != "anon" {
ro = true ro = false
} }
query := qcode.QueryConfig{Block: false} if t.ReadOnly != nil {
insert := qcode.InsertConfig{Block: ro} ro = *t.ReadOnly
update := qcode.UpdateConfig{Block: ro}
del := qcode.DeleteConfig{Block: ro}
if t.Query != nil {
query = qcode.QueryConfig{
Limit: t.Query.Limit,
Filters: t.Query.Filters,
Columns: t.Query.Columns,
DisableFunctions: t.Query.DisableFunctions,
Block: t.Query.Block,
}
} }
if t.Insert != nil { blocked := struct {
insert = qcode.InsertConfig{ query bool
Filters: t.Insert.Filters, insert bool
Columns: t.Insert.Columns, update bool
Presets: t.Insert.Presets, delete bool
Block: t.Insert.Block, }{false, ro, ro, ro}
}
if t.Query.Block != nil {
blocked.query = *t.Query.Block
}
if t.Insert.Block != nil {
blocked.insert = *t.Insert.Block
}
if t.Update.Block != nil {
blocked.update = *t.Update.Block
}
if t.Delete.Block != nil {
blocked.delete = *t.Delete.Block
} }
if t.Update != nil { query := qcode.QueryConfig{
update = qcode.UpdateConfig{ Limit: t.Query.Limit,
Filters: t.Update.Filters, Filters: t.Query.Filters,
Columns: t.Update.Columns, Columns: t.Query.Columns,
Presets: t.Update.Presets, DisableFunctions: t.Query.DisableFunctions,
Block: t.Update.Block, Block: blocked.query,
}
} }
if t.Delete != nil { insert := qcode.InsertConfig{
del = qcode.DeleteConfig{ Filters: t.Insert.Filters,
Filters: t.Delete.Filters, Columns: t.Insert.Columns,
Columns: t.Delete.Columns, Presets: t.Insert.Presets,
Block: t.Delete.Block, Block: blocked.insert,
} }
update := qcode.UpdateConfig{
Filters: t.Update.Filters,
Columns: t.Update.Columns,
Presets: t.Update.Presets,
Block: blocked.update,
}
del := qcode.DeleteConfig{
Filters: t.Delete.Filters,
Columns: t.Delete.Columns,
Block: blocked.delete,
} }
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{ return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
@ -339,3 +293,23 @@ func (r *Role) GetTable(name string) *RoleTable {
func sanitize(value string) string { func sanitize(value string) string {
return strings.ToLower(strings.TrimSpace(value)) return strings.ToLower(strings.TrimSpace(value))
} }
var (
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
)
func sanitizeVars(s string) string {
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
s1 := strings.Map(func(r rune) rune {
if unicode.IsSpace(r) {
return ' '
}
return r
}, s0)
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
return strings.ToLower(m)
})
}

View File

@ -6,27 +6,24 @@ import (
"errors" "errors"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"sort" "sort"
"strings" "strings"
"text/scanner"
"github.com/chirino/graphql/schema" "github.com/chirino/graphql/schema"
"github.com/dosco/super-graph/jsn" "github.com/dosco/super-graph/jsn"
) )
const ( const (
expComment = iota + 1 AL_QUERY int = iota + 1
expVar AL_VARS
expQuery
) )
type Item struct { type Item struct {
Name string Name string
key string key string
Query string Query string
Vars string Vars json.RawMessage
Comment string Comment string
} }
@ -38,7 +35,6 @@ type List struct {
type Config struct { type Config struct {
CreateIfNotExists bool CreateIfNotExists bool
Persist bool Persist bool
Log *log.Logger
} }
func New(filename string, conf Config) (*List, error) { func New(filename string, conf Config) (*List, error) {
@ -84,12 +80,6 @@ func New(filename string, conf Config) (*List, error) {
} else { } else {
al.filepath = filename al.filepath = filename
} }
if file, err := os.OpenFile(al.filepath, os.O_RDONLY|os.O_CREATE, 0644); err != nil {
return nil, err
} else {
file.Close()
}
} }
var err error var err error
@ -99,10 +89,8 @@ func New(filename string, conf Config) (*List, error) {
go func() { go func() {
for v := range al.saveChan { for v := range al.saveChan {
err := al.save(v) if err = al.save(v); err != nil {
break
if err != nil && conf.Log != nil {
conf.Log.Println("WRN allow list save:", err)
} }
} }
}() }()
@ -128,101 +116,121 @@ func (al *List) Set(vars []byte, query, comment string) error {
return errors.New("empty query") return errors.New("empty query")
} }
var q string
for i := 0; i < len(query); i++ {
c := query[i]
if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' {
q = query
break
} else if c == '{' {
q = "query " + query
break
}
}
al.saveChan <- Item{ al.saveChan <- Item{
Comment: comment, Comment: comment,
Query: query, Query: q,
Vars: string(vars), Vars: vars,
} }
return nil return nil
} }
func (al *List) Load() ([]Item, error) { func (al *List) Load() ([]Item, error) {
var list []Item
varString := "variables"
b, err := ioutil.ReadFile(al.filepath) b, err := ioutil.ReadFile(al.filepath)
if err != nil { if err != nil {
return nil, err return list, err
} }
return parse(string(b), al.filepath) if len(b) == 0 {
} return list, nil
}
func parse(b, filename string) ([]Item, error) { var comment bytes.Buffer
var items []Item var varBytes []byte
var s scanner.Scanner itemMap := make(map[string]struct{})
s.Init(strings.NewReader(b))
s.Filename = filename
s.Mode ^= scanner.SkipComments
var op, sp scanner.Position s, e, c := 0, 0, 0
var item Item ty := 0
newComment := false for {
st := expComment fq := false
for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() { if c == 0 && b[e] == '#' {
txt := s.TokenText() s = e
for e < len(b) && b[e] != '\n' {
switch { e++
case strings.HasPrefix(txt, "/*"):
if st == expQuery {
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
} }
item = Item{Comment: strings.TrimSpace(txt[2 : len(txt)-2])} if (e - s) > 2 {
sp = s.Pos() comment.Write(b[(s + 1):(e + 1)])
st = expComment
newComment = true
case !newComment && strings.HasPrefix(txt, "#"):
if st == expQuery {
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
} }
item = Item{} }
sp = s.Pos()
st = expComment
case strings.HasPrefix(txt, "variables"): if e >= len(b) {
if st == expComment { break
v := b[sp.Offset:s.Pos().Offset] }
item.Comment = strings.TrimSpace(v[:strings.IndexByte(v, '\n')])
}
sp = s.Pos()
st = expVar
case isGraphQL(txt): if matchPrefix(b, e, "query") || matchPrefix(b, e, "mutation") {
if st == expVar { if c == 0 {
v := b[sp.Offset:s.Pos().Offset] s = e
item.Vars = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
} }
sp = op ty = AL_QUERY
st = expQuery } else if matchPrefix(b, e, varString) {
if c == 0 {
s = e + len(varString) + 1
}
ty = AL_VARS
} else if b[e] == '{' {
c++
} else if b[e] == '}' {
c--
if c == 0 {
if ty == AL_QUERY {
fq = true
} else if ty == AL_VARS {
varBytes = b[s:(e + 1)]
}
ty = 0
}
}
if fq {
query := string(b[s:(e + 1)])
name := QueryName(query)
key := strings.ToLower(name)
if _, ok := itemMap[key]; !ok {
v := Item{
Name: name,
key: key,
Query: query,
Vars: varBytes,
Comment: comment.String(),
}
list = append(list, v)
comment.Reset()
}
varBytes = nil
} }
op = s.Pos()
e++
if e >= len(b) {
break
}
} }
if st == expQuery { return list, nil
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
}
for i := range items {
items[i].Name = QueryName(items[i].Query)
items[i].key = strings.ToLower(items[i].Name)
}
return items, nil
}
func isGraphQL(s string) bool {
return strings.HasPrefix(s, "query") ||
strings.HasPrefix(s, "mutation") ||
strings.HasPrefix(s, "subscription")
} }
func (al *List) save(item Item) error { func (al *List) save(item Item) error {
@ -231,6 +239,8 @@ func (al *List) save(item Item) error {
qd := &schema.QueryDocument{} qd := &schema.QueryDocument{}
if err := qd.Parse(item.Query); err != nil { if err := qd.Parse(item.Query); err != nil {
fmt.Println("##", item.Query)
return err return err
} }
@ -238,6 +248,8 @@ func (al *List) save(item Item) error {
query := buf.String() query := buf.String()
buf.Reset() buf.Reset()
// fmt.Println(">", query)
item.Name = QueryName(query) item.Name = QueryName(query)
item.key = strings.ToLower(item.Name) item.key = strings.ToLower(item.Name)
@ -279,43 +291,57 @@ func (al *List) save(item Item) error {
return strings.Compare(list[i].key, list[j].key) == -1 return strings.Compare(list[i].key, list[j].key) == -1
}) })
for i, v := range list { for _, v := range list {
var vars string cmtLines := strings.Split(v.Comment, "\n")
if v.Vars != "" {
buf.Reset() i := 0
if err := jsn.Clear(&buf, []byte(v.Vars)); err != nil { for _, c := range cmtLines {
if c = strings.TrimSpace(c); c == "" {
continue continue
} }
_, err := f.WriteString(fmt.Sprintf("# %s\n", c))
if err != nil {
return err
}
i++
}
if i != 0 {
if _, err := f.WriteString("\n"); err != nil {
return err
}
} else {
if _, err := f.WriteString(fmt.Sprintf("# Query named %s\n\n", v.Name)); err != nil {
return err
}
}
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
buf.Reset()
if err := jsn.Clear(&buf, v.Vars); err != nil {
return fmt.Errorf("failed to clean vars: %w", err)
}
vj := json.RawMessage(buf.Bytes()) vj := json.RawMessage(buf.Bytes())
if vj, err = json.MarshalIndent(vj, "", " "); err != nil { vj, err = json.MarshalIndent(vj, "", " ")
continue if err != nil {
return fmt.Errorf("failed to marshal vars: %w", err)
} }
vars = string(vj)
}
list[i].Vars = vars
list[i].Comment = strings.TrimSpace(v.Comment)
}
for _, v := range list { _, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
if v.Comment != "" {
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
} else {
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
}
if err != nil {
return err
}
if v.Vars != "" {
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", v.Vars))
if err != nil { if err != nil {
return err return err
} }
} }
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query)) if v.Query[0] == '{' {
_, err = f.WriteString(fmt.Sprintf("query %s\n\n", v.Query))
} else {
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
}
if err != nil { if err != nil {
return err return err
} }
@ -324,6 +350,18 @@ func (al *List) save(item Item) error {
return nil return nil
} }
func matchPrefix(b []byte, i int, s string) bool {
if (len(b) - i) < len(s) {
return false
}
for n := 0; n < len(s); n++ {
if b[(i+n)] != s[n] {
return false
}
}
return true
}
func QueryName(b string) string { func QueryName(b string) string {
state, s := 0, 0 state, s := 0, 0

View File

@ -14,7 +14,7 @@ func TestGQLName1(t *testing.T) {
name := QueryName(q) name := QueryName(q)
if name != "" { if len(name) != 0 {
t.Fatal("Name should be empty, not ", name) t.Fatal("Name should be empty, not ", name)
} }
} }
@ -82,160 +82,3 @@ func TestGQLName5(t *testing.T) {
t.Fatal("Name should be empty, not ", name) t.Fatal("Name should be empty, not ", name)
} }
} }
func TestParse1(t *testing.T) {
var al = `
# Hello world
variables {
"data": {
"slug": "",
"body": "",
"post": {
"connect": {
"slug": ""
}
}
}
}
mutation createComment {
comment(insert: $data) {
slug
body
createdAt: created_at
totalVotes: cached_votes_total
totalReplies: cached_replies_total
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}
# Query named createPost
query createPost {
post(insert: $data) {
slug
body
published
createdAt: created_at
totalVotes: cached_votes_total
totalComments: cached_comments_total
vote: post_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}`
_, err := parse(al, "allow.list")
if err != nil {
t.Fatal(err)
}
}
func TestParse2(t *testing.T) {
var al = `
/* Hello world */
variables {
"data": {
"slug": "",
"body": "",
"post": {
"connect": {
"slug": ""
}
}
}
}
mutation createComment {
comment(insert: $data) {
slug
body
createdAt: created_at
totalVotes: cached_votes_total
totalReplies: cached_replies_total
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}
/*
Query named createPost
*/
variables {
"data": {
"thread": {
"connect": {
"slug": ""
}
},
"slug": "",
"published": false,
"body": ""
}
}
query createPost {
post(insert: $data) {
slug
body
published
createdAt: created_at
totalVotes: cached_votes_total
totalComments: cached_comments_total
vote: post_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}`
_, err := parse(al, "allow.list")
if err != nil {
t.Fatal(err)
}
}

View File

@ -55,6 +55,19 @@ func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
config.AllowListFile = "./allow.list" config.AllowListFile = "./allow.list"
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id` config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
blockFalse := false
config.Roles = []core.Role{
core.Role{
Name: "anon",
Tables: []core.RoleTable{
core.RoleTable{Name: "users", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
core.RoleTable{Name: "product", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
core.RoleTable{Name: "line_item", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
},
},
}
sg, err := core.NewSuperGraph(&config, db) sg, err := core.NewSuperGraph(&config, db)
require.NoError(t, err) require.NoError(t, err)
ctx := context.Background() ctx := context.Background()

View File

@ -1,3 +1,4 @@
//nolint:errcheck
package psql package psql
import ( import (
@ -11,7 +12,8 @@ import (
func (c *compilerContext) renderBaseColumns( func (c *compilerContext) renderBaseColumns(
sel *qcode.Select, sel *qcode.Select,
ti *DBTableInfo, ti *DBTableInfo,
childCols []*qcode.Column) ([]int, bool, error) { childCols []*qcode.Column,
skipped uint32) ([]int, bool, error) {
var realColsRendered []int var realColsRendered []int
@ -111,15 +113,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
c.renderComma(columnsRendered) c.renderComma(columnsRendered)
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`, //fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name) //c.sel.Name, cn, arg.Val, col.Name)
_, _ = io.WriteString(c.w, `ts_rank(`) io.WriteString(c.w, `ts_rank(`)
colWithTable(c.w, ti.Name, cn) colWithTable(c.w, ti.Name, cn)
if c.schema.ver >= 110000 { if c.schema.ver >= 110000 {
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`) io.WriteString(c.w, `, websearch_to_tsquery('{{`)
} else { } else {
_, _ = io.WriteString(c.w, `, to_tsquery(`) io.WriteString(c.w, `, to_tsquery('{{`)
} }
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"}) io.WriteString(c.w, arg.Val)
_, _ = io.WriteString(c.w, `))`) io.WriteString(c.w, `}}'))`)
alias(c.w, col.Name) alias(c.w, col.Name)
return nil return nil
@ -136,15 +138,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
c.renderComma(columnsRendered) c.renderComma(columnsRendered)
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`, //fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name) //c.sel.Name, cn, arg.Val, col.Name)
_, _ = io.WriteString(c.w, `ts_headline(`) io.WriteString(c.w, `ts_headline(`)
colWithTable(c.w, ti.Name, cn) colWithTable(c.w, ti.Name, cn)
if c.schema.ver >= 110000 { if c.schema.ver >= 110000 {
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`) io.WriteString(c.w, `, websearch_to_tsquery('{{`)
} else { } else {
_, _ = io.WriteString(c.w, `, to_tsquery(`) io.WriteString(c.w, `, to_tsquery('{{`)
} }
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"}) io.WriteString(c.w, arg.Val)
_, _ = io.WriteString(c.w, `))`) io.WriteString(c.w, `}}'))`)
alias(c.w, col.Name) alias(c.w, col.Name)
return nil return nil
@ -156,9 +158,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
} }
c.renderComma(columnsRendered) c.renderComma(columnsRendered)
_, _ = io.WriteString(c.w, `(`) io.WriteString(c.w, `(`)
squoted(c.w, ti.Name) squoted(c.w, ti.Name)
_, _ = io.WriteString(c.w, ` :: text)`) io.WriteString(c.w, ` :: text)`)
alias(c.w, col.Name) alias(c.w, col.Name)
return nil return nil
@ -168,9 +170,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
pl := funcPrefixLen(c.schema.fm, col.Name) pl := funcPrefixLen(c.schema.fm, col.Name)
// if pl == 0 { // if pl == 0 {
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name) // //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
// _, _ = io.WriteString(c.w, `'`) // io.WriteString(c.w, `'`)
// _, _ = io.WriteString(c.w, col.Name) // io.WriteString(c.w, col.Name)
// _, _ = io.WriteString(c.w, ` not defined'`) // io.WriteString(c.w, ` not defined'`)
// alias(c.w, col.Name) // alias(c.w, col.Name)
// } // }
@ -189,10 +191,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
c.renderComma(columnsRendered) c.renderComma(columnsRendered)
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name) //fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
_, _ = io.WriteString(c.w, fn) io.WriteString(c.w, fn)
_, _ = io.WriteString(c.w, `(`) io.WriteString(c.w, `(`)
colWithTable(c.w, ti.Name, cn) colWithTable(c.w, ti.Name, cn)
_, _ = io.WriteString(c.w, `)`) io.WriteString(c.w, `)`)
alias(c.w, col.Name) alias(c.w, col.Name)
return nil return nil
@ -200,7 +202,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
func (c *compilerContext) renderComma(columnsRendered int) { func (c *compilerContext) renderComma(columnsRendered int) {
if columnsRendered != 0 { if columnsRendered != 0 {
_, _ = io.WriteString(c.w, `, `) io.WriteString(c.w, `, `)
} }
} }

View File

@ -4,18 +4,17 @@ package psql
import ( import (
"encoding/json" "encoding/json"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
) )
var ( var (
qcompileTest, _ = qcode.NewCompiler(qcode.Config{}) qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
schema, _ = GetTestSchema() schema = GetTestSchema()
vars = map[string]string{ vars = NewVariables(map[string]string{
"admin_account_id": "5", "admin_account_id": "5",
} })
pcompileTest = NewCompiler(Config{ pcompileTest = NewCompiler(Config{
Schema: schema, Schema: schema,
@ -25,110 +24,31 @@ var (
// FuzzerEntrypoint for Fuzzbuzz // FuzzerEntrypoint for Fuzzbuzz
func Fuzz(data []byte) int { func Fuzz(data []byte) int {
err1 := query(data) gql := `mutation {
err2 := insert(data) product(insert: $data) {
err3 := update(data) id
err4 := delete(data) name
user {
id
full_name
email
}
}
}`
if err1 != nil || err2 != nil || err3 != nil || err4 != nil { qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
panic("qcompile can't fail")
}
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
if err != nil {
return 0 return 0
} }
return 1 return 1
} }
func query(data []byte) error {
gql := data
qc, err1 := qcompileTest.Compile(gql, "user")
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err2 := pcompileTest.CompileEx(qc, vars)
if err1 != nil {
return err1
} else {
return err2
}
}
func insert(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}`
qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
panic("qcompile can't fail")
}
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}
func update(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}`
qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
panic("qcompile can't fail")
}
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}
func delete(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}`
qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
panic("qcompile can't fail")
}
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}

View File

@ -1,20 +0,0 @@
// +build gofuzz
package psql
import (
"testing"
)
var ret int
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"{\"connect\":{}}",
"q(q{q{q{q{q{q{q{q{",
}
for _, f := range crashers {
ret = Fuzz([]byte(f))
}
}

View File

@ -10,8 +10,8 @@ import (
"github.com/dosco/super-graph/core/internal/util" "github.com/dosco/super-graph/core/internal/util"
) )
func (c *compilerContext) renderInsert( func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) { vars Variables, ti *DBTableInfo) (uint32, error) {
insert, ok := vars[qc.ActionVar] insert, ok := vars[qc.ActionVar]
if !ok { if !ok {
@ -25,8 +25,9 @@ func (c *compilerContext) renderInsert(
if insert[0] == '[' { if insert[0] == '[' {
io.WriteString(c.w, `json_array_elements(`) io.WriteString(c.w, `json_array_elements(`)
} }
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"}) io.WriteString(c.w, `'{{`)
io.WriteString(c.w, ` :: json`) io.WriteString(c.w, qc.ActionVar)
io.WriteString(c.w, `}}' :: json`)
if insert[0] == '[' { if insert[0] == '[' {
io.WriteString(c.w, `)`) io.WriteString(c.w, `)`)
} }
@ -89,12 +90,12 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
io.WriteString(w, `INSERT INTO `) io.WriteString(w, `INSERT INTO `)
quoted(w, ti.Name) quoted(w, ti.Name)
io.WriteString(w, ` (`) io.WriteString(w, ` (`)
c.renderInsertUpdateColumns(qc, jt, ti, sk, false) renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
renderNestedInsertRelColumns(w, item.kvitem, false) renderNestedInsertRelColumns(w, item.kvitem, false)
io.WriteString(w, `)`) io.WriteString(w, `)`)
io.WriteString(w, ` SELECT `) io.WriteString(w, ` SELECT `)
c.renderInsertUpdateColumns(qc, jt, ti, sk, true) renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
renderNestedInsertRelColumns(w, item.kvitem, true) renderNestedInsertRelColumns(w, item.kvitem, true)
io.WriteString(w, ` FROM "_sg_input" i`) io.WriteString(w, ` FROM "_sg_input" i`)

View File

@ -1,61 +0,0 @@
package psql
import (
"io"
)
func (md *Metadata) RenderVar(w io.Writer, vv string) {
f, s := -1, 0
for i := range vv {
v := vv[i]
switch {
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
if (i - s) > 0 {
_, _ = io.WriteString(w, vv[s:i])
}
f = i
case (v < 'a' && v > 'z') &&
(v < 'A' && v > 'Z') &&
(v < '0' && v > '9') &&
v != '_' &&
f != -1 &&
(i-f) > 1:
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
s = i
f = -1
}
}
if f != -1 && (len(vv)-f) > 1 {
md.renderValueExp(w, Param{Name: vv[f+1:]})
} else {
_, _ = io.WriteString(w, vv[s:])
}
}
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
_, _ = io.WriteString(w, `$`)
if v, ok := md.pindex[p.Name]; ok {
int32String(w, int32(v))
} else {
md.params = append(md.params, p)
n := len(md.params)
if md.pindex == nil {
md.pindex = make(map[string]int)
}
md.pindex[p.Name] = n
int32String(w, int32(n))
}
}
func (md Metadata) Skipped() uint32 {
return md.skipped
}
func (md Metadata) Params() []Param {
return md.params
}

View File

@ -6,7 +6,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"strings"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
"github.com/dosco/super-graph/core/internal/util" "github.com/dosco/super-graph/core/internal/util"
@ -34,44 +33,42 @@ var updateTypes = map[string]itemType{
var noLimit = qcode.Paging{NoLimit: true} var noLimit = qcode.Paging{NoLimit: true}
func (co *Compiler) compileMutation(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) { func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
md := Metadata{}
if len(qc.Selects) == 0 { if len(qc.Selects) == 0 {
return md, errors.New("empty query") return 0, errors.New("empty query")
} }
c := &compilerContext{md, w, qc.Selects, co} c := &compilerContext{w, qc.Selects, co}
root := &qc.Selects[0] root := &qc.Selects[0]
ti, err := c.schema.GetTable(root.Name) ti, err := c.schema.GetTable(root.Name)
if err != nil { if err != nil {
return c.md, err return 0, err
} }
switch qc.Type { switch qc.Type {
case qcode.QTInsert: case qcode.QTInsert:
if _, err := c.renderInsert(w, qc, vars, ti); err != nil { if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return c.md, err return 0, err
} }
case qcode.QTUpdate: case qcode.QTUpdate:
if _, err := c.renderUpdate(w, qc, vars, ti); err != nil { if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
return c.md, err return 0, err
} }
case qcode.QTUpsert: case qcode.QTUpsert:
if _, err := c.renderUpsert(w, qc, vars, ti); err != nil { if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
return c.md, err return 0, err
} }
case qcode.QTDelete: case qcode.QTDelete:
if _, err := c.renderDelete(w, qc, vars, ti); err != nil { if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
return c.md, err return 0, err
} }
default: default:
return c.md, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'") return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
} }
root.Paging = noLimit root.Paging = noLimit
@ -80,7 +77,7 @@ func (co *Compiler) compileMutation(w io.Writer, qc *qcode.QCode, vars Variables
root.Where = nil root.Where = nil
root.Args = nil root.Args = nil
return co.compileQueryWithMetadata(w, qc, vars, c.md) return c.compileQuery(qc, w, vars)
} }
type kvitem struct { type kvitem struct {
@ -368,12 +365,12 @@ func (c *compilerContext) renderUnionStmt(w io.Writer, item renitem) error {
return nil return nil
} }
func (c *compilerContext) renderInsertUpdateColumns( func renderInsertUpdateColumns(w io.Writer,
qc *qcode.QCode, qc *qcode.QCode,
jt map[string]json.RawMessage, jt map[string]json.RawMessage,
ti *DBTableInfo, ti *DBTableInfo,
skipcols map[string]struct{}, skipcols map[string]struct{},
isValues bool) (uint32, error) { values bool) (uint32, error) {
root := &qc.Selects[0] root := &qc.Selects[0]
renderedCol := false renderedCol := false
@ -395,18 +392,18 @@ func (c *compilerContext) renderInsertUpdateColumns(
} }
} }
if n != 0 { if n != 0 {
io.WriteString(c.w, `, `) io.WriteString(w, `, `)
} }
if isValues { if values {
io.WriteString(c.w, `CAST( i.j ->>`) io.WriteString(w, `CAST( i.j ->>`)
io.WriteString(c.w, `'`) io.WriteString(w, `'`)
io.WriteString(c.w, cn.Name) io.WriteString(w, cn.Name)
io.WriteString(c.w, `' AS `) io.WriteString(w, `' AS `)
io.WriteString(c.w, cn.Type) io.WriteString(w, cn.Type)
io.WriteString(c.w, `)`) io.WriteString(w, `)`)
} else { } else {
quoted(c.w, cn.Name) quoted(w, cn.Name)
} }
if !renderedCol { if !renderedCol {
@ -425,28 +422,16 @@ func (c *compilerContext) renderInsertUpdateColumns(
continue continue
} }
if i != 0 || n != 0 { if i != 0 || n != 0 {
io.WriteString(c.w, `, `) io.WriteString(w, `, `)
} }
if isValues { if values {
val := root.PresetMap[cn] io.WriteString(w, `'`)
switch { io.WriteString(w, root.PresetMap[cn])
case ok && len(val) > 1 && val[0] == '$': io.WriteString(w, `' :: `)
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type}) io.WriteString(w, col.Type)
case ok && strings.HasPrefix(val, "sql:"):
io.WriteString(c.w, `(`)
c.md.RenderVar(c.w, val[4:])
io.WriteString(c.w, `)`)
case ok:
squoted(c.w, val)
}
io.WriteString(c.w, ` :: `)
io.WriteString(c.w, col.Type)
} else { } else {
quoted(c.w, cn) quoted(w, cn)
} }
if !renderedCol { if !renderedCol {
@ -455,15 +440,15 @@ func (c *compilerContext) renderInsertUpdateColumns(
} }
if len(skipcols) != 0 && renderedCol { if len(skipcols) != 0 && renderedCol {
io.WriteString(c.w, `, `) io.WriteString(w, `, `)
} }
return 0, nil return 0, nil
} }
func (c *compilerContext) renderUpsert( func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) { vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0] root := &qc.Selects[0]
upsert, ok := vars[qc.ActionVar] upsert, ok := vars[qc.ActionVar]
if !ok { if !ok {
return 0, fmt.Errorf("variable '%s' not defined", qc.ActionVar) return 0, fmt.Errorf("variable '%s' not defined", qc.ActionVar)
@ -481,7 +466,7 @@ func (c *compilerContext) renderUpsert(
return 0, err return 0, err
} }
if _, err := c.renderInsert(w, qc, vars, ti); err != nil { if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return 0, err return 0, err
} }
@ -542,10 +527,6 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
rel := item.relPC rel := item.relPC
if rel == nil {
return errors.New("invalid connect value")
}
// Render only for parent-to-child relationship of one-to-one // Render only for parent-to-child relationship of one-to-one
// For this to work the child needs to found first so it's primary key // For this to work the child needs to found first so it's primary key
// can be set in the related column on the parent object. // can be set in the related column on the parent object.
@ -691,7 +672,7 @@ func renderCteName(w io.Writer, item kvitem) error {
io.WriteString(w, item.ti.Name) io.WriteString(w, item.ti.Name)
if item._type == itemConnect || item._type == itemDisconnect { if item._type == itemConnect || item._type == itemDisconnect {
io.WriteString(w, `_`) io.WriteString(w, `_`)
int32String(w, item.id) int2string(w, item.id)
} }
io.WriteString(w, `"`) io.WriteString(w, `"`)
return nil return nil

View File

@ -72,7 +72,7 @@ func delete(t *testing.T) {
// } // }
// }` // }`
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"` // sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
// vars := map[string]json.RawMessage{ // vars := map[string]json.RawMessage{
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`), // "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
@ -97,7 +97,7 @@ func delete(t *testing.T) {
// } // }
// }` // }`
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"` // sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
// vars := map[string]json.RawMessage{ // vars := map[string]json.RawMessage{
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`), // "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),

View File

@ -139,9 +139,9 @@ func TestMain(m *testing.M) {
log.Fatal(err) log.Fatal(err)
} }
vars := map[string]string{ vars := psql.NewVariables(map[string]string{
"admin_account_id": "5", "admin_account_id": "5",
} })
pcompile = psql.NewCompiler(psql.Config{ pcompile = psql.NewCompiler(psql.Config{
Schema: schema, Schema: schema,

View File

@ -7,7 +7,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"strconv"
"strings" "strings"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
@ -18,25 +17,6 @@ const (
closeBlock = 500 closeBlock = 500
) )
type Param struct {
Name string
Type string
IsArray bool
}
type Metadata struct {
skipped uint32
params []Param
pindex map[string]int
}
type compilerContext struct {
md Metadata
w io.Writer
s []qcode.Select
*Compiler
}
type Variables map[string]json.RawMessage type Variables map[string]json.RawMessage
type Config struct { type Config struct {
@ -56,12 +36,12 @@ func NewCompiler(conf Config) *Compiler {
} }
} }
func (co *Compiler) AddRelationship(child, parent string, rel *DBRel) error { func (c *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
return co.schema.SetRel(child, parent, rel) return c.schema.SetRel(child, parent, rel)
} }
func (co *Compiler) IDColumn(table string) (*DBColumn, error) { func (c *Compiler) IDColumn(table string) (*DBColumn, error) {
ti, err := co.schema.GetTable(table) ti, err := c.schema.GetTable(table)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -73,47 +53,36 @@ func (co *Compiler) IDColumn(table string) (*DBColumn, error) {
return ti.PrimaryCol, nil return ti.PrimaryCol, nil
} }
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte, error) { type compilerContext struct {
w io.Writer
s []qcode.Select
*Compiler
}
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte, error) {
w := &bytes.Buffer{} w := &bytes.Buffer{}
metad, err := co.Compile(w, qc, vars) skipped, err := co.Compile(qc, w, vars)
return metad, w.Bytes(), err return skipped, w.Bytes(), err
} }
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) { func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
return co.CompileWithMetadata(w, qc, vars, Metadata{})
}
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
md.skipped = 0
if qc == nil {
return md, fmt.Errorf("qcode is nil")
}
switch qc.Type { switch qc.Type {
case qcode.QTQuery: case qcode.QTQuery:
return co.compileQueryWithMetadata(w, qc, vars, md) return co.compileQuery(qc, w, vars)
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
case qcode.QTInsert, return co.compileMutation(qc, w, vars)
qcode.QTUpdate,
qcode.QTDelete,
qcode.QTUpsert:
return co.compileMutation(w, qc, vars)
default:
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
} }
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
} }
func (co *Compiler) compileQueryWithMetadata( func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
if len(qc.Selects) == 0 { if len(qc.Selects) == 0 {
return md, errors.New("empty query") return 0, errors.New("empty query")
} }
c := &compilerContext{md, w, qc.Selects, co} c := &compilerContext{w, qc.Selects, co}
st := NewIntStack() st := NewIntStack()
i := 0 i := 0
@ -139,11 +108,13 @@ func (co *Compiler) compileQueryWithMetadata(
i++ i++
} }
var ignored uint32
if st.Len() != 0 { if st.Len() != 0 {
io.WriteString(c.w, `) as "__root" FROM `) io.WriteString(c.w, `) as "__root" FROM `)
} else { } else {
io.WriteString(c.w, `) as "__root"`) io.WriteString(c.w, `) as "__root"`)
return c.md, nil return ignored, nil
} }
for { for {
@ -156,40 +127,40 @@ func (co *Compiler) compileQueryWithMetadata(
if id < closeBlock { if id < closeBlock {
sel := &c.s[id] sel := &c.s[id]
if len(sel.Cols) == 0 {
continue
}
ti, err := c.schema.GetTable(sel.Name) ti, err := c.schema.GetTable(sel.Name)
if err != nil { if err != nil {
return c.md, err return 0, err
} }
if sel.Type != qcode.STUnion { if sel.ParentID == -1 {
if len(sel.Cols) == 0 { io.WriteString(c.w, `(`)
continue } else {
} c.renderLateralJoin(sel)
if sel.ParentID == -1 {
io.WriteString(c.w, `(`)
} else {
c.renderLateralJoin(sel)
}
if !ti.IsSingular {
c.renderPluralSelect(sel, ti)
}
if err := c.renderSelect(sel, ti, vars); err != nil {
return c.md, err
}
} }
if !ti.IsSingular {
c.renderPluralSelect(sel, ti)
}
skipped, err := c.renderSelect(sel, ti, vars)
if err != nil {
return 0, err
}
ignored |= skipped
for _, cid := range sel.Children { for _, cid := range sel.Children {
if hasBit(c.md.skipped, uint32(cid)) { if hasBit(skipped, uint32(cid)) {
continue continue
} }
child := &c.s[cid] child := &c.s[cid]
if child.SkipRender { if child.SkipRender {
continue continue
} }
st.Push(child.ID + closeBlock) st.Push(child.ID + closeBlock)
st.Push(child.ID) st.Push(child.ID)
} }
@ -197,48 +168,46 @@ func (co *Compiler) compileQueryWithMetadata(
} else { } else {
sel := &c.s[(id - closeBlock)] sel := &c.s[(id - closeBlock)]
if sel.Type != qcode.STUnion { ti, err := c.schema.GetTable(sel.Name)
ti, err := c.schema.GetTable(sel.Name) if err != nil {
if err != nil { return 0, err
return c.md, err
}
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sr", sel.ID)
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sj", sel.ID)
if !ti.IsSingular {
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sj", sel.ID)
}
if sel.ParentID == -1 {
if st.Len() != 0 {
io.WriteString(c.w, `, `)
}
} else {
c.renderLateralJoinClose(sel)
}
} }
if sel.Type != qcode.STMember { io.WriteString(c.w, `)`)
if len(sel.Args) != 0 { aliasWithID(c.w, "__sr", sel.ID)
for _, v := range sel.Args {
qcode.FreeNode(v) io.WriteString(c.w, `)`)
} aliasWithID(c.w, "__sj", sel.ID)
if !ti.IsSingular {
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sj", sel.ID)
}
if sel.ParentID == -1 {
if st.Len() != 0 {
io.WriteString(c.w, `, `)
}
} else {
c.renderLateralJoinClose(sel)
}
if len(sel.Args) != 0 {
i := 0
for _, v := range sel.Args {
qcode.FreeNode(v, 500)
i++
} }
} }
} }
} }
return c.md, nil return ignored, nil
} }
func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo) error { func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo) error {
io.WriteString(c.w, `SELECT coalesce(jsonb_agg("__sj_`) io.WriteString(c.w, `SELECT coalesce(jsonb_agg("__sj_`)
int32String(c.w, sel.ID) int2string(c.w, sel.ID)
io.WriteString(c.w, `"."json"), '[]') as "json"`) io.WriteString(c.w, `"."json"), '[]') as "json"`)
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
@ -262,7 +231,7 @@ func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo)
io.WriteString(c.w, `, CONCAT_WS(','`) io.WriteString(c.w, `, CONCAT_WS(','`)
for i := 0; i < n; i++ { for i := 0; i < n; i++ {
io.WriteString(c.w, `, max("__cur_`) io.WriteString(c.w, `, max("__cur_`)
int32String(c.w, int32(i)) int2string(c.w, int32(i))
io.WriteString(c.w, `")`) io.WriteString(c.w, `")`)
} }
io.WriteString(c.w, `) as "cursor"`) io.WriteString(c.w, `) as "cursor"`)
@ -278,7 +247,7 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
io.WriteString(c.w, `', `) io.WriteString(c.w, `', `)
io.WriteString(c.w, `"__sj_`) io.WriteString(c.w, `"__sj_`)
int32String(c.w, sel.ID) int2string(c.w, sel.ID)
io.WriteString(c.w, `"."json"`) io.WriteString(c.w, `"."json"`)
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
@ -287,14 +256,16 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
io.WriteString(c.w, `_cursor', `) io.WriteString(c.w, `_cursor', `)
io.WriteString(c.w, `"__sj_`) io.WriteString(c.w, `"__sj_`)
int32String(c.w, sel.ID) int2string(c.w, sel.ID)
io.WriteString(c.w, `"."cursor"`) io.WriteString(c.w, `"."cursor"`)
} }
return nil return nil
} }
func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) ([]*qcode.Column, error) { func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, []*qcode.Column, error) {
var skipped uint32
cols := make([]*qcode.Column, 0, len(sel.Cols)) cols := make([]*qcode.Column, 0, len(sel.Cols))
colmap := make(map[string]struct{}, len(sel.Cols)) colmap := make(map[string]struct{}, len(sel.Cols))
@ -336,7 +307,9 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
rel, err := c.schema.GetRel(child.Name, ti.Name) rel, err := c.schema.GetRel(child.Name, ti.Name)
if err != nil { if err != nil {
return nil, err return 0, nil, err
//skipped |= (1 << uint(id))
//continue
} }
switch rel.Type { switch rel.Type {
@ -362,25 +335,16 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
if _, ok := colmap[rel.Left.Col]; !ok { if _, ok := colmap[rel.Left.Col]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col}) cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
colmap[rel.Left.Col] = struct{}{} colmap[rel.Left.Col] = struct{}{}
c.md.skipped |= (1 << uint(id)) skipped |= (1 << uint(id))
}
case RelPolymorphic:
if _, ok := colmap[rel.Left.Col]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
colmap[rel.Left.Col] = struct{}{}
}
if _, ok := colmap[rel.Right.Table]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
colmap[rel.Right.Table] = struct{}{}
} }
default: default:
return nil, fmt.Errorf("unknown relationship %s", rel) return 0, nil, fmt.Errorf("unknown relationship %s", rel)
//skipped |= (1 << uint(id))
} }
} }
return cols, nil return skipped, cols, nil
} }
// This // This
@ -449,30 +413,22 @@ func (c *compilerContext) addSeekPredicate(sel *qcode.Select) error {
return nil return nil
} }
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) error { func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, error) {
var rel *DBRel var rel *DBRel
var err error var err error
// Relationships must be between union parents and their parents
if sel.ParentID != -1 { if sel.ParentID != -1 {
if sel.Type == qcode.STMember && sel.UParentID != -1 { parent := c.s[sel.ParentID]
cn := c.s[sel.ParentID].Name
pn := c.s[sel.UParentID].Name
rel, err = c.schema.GetRel(cn, pn)
} else { rel, err = c.schema.GetRel(ti.Name, parent.Name)
pn := c.s[sel.ParentID].Name if err != nil {
rel, err = c.schema.GetRel(ti.Name, pn) return 0, err
} }
} }
skipped, childCols, err := c.initSelect(sel, ti, vars)
if err != nil { if err != nil {
return err return 0, err
}
childCols, err := c.initSelect(sel, ti, vars)
if err != nil {
return err
} }
// SELECT // SELECT
@ -482,13 +438,13 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
// } // }
io.WriteString(c.w, `SELECT to_jsonb("__sr_`) io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
int32String(c.w, sel.ID) int2string(c.w, sel.ID)
io.WriteString(c.w, `".*) `) io.WriteString(c.w, `".*) `)
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
for i := range sel.OrderBy { for i := range sel.OrderBy {
io.WriteString(c.w, `- '__cur_`) io.WriteString(c.w, `- '__cur_`)
int32String(c.w, int32(i)) int2string(c.w, int32(i))
io.WriteString(c.w, `' `) io.WriteString(c.w, `' `)
} }
} }
@ -498,15 +454,15 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
for i := range sel.OrderBy { for i := range sel.OrderBy {
io.WriteString(c.w, `, "__cur_`) io.WriteString(c.w, `, "__cur_`)
int32String(c.w, int32(i)) int2string(c.w, int32(i))
io.WriteString(c.w, `"`) io.WriteString(c.w, `"`)
} }
} }
io.WriteString(c.w, `FROM (SELECT `) io.WriteString(c.w, `FROM (SELECT `)
if err := c.renderColumns(sel, ti); err != nil { if err := c.renderColumns(sel, ti, skipped); err != nil {
return err return 0, err
} }
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
@ -514,7 +470,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
io.WriteString(c.w, `, LAST_VALUE(`) io.WriteString(c.w, `, LAST_VALUE(`)
colWithTableID(c.w, ti.Name, sel.ID, ob.Col) colWithTableID(c.w, ti.Name, sel.ID, ob.Col)
io.WriteString(c.w, `) OVER() AS "__cur_`) io.WriteString(c.w, `) OVER() AS "__cur_`)
int32String(c.w, int32(i)) int2string(c.w, int32(i))
io.WriteString(c.w, `"`) io.WriteString(c.w, `"`)
} }
} }
@ -522,8 +478,9 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
io.WriteString(c.w, ` FROM (`) io.WriteString(c.w, ` FROM (`)
// FROM (SELECT .... ) // FROM (SELECT .... )
if err = c.renderBaseSelect(sel, ti, rel, childCols); err != nil { err = c.renderBaseSelect(sel, ti, rel, childCols, skipped)
return err if err != nil {
return skipped, err
} }
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID) //fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID)
@ -532,7 +489,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
// END-FROM // END-FROM
return nil return skipped, nil
} }
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error { func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
@ -553,33 +510,36 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
} }
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error { func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
rel, _ := c.schema.GetRel(table, parent) rel, err := c.schema.GetRel(table, parent)
if err != nil {
return err
}
// This join is only required for one-to-many relations since // This join is only required for one-to-many relations since
// these make use of join tables that need to be pulled in. // these make use of join tables that need to be pulled in.
if rel == nil || rel.Type != RelOneToManyThrough { if rel.Type != RelOneToManyThrough {
return nil return err
} }
// pt, err := c.schema.GetTable(parent) pt, err := c.schema.GetTable(parent)
// if err != nil { if err != nil {
// return err return err
// } }
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`, //fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col) //rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
io.WriteString(c.w, ` LEFT OUTER JOIN "`) io.WriteString(c.w, ` LEFT OUTER JOIN "`)
io.WriteString(c.w, rel.Through.Table) io.WriteString(c.w, rel.Through)
io.WriteString(c.w, `" ON ((`) io.WriteString(c.w, `" ON ((`)
colWithTable(c.w, rel.Through.Table, rel.Through.ColL) colWithTable(c.w, rel.Through, rel.ColT)
io.WriteString(c.w, `) = (`) io.WriteString(c.w, `) = (`)
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTableID(c.w, pt.Name, id, rel.Left.Col)
io.WriteString(c.w, `))`) io.WriteString(c.w, `))`)
return nil return nil
} }
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) error { func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
i := 0 i := 0
var cn string var cn string
@ -615,7 +575,7 @@ func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) erro
i += c.renderRemoteRelColumns(sel, ti, i) i += c.renderRemoteRelColumns(sel, ti, i)
return c.renderJoinColumns(sel, ti, i) return c.renderJoinColumns(sel, ti, skipped, i)
} }
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) int { func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) int {
@ -640,12 +600,12 @@ func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableI
return i return i
} }
func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) error { func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32, colsRendered int) error {
// columns previously rendered // columns previously rendered
i := colsRendered i := colsRendered
for _, id := range sel.Children { for _, id := range sel.Children {
if hasBit(c.md.skipped, uint32(id)) { if hasBit(skipped, uint32(id)) {
continue continue
} }
childSel := &c.s[id] childSel := &c.s[id]
@ -660,37 +620,14 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
continue continue
} }
if childSel.Type == qcode.STUnion { io.WriteString(c.w, `"__sj_`)
rel, err := c.schema.GetRel(childSel.Name, ti.Name) int2string(c.w, childSel.ID)
if err != nil { io.WriteString(c.w, `"."json"`)
return err alias(c.w, childSel.FieldName)
}
io.WriteString(c.w, `(CASE `)
for _, uid := range childSel.Children {
unionSel := &c.s[uid]
io.WriteString(c.w, `WHEN `)
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
io.WriteString(c.w, ` = `)
squoted(c.w, unionSel.Name)
io.WriteString(c.w, ` THEN `)
io.WriteString(c.w, `"__sj_`)
int32String(c.w, unionSel.ID)
io.WriteString(c.w, `"."json"`)
}
io.WriteString(c.w, `END)`)
alias(c.w, childSel.FieldName)
} else {
io.WriteString(c.w, `"__sj_`)
int32String(c.w, childSel.ID)
io.WriteString(c.w, `"."json"`)
alias(c.w, childSel.FieldName)
}
if childSel.Paging.Type != qcode.PtOffset { if childSel.Paging.Type != qcode.PtOffset {
io.WriteString(c.w, `, "__sj_`) io.WriteString(c.w, `, "__sj_`)
int32String(c.w, childSel.ID) int2string(c.w, childSel.ID)
io.WriteString(c.w, `"."cursor" AS "`) io.WriteString(c.w, `"."cursor" AS "`)
io.WriteString(c.w, childSel.FieldName) io.WriteString(c.w, childSel.FieldName)
io.WriteString(c.w, `_cursor"`) io.WriteString(c.w, `_cursor"`)
@ -703,7 +640,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
} }
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, rel *DBRel, func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, rel *DBRel,
childCols []*qcode.Column) error { childCols []*qcode.Column, skipped uint32) error {
isRoot := (rel == nil) isRoot := (rel == nil)
isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop) isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop)
hasOrder := len(sel.OrderBy) != 0 hasOrder := len(sel.OrderBy) != 0
@ -718,7 +655,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
c.renderDistinctOn(sel, ti) c.renderDistinctOn(sel, ti)
} }
realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols) realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols, skipped)
if err != nil { if err != nil {
return err return err
} }
@ -741,8 +678,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
} }
io.WriteString(c.w, ` WHERE (`) io.WriteString(c.w, ` WHERE (`)
if err := c.renderRelationship(sel, ti); err != nil {
if err := c.renderRelationship(sel, rel); err != nil {
return err return err
} }
if isFil { if isFil {
@ -774,7 +710,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
case ti.IsSingular: case ti.IsSingular:
io.WriteString(c.w, ` LIMIT ('1') :: integer`) io.WriteString(c.w, ` LIMIT ('1') :: integer`)
case sel.Paging.Limit != "": case len(sel.Paging.Limit) != 0:
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit) //fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
io.WriteString(c.w, ` LIMIT ('`) io.WriteString(c.w, ` LIMIT ('`)
io.WriteString(c.w, sel.Paging.Limit) io.WriteString(c.w, sel.Paging.Limit)
@ -787,7 +723,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
io.WriteString(c.w, ` LIMIT ('20') :: integer`) io.WriteString(c.w, ` LIMIT ('20') :: integer`)
} }
if sel.Paging.Offset != "" { if len(sel.Paging.Offset) != 0 {
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset) //fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
io.WriteString(c.w, ` OFFSET ('`) io.WriteString(c.w, ` OFFSET ('`)
io.WriteString(c.w, sel.Paging.Offset) io.WriteString(c.w, sel.Paging.Offset)
@ -846,34 +782,29 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
io.WriteString(c.w, `, `) io.WriteString(c.w, `, `)
} }
io.WriteString(c.w, `a[`) io.WriteString(c.w, `a[`)
int32String(c.w, int32(i+1)) int2string(c.w, int32(i+1))
io.WriteString(c.w, `] as `) io.WriteString(c.w, `] as `)
quoted(c.w, ob.Col) quoted(c.w, ob.Col)
} }
io.WriteString(c.w, ` FROM string_to_array(`) io.WriteString(c.w, ` FROM string_to_array('{{cursor}}', ',') as a) `)
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
io.WriteString(c.w, `, ',') as a) `)
return nil return nil
} }
func (c *compilerContext) renderRelationshipByName(table, parent string) error { func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
rel, err := c.schema.GetRel(table, parent) parent := c.s[sel.ParentID]
pti, err := c.schema.GetTable(parent.Name)
if err != nil { if err != nil {
return err return err
} }
return c.renderRelationship(nil, rel)
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
} }
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error { func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
var pid int32 rel, err := c.schema.GetRel(table, parent)
if err != nil {
switch { return err
case sel == nil:
pid = int32(-1)
case sel.Type == qcode.STMember:
pid = sel.UParentID
default:
pid = sel.ParentID
} }
io.WriteString(c.w, `((`) io.WriteString(c.w, `((`)
@ -886,19 +817,19 @@ func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) erro
switch { switch {
case !rel.Left.Array && rel.Right.Array: case !rel.Left.Array && rel.Right.Array:
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, table, rel.Left.Col)
io.WriteString(c.w, `) = any (`) io.WriteString(c.w, `) = any (`)
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col) colWithTableID(c.w, parent, id, rel.Right.Col)
case rel.Left.Array && !rel.Right.Array: case rel.Left.Array && !rel.Right.Array:
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col) colWithTableID(c.w, parent, id, rel.Right.Col)
io.WriteString(c.w, `) = any (`) io.WriteString(c.w, `) = any (`)
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, table, rel.Left.Col)
default: default:
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, table, rel.Left.Col)
io.WriteString(c.w, `) = (`) io.WriteString(c.w, `) = (`)
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col) colWithTableID(c.w, parent, id, rel.Right.Col)
} }
case RelOneToManyThrough: case RelOneToManyThrough:
@ -908,34 +839,25 @@ func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) erro
switch { switch {
case !rel.Left.Array && rel.Right.Array: case !rel.Left.Array && rel.Right.Array:
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, table, rel.Left.Col)
io.WriteString(c.w, `) = any (`) io.WriteString(c.w, `) = any (`)
colWithTable(c.w, rel.Through.Table, rel.Through.ColR) colWithTable(c.w, rel.Through, rel.Right.Col)
case rel.Left.Array && !rel.Right.Array: case rel.Left.Array && !rel.Right.Array:
colWithTable(c.w, rel.Through.Table, rel.Through.ColR) colWithTable(c.w, rel.Through, rel.Right.Col)
io.WriteString(c.w, `) = any (`) io.WriteString(c.w, `) = any (`)
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, table, rel.Left.Col)
default: default:
colWithTable(c.w, rel.Through.Table, rel.Through.ColR) colWithTable(c.w, table, rel.Left.Col)
io.WriteString(c.w, `) = (`) io.WriteString(c.w, `) = (`)
colWithTable(c.w, rel.Right.Table, rel.Right.Col) colWithTable(c.w, rel.Through, rel.Right.Col)
} }
case RelEmbedded: case RelEmbedded:
colWithTable(c.w, rel.Left.Table, rel.Left.Col) colWithTable(c.w, rel.Left.Table, rel.Left.Col)
io.WriteString(c.w, `) = (`) io.WriteString(c.w, `) = (`)
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col) colWithTableID(c.w, parent, id, rel.Left.Col)
case RelPolymorphic:
colWithTable(c.w, sel.Name, rel.Right.Col)
io.WriteString(c.w, `) = (`)
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
io.WriteString(c.w, `) AND (`)
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
io.WriteString(c.w, `) = (`)
squoted(c.w, sel.Name)
} }
io.WriteString(c.w, `))`) io.WriteString(c.w, `))`)
@ -1011,8 +933,11 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
return err return err
} }
} else if err := c.renderOp(val, ti); err != nil { } else {
return err //fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
if err := c.renderOp(val, ti); err != nil {
return err
}
} }
} }
//qcode.FreeExp(val) //qcode.FreeExp(val)
@ -1045,7 +970,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
io.WriteString(c.w, ` WHERE `) io.WriteString(c.w, ` WHERE `)
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil { if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
return err return err
} }
@ -1074,7 +999,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
return nil return nil
} }
if ex.Col != "" { if len(ex.Col) != 0 {
if col, ok = ti.ColMap[ex.Col]; !ok { if col, ok = ti.ColMap[ex.Col]; !ok {
return fmt.Errorf("no column '%s' found ", ex.Col) return fmt.Errorf("no column '%s' found ", ex.Col)
} }
@ -1102,9 +1027,9 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
case qcode.OpLesserThan: case qcode.OpLesserThan:
io.WriteString(c.w, `<`) io.WriteString(c.w, `<`)
case qcode.OpIn: case qcode.OpIn:
io.WriteString(c.w, `= ANY`) io.WriteString(c.w, `IN`)
case qcode.OpNotIn: case qcode.OpNotIn:
io.WriteString(c.w, `!= ANY`) io.WriteString(c.w, `NOT IN`)
case qcode.OpLike: case qcode.OpLike:
io.WriteString(c.w, `LIKE`) io.WriteString(c.w, `LIKE`)
case qcode.OpNotLike: case qcode.OpNotLike:
@ -1154,13 +1079,12 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
io.WriteString(c.w, `((`) io.WriteString(c.w, `((`)
colWithTable(c.w, ti.Name, ti.TSVCol.Name) colWithTable(c.w, ti.Name, ti.TSVCol.Name)
if c.schema.ver >= 110000 { if c.schema.ver >= 110000 {
io.WriteString(c.w, `) @@ websearch_to_tsquery(`) io.WriteString(c.w, `) @@ websearch_to_tsquery('{{`)
} else { } else {
io.WriteString(c.w, `) @@ to_tsquery(`) io.WriteString(c.w, `) @@ to_tsquery('{{`)
} }
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"}) io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `))`) io.WriteString(c.w, `}}'))`)
return nil return nil
default: default:
@ -1246,25 +1170,15 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
val, ok := vars[ex.Val] val, ok := vars[ex.Val]
switch { switch {
case ok && strings.HasPrefix(val, "sql:"): case ok && strings.HasPrefix(val, "sql:"):
io.WriteString(c.w, `(`) io.WriteString(c.w, ` (`)
c.md.RenderVar(c.w, val[4:]) io.WriteString(c.w, val[4:])
io.WriteString(c.w, `)`) io.WriteString(c.w, `)`)
case ok: case ok:
squoted(c.w, val) squoted(c.w, val)
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
io.WriteString(c.w, `))`)
io.WriteString(c.w, ` :: `)
io.WriteString(c.w, col.Type)
io.WriteString(c.w, `[])`)
return
default: default:
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false}) io.WriteString(c.w, ` '{{`)
io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `}}'`)
} }
case qcode.ValRef: case qcode.ValRef:
@ -1314,7 +1228,7 @@ func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
return 0 return 0
} }
func hasBit(n, pos uint32) bool { func hasBit(n uint32, pos uint32) bool {
val := n & (1 << pos) val := n & (1 << pos)
return (val > 0) return (val > 0)
} }
@ -1329,7 +1243,7 @@ func aliasWithID(w io.Writer, alias string, id int32) {
io.WriteString(w, ` AS "`) io.WriteString(w, ` AS "`)
io.WriteString(w, alias) io.WriteString(w, alias)
io.WriteString(w, `_`) io.WriteString(w, `_`)
int32String(w, id) int2string(w, id)
io.WriteString(w, `"`) io.WriteString(w, `"`)
} }
@ -1346,7 +1260,7 @@ func colWithTableID(w io.Writer, table string, id int32, col string) {
io.WriteString(w, table) io.WriteString(w, table)
if id >= 0 { if id >= 0 {
io.WriteString(w, `_`) io.WriteString(w, `_`)
int32String(w, id) int2string(w, id)
} }
io.WriteString(w, `"."`) io.WriteString(w, `"."`)
io.WriteString(w, col) io.WriteString(w, col)
@ -1365,6 +1279,26 @@ func squoted(w io.Writer, identifier string) {
io.WriteString(w, `'`) io.WriteString(w, `'`)
} }
func int32String(w io.Writer, val int32) { const charset = "0123456789"
io.WriteString(w, strconv.FormatInt(int64(val), 10))
func int2string(w io.Writer, val int32) {
if val < 10 {
w.Write([]byte{charset[val]})
return
}
temp := int32(0)
val2 := val
for val2 > 0 {
temp *= 10
temp += val2 % 10
val2 = int32(float64(val2 / 10))
}
val3 := temp
for val3 > 0 {
d := val3 % 10
val3 /= 10
w.Write([]byte{charset[d]})
}
} }

View File

@ -32,20 +32,6 @@ func withComplexArgs(t *testing.T) {
compileGQLToPSQL(t, gql, nil, "user") compileGQLToPSQL(t, gql, nil, "user")
} }
func withWhereIn(t *testing.T) {
gql := `query {
products(where: { id: { in: $list } }) {
id
}
}`
vars := map[string]json.RawMessage{
"list": json.RawMessage(`[1,2,3]`),
}
compileGQLToPSQL(t, gql, vars, "user")
}
func withWhereAndList(t *testing.T) { func withWhereAndList(t *testing.T) {
gql := `query { gql := `query {
products( products(
@ -307,100 +293,6 @@ func multiRoot(t *testing.T) {
compileGQLToPSQL(t, gql, nil, "user") compileGQLToPSQL(t, gql, nil, "user")
} }
func withFragment1(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields2 on user {
first_name
last_name
}`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withFragment2(t *testing.T) {
gql := `
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withFragment3(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
query {
users {
...userFields2
created_at
...userFields1
}
}
`
compileGQLToPSQL(t, gql, nil, "anon")
}
// func withInlineFragment(t *testing.T) {
// gql := `
// query {
// users {
// ... on users {
// id
// email
// }
// created_at
// ... on user {
// first_name
// last_name
// }
// }
// }
// `
// compileGQLToPSQL(t, gql, nil, "anon")
// }
func withCursor(t *testing.T) { func withCursor(t *testing.T) {
gql := `query { gql := `query {
Products( Products(
@ -475,7 +367,6 @@ func blockedFunctions(t *testing.T) {
func TestCompileQuery(t *testing.T) { func TestCompileQuery(t *testing.T) {
t.Run("withComplexArgs", withComplexArgs) t.Run("withComplexArgs", withComplexArgs)
t.Run("withWhereIn", withWhereIn)
t.Run("withWhereAndList", withWhereAndList) t.Run("withWhereAndList", withWhereAndList)
t.Run("withWhereIsNull", withWhereIsNull) t.Run("withWhereIsNull", withWhereIsNull)
t.Run("withWhereMultiOr", withWhereMultiOr) t.Run("withWhereMultiOr", withWhereMultiOr)
@ -494,10 +385,6 @@ func TestCompileQuery(t *testing.T) {
t.Run("queryWithVariables", queryWithVariables) t.Run("queryWithVariables", queryWithVariables)
t.Run("withWhereOnRelations", withWhereOnRelations) t.Run("withWhereOnRelations", withWhereOnRelations)
t.Run("multiRoot", multiRoot) t.Run("multiRoot", multiRoot)
t.Run("withFragment1", withFragment1)
t.Run("withFragment2", withFragment2)
t.Run("withFragment3", withFragment3)
//t.Run("withInlineFragment", withInlineFragment)
t.Run("jsonColumnAsTable", jsonColumnAsTable) t.Run("jsonColumnAsTable", jsonColumnAsTable)
t.Run("withCursor", withCursor) t.Run("withCursor", withCursor)
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon) t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
@ -542,7 +429,7 @@ func BenchmarkCompile(b *testing.B) {
b.Fatal(err) b.Fatal(err)
} }
_, err = pcompile.Compile(w, qc, nil) _, err = pcompile.Compile(qc, w, nil)
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)
} }
@ -563,7 +450,7 @@ func BenchmarkCompileParallel(b *testing.B) {
b.Fatal(err) b.Fatal(err)
} }
_, err = pcompile.Compile(w, qc, nil) _, err = pcompile.Compile(qc, w, nil)
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)
} }

View File

@ -11,7 +11,6 @@ type DBSchema struct {
ver int ver int
t map[string]*DBTableInfo t map[string]*DBTableInfo
rm map[string]map[string]*DBRel rm map[string]map[string]*DBRel
vt map[string]*VirtualTable
fm map[string]*DBFunction fm map[string]*DBFunction
} }
@ -34,19 +33,15 @@ const (
RelOneToOne RelType = iota + 1 RelOneToOne RelType = iota + 1
RelOneToMany RelOneToMany
RelOneToManyThrough RelOneToManyThrough
RelPolymorphic
RelEmbedded RelEmbedded
RelRemote RelRemote
) )
type DBRel struct { type DBRel struct {
Type RelType Type RelType
Through struct { Through string
Table string ColT string
ColL string Left struct {
ColR string
}
Left struct {
col *DBColumn col *DBColumn
Table string Table string
Col string Col string
@ -65,7 +60,6 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
ver: info.Version, ver: info.Version,
t: make(map[string]*DBTableInfo), t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel), rm: make(map[string]map[string]*DBRel),
vt: make(map[string]*VirtualTable),
fm: make(map[string]*DBFunction, len(info.Functions)), fm: make(map[string]*DBFunction, len(info.Functions)),
} }
@ -76,10 +70,6 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
} }
} }
if err := schema.virtualRels(info.VTables); err != nil {
return nil, err
}
for i, t := range info.Tables { for i, t := range info.Tables {
err := schema.firstDegreeRels(t, info.Columns[i]) err := schema.firstDegreeRels(t, info.Columns[i])
if err != nil { if err != nil {
@ -112,7 +102,7 @@ func (s *DBSchema) addTable(
singular := flect.Singularize(t.Key) singular := flect.Singularize(t.Key)
plural := flect.Pluralize(t.Key) plural := flect.Pluralize(t.Key)
ts := &DBTableInfo{ s.t[singular] = &DBTableInfo{
Name: t.Name, Name: t.Name,
Type: t.Type, Type: t.Type,
IsSingular: true, IsSingular: true,
@ -122,9 +112,8 @@ func (s *DBSchema) addTable(
Singular: singular, Singular: singular,
Plural: plural, Plural: plural,
} }
s.t[singular] = ts
tp := &DBTableInfo{ s.t[plural] = &DBTableInfo{
Name: t.Name, Name: t.Name,
Type: t.Type, Type: t.Type,
IsSingular: false, IsSingular: false,
@ -134,15 +123,14 @@ func (s *DBSchema) addTable(
Singular: singular, Singular: singular,
Plural: plural, Plural: plural,
} }
s.t[plural] = tp
if al, ok := aliases[t.Key]; ok { if al, ok := aliases[t.Key]; ok {
for i := range al { for i := range al {
k1 := flect.Singularize(al[i]) k1 := flect.Singularize(al[i])
s.t[k1] = ts s.t[k1] = s.t[singular]
k2 := flect.Pluralize(al[i]) k2 := flect.Pluralize(al[i])
s.t[k2] = tp s.t[k2] = s.t[plural]
} }
} }
@ -166,54 +154,6 @@ func (s *DBSchema) addTable(
return nil return nil
} }
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
for _, vt := range vts {
s.vt[vt.Name] = &vt
for _, t := range s.t {
idCol, ok := t.ColMap[vt.IDColumn]
if !ok {
continue
}
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
continue
}
nt := DBTable{
ID: -1,
Name: vt.Name,
Key: strings.ToLower(vt.Name),
Type: "virtual",
}
if err := s.addTable(nt, nil, nil); err != nil {
return err
}
rel := &DBRel{Type: RelPolymorphic}
rel.Left.col = idCol
rel.Left.Table = t.Name
rel.Left.Col = idCol.Name
rcol := DBColumn{
Name: vt.FKeyColumn,
Key: strings.ToLower(vt.FKeyColumn),
Type: idCol.Type,
}
rel.Right.col = &rcol
rel.Right.Table = vt.TypeColumn
rel.Right.Col = rcol.Name
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
return err
}
}
}
return nil
}
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error { func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
ct := t.Key ct := t.Key
cti, ok := s.t[ct] cti, ok := s.t[ct]
@ -224,7 +164,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
for i := range cols { for i := range cols {
c := cols[i] c := cols[i]
if c.FKeyTable == "" { if len(c.FKeyTable) == 0 {
continue continue
} }
@ -328,7 +268,7 @@ func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
for i := range cols { for i := range cols {
c := cols[i] c := cols[i]
if c.FKeyTable == "" { if len(c.FKeyTable) == 0 {
continue continue
} }
@ -404,17 +344,16 @@ func (s *DBSchema) updateSchemaOTMT(
// One-to-many-through relation between 1nd foreign key table and the // One-to-many-through relation between 1nd foreign key table and the
// 2nd foreign key table // 2nd foreign key table
rel1 := &DBRel{Type: RelOneToManyThrough} rel1 := &DBRel{Type: RelOneToManyThrough}
rel1.Through.Table = ti.Name rel1.Through = ti.Name
rel1.Through.ColL = col1.Name rel1.ColT = col2.Name
rel1.Through.ColR = col2.Name
rel1.Left.col = fc1 rel1.Left.col = &col2
rel1.Left.Table = col1.FKeyTable rel1.Left.Table = col2.FKeyTable
rel1.Left.Col = fc1.Name rel1.Left.Col = fc2.Name
rel1.Right.col = fc2 rel1.Right.col = &col1
rel1.Right.Table = t2 rel1.Right.Table = ti.Name
rel1.Right.Col = fc2.Name rel1.Right.Col = col1.Name
if err := s.SetRel(t1, t2, rel1); err != nil { if err := s.SetRel(t1, t2, rel1); err != nil {
return err return err
@ -423,17 +362,16 @@ func (s *DBSchema) updateSchemaOTMT(
// One-to-many-through relation between 2nd foreign key table and the // One-to-many-through relation between 2nd foreign key table and the
// 1nd foreign key table // 1nd foreign key table
rel2 := &DBRel{Type: RelOneToManyThrough} rel2 := &DBRel{Type: RelOneToManyThrough}
rel2.Through.Table = ti.Name rel2.Through = ti.Name
rel2.Through.ColL = col2.Name rel2.ColT = col1.Name
rel2.Through.ColR = col1.Name
rel2.Left.col = fc2 rel1.Left.col = fc1
rel2.Left.Table = col2.FKeyTable rel2.Left.Table = col1.FKeyTable
rel2.Left.Col = fc2.Name rel2.Left.Col = fc1.Name
rel2.Right.col = fc1 rel1.Right.col = &col2
rel2.Right.Table = t1 rel2.Right.Table = ti.Name
rel2.Right.Col = fc1.Name rel2.Right.Col = col2.Name
if err := s.SetRel(t2, t1, rel2); err != nil { if err := s.SetRel(t2, t1, rel2); err != nil {
return err return err

View File

@ -14,18 +14,14 @@ func (rt RelType) String() string {
return "remote" return "remote"
case RelEmbedded: case RelEmbedded:
return "embedded" return "embedded"
case RelPolymorphic:
return "polymorphic"
} }
return "" return ""
} }
func (re *DBRel) String() string { func (re *DBRel) String() string {
if re.Type == RelOneToManyThrough { if re.Type == RelOneToManyThrough {
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'", return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
re.Right.Table, re.Right.Col)
} }
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'", return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col) re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)

View File

@ -14,17 +14,9 @@ type DBInfo struct {
Tables []DBTable Tables []DBTable
Columns [][]DBColumn Columns [][]DBColumn
Functions []DBFunction Functions []DBFunction
VTables []VirtualTable
colMap map[string]map[string]*DBColumn colMap map[string]map[string]*DBColumn
} }
type VirtualTable struct {
Name string
IDColumn string
TypeColumn string
FKeyColumn string
}
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) { func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
di := &DBInfo{} di := &DBInfo{}
var version string var version string

View File

@ -1,26 +1,26 @@
=== RUN TestCompileInsert === RUN TestCompileInsert
=== RUN TestCompileInsert/simpleInsert === RUN TestCompileInsert/simpleInsert
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/singleInsert === RUN TestCompileInsert/singleInsert
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/bulkInsert === RUN TestCompileInsert/bulkInsert
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/simpleInsertWithPresets === RUN TestCompileInsert/simpleInsertWithPresets
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertManyToMany === RUN TestCompileInsert/nestedInsertManyToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToMany === RUN TestCompileInsert/nestedInsertOneToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToOne === RUN TestCompileInsert/nestedInsertOneToOne
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect === RUN TestCompileInsert/nestedInsertOneToManyWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect === RUN TestCompileInsert/nestedInsertOneToOneWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray === RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
--- PASS: TestCompileInsert (0.03s) --- PASS: TestCompileInsert (0.02s)
--- PASS: TestCompileInsert/simpleInsert (0.00s) --- PASS: TestCompileInsert/simpleInsert (0.00s)
--- PASS: TestCompileInsert/singleInsert (0.00s) --- PASS: TestCompileInsert/singleInsert (0.00s)
--- PASS: TestCompileInsert/bulkInsert (0.00s) --- PASS: TestCompileInsert/bulkInsert (0.00s)
@ -33,14 +33,14 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s) --- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
=== RUN TestCompileMutate === RUN TestCompileMutate
=== RUN TestCompileMutate/singleUpsert === RUN TestCompileMutate/singleUpsert
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileMutate/singleUpsertWhere === RUN TestCompileMutate/singleUpsertWhere
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileMutate/bulkUpsert === RUN TestCompileMutate/bulkUpsert
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileMutate/delete === RUN TestCompileMutate/delete
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
--- PASS: TestCompileMutate (0.01s) --- PASS: TestCompileMutate (0.00s)
--- PASS: TestCompileMutate/singleUpsert (0.00s) --- PASS: TestCompileMutate/singleUpsert (0.00s)
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s) --- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
--- PASS: TestCompileMutate/bulkUpsert (0.00s) --- PASS: TestCompileMutate/bulkUpsert (0.00s)
@ -48,8 +48,6 @@ WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :
=== RUN TestCompileQuery === RUN TestCompileQuery
=== RUN TestCompileQuery/withComplexArgs === RUN TestCompileQuery/withComplexArgs
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withWhereIn
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = ANY (ARRAY(SELECT json_array_elements_text($1)) :: bigint[])))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withWhereAndList === RUN TestCompileQuery/withWhereAndList
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withWhereIsNull === RUN TestCompileQuery/withWhereIsNull
@ -57,9 +55,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
=== RUN TestCompileQuery/withWhereMultiOr === RUN TestCompileQuery/withWhereMultiOr
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/fetchByID === RUN TestCompileQuery/fetchByID
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = $1 :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/searchQuery === RUN TestCompileQuery/searchQuery
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery($1)) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery($1)) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery($1))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/oneToMany === RUN TestCompileQuery/oneToMany
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/oneToManyReverse === RUN TestCompileQuery/oneToManyReverse
@ -67,9 +65,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
=== RUN TestCompileQuery/oneToManyArray === RUN TestCompileQuery/oneToManyArray
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/manyToMany === RUN TestCompileQuery/manyToMany
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/manyToManyReverse === RUN TestCompileQuery/manyToManyReverse
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/aggFunction === RUN TestCompileQuery/aggFunction
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/aggFunctionBlockedByCol === RUN TestCompileQuery/aggFunctionBlockedByCol
@ -79,32 +77,25 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
=== RUN TestCompileQuery/aggFunctionWithFilter === RUN TestCompileQuery/aggFunctionWithFilter
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/syntheticTables === RUN TestCompileQuery/syntheticTables
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = $1 :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0" SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/queryWithVariables === RUN TestCompileQuery/queryWithVariables
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = $1 :: numeric(7,2)) AND (("products"."id") = $2 :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/withWhereOnRelations === RUN TestCompileQuery/withWhereOnRelations
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/multiRoot === RUN TestCompileQuery/multiRoot
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0" SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment1
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment2
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment3
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/jsonColumnAsTable === RUN TestCompileQuery/jsonColumnAsTable
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withCursor === RUN TestCompileQuery/withCursor
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array($1, ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/nullForAuthRequiredInAnon === RUN TestCompileQuery/nullForAuthRequiredInAnon
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/blockedQuery === RUN TestCompileQuery/blockedQuery
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0" SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/blockedFunctions === RUN TestCompileQuery/blockedFunctions
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0" SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
--- PASS: TestCompileQuery (0.03s) --- PASS: TestCompileQuery (0.02s)
--- PASS: TestCompileQuery/withComplexArgs (0.00s) --- PASS: TestCompileQuery/withComplexArgs (0.00s)
--- PASS: TestCompileQuery/withWhereIn (0.00s)
--- PASS: TestCompileQuery/withWhereAndList (0.00s) --- PASS: TestCompileQuery/withWhereAndList (0.00s)
--- PASS: TestCompileQuery/withWhereIsNull (0.00s) --- PASS: TestCompileQuery/withWhereIsNull (0.00s)
--- PASS: TestCompileQuery/withWhereMultiOr (0.00s) --- PASS: TestCompileQuery/withWhereMultiOr (0.00s)
@ -123,9 +114,6 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
--- PASS: TestCompileQuery/queryWithVariables (0.00s) --- PASS: TestCompileQuery/queryWithVariables (0.00s)
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s) --- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
--- PASS: TestCompileQuery/multiRoot (0.00s) --- PASS: TestCompileQuery/multiRoot (0.00s)
--- PASS: TestCompileQuery/withFragment1 (0.00s)
--- PASS: TestCompileQuery/withFragment2 (0.00s)
--- PASS: TestCompileQuery/withFragment3 (0.00s)
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s) --- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
--- PASS: TestCompileQuery/withCursor (0.00s) --- PASS: TestCompileQuery/withCursor (0.00s)
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s) --- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
@ -133,23 +121,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
--- PASS: TestCompileQuery/blockedFunctions (0.00s) --- PASS: TestCompileQuery/blockedFunctions (0.00s)
=== RUN TestCompileUpdate === RUN TestCompileUpdate
=== RUN TestCompileUpdate/singleUpdate === RUN TestCompileUpdate/singleUpdate
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = $2 :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/simpleUpdateWithPresets === RUN TestCompileUpdate/simpleUpdateWithPresets
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateManyToMany === RUN TestCompileUpdate/nestedUpdateManyToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToMany === RUN TestCompileUpdate/nestedUpdateOneToMany
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOne === RUN TestCompileUpdate/nestedUpdateOneToOne
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect === RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect === RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect === RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
--- PASS: TestCompileUpdate (0.02s) --- PASS: TestCompileUpdate (0.02s)
--- PASS: TestCompileUpdate/singleUpdate (0.00s) --- PASS: TestCompileUpdate/singleUpdate (0.00s)
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s) --- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
@ -160,4 +148,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s) --- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s) --- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
PASS PASS
ok github.com/dosco/super-graph/core/internal/psql 0.323s ok github.com/dosco/super-graph/core/internal/psql 0.306s

View File

@ -10,8 +10,8 @@ import (
"github.com/dosco/super-graph/core/internal/util" "github.com/dosco/super-graph/core/internal/util"
) )
func (c *compilerContext) renderUpdate( func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) { vars Variables, ti *DBTableInfo) (uint32, error) {
update, ok := vars[qc.ActionVar] update, ok := vars[qc.ActionVar]
if !ok { if !ok {
@ -21,10 +21,9 @@ func (c *compilerContext) renderUpdate(
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar) return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
} }
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `) io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"}) io.WriteString(c.w, qc.ActionVar)
// io.WriteString(c.w, qc.ActionVar) io.WriteString(c.w, `}}' :: json AS j)`)
io.WriteString(c.w, ` :: json AS j)`)
st := util.NewStack() st := util.NewStack()
st.Push(kvitem{_type: itemUpdate, key: ti.Name, val: update, ti: ti}) st.Push(kvitem{_type: itemUpdate, key: ti.Name, val: update, ti: ti})
@ -85,11 +84,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
io.WriteString(w, `UPDATE `) io.WriteString(w, `UPDATE `)
quoted(w, ti.Name) quoted(w, ti.Name)
io.WriteString(w, ` SET (`) io.WriteString(w, ` SET (`)
c.renderInsertUpdateColumns(qc, jt, ti, sk, false) renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
renderNestedUpdateRelColumns(w, item.kvitem, false) renderNestedUpdateRelColumns(w, item.kvitem, false)
io.WriteString(w, `) = (SELECT `) io.WriteString(w, `) = (SELECT `)
c.renderInsertUpdateColumns(qc, jt, ti, sk, true) renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
renderNestedUpdateRelColumns(w, item.kvitem, true) renderNestedUpdateRelColumns(w, item.kvitem, true)
io.WriteString(w, ` FROM "_sg_input" i`) io.WriteString(w, ` FROM "_sg_input" i`)
@ -121,10 +120,12 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
} }
io.WriteString(w, `)`) io.WriteString(w, `)`)
} else if qc.Selects[0].Where != nil { } else {
io.WriteString(w, `WHERE `) if qc.Selects[0].Where != nil {
if err := c.renderWhere(&qc.Selects[0], ti); err != nil { io.WriteString(w, ` WHERE `)
return err if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
return err
}
} }
} }
@ -196,9 +197,8 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
return nil return nil
} }
func (c *compilerContext) renderDelete( func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) { vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0] root := &qc.Selects[0]
io.WriteString(c.w, `WITH `) io.WriteString(c.w, `WITH `)

View File

@ -223,7 +223,7 @@ func nestedUpdateOneToOneWithDisconnect(t *testing.T) {
// } // }
// }` // }`
// sql := `WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"` // sql := `WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
// vars := map[string]json.RawMessage{ // vars := map[string]json.RawMessage{
// "data": json.RawMessage(`{ // "data": json.RawMessage(`{

View File

@ -0,0 +1,13 @@
package psql
import "regexp"
func NewVariables(varlist map[string]string) map[string]string {
re := regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
vars := make(map[string]string, len(varlist))
for k, v := range varlist {
vars[k] = re.ReplaceAllString(v, `{{$1}}`)
}
return vars
}

View File

@ -1,11 +0,0 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 120888 9236 ns/op 3755 B/op 28 allocs/op
BenchmarkQCompileP-16 502248 2620 ns/op 3795 B/op 28 allocs/op
BenchmarkParse-16 128370 9294 ns/op 3902 B/op 18 allocs/op
BenchmarkParseP-16 575752 2340 ns/op 3903 B/op 18 allocs/op
BenchmarkSchemaParse-16 212048 5779 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 630918 1686 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 7.710s

View File

@ -1,13 +0,0 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 11.092s

View File

@ -1,13 +1,13 @@
package qcode package qcode
import ( import (
"regexp"
"sort" "sort"
"strings" "strings"
) )
type Config struct { type Config struct {
DefaultBlock bool Blocklist []string
Blocklist []string
} }
type QueryConfig struct { type QueryConfig struct {
@ -46,7 +46,8 @@ type TRConfig struct {
} }
type trval struct { type trval struct {
query struct { readOnly bool
query struct {
limit string limit string
fil *Exp fil *Exp
filNU bool filNU bool
@ -131,3 +132,12 @@ func mapToList(m map[string]string) []string {
sort.Strings(list) sort.Strings(list)
return list return list
} }
var varRe = regexp.MustCompile(`\$([a-zA-Z0-9_]+)`)
func parsePresets(m map[string]string) map[string]string {
for k, v := range m {
m[k] = varRe.ReplaceAllString(v, `{{$1}}`)
}
return m
}

View File

@ -11,18 +11,15 @@ import (
var ( var (
queryToken = []byte("query") queryToken = []byte("query")
mutationToken = []byte("mutation") mutationToken = []byte("mutation")
fragmentToken = []byte("fragment")
subscriptionToken = []byte("subscription") subscriptionToken = []byte("subscription")
onToken = []byte("on")
trueToken = []byte("true") trueToken = []byte("true")
falseToken = []byte("false") falseToken = []byte("false")
quotesToken = []byte(`'"`) quotesToken = []byte(`'"`)
signsToken = []byte(`+-`) signsToken = []byte(`+-`)
punctuatorToken = []byte(`!():=[]{|}`)
spreadToken = []byte(`...`) spreadToken = []byte(`...`)
digitToken = []byte(`0123456789`) digitToken = []byte(`0123456789`)
dotToken = []byte(`.`) dotToken = []byte(`.`)
punctuatorToken = `!():=[]{|}`
) )
// Pos represents a byte position in the original input text from which // Pos represents a byte position in the original input text from which
@ -46,8 +43,6 @@ const (
itemName itemName
itemQuery itemQuery
itemMutation itemMutation
itemFragment
itemOn
itemSub itemSub
itemPunctuator itemPunctuator
itemArgsOpen itemArgsOpen
@ -141,7 +136,8 @@ func (l *lexer) current() (Pos, Pos) {
func (l *lexer) emit(t itemType) { func (l *lexer) emit(t itemType) {
l.items = append(l.items, item{t, l.start, l.pos, l.line}) l.items = append(l.items, item{t, l.start, l.pos, l.line})
// Some items contain text internally. If so, count their newlines. // Some items contain text internally. If so, count their newlines.
if t == itemStringVal { switch t {
case itemStringVal:
for i := l.start; i < l.pos; i++ { for i := l.start; i < l.pos; i++ {
if l.input[i] == '\n' { if l.input[i] == '\n' {
l.line++ l.line++
@ -267,11 +263,11 @@ func lexRoot(l *lexer) stateFn {
l.backup() l.backup()
return lexString return lexString
case r == '.': case r == '.':
l.acceptRun(dotToken) if len(l.input) >= 3 {
s, e := l.current() if equals(l.input, 0, 3, spreadToken) {
if equals(l.input, s, e, spreadToken) { l.emit(itemSpread)
l.emit(itemSpread) return lexRoot
return lexRoot }
} }
fallthrough // '.' can start a number. fallthrough // '.' can start a number.
case r == '+' || r == '-' || ('0' <= r && r <= '9'): case r == '+' || r == '-' || ('0' <= r && r <= '9'):
@ -303,14 +299,10 @@ func lexName(l *lexer) stateFn {
switch { switch {
case equals(l.input, s, e, queryToken): case equals(l.input, s, e, queryToken):
l.emitL(itemQuery) l.emitL(itemQuery)
case equals(l.input, s, e, fragmentToken):
l.emitL(itemFragment)
case equals(l.input, s, e, mutationToken): case equals(l.input, s, e, mutationToken):
l.emitL(itemMutation) l.emitL(itemMutation)
case equals(l.input, s, e, subscriptionToken): case equals(l.input, s, e, subscriptionToken):
l.emitL(itemSub) l.emitL(itemSub)
case equals(l.input, s, e, onToken):
l.emitL(itemOn)
case equals(l.input, s, e, trueToken): case equals(l.input, s, e, trueToken):
l.emitL(itemBoolVal) l.emitL(itemBoolVal)
case equals(l.input, s, e, falseToken): case equals(l.input, s, e, falseToken):
@ -403,15 +395,35 @@ func isAlphaNumeric(r rune) bool {
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r) return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
} }
func equals(b []byte, s, e Pos, val []byte) bool { func equals(b []byte, s Pos, e Pos, val []byte) bool {
return bytes.EqualFold(b[s:e], val) n := 0
for i := s; i < e; i++ {
if n >= len(val) {
return true
}
switch {
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
return false
case b[i] != val[n]:
return false
}
n++
}
return true
} }
func contains(b []byte, s, e Pos, chars string) bool { func contains(b []byte, s Pos, e Pos, val []byte) bool {
return bytes.ContainsAny(b[s:e], chars) for i := s; i < e; i++ {
for n := 0; n < len(val); n++ {
if b[i] == val[n] {
return true
}
}
}
return false
} }
func lowercase(b []byte, s, e Pos) { func lowercase(b []byte, s Pos, e Pos) {
for i := s; i < e; i++ { for i := s; i < e; i++ {
if b[i] >= 'A' && b[i] <= 'Z' { if b[i] >= 'A' && b[i] <= 'Z' {
b[i] = ('a' + (b[i] - 'A')) b[i] = ('a' + (b[i] - 'A'))

View File

@ -3,9 +3,10 @@ package qcode
import ( import (
"errors" "errors"
"fmt" "fmt"
"hash/maphash"
"sync" "sync"
"unsafe" "unsafe"
"github.com/dosco/super-graph/core/internal/util"
) )
var ( var (
@ -49,19 +50,6 @@ func (o *Operation) Reset() {
*o = zeroOperation *o = zeroOperation
} }
type Fragment struct {
Name string
On string
Fields []Field
fieldsA [10]Field
}
var zeroFragment = Fragment{}
func (f *Fragment) Reset() {
*f = zeroFragment
}
type Field struct { type Field struct {
ID int32 ID int32
ParentID int32 ParentID int32
@ -71,13 +59,11 @@ type Field struct {
argsA [5]Arg argsA [5]Arg
Children []int32 Children []int32
childrenA [5]int32 childrenA [5]int32
Union bool
} }
type Arg struct { type Arg struct {
Name string Name string
Val *Node Val *Node
df bool
} }
type Node struct { type Node struct {
@ -96,8 +82,6 @@ func (n *Node) Reset() {
} }
type Parser struct { type Parser struct {
frags map[uint64]*Fragment
h maphash.Hash
input []byte // the string being scanned input []byte // the string being scanned
pos int pos int
items []item items []item
@ -112,192 +96,12 @@ var opPool = sync.Pool{
New: func() interface{} { return new(Operation) }, New: func() interface{} { return new(Operation) },
} }
var fragPool = sync.Pool{
New: func() interface{} { return new(Fragment) },
}
var lexPool = sync.Pool{ var lexPool = sync.Pool{
New: func() interface{} { return new(lexer) }, New: func() interface{} { return new(lexer) },
} }
func Parse(gql []byte) (*Operation, error) { func Parse(gql []byte) (*Operation, error) {
var err error return parseSelectionSet(gql)
if len(gql) == 0 {
return nil, errors.New("blank query")
}
l := lexPool.Get().(*lexer)
l.Reset()
defer lexPool.Put(l)
if err = lex(l, gql); err != nil {
return nil, err
}
p := &Parser{
input: l.input,
pos: -1,
items: l.items,
}
op := opPool.Get().(*Operation)
op.Reset()
op.Fields = op.fieldsA[:0]
s := -1
qf := false
for {
if p.peek(itemEOF) {
p.ignore()
break
}
if p.peek(itemFragment) {
p.ignore()
if f, err := p.parseFragment(); err != nil {
fragPool.Put(f)
return nil, err
}
} else {
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
s = p.pos
qf = true
}
p.ignore()
}
}
p.reset(s)
if err := p.parseOp(op); err != nil {
return nil, err
}
for _, v := range p.frags {
fragPool.Put(v)
}
return op, nil
}
func (p *Parser) parseFragment() (*Fragment, error) {
var err error
frag := fragPool.Get().(*Fragment)
frag.Reset()
frag.Fields = frag.fieldsA[:0]
if p.peek(itemName) {
frag.Name = p.val(p.next())
} else {
return frag, errors.New("fragment: missing name")
}
if p.peek(itemOn) {
p.ignore()
} else {
return frag, errors.New("fragment: missing 'on' keyword")
}
if p.peek(itemName) {
frag.On = p.vall(p.next())
} else {
return frag, errors.New("fragment: missing table name after 'on' keyword")
}
if p.peek(itemObjOpen) {
p.ignore()
} else {
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
}
frag.Fields, err = p.parseFields(frag.Fields)
if err != nil {
return frag, fmt.Errorf("fragment: %v", err)
}
if p.frags == nil {
p.frags = make(map[uint64]*Fragment)
}
_, _ = p.h.WriteString(frag.Name)
k := p.h.Sum64()
p.h.Reset()
p.frags[k] = frag
return frag, nil
}
func (p *Parser) parseOp(op *Operation) error {
var err error
var typeSet bool
if p.peek(itemQuery, itemMutation, itemSub) {
err = p.parseOpTypeAndArgs(op)
if err != nil {
return fmt.Errorf("%s: %v", op.Type, err)
}
typeSet = true
}
if p.peek(itemObjOpen) {
p.ignore()
if !typeSet {
op.Type = opQuery
}
for {
if p.peek(itemEOF, itemFragment) {
p.ignore()
break
}
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return fmt.Errorf("%s: %v", op.Type, err)
}
}
} else {
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
}
return nil
}
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
item := p.next()
switch item._type {
case itemQuery:
op.Type = opQuery
case itemMutation:
op.Type = opMutate
case itemSub:
op.Type = opSub
}
op.Args = op.argsA[:0]
var err error
if p.peek(itemName) {
op.Name = p.val(p.next())
}
if p.peek(itemArgsOpen) {
p.ignore()
op.Args, err = p.parseOpParams(op.Args)
if err != nil {
return err
}
}
return nil
} }
func ParseArgValue(argVal string) (*Node, error) { func ParseArgValue(argVal string) (*Node, error) {
@ -319,158 +123,228 @@ func ParseArgValue(argVal string) (*Node, error) {
return op, err return op, err
} }
func (p *Parser) parseFields(fields []Field) ([]Field, error) { func parseSelectionSet(gql []byte) (*Operation, error) {
var err error var err error
st := NewStack()
if !p.peek(itemName, itemSpread) { if len(gql) == 0 {
return nil, fmt.Errorf("unexpected token: %s", p.peekNext()) return nil, errors.New("blank query")
} }
for { l := lexPool.Get().(*lexer)
if p.peek(itemEOF) { l.Reset()
p.ignore()
return nil, errors.New("invalid query") if err = lex(l, gql); err != nil {
return nil, err
}
p := &Parser{
input: l.input,
pos: -1,
items: l.items,
}
var op *Operation
if p.peek(itemObjOpen) {
p.ignore()
op, err = p.parseQueryOp()
} else {
op, err = p.parseOp()
}
if err != nil {
return nil, err
}
if p.peek(itemObjClose) {
p.ignore()
} else {
return nil, fmt.Errorf("operation missing closing '}'")
}
if !p.peek(itemEOF) {
p.ignore()
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
}
lexPool.Put(l)
return op, err
}
func (p *Parser) next() item {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return item{_type: itemEOF}
}
p.pos = n
return p.items[p.pos]
}
func (p *Parser) ignore() {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return
}
p.pos = n
}
func (p *Parser) current() string {
item := p.items[p.pos]
return b2s(p.input[item.pos:item.end])
}
func (p *Parser) peek(types ...itemType) bool {
n := p.pos + 1
// if p.items[n]._type == itemEOF {
// return false
// }
if n >= len(p.items) {
return false
}
for i := 0; i < len(types); i++ {
if p.items[n]._type == types[i] {
return true
} }
}
return false
}
if p.peek(itemObjClose) { func (p *Parser) parseOp() (*Operation, error) {
p.ignore() if !p.peek(itemQuery, itemMutation, itemSub) {
err := errors.New("expecting a query, mutation or subscription")
return nil, err
}
item := p.next()
if st.Len() != 0 { op := opPool.Get().(*Operation)
st.Pop() op.Reset()
continue
} else {
break
}
}
if len(fields) >= maxFields { switch item._type {
return nil, fmt.Errorf("too many fields (max %d)", maxFields) case itemQuery:
} op.Type = opQuery
case itemMutation:
op.Type = opMutate
case itemSub:
op.Type = opSub
}
isFrag := false op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
if p.peek(itemSpread) { var err error
p.ignore()
isFrag = true
}
if isFrag { if p.peek(itemName) {
fields, err = p.parseFragmentFields(st, fields) op.Name = p.val(p.next())
} else { }
fields, err = p.parseNormalFields(st, fields)
}
if p.peek(itemArgsOpen) {
p.ignore()
op.Args, err = p.parseOpParams(op.Args)
if err != nil { if err != nil {
return nil, err return nil, err
} }
} }
return fields, nil
}
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
if !p.peek(itemName) {
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
}
fields = append(fields, Field{ID: int32(len(fields))})
f := &fields[(len(fields) - 1)]
f.Args = f.argsA[:0]
f.Children = f.childrenA[:0]
// Parse the field
if err := p.parseField(f); err != nil {
return nil, err
}
if st.Len() == 0 {
f.ParentID = -1
} else {
pid := st.Peek()
f.ParentID = pid
fields[pid].Children = append(fields[pid].Children, f.ID)
}
// The first opening curley brackets after this
// comes the columns or child fields
if p.peek(itemObjOpen) { if p.peek(itemObjOpen) {
p.ignore() p.ignore()
st.Push(f.ID)
for n := 0; n < 10; n++ {
if !p.peek(itemName) {
break
}
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return nil, err
}
}
} }
return fields, nil return op, nil
} }
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) { func (p *Parser) parseQueryOp() (*Operation, error) {
op := opPool.Get().(*Operation)
op.Reset()
op.Type = opQuery
op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
var err error var err error
pid := st.Peek()
if p.peek(itemOn) { for n := 0; n < 10; n++ {
p.ignore() if !p.peek(itemName) {
fields[pid].Union = true break
}
if fields, err = p.parseNormalFields(st, fields); err != nil { op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return nil, err
}
}
return op, nil
}
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
st := util.NewStack()
for {
if len(fields) >= maxFields {
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
}
if p.peek(itemObjClose) {
p.ignore()
st.Pop()
if st.Len() == 0 {
break
} else {
continue
}
}
if !p.peek(itemName) {
return nil, errors.New("expecting an alias or field name")
}
fields = append(fields, Field{ID: int32(len(fields))})
f := &fields[(len(fields) - 1)]
f.Args = f.argsA[:0]
f.Children = f.childrenA[:0]
// Parse the inside of the the fields () parentheses
// in short parse the args like id, where, etc
if err := p.parseField(f); err != nil {
return nil, err return nil, err
} }
// If parent is a union selector than copy over args from the parent intf := st.Peek()
// to the first child which is the root selector for each union type. if pid, ok := intf.(int32); ok {
for i := pid + 1; i < int32(len(fields)); i++ { f.ParentID = pid
f := &fields[i] fields[pid].Children = append(fields[pid].Children, f.ID)
if f.ParentID == pid { } else {
f.Args = fields[pid].Args f.ParentID = -1
}
} }
} else { // The first opening curley brackets after this
if !p.peek(itemName) { // comes the columns or child fields
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next()) if p.peek(itemObjOpen) {
} p.ignore()
st.Push(f.ID)
name := p.val(p.next()) } else if p.peek(itemObjClose) {
_, _ = p.h.WriteString(name) if st.Len() == 0 {
id := p.h.Sum64() break
p.h.Reset()
fr, ok := p.frags[id]
if !ok {
return nil, fmt.Errorf("no fragment named '%s' defined", name)
}
ff := fr.Fields
n := int32(len(fields))
fields = append(fields, ff...)
for i := 0; i < len(ff); i++ {
k := (n + int32(i))
f := &fields[k]
f.ID = int32(k)
// If this is the top-level point the parent to the parent of the
// previous field.
if f.ParentID == -1 {
f.ParentID = pid
if f.ParentID != -1 {
fields[pid].Children = append(fields[pid].Children, f.ID)
}
// Update all the other parents id's by our new place in this new array
} else { } else {
f.ParentID += n continue
}
// Copy over children since fields append is not a deep copy
f.Children = make([]int32, len(f.Children))
copy(f.Children, ff[i].Children)
// Copy over args since args append is not a deep copy
f.Args = make([]Arg, len(f.Args))
copy(f.Args, ff[i].Args)
// Update all the children which is needed.
for j := range f.Children {
f.Children[j] += n
} }
} }
} }
@ -511,7 +385,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
return nil, fmt.Errorf("too many args (max %d)", maxArgs) return nil, fmt.Errorf("too many args (max %d)", maxArgs)
} }
if p.peek(itemEOF, itemArgsClose) { if p.peek(itemArgsClose) {
p.ignore() p.ignore()
break break
} }
@ -529,7 +403,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
return nil, fmt.Errorf("too many args (max %d)", maxArgs) return nil, fmt.Errorf("too many args (max %d)", maxArgs)
} }
if p.peek(itemEOF, itemArgsClose) { if p.peek(itemArgsClose) {
p.ignore() p.ignore()
break break
} }
@ -571,8 +445,10 @@ func (p *Parser) parseList() (*Node, error) {
} }
if ty == 0 { if ty == 0 {
ty = node.Type ty = node.Type
} else if ty != node.Type { } else {
return nil, errors.New("All values in a list must be of the same type") if ty != node.Type {
return nil, errors.New("All values in a list must be of the same type")
}
} }
node.Parent = parent node.Parent = parent
nodes = append(nodes, node) nodes = append(nodes, node)
@ -594,7 +470,7 @@ func (p *Parser) parseObj() (*Node, error) {
parent.Reset() parent.Reset()
for { for {
if p.peek(itemEOF, itemObjClose) { if p.peek(itemObjClose) {
p.ignore() p.ignore()
break break
} }
@ -669,57 +545,6 @@ func (p *Parser) vall(v item) string {
return b2s(p.input[v.pos:v.end]) return b2s(p.input[v.pos:v.end])
} }
func (p *Parser) peek(types ...itemType) bool {
n := p.pos + 1
l := len(types)
// if p.items[n]._type == itemEOF {
// return false
// }
if n >= len(p.items) {
return types[0] == itemEOF
}
if l == 1 {
return p.items[n]._type == types[0]
}
for i := 0; i < l; i++ {
if p.items[n]._type == types[i] {
return true
}
}
return false
}
func (p *Parser) next() item {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return item{_type: itemEOF}
}
p.pos = n
return p.items[p.pos]
}
func (p *Parser) ignore() {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return
}
p.pos = n
}
func (p *Parser) peekNext() string {
item := p.items[p.pos+1]
return b2s(p.input[item.pos:item.end])
}
func (p *Parser) reset(to int) {
p.pos = to
}
func b2s(b []byte) string { func b2s(b []byte) string {
return *(*string)(unsafe.Pointer(&b)) return *(*string)(unsafe.Pointer(&b))
} }
@ -753,9 +578,34 @@ func (t parserType) String() string {
case NodeList: case NodeList:
v = "node-list" v = "node-list"
} }
return v return fmt.Sprintf("<%s>", v)
} }
func FreeNode(n *Node) { // type Frees struct {
// n *Node
// loc int
// }
// var freeList []Frees
// func FreeNode(n *Node, loc int) {
// j := -1
// for i := range freeList {
// if n == freeList[i].n {
// j = i
// break
// }
// }
// if j == -1 {
// nodePool.Put(n)
// freeList = append(freeList, Frees{n, loc})
// } else {
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
// }
// }
func FreeNode(n *Node, loc int) {
nodePool.Put(n) nodePool.Put(n)
} }

View File

@ -2,9 +2,8 @@ package qcode
import ( import (
"errors" "errors"
"testing"
"github.com/chirino/graphql/schema" "github.com/chirino/graphql/schema"
"testing"
) )
func TestCompile1(t *testing.T) { func TestCompile1(t *testing.T) {
@ -121,7 +120,7 @@ updateThread {
} }
} }
} }
}}` }`
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "anon") _, err := qcompile.Compile([]byte(gql), "anon")
@ -131,93 +130,6 @@ updateThread {
} }
func TestFragmentsCompile1(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields2 on user {
first_name
last_name
}
`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
func TestFragmentsCompile2(t *testing.T) {
gql := `
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
func TestFragmentsCompile3(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
query {
users {
...userFields2
created_at
...userFields1
}
}
`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
var gql = []byte(` var gql = []byte(`
{products( {products(
# returns only 30 items # returns only 30 items
@ -239,29 +151,6 @@ var gql = []byte(`
price price
}}`) }}`)
var gqlWithFragments = []byte(`
fragment userFields1 on user {
id
email
__typename
}
query {
users {
...userFields2
created_at
...userFields1
__typename
}
}
fragment userFields2 on user {
first_name
last_name
__typename
}`)
func BenchmarkQCompile(b *testing.B) { func BenchmarkQCompile(b *testing.B) {
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
@ -294,22 +183,8 @@ func BenchmarkQCompileP(b *testing.B) {
}) })
} }
func BenchmarkQCompileFragment(b *testing.B) {
qcompile, _ := NewCompiler(Config{})
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
_, err := qcompile.Compile(gqlWithFragments, "user")
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkParse(b *testing.B) { func BenchmarkParse(b *testing.B) {
b.ResetTimer() b.ResetTimer()
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
@ -336,18 +211,6 @@ func BenchmarkParseP(b *testing.B) {
}) })
} }
func BenchmarkParseFragment(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
_, err := Parse(gqlWithFragments)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkSchemaParse(b *testing.B) { func BenchmarkSchemaParse(b *testing.B) {
b.ResetTimer() b.ResetTimer()

View File

@ -12,7 +12,6 @@ import (
) )
type QType int type QType int
type SType int
type Action int type Action int
const ( const (
@ -20,8 +19,7 @@ const (
) )
const ( const (
QTUnknown QType = iota QTQuery QType = iota + 1
QTQuery
QTMutation QTMutation
QTInsert QTInsert
QTUpdate QTUpdate
@ -29,12 +27,6 @@ const (
QTUpsert QTUpsert
) )
const (
STNone SType = iota
STUnion
STMember
)
type QCode struct { type QCode struct {
Type QType Type QType
ActionVar string ActionVar string
@ -46,8 +38,6 @@ type QCode struct {
type Select struct { type Select struct {
ID int32 ID int32
ParentID int32 ParentID int32
UParentID int32
Type SType
Args map[string]*Node Args map[string]*Node
Name string Name string
FieldName string FieldName string
@ -180,10 +170,9 @@ const (
) )
type Compiler struct { type Compiler struct {
db bool // default block tables if not defined in anon role
tr map[string]map[string]*trval tr map[string]map[string]*trval
bl map[string]struct{} bl map[string]struct{}
defBlock bool
} }
var expPool = sync.Pool{ var expPool = sync.Pool{
@ -191,7 +180,7 @@ var expPool = sync.Pool{
} }
func NewCompiler(c Config) (*Compiler, error) { func NewCompiler(c Config) (*Compiler, error) {
co := &Compiler{defBlock: c.DefaultBlock} co := &Compiler{}
co.tr = make(map[string]map[string]*trval) co.tr = make(map[string]map[string]*trval)
co.bl = make(map[string]struct{}, len(c.Blocklist)) co.bl = make(map[string]struct{}, len(c.Blocklist))
@ -238,7 +227,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
return err return err
} }
trv.insert.cols = listToMap(trc.Insert.Columns) trv.insert.cols = listToMap(trc.Insert.Columns)
trv.insert.psmap = trc.Insert.Presets trv.insert.psmap = parsePresets(trc.Insert.Presets)
trv.insert.pslist = mapToList(trv.insert.psmap) trv.insert.pslist = mapToList(trv.insert.psmap)
trv.insert.block = trc.Insert.Block trv.insert.block = trc.Insert.Block
@ -248,7 +237,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
return err return err
} }
trv.update.cols = listToMap(trc.Update.Columns) trv.update.cols = listToMap(trc.Update.Columns)
trv.update.psmap = trc.Update.Presets trv.update.psmap = parsePresets(trc.Update.Presets)
trv.update.pslist = mapToList(trv.update.psmap) trv.update.pslist = mapToList(trv.update.psmap)
trv.update.block = trc.Update.Block trv.update.block = trc.Update.Block
@ -287,7 +276,6 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
return nil, err return nil, err
} }
freeNodes(op)
opPool.Put(op) opPool.Put(op)
return &qc, nil return &qc, nil
@ -356,22 +344,22 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
case QTInsert: case QTInsert:
if trv.insert.block { if trv.insert.block {
return fmt.Errorf("%s, insert blocked: %s", role, field.Name) return fmt.Errorf("insert blocked: %s", field.Name)
} }
case QTUpdate: case QTUpdate:
if trv.update.block { if trv.update.block {
return fmt.Errorf("%s, update blocked: %s", role, field.Name) return fmt.Errorf("update blocked: %s", field.Name)
} }
case QTDelete: case QTDelete:
if trv.delete.block { if trv.delete.block {
return fmt.Errorf("%s, delete blocked: %s", role, field.Name) return fmt.Errorf("delete blocked: %s", field.Name)
} }
} }
} else if role == "anon" { } else if role == "anon" {
skipRender = com.defBlock skipRender = true
} }
selects = append(selects, Select{ selects = append(selects, Select{
@ -382,11 +370,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
}) })
s := &selects[(len(selects) - 1)] s := &selects[(len(selects) - 1)]
if field.Union { if len(field.Alias) != 0 {
s.Type = STUnion
}
if field.Alias != "" {
s.FieldName = field.Alias s.FieldName = field.Alias
} else { } else {
s.FieldName = s.Name s.FieldName = s.Name
@ -397,11 +381,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
} else { } else {
p := &selects[s.ParentID] p := &selects[s.ParentID]
p.Children = append(p.Children, s.ID) p.Children = append(p.Children, s.ID)
if p.Type == STUnion {
s.Type = STMember
s.UParentID = p.ParentID
}
} }
if skipRender { if skipRender {
@ -439,7 +418,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
com.AddFilters(qc, s, role) com.AddFilters(qc, s, role)
s.Cols = make([]Column, 0, len(field.Children)) s.Cols = make([]Column, 0, len(field.Children))
cm := make(map[string]struct{})
action = QTQuery action = QTQuery
for _, cid := range field.Children { for _, cid := range field.Children {
@ -449,27 +427,19 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
continue continue
} }
var fname string
if f.Alias != "" {
fname = f.Alias
} else {
fname = f.Name
}
if _, ok := cm[fname]; ok {
continue
} else {
cm[fname] = struct{}{}
}
if len(f.Children) != 0 { if len(f.Children) != 0 {
val := f.ID | (s.ID << 16) val := f.ID | (s.ID << 16)
st.Push(val) st.Push(val)
continue continue
} }
col := Column{Name: f.Name, FieldName: fname} col := Column{Name: f.Name}
if len(f.Alias) != 0 {
col.FieldName = f.Alias
} else {
col.FieldName = f.Name
}
s.Cols = append(s.Cols, col) s.Cols = append(s.Cols, col)
} }
@ -481,7 +451,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
} }
qc.Selects = selects[:id] qc.Selects = selects[:id]
return nil return nil
} }
@ -513,42 +482,50 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error { func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
var err error var err error
// don't free this arg either previously done or will be free'd
// in the future like in psql
var df bool
for i := range args { for i := range args {
arg := &args[i] arg := &args[i]
switch arg.Name { switch arg.Name {
case "id": case "id":
err = com.compileArgID(sel, arg) err, df = com.compileArgID(sel, arg)
case "search": case "search":
err = com.compileArgSearch(sel, arg) err, df = com.compileArgSearch(sel, arg)
case "where": case "where":
err = com.compileArgWhere(sel, arg, role) err, df = com.compileArgWhere(sel, arg, role)
case "orderby", "order_by", "order": case "orderby", "order_by", "order":
err = com.compileArgOrderBy(sel, arg) err, df = com.compileArgOrderBy(sel, arg)
case "distinct_on", "distinct": case "distinct_on", "distinct":
err = com.compileArgDistinctOn(sel, arg) err, df = com.compileArgDistinctOn(sel, arg)
case "limit": case "limit":
err = com.compileArgLimit(sel, arg) err, df = com.compileArgLimit(sel, arg)
case "offset": case "offset":
err = com.compileArgOffset(sel, arg) err, df = com.compileArgOffset(sel, arg)
case "first": case "first":
err = com.compileArgFirstLast(sel, arg, PtForward) err, df = com.compileArgFirstLast(sel, arg, PtForward)
case "last": case "last":
err = com.compileArgFirstLast(sel, arg, PtBackward) err, df = com.compileArgFirstLast(sel, arg, PtBackward)
case "after": case "after":
err = com.compileArgAfterBefore(sel, arg, PtForward) err, df = com.compileArgAfterBefore(sel, arg, PtForward)
case "before": case "before":
err = com.compileArgAfterBefore(sel, arg, PtBackward) err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
}
if !df {
FreeNode(arg.Val, 5)
} }
if err != nil { if err != nil {
@ -629,12 +606,14 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
} }
// Objects inside a list // Objects inside a list
if node.Name == "" { if len(node.Name) == 0 {
pushChildren(st, node.exp, node) pushChildren(st, node.exp, node)
continue continue
} else if _, ok := com.bl[node.Name]; ok { } else {
continue if _, ok := com.bl[node.Name]; ok {
continue
}
} }
ex, err := newExp(st, node, usePool) ex, err := newExp(st, node, usePool)
@ -657,20 +636,39 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
} }
} }
if usePool {
st.Push(node)
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
FreeNode(node, 1)
}
}
return root, needsUser, nil return root, needsUser, nil
} }
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error { func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
if sel.ID != 0 { if sel.ID != 0 {
return nil return nil, false
} }
if sel.Where != nil && sel.Where.Op == OpEqID { if sel.Where != nil && sel.Where.Op == OpEqID {
return nil return nil, false
} }
if arg.Val.Type != NodeVar { if arg.Val.Type != NodeVar {
return argErr("id", "variable") return argErr("id", "variable"), false
} }
ex := expPool.Get().(*Exp) ex := expPool.Get().(*Exp)
@ -681,12 +679,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
ex.Val = arg.Val.Val ex.Val = arg.Val.Val
sel.Where = ex sel.Where = ex
return nil return nil, false
} }
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error { func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
if arg.Val.Type != NodeVar { if arg.Val.Type != NodeVar {
return argErr("search", "variable") return argErr("search", "variable"), false
} }
ex := expPool.Get().(*Exp) ex := expPool.Get().(*Exp)
@ -701,19 +699,18 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
} }
sel.Args[arg.Name] = arg.Val sel.Args[arg.Name] = arg.Val
arg.df = true
AddFilter(sel, ex) AddFilter(sel, ex)
return nil return nil, true
} }
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error { func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
st := util.NewStack() st := util.NewStack()
var err error var err error
ex, nu, err := com.compileArgObj(st, arg) ex, nu, err := com.compileArgObj(st, arg)
if err != nil { if err != nil {
return err return err, false
} }
if nu && role == "anon" { if nu && role == "anon" {
@ -721,12 +718,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
} }
AddFilter(sel, ex) AddFilter(sel, ex)
return nil return nil, true
} }
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error { func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
if arg.Val.Type != NodeObj { if arg.Val.Type != NodeObj {
return fmt.Errorf("expecting an object") return fmt.Errorf("expecting an object"), false
} }
st := util.NewStack() st := util.NewStack()
@ -744,15 +741,16 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
node, ok := intf.(*Node) node, ok := intf.(*Node)
if !ok || node == nil { if !ok || node == nil {
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf) return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
} }
if _, ok := com.bl[node.Name]; ok { if _, ok := com.bl[node.Name]; ok {
FreeNode(node, 2)
continue continue
} }
if node.Type != NodeStr && node.Type != NodeVar { if node.Type != NodeStr && node.Type != NodeVar {
return fmt.Errorf("expecting a string or variable") return fmt.Errorf("expecting a string or variable"), false
} }
ob := &OrderBy{} ob := &OrderBy{}
@ -771,24 +769,25 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
case "desc_nulls_last": case "desc_nulls_last":
ob.Order = OrderDescNullsLast ob.Order = OrderDescNullsLast
default: default:
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first") return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
} }
setOrderByColName(ob, node) setOrderByColName(ob, node)
sel.OrderBy = append(sel.OrderBy, ob) sel.OrderBy = append(sel.OrderBy, ob)
FreeNode(node, 3)
} }
return nil return nil, false
} }
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error { func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if _, ok := com.bl[node.Name]; ok { if _, ok := com.bl[node.Name]; ok {
return nil return nil, false
} }
if node.Type != NodeList && node.Type != NodeStr { if node.Type != NodeList && node.Type != NodeStr {
return fmt.Errorf("expecting a list of strings or just a string") return fmt.Errorf("expecting a list of strings or just a string"), false
} }
if node.Type == NodeStr { if node.Type == NodeStr {
@ -797,57 +796,58 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
for i := range node.Children { for i := range node.Children {
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val) sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
FreeNode(node.Children[i], 5)
} }
return nil return nil, false
} }
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error { func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != NodeInt { if node.Type != NodeInt {
return argErr("limit", "number") return argErr("limit", "number"), false
} }
sel.Paging.Limit = node.Val sel.Paging.Limit = node.Val
return nil return nil, false
} }
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error { func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != NodeVar { if node.Type != NodeVar {
return argErr("offset", "variable") return argErr("offset", "variable"), false
} }
sel.Paging.Offset = node.Val sel.Paging.Offset = node.Val
return nil return nil, false
} }
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error { func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != NodeInt { if node.Type != NodeInt {
return argErr(arg.Name, "number") return argErr(arg.Name, "number"), false
} }
sel.Paging.Type = pt sel.Paging.Type = pt
sel.Paging.Limit = node.Val sel.Paging.Limit = node.Val
return nil return nil, false
} }
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error { func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != NodeVar || node.Val != "cursor" { if node.Type != NodeVar || node.Val != "cursor" {
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name) return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
} }
sel.Paging.Type = pt sel.Paging.Type = pt
sel.Paging.Cursor = true sel.Paging.Cursor = true
return nil return nil, false
} }
// var zeroTrv = &trval{} // var zeroTrv = &trval{}
@ -1030,15 +1030,10 @@ func setListVal(ex *Exp, node *Node) {
case NodeFloat: case NodeFloat:
ex.ListType = ValFloat ex.ListType = ValFloat
} }
} else {
ex.Val = node.Val
return
} }
for i := range node.Children { for i := range node.Children {
ex.ListVal = append(ex.ListVal, node.Children[i].Val) ex.ListVal = append(ex.ListVal, node.Children[i].Val)
} }
} }
func setWhereColName(ex *Exp, node *Node) { func setWhereColName(ex *Exp, node *Node) {
@ -1048,7 +1043,7 @@ func setWhereColName(ex *Exp, node *Node) {
if n.Type != NodeObj { if n.Type != NodeObj {
continue continue
} }
if n.Name != "" { if len(n.Name) != 0 {
k := n.Name k := n.Name
if k == "and" || k == "or" || k == "not" || if k == "and" || k == "or" || k == "not" ||
k == "_and" || k == "_or" || k == "_not" { k == "_and" || k == "_or" || k == "_not" {
@ -1227,81 +1222,3 @@ func FreeExp(ex *Exp) {
func argErr(name, ty string) error { func argErr(name, ty string) error {
return fmt.Errorf("value for argument '%s' must be a %s", name, ty) return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
} }
func freeNodes(op *Operation) {
var st *util.Stack
fm := make(map[*Node]struct{})
for i := range op.Args {
arg := op.Args[i]
if arg.df {
continue
}
for i := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[i]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
for i := range op.Fields {
f := op.Fields[i]
for j := range f.Args {
arg := f.Args[j]
if arg.df {
continue
}
for k := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[k]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
}
if st == nil {
return
}
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
if _, ok := fm[node]; !ok {
nodePool.Put(node)
fm[node] = struct{}{}
}
}
}

View File

@ -29,8 +29,6 @@ func al(b byte) bool {
func (qt QType) String() string { func (qt QType) String() string {
switch qt { switch qt {
case QTUnknown:
return "unknown"
case QTQuery: case QTQuery:
return "query" return "query"
case QTMutation: case QTMutation:

View File

@ -2,93 +2,128 @@ package core
import ( import (
"bytes" "bytes"
"context"
"crypto/sha256"
"database/sql" "database/sql"
"encoding/hex"
"fmt" "fmt"
"hash/maphash"
"io" "io"
"strings" "strings"
"sync"
"github.com/dosco/super-graph/core/internal/allow" "github.com/dosco/super-graph/core/internal/allow"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
"github.com/valyala/fasttemplate"
) )
type query struct { type preparedItem struct {
sync.Once
sd *sql.Stmt sd *sql.Stmt
ai allow.Item args [][]byte
qt qcode.QType
err error
st stmt st stmt
roleArg bool roleArg bool
} }
func (sg *SuperGraph) prepare(q *query, role string) {
var stmts []stmt
var err error
qb := []byte(q.ai.Query)
vars := []byte(q.ai.Vars)
switch q.qt {
case qcode.QTQuery:
if sg.abacEnabled {
stmts, err = sg.buildMultiStmt(qb, vars)
} else {
stmts, err = sg.buildRoleStmt(qb, vars, role)
}
case qcode.QTMutation:
stmts, err = sg.buildRoleStmt(qb, vars, role)
}
if err != nil {
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
}
q.st = stmts[0]
q.roleArg = len(stmts) > 1
q.sd, err = sg.db.Prepare(q.st.sql)
if err != nil {
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
}
}
func (sg *SuperGraph) initPrepared() error { func (sg *SuperGraph) initPrepared() error {
ct := context.Background()
if sg.allowList.IsPersist() { if sg.allowList.IsPersist() {
return nil return nil
} }
sg.prepared = make(map[string]*preparedItem)
if err := sg.prepareRoleStmt(); err != nil { tx, err := sg.db.BeginTx(ct, nil)
return fmt.Errorf("role query: %w", err) if err != nil {
return err
}
defer tx.Rollback() //nolint: errcheck
if err = sg.prepareRoleStmt(tx); err != nil {
return fmt.Errorf("prepareRoleStmt: %w", err)
} }
sg.queries = make(map[uint64]*query) if err := tx.Commit(); err != nil {
return err
}
success := 0
list, err := sg.allowList.Load() list, err := sg.allowList.Load()
if err != nil { if err != nil {
return err return err
} }
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
for _, v := range list { for _, v := range list {
if v.Query == "" { if len(v.Query) == 0 {
continue continue
} }
qt := qcode.GetQType(v.Query) err := sg.prepareStmt(v)
if err != nil {
sg.log.Printf("WRN %s: %v", v.Name, err)
} else {
success++
}
}
switch qt { sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
case qcode.QTQuery:
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
case qcode.QTMutation: return nil
for _, role := range sg.conf.Roles { }
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
query := item.Query
qb := []byte(query)
vars := item.Vars
qt := qcode.GetQType(query)
ct := context.Background()
switch qt {
case qcode.QTQuery:
var stmts1 []stmt
var err error
if sg.abacEnabled {
stmts1, err = sg.buildMultiStmt(qb, vars)
} else {
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
}
if err != nil {
return err
}
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
if err != nil {
return err
}
if sg.anonExists {
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
if err != nil {
return err
}
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
if err != nil {
return err
}
}
case qcode.QTMutation:
for _, role := range sg.conf.Roles {
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
if err != nil {
return err
}
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
if err != nil {
return err
} }
} }
} }
@ -96,24 +131,40 @@ func (sg *SuperGraph) initPrepared() error {
return nil return nil
} }
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
finalSQL, am := processTemplate(st[0].sql)
sd, err := sg.db.Prepare(finalSQL)
if err != nil {
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
}
sg.prepared[key] = &preparedItem{
sd: sd,
args: am,
st: st[0],
roleArg: len(st) > 1,
}
return nil
}
// nolint: errcheck // nolint: errcheck
func (sg *SuperGraph) prepareRoleStmt() error { func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
var err error var err error
if !sg.abacEnabled { if !sg.abacEnabled {
return nil return nil
} }
rq := strings.ReplaceAll(sg.conf.RolesQuery, "$user_id", "$1")
w := &bytes.Buffer{} w := &bytes.Buffer{}
io.WriteString(w, `SELECT (CASE WHEN EXISTS (`) io.WriteString(w, `SELECT (CASE WHEN EXISTS (`)
io.WriteString(w, rq) io.WriteString(w, sg.conf.RolesQuery)
io.WriteString(w, `) THEN `) io.WriteString(w, `) THEN `)
io.WriteString(w, `(SELECT (CASE`) io.WriteString(w, `(SELECT (CASE`)
for _, role := range sg.conf.Roles { for _, role := range sg.conf.Roles {
if role.Match == "" { if len(role.Match) == 0 {
continue continue
} }
io.WriteString(w, ` WHEN `) io.WriteString(w, ` WHEN `)
@ -123,12 +174,14 @@ func (sg *SuperGraph) prepareRoleStmt() error {
io.WriteString(w, `'`) io.WriteString(w, `'`)
} }
io.WriteString(w, ` ELSE $2 END) FROM (`) io.WriteString(w, ` ELSE {{role}} END) FROM (`)
io.WriteString(w, rq) io.WriteString(w, sg.conf.RolesQuery)
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `) io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `) io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
sg.getRole, err = sg.db.Prepare(w.String()) roleSQL, _ := processTemplate(w.String())
sg.getRole, err = tx.Prepare(roleSQL)
if err != nil { if err != nil {
return err return err
} }
@ -136,18 +189,49 @@ func (sg *SuperGraph) prepareRoleStmt() error {
return nil return nil
} }
func processTemplate(tmpl string) (string, [][]byte) {
st := struct {
vmap map[string]int
am [][]byte
i int
}{
vmap: make(map[string]int),
am: make([][]byte, 0, 5),
i: 0,
}
execFunc := func(w io.Writer, tag string) (int, error) {
if n, ok := st.vmap[tag]; ok {
return w.Write([]byte(fmt.Sprintf("$%d", n)))
}
st.am = append(st.am, []byte(tag))
st.i++
st.vmap[tag] = st.i
return w.Write([]byte(fmt.Sprintf("$%d", st.i)))
}
t1 := fasttemplate.New(tmpl, `'{{`, `}}'`)
ts1 := t1.ExecuteFuncString(execFunc)
t2 := fasttemplate.New(ts1, `{{`, `}}`)
ts2 := t2.ExecuteFuncString(execFunc)
return ts2, st.am
}
func (sg *SuperGraph) initAllowList() error { func (sg *SuperGraph) initAllowList() error {
var ac allow.Config var ac allow.Config
var err error var err error
if sg.conf.AllowListFile == "" { if len(sg.conf.AllowListFile) == 0 {
sg.conf.AllowListFile = "allow.list" sg.conf.UseAllowList = false
sg.log.Printf("WRN allow list disabled no file specified")
} }
// When list is not eabled it is still created and // When list is not eabled it is still created and
// and new queries are saved to it. // and new queries are saved to it.
if !sg.conf.UseAllowList { if !sg.conf.UseAllowList {
ac = allow.Config{CreateIfNotExists: true, Persist: true, Log: sg.log} ac = allow.Config{CreateIfNotExists: true, Persist: true}
} }
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac) sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
@ -159,11 +243,9 @@ func (sg *SuperGraph) initAllowList() error {
} }
// nolint: errcheck // nolint: errcheck
func queryID(h *maphash.Hash, name, role string) uint64 { func stmtHash(name string, role string) string {
h.WriteString(name) h := sha256.New()
h.WriteString(role) io.WriteString(h, strings.ToLower(name))
v := h.Sum64() io.WriteString(h, role)
h.Reset() return hex.EncodeToString(h.Sum(nil))
return v
} }

View File

@ -4,10 +4,10 @@ import (
"bytes" "bytes"
"errors" "errors"
"fmt" "fmt"
"hash/maphash"
"net/http" "net/http"
"sync" "sync"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
"github.com/dosco/super-graph/jsn" "github.com/dosco/super-graph/jsn"
) )
@ -16,13 +16,12 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
var err error var err error
sel := st.qc.Selects sel := st.qc.Selects
h := maphash.Hash{} h := xxhash.New()
h.SetSeed(sg.hashSeed)
// fetch the field name used within the db response json // fetch the field name used within the db response json
// that are used to mark insertion points and the mapping between // that are used to mark insertion points and the mapping between
// those field names and their select objects // those field names and their select objects
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped()) fids, sfmap := sg.parentFieldIds(h, sel, st.skipped)
// fetch the field values of the marked insertion points // fetch the field values of the marked insertion points
// these values contain the id to be used with fetching remote data // these values contain the id to be used with fetching remote data
@ -31,10 +30,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
switch { switch {
case len(from) == 1: case len(from) == 1:
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap) to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
case len(from) > 1: case len(from) > 1:
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap) to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
default: default:
return nil, errors.New("something wrong no remote ids found in db response") return nil, errors.New("something wrong no remote ids found in db response")
@ -56,7 +55,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
func (sg *SuperGraph) resolveRemote( func (sg *SuperGraph) resolveRemote(
hdr http.Header, hdr http.Header,
h *maphash.Hash, h *xxhash.Digest,
field jsn.Field, field jsn.Field,
sel []qcode.Select, sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) { sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -67,8 +66,7 @@ func (sg *SuperGraph) resolveRemote(
to := toA[:1] to := toA[:1]
// use the json key to find the related Select object // use the json key to find the related Select object
_, _ = h.Write(field.Key) k1 := xxhash.Sum64(field.Key)
k1 := h.Sum64()
s, ok := sfmap[k1] s, ok := sfmap[k1]
if !ok { if !ok {
@ -119,7 +117,7 @@ func (sg *SuperGraph) resolveRemote(
func (sg *SuperGraph) resolveRemotes( func (sg *SuperGraph) resolveRemotes(
hdr http.Header, hdr http.Header,
h *maphash.Hash, h *xxhash.Digest,
from []jsn.Field, from []jsn.Field,
sel []qcode.Select, sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) { sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -136,8 +134,7 @@ func (sg *SuperGraph) resolveRemotes(
for i, id := range from { for i, id := range from {
// use the json key to find the related Select object // use the json key to find the related Select object
_, _ = h.Write(id.Key) k1 := xxhash.Sum64(id.Key)
k1 := h.Sum64()
s, ok := sfmap[k1] s, ok := sfmap[k1]
if !ok { if !ok {
@ -195,7 +192,7 @@ func (sg *SuperGraph) resolveRemotes(
return to, cerr return to, cerr
} }
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) ( func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
[][]byte, [][]byte,
map[uint64]*qcode.Select) { map[uint64]*qcode.Select) {
@ -230,15 +227,15 @@ func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skippe
fm[n] = r.IDField fm[n] = r.IDField
n++ n++
_, _ = h.Write(r.IDField) k := xxhash.Sum64(r.IDField)
sm[h.Sum64()] = s sm[k] = s
} }
} }
return fm, sm return fm, sm
} }
func isSkipped(n, pos uint32) bool { func isSkipped(n uint32, pos uint32) bool {
return ((n & (1 << pos)) != 0) return ((n & (1 << pos)) != 0)
} }

View File

@ -2,11 +2,11 @@ package core
import ( import (
"fmt" "fmt"
"hash/maphash"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"strings" "strings"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/psql" "github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/jsn" "github.com/dosco/super-graph/jsn"
) )
@ -19,7 +19,7 @@ type resolvFn struct {
func (sg *SuperGraph) initResolvers() error { func (sg *SuperGraph) initResolvers() error {
var err error var err error
sg.rmap = make(map[uint64]resolvFn) sg.rmap = make(map[uint64]*resolvFn)
for _, t := range sg.conf.Tables { for _, t := range sg.conf.Tables {
err = sg.initRemotes(t) err = sg.initRemotes(t)
@ -36,8 +36,7 @@ func (sg *SuperGraph) initResolvers() error {
} }
func (sg *SuperGraph) initRemotes(t Table) error { func (sg *SuperGraph) initRemotes(t Table) error {
h := maphash.Hash{} h := xxhash.New()
h.SetSeed(sg.hashSeed)
for _, r := range t.Remotes { for _, r := range t.Remotes {
// defines the table column to be used as an id in the // defines the table column to be used as an id in the
@ -46,7 +45,7 @@ func (sg *SuperGraph) initRemotes(t Table) error {
// if no table column specified in the config then // if no table column specified in the config then
// use the primary key of the table as the id // use the primary key of the table as the id
if idcol == "" { if len(idcol) == 0 {
pcol, err := sg.pc.IDColumn(t.Name) pcol, err := sg.pc.IDColumn(t.Name)
if err != nil { if err != nil {
return err return err
@ -76,18 +75,17 @@ func (sg *SuperGraph) initRemotes(t Table) error {
path = append(path, []byte(p)) path = append(path, []byte(p))
} }
rf := resolvFn{ rf := &resolvFn{
IDField: []byte(idk), IDField: []byte(idk),
Path: path, Path: path,
Fn: fn, Fn: fn,
} }
// index resolver obj by parent and child names // index resolver obj by parent and child names
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf sg.rmap[mkkey(h, r.Name, t.Name)] = rf
// index resolver obj by IDField // index resolver obj by IDField
_, _ = h.Write(rf.IDField) sg.rmap[xxhash.Sum64(rf.IDField)] = rf
sg.rmap[h.Sum64()] = rf
} }
return nil return nil

View File

@ -1,9 +1,11 @@
package core package core
import "hash/maphash" import (
"github.com/cespare/xxhash/v2"
)
// nolint: errcheck // nolint: errcheck
func mkkey(h *maphash.Hash, k1, k2 string) uint64 { func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
h.WriteString(k1) h.WriteString(k1)
h.WriteString(k2) h.WriteString(k2)
v := h.Sum64() v := h.Sum64()

View File

@ -0,0 +1,40 @@
01a106d5.06939d67.js,1589776216137,2e1ce67f6cf79a8a8e2070fc4ea4a6104ac73a5b26a1ab10b62f6cd8e45a8074
1.1c32171f.js.LICENSE.txt,1589776216144,31b4d50dbbd144da150dcdcf0ccef8f6cf8b6b5204d5c9adde3b24466777fad5
0e384e19.7f29b403.js,1589776216137,e2c3882226f2a601b65e4bb1fdb771296c1946f9f125c90af4a8f451dfd2c867
19.fdfbe826.js.LICENSE.txt,1589776216145,6ad95a8099127a8d42b5ace6d148064b1d3e922174f08d75d0ee2220ebeacd0b
17896441.183211f5.js,1589776216137,7736db62d7498a8d3a10a617b1bdfac08c8f29dc03329f4ad3320f2571c223c0
20ac7829.c04b4a1e.js,1589776216137,5b95f479848ccd6959630d4a24bd551d0dbc74457911e9b6f3498655bfaf8ea7
1.1c32171f.js,1589776216137,5441b74bfad9f5a37ba0e6123621c73c3e3b9064bda6b9dcf62fdb7381bf8e41
2.8f12478f.js,1589776216137,3ac7ca0df8fca86145f5decbd86c8adfbc6b5b11a5be96fc96cc9bc33d6306e6
395f47e2.28d67f37.js,1589776216137,8a9b6bc2afdd99ca2b1827c8289352fab6163e30151b9701c29a7863b6cd00b6
404.html,1589776218438,0a748eaa7614b1982623360ba8554c0f498b0796ead3cc429a2c84d287084b50
3d9c95a4.c89589de.js,1589776216137,d5c45e5a3671f303683451d448e2e5d5b464f041cde683af6e824b9e7f951412
9225b3a9.a5e6036b.js,1589776216137,ec9a0d4b34d8751f74348d0da369625a18f320c9ed5ab3c5ccf047ead2551bd8
741df2ae.e13b96b2.js,1589776216137,12028f0cbdf783ac91ea42db64d91190ebd4df24cc74162f953aacc75d16d078
969d212d.9fc45877.js,1589776216138,8323c9f2db042bfaa2ebba43d9500bed881a694d0bfc27fd796cec95bb032dc5
c4f5d8e4.47e70b85.js,1589776216145,6f986b48720724e7c8a715812b5f6625c71c8eca258bb4b410a447eb5da52734
index.html,1589776218438,89f81ec3d3be439a827bd61448dcaddb71c33422df7baa88a7bbcdf784dbc0b2
98ce8162.b5ace15d.js,1589776216137,935e1c6dd08f7e9d0d00221559b95f0f649e28ddf64be6bbb7b3e65bae1aba72
main.e30d99cd.js.LICENSE.txt,1589776216144,1d906c3b83eacffe298d21eeb73e6e73e96310983224783d236195098e6765a7
runtime~main.366c29ad.js,1589776216145,0e550cc9522cd99c5fa4097c7db629eef56127a7f8ade0b7c9954cc8f6a01239
5043549d.62508ecf.js,1589776216137,383959b80d2b0c6416e83c9640ea03c666fe92c407e13a6f022b58072feeafd2
99e04881.197dcef6.js,1589776216144,af99883cbd4d58fbac7cbf814be33032b77bc8daf856aed54bdf0bf27ed5708d
sitemap.xml,1589776218455,660ed269bf0306ba47ecdfb638e487147784d614c43c6c4a8e84194973baf183
styles.9155f1d2.js,1589776216137,f1e0863928710e67338dc88c37f47ef3ff164d36c4bba40d005561094c9c3284
db32d859.a032827a.js,1589776216145,36d575ffad747898726a97cb7a3551e636f744218595bea5c060536eb8d8390f
docs/advanced/index.html,1589776218439,31171870786a597597de9417978a27253581c013962e39959ae4c0777bf86c28
docs/deploy/index.html,1589776218440,7a4735edb93006311b704e62b843bf89bc4354fdf0fdc22a0c5802e39878c193
docs/home/index.html,1589776218440,c7fbb0c1084c6ef8858775c5083b6b416b8188942d4402a5a625eadb3bc00942
docs/intro/index.html,1589776218440,c7a50ae98c0b279f422e55c2eeb9f7ba1c7c1a8bcac07be11fd6e05ced224094
img/super-graph-logo.svg,1589776218438,66a865c4936f44ea811464b967f221b615b7553e85dca0d6f1ef620da3911857
docs/react/index.html,1589776218440,f76fc976f3491d9aacf19ce3b34bee1339f87c673a9da95f192683615618f210
docs/why/index.html,1589776218440,4aa380fe4e5d8476645e368d1f708d5d1344331c572383db823c3499fa0c99cc
docs/security/index.html,1589776218440,0c7d466dc143935db8c02a448952cae2465635e4b6782b1682449bbd56807917
styles.8ee0cad4.css,1589776216137,34b2e79c5c5b1f7afda4376e422e8ccb2c3c04213ca09d788f0c68ecf153d6e6
docs/config/index.html,1589776218440,25b6e87a42c163ac966e80acebca8708f56ae95ba8f3ed8b98ff7fd70ca5a222
docs/internals/index.html,1589776218440,b6f2136a1c832f421a46329fb1f39269d820c55a0dfc9351848271a5501d8e6e
docs/start/index.html,1589776218440,485ec2c61117d8940d8028f34d51d421995a814d5b9d4d5a1870adaed48aec2c
docs/graphql/index.html,1589776218440,3bd79f703fe67656884f3121bfddc3a4fc4d9e5bb2bf9271c94014058fbbd806
main.e30d99cd.js,1589776216144,98a4087d6f537aaddbc1225aaabfb4d12d1394772deb618d4d457685cee59311
19.fdfbe826.js,1589776216144,b8abb73aea5fc0aa50d7e8b8bd38984e3b3aec62de2faf66fb3e55fd1428f8a7
server.bundle.js,1589776218438,826db37f1de931e8b088c1ff20b4a3c2fe0c3d54d9ff4020e500f0df1b83a616

View File

@ -55,30 +55,6 @@ query {
} }
``` ```
### Fragments
Fragments make it easy to build large complex queries with small composible and re-usable fragment blocks.
```graphql
query {
users {
...userFields2
...userFields1
picture_url
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
```
### Sorting ### Sorting
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs. To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.

View File

@ -157,9 +157,7 @@ func main() {
} }
}` }`
ctx = context.WithValue(ctx, core.UserIDKey, 1) res, err := sg.GraphQL(context.Background(), query, nil)
res, err := sg.GraphQL(ctx, query, nil)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -4,8 +4,6 @@ title: Introduction
sidebar_label: Introduction sidebar_label: Introduction
--- ---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest. Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more. Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
@ -27,8 +25,6 @@ Super Graph has a rich feature set like integrating with your existing Ruby on R
- Fuzz tested for security - Fuzz tested for security
- Database migrations tool - Database migrations tool
- Database seeding tool - Database seeding tool
- Works with Postgres and Yugabyte DB
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
## Try the demo app ## Try the demo app
@ -136,9 +132,3 @@ mutation {
} }
} }
``` ```
### Built-in GraphQL Editor
Quickly craft and test your queries with a full-featured GraphQL editor. Auto-complete and schema documentation is automatically available.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />

View File

@ -95,7 +95,7 @@ auth:
type: jwt type: jwt
jwt: jwt:
# valid providers are auth0, firebase and none # the two providers are 'auth0' and 'none'
provider: auth0 provider: auth0
secret: abc335bfcfdb04e50db5bb0a4d67ab9 secret: abc335bfcfdb04e50db5bb0a4d67ab9
public_key_file: /secrets/public_key.pem public_key_file: /secrets/public_key.pem
@ -108,19 +108,6 @@ We can get the JWT token either from the `authorization` header where we expect
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file. For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
### Firebase Auth
```yaml
auth:
type: jwt
jwt:
provider: firebase
audience: <firebase-project-id>
```
Firebase auth also uses JWT the keys are auto-fetched from Google and used according to their documentation mechanism. The `audience` config value needs to be set to your project id and everything else is taken care for you.
### HTTP Headers ### HTTP Headers
```yaml ```yaml

View File

@ -1,249 +0,0 @@
---
id: seed
title: Database Seeding
sidebar_label: Seed Scripts
---
While developing it's often useful to be able to have fake data available in the database. Fake data can help with building the UI and save you time when trying to get the GraphQL query correct. Super Graph has the ability do this for you. All you have to do is write a seed script `config/seed.js` (In Javascript) and use the `db:seed` command line option. Below is an example of kind of things you can do in a seed script.
## Creating fake users
Since all mutations and queries are in standard GraphQL you can use all the features available in Super Graph GraphQL.
```javascript
var users = [];
for (i = 0; i < 20; i++) {
var data = {
slug: util.make_slug(fake.first_name() + "-" + fake.last_name()),
first_name: fake.first_name(),
last_name: fake.last_name(),
picture_url: fake.avatar_url(),
email: fake.email(),
bio: fake.sentence(10),
};
var res = graphql(" \
mutation { \
user(insert: $data) { \
id \
} \
}", { data: data });
users.push(res.user);
}
```
## Inserting the users fake blog posts
Another example highlighting how the `connect` syntax of Super Graph GraphQL can be used to connect inserted posts
to random users that were previously created. For futher details checkout the [seed script](/seed) documentation.
```javascript
var posts = [];
for (i = 0; i < 1500; i++) {
var user.id = users[Math.floor(Math.random() * 10)];
var data = {
slug: util.make_slug(fake.sentence(3) + i),
body: fake.sentence(100),
published: true,
thread: {
connect: { user: user.id }
}
}
var res = graphql(" \
mutation { \
post(insert: $data) { \
id \
} \
}",
{ data: data },
{ user_id: u.id })
posts.push(res.post.slug)
}
```
## Insert a large number of rows efficiently
This feature uses the `COPY` functionality available in Postgres this is the best way to
insert a large number of rows into a table. The `import_csv` function reads in a CSV file using the first
line of the file as column names.
```javascript
import_csv("post_tags", "./tags.csv");
```
## A list of fake data functions available to you.
```
person
name
name_prefix
name_suffix
first_name
last_name
gender
ssn
contact
email
phone
phone_formatted
username
password
// Address
address
city
country
country_abr
state
state_abr
street
street_name
street_number
street_prefix
street_suffix
zip
latitude
latitude_in_range
longitude
longitude_in_range
// Beer
beer_alcohol
beer_hop
beer_ibu
beer_blg
beer_malt
beer_name
beer_style
beer_yeast
// Cars
car
car_type
car_maker
car_model
// Text
word
sentence
paragraph
question
quote
// Misc
generate
boolean
uuid
// Colors
color
hex_color
rgb_color
safe_color
// Internet
url
image_url
avatar_url
domain_name
domain_suffix
ipv4_address
ipv6_address
http_method
user_agent
user_agent_firefox
user_agent_chrome
user_agent_opera
user_agent_safari
// Date / Time
date
date_range
nano_second
second
minute
hour
month
day
weekday
year
timezone
timezone_abv
timezone_full
timezone_offset
// Payment
price
credit_card
credit_card_cvv
credit_card_number
credit_card_type
currency
currency_long
currency_short
// Company
bs
buzzword
company
company_suffix
job
job_description
job_level
job_title
// Hacker
hacker_abbreviation
hacker_adjective
hacker_noun
hacker_phrase
hacker_verb
//Hipster
hipster_word
hipster_paragraph
hipster_sentence
// File
file_extension
file_mine_type
// Numbers
number
numerify
int8
int16
int32
int64
uint8
uint16
uint32
uint64
float32
float32_range
float64
float64_range
shuffle_ints
mac_address
// String
digit
letter
lexify
rand_string
numerify
```
## Some more utility functions
```
shuffle_strings(string_array)
make_slug(text)
make_slug_lang(text, lang)
```

View File

@ -10,7 +10,7 @@ You can then add your database schema to the migrations, maybe create some seed
```bash ```bash
# Download and install Super Graph. You will need Go 1.14 or above # Download and install Super Graph. You will need Go 1.14 or above
go get github.com/dosco/super-graph go get https://github.com/dosco/super-graph
``` ```
And then create and launch your new app And then create and launch your new app
@ -96,6 +96,179 @@ var post_count = import_csv("posts", "posts.csv");
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided. You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
```
// Person
person
name
name_prefix
name_suffix
first_name
last_name
gender
ssn
contact
email
phone
phone_formatted
username
password
// Address
address
city
country
country_abr
state
state_abr
status_code
street
street_name
street_number
street_prefix
street_suffix
zip
latitude
latitude_in_range
longitude
longitude_in_range
// Beer
beer_alcohol
beer_hop
beer_ibu
beer_blg
beer_malt
beer_name
beer_style
beer_yeast
// Cars
car
car_type
car_maker
car_model
// Text
word
sentence
paragraph
question
quote
// Misc
generate
boolean
uuid
// Colors
color
hex_color
rgb_color
safe_color
// Internet
url
image_url
domain_name
domain_suffix
ipv4_address
ipv6_address
simple_status_code
http_method
user_agent
user_agent_firefox
user_agent_chrome
user_agent_opera
user_agent_safari
// Date / Time
date
date_range
nano_second
second
minute
hour
month
day
weekday
year
timezone
timezone_abv
timezone_full
timezone_offset
// Payment
price
credit_card
credit_card_cvv
credit_card_number
credit_card_number_luhn
credit_card_type
currency
currency_long
currency_short
// Company
bs
buzzword
company
company_suffix
job
job_description
job_level
job_title
// Hacker
hacker_abbreviation
hacker_adjective
hacker_ingverb
hacker_noun
hacker_phrase
hacker_verb
//Hipster
hipster_word
hipster_paragraph
hipster_sentence
// File
file_extension
file_mine_type
// Numbers
number
numerify
int8
int16
int32
int64
uint8
uint16
uint32
uint64
float32
float32_range
float64
float64_range
shuffle_ints
mac_address
//String
digit
letter
lexify
shuffle_strings
numerify
```
Other utility functions
```
shuffle_strings(string_array)
make_slug(text)
make_slug_lang(text, lang)
```
### Migrations ### Migrations
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database. Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.

View File

@ -1,82 +0,0 @@
---
id: telemetry
title: Tracing and Metrics
sidebar_label: Telemetry
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
Having observability and telemetry is at the core of any production ready service. Super Graph has built-in support for OpenCensus for tracing requests all the way from HTTP to the database and providing all kinds of metrics.
OpenCensus has a concept called exporters these are external services that can consume this data and make to give you graphs, charts, alerting etc. Super Graph again has built-in support for Zipkin, Prometheus, Google Stackdriver and the AWS X-Ray exporters.
## Telemetry config
The `telemetry` section of the standard config files is where you set values to configure this feature to your needs.
```yaml
telemetry:
debug: true
interval: 5s
metrics:
exporter: "prometheus"
endpoint: ""
namespace: "web api"
key: "1234xyz"
tracing:
exporter: "zipkin"
endpoint: "http://zipkin:9411/api/v2/spans"
sample: 0.2
include_query: false
include_params: false
```
**debug**: Enabling debug enables an embedded web ui to test and debug tracing and metrics. This UI called `zPages` is provided by OpenCensus and will be made available on the `/telemetry` path. For more information on using `zPages` https://opencensus.io/zpages/. Remeber to disable this in production.
**interval**: This controls the interval setting for OpenCensus metrics collection. This deafaults to `5 seconds` if not set.
**metric.exporters** Setting this enables metrics collection. The supported values for this field are `prometheus` and `stackdriver`. The Prometheus exporter requires `metric.namespace` to be set. The Sackdriver exporter requires the `metric.key` to be set to the Google Cloud Project ID.
**metric.endpoint** Is not currently used by any of the exporters.
**tracing.exporter** Setting this enables request tracing. The supported values for this field are `zipkin`, `aws` and `xray`. Zipkin requires `tracing.endpoint` to be set. AWS and Xray are the same and do not require any addiitonal settings.
**tracing.sample** This controls what percentage of the requests should be traced. By default `0.5` or 50% of the requests are traced, `always` is also a valid value for this field and it means all requests will be traced.
**include_query** Include the Super Graph SQL query to the trace. Be careful with this setting in production it will add the entire SQL query to the trace. This can be veru useful to debug slow requests.
**include_params** Include the Super Graph SQL query parameters to the trace. Be careful with this setting in production it will it can potentially leak sensitive user information into tracing logs.
## Using Zipkin
Zipkin is a really great open source request tracing project. It's easy to add to your current Super Graph app as a way to test tracing in development. Add the following to the Super Graph generated `docker-compose.yml` file. Also add `zipkin` in your current apps `depends_on` list. Once setup the Zipkin UI is available at http://localhost:9411
```yaml
your_api:
...
depends_on:
- db
- zipkin
zipkin:
image: openzipkin/zipkin-slim
container_name: zipkin
# Environment settings are defined here https://github.com/openzipkin/zipkin/blob/master/zipkin-server/README.md#environment-variables
environment:
- STORAGE_TYPE=mem
# Uncomment to enable self-tracing
# - SELF_TRACING_ENABLED=true
# Uncomment to enable debug logging
# - JAVA_OPTS=-Dorg.slf4j.simpleLogger.log.zipkin2=debug
ports:
# Port used for the Zipkin UI and HTTP Api
- 9411:9411
```
### Zipkin HTTP to DB traces
<img alt="Zipkin Traces" src={useBaseUrl("img/zipkin1.png")} />
### Zipkin trace details
<img alt="Zipkin Traces" src={useBaseUrl('img/zipkin2.png')} />

View File

@ -1,13 +0,0 @@
---
id: webui
title: Web UI / GraphQL Editor
sidebar_label: Web UI
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
Super Graph comes with a build-in GraphQL editor that only runs in development. Use it to craft your queries and copy-paste them into you're app once you're ready. The editor supports auto-completation and schema documentation. This makes it super easy to craft and test your query all in one go without knowing anything about the underlying database structure.
You can even set query variables or http headers as required. To simulate an authenticated user set the http header `"X-USER-ID": 5` to the user id of the user you want to test with.

View File

@ -36,8 +36,8 @@ module.exports = {
position: "left", position: "left",
}, },
{ {
label: "AbtCode", label: "Art Compute",
href: "https://abtcode.com/s/super-graph", href: "https://artcompute.com/s/super-graph",
position: "left", position: "left",
}, },
], ],

View File

@ -3,16 +3,13 @@ module.exports = {
Docusaurus: [ Docusaurus: [
"home", "home",
"intro", "intro",
"webui",
"start", "start",
"why", "why",
"graphql", "graphql",
"react", "react",
"advanced", "advanced",
"security", "security",
"telemetry",
"config", "config",
"seed",
"deploy", "deploy",
"internals", "internals",
], ],

Binary file not shown.

Before

Width:  |  Height:  |  Size: 117 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 112 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 141 KiB

View File

@ -1805,6 +1805,11 @@ asynckit@^0.4.0:
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79" resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k= integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
at-least-node@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
atob@^2.1.2: atob@^2.1.2:
version "2.1.2" version "2.1.2"
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9" resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
@ -2318,7 +2323,7 @@ ccount@^1.0.0, ccount@^1.0.3:
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17" resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw== integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2: chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
version "2.4.2" version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424" resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ== integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
@ -2517,6 +2522,15 @@ cliui@^5.0.0:
strip-ansi "^5.2.0" strip-ansi "^5.2.0"
wrap-ansi "^5.1.0" wrap-ansi "^5.1.0"
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
coa@^2.0.2: coa@^2.0.2:
version "2.0.2" version "2.0.2"
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3" resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
@ -3202,6 +3216,11 @@ depd@~1.1.2:
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9" resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak= integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
dependency-graph@^0.9.0:
version "0.9.0"
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
des.js@^1.0.0: des.js@^1.0.0:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843" resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
@ -3811,7 +3830,7 @@ fast-glob@^2.0.2:
merge2 "^1.2.3" merge2 "^1.2.3"
micromatch "^3.1.10" micromatch "^3.1.10"
fast-glob@^3.0.3: fast-glob@^3.0.3, fast-glob@^3.1.1:
version "3.2.2" version "3.2.2"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d" resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A== integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
@ -3951,7 +3970,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
make-dir "^3.0.2" make-dir "^3.0.2"
pkg-dir "^4.1.0" pkg-dir "^4.1.0"
find-up@4.1.0, find-up@^4.0.0: find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
version "4.1.0" version "4.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19" resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw== integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
@ -4065,6 +4084,16 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
jsonfile "^4.0.0" jsonfile "^4.0.0"
universalify "^0.1.0" universalify "^0.1.0"
fs-extra@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
dependencies:
at-least-node "^1.0.0"
graceful-fs "^4.2.0"
jsonfile "^6.0.1"
universalify "^1.0.0"
fs-minipass@^2.0.0: fs-minipass@^2.0.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb" resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
@ -4120,6 +4149,11 @@ get-own-enumerable-property-symbols@^3.0.0:
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664" resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g== integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
get-stdin@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
get-stream@^4.0.0: get-stream@^4.0.0:
version "4.1.0" version "4.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5" resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
@ -4241,6 +4275,18 @@ globby@^10.0.1:
merge2 "^1.2.3" merge2 "^1.2.3"
slash "^3.0.0" slash "^3.0.0"
globby@^11.0.0:
version "11.0.0"
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
dependencies:
array-union "^2.1.0"
dir-glob "^3.0.1"
fast-glob "^3.1.1"
ignore "^5.1.4"
merge2 "^1.3.0"
slash "^3.0.0"
globby@^6.1.0: globby@^6.1.0:
version "6.1.0" version "6.1.0"
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c" resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
@ -4697,7 +4743,7 @@ ignore@^3.3.5:
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043" resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug== integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
ignore@^5.1.1: ignore@^5.1.1, ignore@^5.1.4:
version "5.1.4" version "5.1.4"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf" resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A== integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
@ -5336,6 +5382,15 @@ jsonfile@^4.0.0:
optionalDependencies: optionalDependencies:
graceful-fs "^4.1.6" graceful-fs "^4.1.6"
jsonfile@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
dependencies:
universalify "^1.0.0"
optionalDependencies:
graceful-fs "^4.1.6"
jsprim@^1.2.2: jsprim@^1.2.2:
version "1.4.1" version "1.4.1"
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2" resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
@ -5601,6 +5656,13 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
log-symbols@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
dependencies:
chalk "^2.0.1"
loglevel@^1.6.8: loglevel@^1.6.8:
version "1.6.8" version "1.6.8"
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171" resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
@ -6583,7 +6645,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad" resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg== integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
pify@^2.0.0: pify@^2.0.0, pify@^2.3.0:
version "2.3.0" version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c" resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw= integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
@ -6669,6 +6731,24 @@ postcss-calc@^7.0.1:
postcss-selector-parser "^6.0.2" postcss-selector-parser "^6.0.2"
postcss-value-parser "^4.0.2" postcss-value-parser "^4.0.2"
postcss-cli@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
dependencies:
chalk "^4.0.0"
chokidar "^3.3.0"
dependency-graph "^0.9.0"
fs-extra "^9.0.0"
get-stdin "^7.0.0"
globby "^11.0.0"
postcss "^7.0.0"
postcss-load-config "^2.0.0"
postcss-reporter "^6.0.0"
pretty-hrtime "^1.0.3"
read-cache "^1.0.0"
yargs "^15.0.2"
postcss-color-functional-notation@^2.0.1: postcss-color-functional-notation@^2.0.1:
version "2.0.1" version "2.0.1"
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0" resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
@ -7208,6 +7288,16 @@ postcss-replace-overflow-wrap@^3.0.0:
dependencies: dependencies:
postcss "^7.0.2" postcss "^7.0.2"
postcss-reporter@^6.0.0:
version "6.0.1"
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
dependencies:
chalk "^2.4.1"
lodash "^4.17.11"
log-symbols "^2.2.0"
postcss "^7.0.7"
postcss-selector-matches@^4.0.0: postcss-selector-matches@^4.0.0:
version "4.0.0" version "4.0.0"
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff" resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
@ -7307,7 +7397,7 @@ postcss@^6.0.9:
source-map "^0.6.1" source-map "^0.6.1"
supports-color "^5.4.0" supports-color "^5.4.0"
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6: postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
version "7.0.30" version "7.0.30"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ== integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
@ -7602,11 +7692,6 @@ react-helmet@^6.0.0-beta:
react-fast-compare "^2.0.4" react-fast-compare "^2.0.4"
react-side-effect "^2.1.0" react-side-effect "^2.1.0"
react-hook-sticky@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1: react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
version "16.13.1" version "16.13.1"
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4" resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
@ -7700,6 +7785,13 @@ react@^16.8.4:
object-assign "^4.1.1" object-assign "^4.1.1"
prop-types "^15.6.2" prop-types "^15.6.2"
read-cache@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
dependencies:
pify "^2.3.0"
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
version "2.3.7" version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
@ -8617,7 +8709,7 @@ string-width@^3.0.0, string-width@^3.1.0:
is-fullwidth-code-point "^2.0.0" is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0" strip-ansi "^5.1.0"
string-width@^4.1.0: string-width@^4.1.0, string-width@^4.2.0:
version "4.2.0" version "4.2.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5" resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg== integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
@ -9213,6 +9305,11 @@ universalify@^0.1.0:
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66" resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg== integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
universalify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
unpipe@1.0.0, unpipe@~1.0.0: unpipe@1.0.0, unpipe@~1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec" resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
@ -9626,7 +9723,7 @@ wrap-ansi@^5.1.0:
string-width "^3.0.0" string-width "^3.0.0"
strip-ansi "^5.0.0" strip-ansi "^5.0.0"
wrap-ansi@^6.0.0: wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
version "6.2.0" version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53" resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA== integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
@ -9687,6 +9784,14 @@ yargs-parser@^13.1.2:
camelcase "^5.0.0" camelcase "^5.0.0"
decamelize "^1.2.0" decamelize "^1.2.0"
yargs-parser@^18.1.1:
version "18.1.3"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs@^13.3.2: yargs@^13.3.2:
version "13.3.2" version "13.3.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd" resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
@ -9703,6 +9808,23 @@ yargs@^13.3.2:
y18n "^4.0.0" y18n "^4.0.0"
yargs-parser "^13.1.2" yargs-parser "^13.1.2"
yargs@^15.0.2:
version "15.3.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
find-up "^4.1.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^4.2.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.1"
zepto@^1.2.0: zepto@^1.2.0:
version "1.2.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98" resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"

2
go.mod
View File

@ -12,6 +12,7 @@ require (
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
github.com/brianvoe/gofakeit/v5 v5.2.0 github.com/brianvoe/gofakeit/v5 v5.2.0
github.com/cespare/xxhash/v2 v2.1.1
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
github.com/daaku/go.zipexe v1.0.1 // indirect github.com/daaku/go.zipexe v1.0.1 // indirect
github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgrijalva/jwt-go v3.2.0+incompatible
@ -36,6 +37,7 @@ require (
github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.6.3 github.com/spf13/viper v1.6.3
github.com/stretchr/testify v1.5.1 github.com/stretchr/testify v1.5.1
github.com/valyala/fasttemplate v1.1.0
go.opencensus.io v0.22.3 go.opencensus.io v0.22.3
go.uber.org/zap v1.14.1 go.uber.org/zap v1.14.1
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 golang.org/x/crypto v0.0.0-20200414173820-0848c9571904

13
go.sum
View File

@ -35,9 +35,7 @@ github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgv
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M= github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw= github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c= github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ= github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
@ -55,6 +53,8 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU= github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4= github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8= github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
@ -113,7 +113,6 @@ github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFG
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ= github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4= github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q= github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I= github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I=
@ -132,7 +131,6 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY= github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU= github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg= github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs= github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
@ -144,9 +142,7 @@ github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk= github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY= github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg= github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
github.com/gorilla/mux v1.6.2 h1:Pgr17XVTNXAk3q/r4CpKzC5xBM/qW1uVLV+IhRZpIIk=
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs= github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
@ -220,7 +216,6 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q= github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ= github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc= github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI= github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
@ -320,7 +315,6 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw= github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q= github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc= github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
@ -375,6 +369,8 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8= github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
@ -545,7 +541,6 @@ google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ij
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM= google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk= google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg= google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

View File

@ -155,8 +155,8 @@ func cmdVersion(cmd *cobra.Command, args []string) {
} }
func BuildDetails() string { func BuildDetails() string {
if version == "" { if len(version) == 0 {
return ` return fmt.Sprintf(`
Super Graph (unknown version) Super Graph (unknown version)
For documentation, visit https://supergraph.dev For documentation, visit https://supergraph.dev
@ -166,7 +166,7 @@ To build with version information please use the Makefile
Licensed under the Apache Public License 2.0 Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar Copyright 2020, Vikram Rangnekar
` `)
} }
return fmt.Sprintf(` return fmt.Sprintf(`

View File

@ -298,9 +298,9 @@ func ExtractErrorLine(source string, position int) (ErrorLineExtract, error) {
func getMigrationVars() map[string]interface{} { func getMigrationVars() map[string]interface{} {
return map[string]interface{}{ return map[string]interface{}{
"AppName": strings.Title(conf.AppName), "app_name": strings.Title(conf.AppName),
"AppNameSlug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)), "app_name_slug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
"Env": strings.ToLower(os.Getenv("GO_ENV")), "env": strings.ToLower(os.Getenv("GO_ENV")),
} }
} }

View File

@ -2,7 +2,8 @@ package serv
import ( import (
"bytes" "bytes"
"html/template" "fmt"
"io"
"io/ioutil" "io/ioutil"
"os" "os"
"path" "path"
@ -10,6 +11,7 @@ import (
rice "github.com/GeertJohan/go.rice" rice "github.com/GeertJohan/go.rice"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/valyala/fasttemplate"
) )
func cmdNew(cmd *cobra.Command, args []string) { func cmdNew(cmd *cobra.Command, args []string) {
@ -19,8 +21,8 @@ func cmdNew(cmd *cobra.Command, args []string) {
} }
tmpl := newTempl(map[string]string{ tmpl := newTempl(map[string]string{
"AppName": strings.Title(strings.Join(args, " ")), "app_name": strings.Title(strings.Join(args, " ")),
"AppNameSlug": strings.ToLower(strings.Join(args, "_")), "app_name_slug": strings.ToLower(strings.Join(args, "_")),
}) })
// Create app folder and add relevant files // Create app folder and add relevant files
@ -88,10 +90,6 @@ func cmdNew(cmd *cobra.Command, args []string) {
} }
}) })
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
return ioutil.WriteFile(p, []byte{}, 0644)
})
// Create app migrations folder and add relevant files // Create app migrations folder and add relevant files
appMigrationsPath := path.Join(appConfigPath, "migrations") appMigrationsPath := path.Join(appConfigPath, "migrations")
@ -123,16 +121,19 @@ func newTempl(data map[string]string) *Templ {
func (t *Templ) get(name string) ([]byte, error) { func (t *Templ) get(name string) ([]byte, error) {
v := t.MustString(name) v := t.MustString(name)
b := bytes.Buffer{} b := bytes.Buffer{}
tmpl := fasttemplate.New(v, "{%", "%}")
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
return w.Write([]byte(val))
}
return 0, fmt.Errorf("unknown template variable '%s'", tag)
})
tmpl, err := template.New(name).Parse(v)
if err != nil { if err != nil {
return nil, err return nil, err
} }
if err := tmpl.Execute(&b, t.data); err != nil {
return nil, err
}
return b.Bytes(), nil return b.Bytes(), nil
} }

View File

@ -17,8 +17,6 @@ import (
"github.com/dop251/goja" "github.com/dop251/goja"
"github.com/dosco/super-graph/core" "github.com/dosco/super-graph/core"
"github.com/gosimple/slug" "github.com/gosimple/slug"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/stdlib"
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
@ -29,7 +27,6 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
log.Fatalf("ERR failed to read config: %s", err) log.Fatalf("ERR failed to read config: %s", err)
} }
conf.Production = false conf.Production = false
conf.DefaultBlock = false
db, err = initDB(conf, true, false) db, err = initDB(conf, true, false)
if err != nil { if err != nil {
@ -54,7 +51,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
vm := goja.New() vm := goja.New()
vm.Set("graphql", graphQLFn) vm.Set("graphql", graphQLFn)
vm.Set("import_csv", importCSV) //vm.Set("import_csv", importCSV)
console := vm.NewObject() console := vm.NewObject()
console.Set("log", logFunc) //nolint: errcheck console.Set("log", logFunc) //nolint: errcheck
@ -80,8 +77,10 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} { func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
ct := context.Background() ct := context.Background()
if v, ok := opt["user_id"]; ok && v != "" { if v, ok := opt["user_id"]; ok && len(v) != 0 {
ct = context.WithValue(ct, core.UserIDKey, v) ct = context.WithValue(ct, core.UserIDKey, v)
} else {
ct = context.WithValue(ct, core.UserIDKey, "-1")
} }
// var role string // var role string
@ -144,7 +143,7 @@ func (c *csvSource) Values() ([]interface{}, error) {
for _, v := range c.rows[c.i] { for _, v := range c.rows[c.i] {
switch { switch {
case v == "": case len(v) == 0:
vals = append(vals, "") vals = append(vals, "")
case isDigit(v): case isDigit(v):
var n int var n int
@ -182,42 +181,34 @@ func (c *csvSource) Err() error {
return nil return nil
} }
func importCSV(table, filename string) int64 { // func importCSV(table, filename string) int64 {
if filename[0] != '/' { // if filename[0] != '/' {
filename = path.Join(confPath, filename) // filename = path.Join(conf.ConfigPathUsed(), filename)
} // }
s, err := NewCSVSource(filename) // s, err := NewCSVSource(filename)
if err != nil { // if err != nil {
log.Fatalf("ERR %v", err) // log.Fatalf("ERR %s", err)
} // }
var cols []string // var cols []string
colval, _ := s.Values() // colval, _ := s.Values()
for _, c := range colval { // for _, c := range colval {
cols = append(cols, c.(string)) // cols = append(cols, c.(string))
} // }
conn, err := stdlib.AcquireConn(db) // n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
if err != nil { // cols,
log.Fatalf("ERR %v", err) // s)
}
//nolint: errcheck
defer stdlib.ReleaseConn(db, conn)
n, err := conn.CopyFrom( // if err != nil {
context.Background(), // err = fmt.Errorf("%w (line no %d)", err, s.i)
pgx.Identifier{table}, // log.Fatalf("ERR %s", err)
cols, // }
s)
if err != nil { // return n
log.Fatalf("ERR %v", fmt.Errorf("%w (line no %d)", err, s.i)) // }
}
return n
}
//nolint: errcheck //nolint: errcheck
func logFunc(args ...interface{}) { func logFunc(args ...interface{}) {
@ -243,7 +234,7 @@ func avatarURL(size int) string {
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000)) return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
} }
func imageURL(width, height int) string { func imageURL(width int, height int) string {
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000)) return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
} }
@ -386,6 +377,11 @@ func setFakeFuncs(f *goja.Object) {
f.Set("hipster_paragraph", gofakeit.HipsterParagraph) f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
f.Set("hipster_sentence", gofakeit.HipsterSentence) f.Set("hipster_sentence", gofakeit.HipsterSentence)
//Languages
//f.Set("language", gofakeit.Language)
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
// File // File
f.Set("file_extension", gofakeit.FileExtension) f.Set("file_extension", gofakeit.FileExtension)
f.Set("file_mine_type", gofakeit.FileMimeType) f.Set("file_mine_type", gofakeit.FileMimeType)
@ -414,6 +410,8 @@ func setFakeFuncs(f *goja.Object) {
f.Set("lexify", gofakeit.Lexify) f.Set("lexify", gofakeit.Lexify)
f.Set("rand_string", getRandValue) f.Set("rand_string", getRandValue)
f.Set("numerify", gofakeit.Numerify) f.Set("numerify", gofakeit.Numerify)
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
} }
//nolint: errcheck //nolint: errcheck

View File

@ -66,11 +66,9 @@ func newViper(configPath, configFile string) *viper.Viper {
vi.SetDefault("host_port", "0.0.0.0:8080") vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false) vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false) vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", false) vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js") vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("default_block", true)
vi.SetDefault("database.type", "postgres") vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost") vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432) vi.SetDefault("database.port", 5432)
@ -90,7 +88,7 @@ func newViper(configPath, configFile string) *viper.Viper {
} }
func GetConfigName() string { func GetConfigName() string {
if os.Getenv("GO_ENV") == "" { if len(os.Getenv("GO_ENV")) == 0 {
return "dev" return "dev"
} }

View File

@ -7,8 +7,8 @@ import (
var healthyResponse = []byte("All's Well") var healthyResponse = []byte("All's Well")
func health(w http.ResponseWriter, r *http.Request) { func health(w http.ResponseWriter, _ *http.Request) {
ct, cancel := context.WithTimeout(r.Context(), conf.DB.PingTimeout) ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
defer cancel() defer cancel()
if err := db.PingContext(ct); err != nil { if err := db.PingContext(ct); err != nil {

View File

@ -105,7 +105,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
} }
if err == nil { if err == nil {
if conf.CacheControl != "" && res.Operation() == core.OpQuery { if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
w.Header().Set("Cache-Control", conf.CacheControl) w.Header().Set("Cache-Control", conf.CacheControl)
} }
//nolint: errcheck //nolint: errcheck

View File

@ -15,6 +15,7 @@ import (
"contrib.go.opencensus.io/integrations/ocsql" "contrib.go.opencensus.io/integrations/ocsql"
"github.com/jackc/pgx/v4" "github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/stdlib" "github.com/jackc/pgx/v4/stdlib"
//_ "github.com/jackc/pgx/v4/stdlib"
) )
const ( const (
@ -215,7 +216,7 @@ func initDB(c *Config, useDB, useTelemetry bool) (*sql.DB, error) {
if useTelemetry && conf.telemetryEnabled() { if useTelemetry && conf.telemetryEnabled() {
opts := ocsql.TraceOptions{ opts := ocsql.TraceOptions{
AllowRoot: true, AllowRoot: false,
Ping: true, Ping: true,
RowsNext: true, RowsNext: true,
RowsClose: true, RowsClose: true,

View File

@ -32,7 +32,6 @@ type Auth struct {
Secret string Secret string
PubKeyFile string `mapstructure:"public_key_file"` PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"` PubKeyType string `mapstructure:"public_key_type"`
Audience string `mapstructure:"audience"`
} }
Header struct { Header struct {
@ -47,17 +46,17 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
ctx := r.Context() ctx := r.Context()
userIDProvider := r.Header.Get("X-User-ID-Provider") userIDProvider := r.Header.Get("X-User-ID-Provider")
if userIDProvider != "" { if len(userIDProvider) != 0 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider) ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
} }
userID := r.Header.Get("X-User-ID") userID := r.Header.Get("X-User-ID")
if userID != "" { if len(userID) != 0 {
ctx = context.WithValue(ctx, core.UserIDKey, userID) ctx = context.WithValue(ctx, core.UserIDKey, userID)
} }
userRole := r.Header.Get("X-User-Role") userRole := r.Header.Get("X-User-Role")
if userRole != "" { if len(userRole) != 0 {
ctx = context.WithValue(ctx, core.UserRoleKey, userRole) ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
} }
@ -68,11 +67,11 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) { func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
hdr := ac.Header hdr := ac.Header
if hdr.Name == "" { if len(hdr.Name) == 0 {
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name) return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
} }
if !hdr.Exists && hdr.Value == "" { if !hdr.Exists && len(hdr.Value) == 0 {
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name) return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
} }
@ -82,7 +81,7 @@ func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
switch { switch {
case hdr.Exists: case hdr.Exists:
fo1 = (value == "") fo1 = (len(value) == 0)
default: default:
fo1 = (value != hdr.Value) fo1 = (value != hdr.Value)

View File

@ -2,32 +2,19 @@ package auth
import ( import (
"context" "context"
"encoding/json"
"io/ioutil" "io/ioutil"
"net/http" "net/http"
"strconv"
"strings" "strings"
"time"
jwt "github.com/dgrijalva/jwt-go" jwt "github.com/dgrijalva/jwt-go"
"github.com/dosco/super-graph/core" "github.com/dosco/super-graph/core"
) )
const ( const (
authHeader = "Authorization" authHeader = "Authorization"
jwtAuth0 int = iota + 1 jwtAuth0 int = iota + 1
jwtFirebase int = iota + 2
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
firebaseIssuerPrefix = "https://securetoken.google.com/"
) )
type firebasePKCache struct {
PublicKeys map[string]string
Expiration time.Time
}
var firebasePublicKeys firebasePKCache
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) { func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
var key interface{} var key interface{}
var jwtProvider int var jwtProvider int
@ -36,18 +23,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
if ac.JWT.Provider == "auth0" { if ac.JWT.Provider == "auth0" {
jwtProvider = jwtAuth0 jwtProvider = jwtAuth0
} else if ac.JWT.Provider == "firebase" {
jwtProvider = jwtFirebase
} }
secret := ac.JWT.Secret secret := ac.JWT.Secret
publicKeyFile := ac.JWT.PubKeyFile publicKeyFile := ac.JWT.PubKeyFile
switch { switch {
case secret != "": case len(secret) != 0:
key = []byte(secret) key = []byte(secret)
case publicKeyFile != "": case len(publicKeyFile) != 0:
kd, err := ioutil.ReadFile(publicKeyFile) kd, err := ioutil.ReadFile(publicKeyFile)
if err != nil { if err != nil {
return nil, err return nil, err
@ -71,10 +56,9 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
} }
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
var tok string var tok string
if cookie != "" { if len(cookie) != 0 {
ck, err := r.Cookie(cookie) ck, err := r.Cookie(cookie)
if err != nil { if err != nil {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -90,16 +74,9 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
tok = ah[7:] tok = ah[7:]
} }
var keyFunc jwt.Keyfunc token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
if jwtProvider == jwtFirebase { return key, nil
keyFunc = firebaseKeyFunction })
} else {
keyFunc = func(token *jwt.Token) (interface{}, error) {
return key, nil
}
}
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
if err != nil { if err != nil {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -109,20 +86,12 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
if claims, ok := token.Claims.(*jwt.StandardClaims); ok { if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
ctx := r.Context() ctx := r.Context()
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
next.ServeHTTP(w, r)
return
}
if jwtProvider == jwtAuth0 { if jwtProvider == jwtAuth0 {
sub := strings.Split(claims.Subject, "|") sub := strings.Split(claims.Subject, "|")
if len(sub) != 2 { if len(sub) != 2 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0]) ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
ctx = context.WithValue(ctx, core.UserIDKey, sub[1]) ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
} }
} else if jwtProvider == jwtFirebase &&
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
} else { } else {
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject) ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
} }
@ -134,92 +103,3 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
}, nil }, nil
} }
type firebaseKeyError struct {
Err error
Message string
}
func (e *firebaseKeyError) Error() string {
return e.Message + " " + e.Err.Error()
}
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
kid, ok := token.Header["kid"]
if !ok {
return nil, &firebaseKeyError{
Message: "Error 'kid' header not found in token",
}
}
if firebasePublicKeys.Expiration.Before(time.Now()) {
resp, err := http.Get(firebasePKEndpoint)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error connecting to firebase certificate server",
Err: err,
}
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error reading firebase certificate server response",
Err: err,
}
}
cachePolicy := resp.Header.Get("cache-control")
ageIndex := strings.Index(cachePolicy, "max-age=")
if ageIndex < 0 {
return nil, &firebaseKeyError{
Message: "Error parsing cache-control header: 'max-age=' not found",
}
}
ageToEnd := cachePolicy[ageIndex+8:]
endIndex := strings.Index(ageToEnd, ",")
if endIndex < 0 {
endIndex = len(ageToEnd) - 1
}
ageString := ageToEnd[:endIndex]
age, err := strconv.ParseInt(ageString, 10, 64)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error parsing max-age cache policy",
Err: err,
}
}
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
if err != nil {
firebasePublicKeys = firebasePKCache{}
return nil, &firebaseKeyError{
Message: "Error unmarshalling firebase public key json",
Err: err,
}
}
firebasePublicKeys.Expiration = expiration
}
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
return k, err
}
return nil, &firebaseKeyError{
Message: "Error no matching public key for kid supplied in jwt",
}
}

View File

@ -165,7 +165,7 @@ func railsAuth(ac *Auth) (*rails.Auth, error) {
} }
version := ac.Rails.Version version := ac.Rails.Version
if version == "" { if len(version) == 0 {
return nil, errors.New("no auth.rails.version defined") return nil, errors.New("no auth.rails.version defined")
} }

View File

@ -6,11 +6,9 @@ import (
"database/sql" "database/sql"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"path/filepath" "path/filepath"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"text/template" "text/template"
@ -107,40 +105,39 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) { func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
path = strings.TrimRight(path, string(filepath.Separator)) path = strings.TrimRight(path, string(filepath.Separator))
files, err := ioutil.ReadDir(path) fileInfos, err := fs.ReadDir(path)
if err != nil { if err != nil {
log.Fatal(err) return nil, err
} }
fm := make(map[int]string, len(files)) paths := make([]string, 0, len(fileInfos))
keys := make([]int, 0, len(files)) for _, fi := range fileInfos {
for _, fi := range files {
if fi.IsDir() { if fi.IsDir() {
continue continue
} }
matches := migrationPattern.FindStringSubmatch(fi.Name()) matches := migrationPattern.FindStringSubmatch(fi.Name())
if len(matches) != 2 { if len(matches) != 2 {
continue continue
} }
n, err := strconv.Atoi(matches[1]) n, err := strconv.ParseInt(matches[1], 10, 32)
if err != nil { if err != nil {
// The regexp already validated that the prefix is all digits so this *should* never fail // The regexp already validated that the prefix is all digits so this *should* never fail
return nil, err return nil, err
} }
fm[n] = filepath.Join(path, fi.Name()) mcount := len(paths)
keys = append(keys, n)
}
sort.Ints(keys) if n < int64(mcount) {
return nil, fmt.Errorf("Duplicate migration %d", n)
}
paths := make([]string, 0, len(keys)) if int64(mcount) < n {
for _, k := range keys { return nil, fmt.Errorf("Missing migration %d", mcount)
paths = append(paths, fm[k]) }
paths = append(paths, filepath.Join(path, fi.Name()))
} }
return paths, nil return paths, nil
@ -199,7 +196,7 @@ func (m *Migrator) LoadMigrations(path string) error {
for _, v := range strings.Split(upSQL, "\n") { for _, v := range strings.Split(upSQL, "\n") {
// Only account for regular single line comment, empty line and space/comment combination // Only account for regular single line comment, empty line and space/comment combination
cleanString := strings.TrimSpace(v) cleanString := strings.TrimSpace(v)
if cleanString != "" && if len(cleanString) != 0 &&
!strings.HasPrefix(cleanString, "--") { !strings.HasPrefix(cleanString, "--") {
containsSQL = true containsSQL = true
break break

File diff suppressed because one or more lines are too long

View File

@ -27,7 +27,7 @@ func initWatcher() {
} }
var d dir var d dir
if cpath == "" || cpath == "./" { if len(cpath) == 0 || cpath == "./" {
d = Dir("./config", ReExec) d = Dir("./config", ReExec)
} else { } else {
d = Dir(cpath, ReExec) d = Dir(cpath, ReExec)
@ -52,11 +52,11 @@ func startHTTP() {
hp := strings.SplitN(conf.HostPort, ":", 2) hp := strings.SplitN(conf.HostPort, ":", 2)
if len(hp) == 2 { if len(hp) == 2 {
if conf.Host != "" { if len(conf.Host) != 0 {
hp[0] = conf.Host hp[0] = conf.Host
} }
if conf.Port != "" { if len(conf.Port) != 0 {
hp[1] = conf.Port hp[1] = conf.Port
} }
@ -64,7 +64,7 @@ func startHTTP() {
} }
} }
if conf.hostPort == "" { if len(conf.hostPort) == 0 {
conf.hostPort = defaultHP conf.hostPort = defaultHP
} }
@ -123,7 +123,7 @@ func routeHandler() (http.Handler, error) {
return mux, nil return mux, nil
} }
if conf.APIPath != "" { if len(conf.APIPath) != 0 {
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql") apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
} }

View File

@ -5,7 +5,7 @@ steps:
[ [
"build", "build",
"--tag", "--tag",
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest", "gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
"--build-arg", "--build-arg",
"GO_ENV=production", "GO_ENV=production",
".", ".",
@ -13,7 +13,7 @@ steps:
# Push new image to Google Container Registry # Push new image to Google Container Registry
- name: "gcr.io/cloud-builders/docker" - name: "gcr.io/cloud-builders/docker"
args: ["push", "gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest"] args: ["push", "gcr.io/$PROJECT_ID/{% app_name_slug %}:latest"]
# Deploy image to Cloud Run # Deploy image to Cloud Run
- name: "gcr.io/cloud-builders/gcloud" - name: "gcr.io/cloud-builders/gcloud"
@ -23,15 +23,15 @@ steps:
"deploy", "deploy",
"data", "data",
"--image", "--image",
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest", "gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
"--add-cloudsql-instances", "--add-cloudsql-instances",
"$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production", "$PROJECT_ID:$REGION:{% app_name_slug %}_production",
"--region", "--region",
"$REGION", "$REGION",
"--platform", "--platform",
"managed", "managed",
"--update-env-vars", "--update-env-vars",
"GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production,SECRETS_FILE=prod.secrets.yml", "GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{% app_name_slug %}_production,SECRETS_FILE=prod.secrets.yml",
"--port", "--port",
"8080", "8080",
"--service-account", "--service-account",

View File

@ -1,4 +1,4 @@
app_name: "{{- .AppName }} Development" app_name: "{% app_name %} Development"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: true web_ui: true
@ -82,7 +82,7 @@ cors_debug: false
auth: auth:
# Can be 'rails', 'jwt' or 'header' # Can be 'rails', 'jwt' or 'header'
type: rails type: rails
cookie: _{{- .AppNameSlug -}}_session cookie: _{% app_name_slug %}_session
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header for testing. # the user_id via a header for testing.
@ -134,7 +134,7 @@ database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {{ .AppNameSlug -}}_development dbname: {% app_name_slug %}_development
user: postgres user: postgres
password: postgres password: postgres

View File

@ -9,10 +9,48 @@ services:
ports: ports:
- "5432:5432" - "5432:5432"
{{ .AppNameSlug -}}_api: # Yugabyte DB
# yb-master:
# image: yugabytedb/yugabyte:latest
# container_name: yb-master-n1
# command: [ "/home/yugabyte/bin/yb-master",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--master_addresses=yb-master-n1:7100",
# "--replication_factor=1",
# "--enable_ysql=true"]
# ports:
# - "7000:7000"
# environment:
# SERVICE_7000_NAME: yb-master
# db:
# image: yugabytedb/yugabyte:latest
# container_name: yb-tserver-n1
# command: [ "/home/yugabyte/bin/yb-tserver",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--start_pgsql_proxy",
# "--tserver_master_addrs=yb-master-n1:7100"]
# ports:
# - "9042:9042"
# - "6379:6379"
# - "5433:5433"
# - "9000:9000"
# environment:
# SERVICE_5433_NAME: ysql
# SERVICE_9042_NAME: ycql
# SERVICE_6379_NAME: yedis
# SERVICE_9000_NAME: yb-tserver
# depends_on:
# - yb-master
{% app_name_slug %}_api:
image: dosco/super-graph:latest image: dosco/super-graph:latest
environment: environment:
GO_ENV: "development" GO_ENV: "development"
# Uncomment below for Yugabyte DB
# SG_DATABASE_PORT: 5433
# SG_DATABASE_USER: yugabyte
# SG_DATABASE_PASSWORD: yugabyte
volumes: volumes:
- ./config:/config - ./config:/config
ports: ports:

View File

@ -2,7 +2,7 @@
# so I only need to overwrite some values # so I only need to overwrite some values
inherits: dev inherits: dev
app_name: "{{- .AppName }} Production" app_name: "{% app_name %} Production"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: false web_ui: false
@ -82,7 +82,7 @@ database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {{ .AppNameSlug -}}_production dbname: {% app_name_slug %}_production
user: postgres user: postgres
password: postgres password: postgres
#pool_size: 10 #pool_size: 10

View File

@ -11,9 +11,9 @@
// opt-in, read http://bit.ly/CRA-PWA // opt-in, read http://bit.ly/CRA-PWA
const isLocalhost = Boolean( const isLocalhost = Boolean(
window.location.hostname === "localhost" || window.location.hostname === 'localhost' ||
// [::1] is the IPv6 localhost address. // [::1] is the IPv6 localhost address.
window.location.hostname === "[::1]" || window.location.hostname === '[::1]' ||
// 127.0.0.1/8 is considered localhost for IPv4. // 127.0.0.1/8 is considered localhost for IPv4.
window.location.hostname.match( window.location.hostname.match(
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/ /^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
); );
export function register(config) { export function register(config) {
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) { if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
// The URL constructor is available in all browsers that support SW. // The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href); const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
if (publicUrl.origin !== window.location.origin) { if (publicUrl.origin !== window.location.origin) {
@ -31,7 +31,7 @@ export function register(config) {
return; return;
} }
window.addEventListener("load", () => { window.addEventListener('load', () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`; const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
if (isLocalhost) { if (isLocalhost) {
@ -42,8 +42,8 @@ export function register(config) {
// service worker/PWA documentation. // service worker/PWA documentation.
navigator.serviceWorker.ready.then(() => { navigator.serviceWorker.ready.then(() => {
console.log( console.log(
"This web app is being served cache-first by a service " + 'This web app is being served cache-first by a service ' +
"worker. To learn more, visit http://bit.ly/CRA-PWA" 'worker. To learn more, visit http://bit.ly/CRA-PWA'
); );
}); });
} else { } else {
@ -57,21 +57,21 @@ export function register(config) {
function registerValidSW(swUrl, config) { function registerValidSW(swUrl, config) {
navigator.serviceWorker navigator.serviceWorker
.register(swUrl) .register(swUrl)
.then((registration) => { .then(registration => {
registration.onupdatefound = () => { registration.onupdatefound = () => {
const installingWorker = registration.installing; const installingWorker = registration.installing;
if (installingWorker == null) { if (installingWorker == null) {
return; return;
} }
installingWorker.onstatechange = () => { installingWorker.onstatechange = () => {
if (installingWorker.state === "installed") { if (installingWorker.state === 'installed') {
if (navigator.serviceWorker.controller) { if (navigator.serviceWorker.controller) {
// At this point, the updated precached content has been fetched, // At this point, the updated precached content has been fetched,
// but the previous service worker will still serve the older // but the previous service worker will still serve the older
// content until all client tabs are closed. // content until all client tabs are closed.
console.log( console.log(
"New content is available and will be used when all " + 'New content is available and will be used when all ' +
"tabs for this page are closed. See http://bit.ly/CRA-PWA." 'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
); );
// Execute callback // Execute callback
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
// At this point, everything has been precached. // At this point, everything has been precached.
// It's the perfect time to display a // It's the perfect time to display a
// "Content is cached for offline use." message. // "Content is cached for offline use." message.
console.log("Content is cached for offline use."); console.log('Content is cached for offline use.');
// Execute callback // Execute callback
if (config && config.onSuccess) { if (config && config.onSuccess) {
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
}; };
}; };
}) })
.catch((error) => { .catch(error => {
console.error("Error during service worker registration:", error); console.error('Error during service worker registration:', error);
}); });
} }
function checkValidServiceWorker(swUrl, config) { function checkValidServiceWorker(swUrl, config) {
// Check if the service worker can be found. If it can't reload the page. // Check if the service worker can be found. If it can't reload the page.
fetch(swUrl) fetch(swUrl)
.then((response) => { .then(response => {
// Ensure service worker exists, and that we really are getting a JS file. // Ensure service worker exists, and that we really are getting a JS file.
const contentType = response.headers.get("content-type"); const contentType = response.headers.get('content-type');
if ( if (
response.status === 404 || response.status === 404 ||
(contentType != null && contentType.indexOf("javascript") === -1) (contentType != null && contentType.indexOf('javascript') === -1)
) { ) {
// No service worker found. Probably a different app. Reload the page. // No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then((registration) => { navigator.serviceWorker.ready.then(registration => {
registration.unregister().then(() => { registration.unregister().then(() => {
window.location.reload(); window.location.reload();
}); });
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
}) })
.catch(() => { .catch(() => {
console.log( console.log(
"No internet connection found. App is running in offline mode." 'No internet connection found. App is running in offline mode.'
); );
}); });
} }
export function unregister() { export function unregister() {
if ("serviceWorker" in navigator) { if ('serviceWorker' in navigator) {
navigator.serviceWorker.ready.then((registration) => { navigator.serviceWorker.ready.then(registration => {
registration.unregister(); registration.unregister();
}); });
} }

View File

@ -1,13 +0,0 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/jsn
BenchmarkGet
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
BenchmarkFilter
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
BenchmarkStrip
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
BenchmarkReplace
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
PASS
ok github.com/dosco/super-graph/jsn 6.029s

View File

@ -3,7 +3,6 @@ package jsn
import ( import (
"bytes" "bytes"
"encoding/json" "encoding/json"
"errors"
"io" "io"
) )
@ -69,12 +68,7 @@ func Clear(w *bytes.Buffer, v []byte) error {
} }
io := int(dec.InputOffset()) io := int(dec.InputOffset())
s := io - len(v1) - 2 w.Write(v[io-len(v1)-2 : io])
if io <= s || s <= 0 {
return errors.New("invalid json")
}
w.Write(v[s:io])
w.WriteString(`:`) w.WriteString(`:`)
isValue = true isValue = true

View File

@ -2,19 +2,17 @@ package jsn
import ( import (
"bytes" "bytes"
"hash/maphash"
"github.com/cespare/xxhash/v2"
) )
// Filter function filters the JSON keeping only the provided keys and removing all others // Filter function filters the JSON keeping only the provided keys and removing all others
func Filter(w *bytes.Buffer, b []byte, keys []string) error { func Filter(w *bytes.Buffer, b []byte, keys []string) error {
var err error var err error
kmap := make(map[uint64]struct{}, len(keys)) kmap := make(map[uint64]struct{}, len(keys))
h := maphash.Hash{}
for i := range keys { for i := range keys {
_, _ = h.WriteString(keys[i]) kmap[xxhash.Sum64String(keys[i])] = struct{}{}
kmap[h.Sum64()] = struct{}{}
h.Reset()
} }
// is an list // is an list
@ -134,11 +132,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
cb := b[s:(e + 1)] cb := b[s:(e + 1)]
e = 0 e = 0
_, _ = h.Write(k) if _, ok := kmap[xxhash.Sum64(k)]; !ok {
_, ok := kmap[h.Sum64()]
h.Reset()
if !ok {
continue continue
} }

View File

@ -2,55 +2,10 @@
package jsn package jsn
import (
"bytes"
"errors"
)
func Fuzz(data []byte) int { func Fuzz(data []byte) int {
c := 0 if err := unifiedTest(data); err != nil {
return 0
if err := Validate(string(data)); err == nil {
c = 1
} }
if err := fuzzTest(data); err == nil { return 1
c = 1
}
return c
}
func fuzzTest(data []byte) error {
err1 := Validate(string(data))
var b1 bytes.Buffer
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
path1 := [][]byte{[]byte("data"), []byte("users")}
Strip(data, path1)
from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
}
to := []Field{
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
}
var b2 bytes.Buffer
err3 := Replace(&b2, data, from, to)
Keys(data)
var b3 bytes.Buffer
err4 := Clear(&b3, data)
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
return errors.New("there was an error")
}
return nil
} }

View File

@ -1,15 +1,9 @@
// +build gofuzz package jsn
package jsn_test
import ( import (
"testing" "testing"
"github.com/dosco/super-graph/jsn"
) )
var ret int
func TestFuzzCrashers(t *testing.T) { func TestFuzzCrashers(t *testing.T) {
var crashers = []string{ var crashers = []string{
"00\"0000\"0{", "00\"0000\"0{",
@ -58,16 +52,9 @@ func TestFuzzCrashers(t *testing.T) {
"0000\"0\"{", "0000\"0\"{",
"000\"000\"{", "000\"000\"{",
"\"00000000\"{", "\"00000000\"{",
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
"\xef\x99\xe3\"",
"\"\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xe3\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xef\xe3" +
"\xe3\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\"",
} }
for _, f := range crashers { for _, f := range crashers {
ret = jsn.Fuzz([]byte(f)) _ = unifiedTest([]byte(f))
} }
} }

View File

@ -1,7 +1,7 @@
package jsn package jsn
import ( import (
"hash/maphash" "github.com/cespare/xxhash/v2"
) )
const ( const (
@ -41,12 +41,9 @@ func Value(b []byte) []byte {
// Keys function fetches values for the provided keys // Keys function fetches values for the provided keys
func Get(b []byte, keys [][]byte) []Field { func Get(b []byte, keys [][]byte) []Field {
kmap := make(map[uint64]struct{}, len(keys)) kmap := make(map[uint64]struct{}, len(keys))
h := maphash.Hash{}
for i := range keys { for i := range keys {
_, _ = h.Write(keys[i]) kmap[xxhash.Sum64(keys[i])] = struct{}{}
kmap[h.Sum64()] = struct{}{}
h.Reset()
} }
res := make([]Field, 0, 20) res := make([]Field, 0, 20)
@ -144,9 +141,7 @@ func Get(b []byte, keys [][]byte) []Field {
} }
if e != 0 { if e != 0 {
_, _ = h.Write(k) _, ok := kmap[xxhash.Sum64(k)]
_, ok := kmap[h.Sum64()]
h.Reset()
if ok { if ok {
res = append(res, Field{k, b[s:(e + 1)]}) res = append(res, Field{k, b[s:(e + 1)]})

View File

@ -1,4 +1,4 @@
package jsn_test package jsn
import ( import (
"bytes" "bytes"
@ -6,8 +6,6 @@ import (
"io/ioutil" "io/ioutil"
"strings" "strings"
"testing" "testing"
"github.com/dosco/super-graph/jsn"
) )
var ( var (
@ -173,13 +171,13 @@ var (
) )
func TestGet(t *testing.T) { func TestGet(t *testing.T) {
values := jsn.Get([]byte(input1), [][]byte{ values := Get([]byte(input1), [][]byte{
[]byte("test_1a"), []byte("test_1a"),
[]byte("__twitter_id"), []byte("__twitter_id"),
[]byte("work_email"), []byte("work_email"),
}) })
expected := []jsn.Field{ expected := []Field{
{[]byte("test_1a"), []byte(`{ "__twitter_id": "ABCD" }`)}, {[]byte("test_1a"), []byte(`{ "__twitter_id": "ABCD" }`)},
{[]byte("__twitter_id"), []byte(`"ABCD"`)}, {[]byte("__twitter_id"), []byte(`"ABCD"`)},
{[]byte("__twitter_id"), []byte(`"2048666903444506956"`)}, {[]byte("__twitter_id"), []byte(`"2048666903444506956"`)},
@ -216,11 +214,11 @@ func TestGet(t *testing.T) {
} }
func TestGet1(t *testing.T) { func TestGet1(t *testing.T) {
values := jsn.Get([]byte(input5), [][]byte{ values := Get([]byte(input5), [][]byte{
[]byte("thread_slug"), []byte("thread_slug"),
}) })
expected := []jsn.Field{ expected := []Field{
{[]byte("thread_slug"), []byte(`"in-september-2018-slovak-police-stated-that-kuciak-7929"`)}, {[]byte("thread_slug"), []byte(`"in-september-2018-slovak-police-stated-that-kuciak-7929"`)},
} }
@ -240,11 +238,11 @@ func TestGet1(t *testing.T) {
} }
func TestGet2(t *testing.T) { func TestGet2(t *testing.T) {
values := jsn.Get([]byte(input6), [][]byte{ values := Get([]byte(input6), [][]byte{
[]byte("users_cursor"), []byte("threads_cursor"), []byte("users_cursor"), []byte("threads_cursor"),
}) })
expected := []jsn.Field{ expected := []Field{
{[]byte("threads_cursor"), []byte(`null`)}, {[]byte("threads_cursor"), []byte(`null`)},
{[]byte("threads_cursor"), []byte(`25`)}, {[]byte("threads_cursor"), []byte(`25`)},
{[]byte("users_cursor"), []byte(`3`)}, {[]byte("users_cursor"), []byte(`3`)},
@ -266,7 +264,7 @@ func TestGet2(t *testing.T) {
} }
func TestGet3(t *testing.T) { func TestGet3(t *testing.T) {
values := jsn.Get(input7, [][]byte{[]byte("data")}) values := Get(input7, [][]byte{[]byte("data")})
v := values[0].Value v := values[0].Value
if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) { if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) {
@ -279,7 +277,7 @@ func TestGet4(t *testing.T) {
exp = strings.ReplaceAll(exp, "@", "`") exp = strings.ReplaceAll(exp, "@", "`")
values := jsn.Get(input8, [][]byte{[]byte("body")}) values := Get(input8, [][]byte{[]byte("body")})
if string(values[0].Key) != "body" { if string(values[0].Key) != "body" {
t.Fatal("unexpected key") t.Fatal("unexpected key")
@ -293,29 +291,29 @@ func TestGet4(t *testing.T) {
func TestValue(t *testing.T) { func TestValue(t *testing.T) {
v1 := []byte("12345") v1 := []byte("12345")
if !bytes.Equal(jsn.Value(v1), v1) { if !bytes.Equal(Value(v1), v1) {
t.Fatal("Number value invalid") t.Fatal("Number value invalid")
} }
v2 := []byte(`"12345"`) v2 := []byte(`"12345"`)
if !bytes.Equal(jsn.Value(v2), []byte(`12345`)) { if !bytes.Equal(Value(v2), []byte(`12345`)) {
t.Fatal("String value invalid") t.Fatal("String value invalid")
} }
v3 := []byte(`{ "hello": "world" }`) v3 := []byte(`{ "hello": "world" }`)
if jsn.Value(v3) != nil { if Value(v3) != nil {
t.Fatal("Object value is not nil", jsn.Value(v3)) t.Fatal("Object value is not nil", Value(v3))
} }
v4 := []byte(`[ "hello", "world" ]`) v4 := []byte(`[ "hello", "world" ]`)
if jsn.Value(v4) != nil { if Value(v4) != nil {
t.Fatal("List value is not nil") t.Fatal("List value is not nil")
} }
} }
func TestFilter1(t *testing.T) { func TestFilter1(t *testing.T) {
var b bytes.Buffer var b bytes.Buffer
err := jsn.Filter(&b, []byte(input2), []string{"id", "full_name", "embed"}) err := Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -331,7 +329,7 @@ func TestFilter2(t *testing.T) {
value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]` value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]`
var b bytes.Buffer var b bytes.Buffer
err := jsn.Filter(&b, []byte(value), []string{"id"}) err := Filter(&b, []byte(value), []string{"id"})
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -345,7 +343,7 @@ func TestFilter2(t *testing.T) {
func TestStrip(t *testing.T) { func TestStrip(t *testing.T) {
path1 := [][]byte{[]byte("data"), []byte("users")} path1 := [][]byte{[]byte("data"), []byte("users")}
value1 := jsn.Strip([]byte(input3), path1) value1 := Strip([]byte(input3), path1)
expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`) expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
@ -355,7 +353,7 @@ func TestStrip(t *testing.T) {
} }
path2 := [][]byte{[]byte("boo"), []byte("hoo")} path2 := [][]byte{[]byte("boo"), []byte("hoo")}
value2 := jsn.Strip([]byte(input3), path2) value2 := Strip([]byte(input3), path2)
if !bytes.Equal(value2, []byte(input3)) { if !bytes.Equal(value2, []byte(input3)) {
t.Log(value2) t.Log(value2)
@ -366,7 +364,7 @@ func TestStrip(t *testing.T) {
func TestValidateTrue(t *testing.T) { func TestValidateTrue(t *testing.T) {
json := []byte(` [{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`) json := []byte(` [{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
err := jsn.Validate(string(json)) err := Validate(string(json))
if err != nil { if err != nil {
t.Error(err) t.Error(err)
} }
@ -375,7 +373,7 @@ func TestValidateTrue(t *testing.T) {
func TestValidateFalse(t *testing.T) { func TestValidateFalse(t *testing.T) {
json := []byte(` [{ "hello": 123"<html>}]`) json := []byte(` [{ "hello": 123"<html>}]`)
err := jsn.Validate(string(json)) err := Validate(string(json))
if err == nil { if err == nil {
t.Error("JSON validation failed to detect invalid json") t.Error("JSON validation failed to detect invalid json")
} }
@ -384,12 +382,12 @@ func TestValidateFalse(t *testing.T) {
func TestReplace(t *testing.T) { func TestReplace(t *testing.T) {
var buf bytes.Buffer var buf bytes.Buffer
from := []jsn.Field{ from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)}, {[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)}, {[]byte("__twitter_id"), []byte(`"ABC123"`)},
} }
to := []jsn.Field{ to := []Field{
{[]byte("__twitter_id"), []byte(`"1234567890"`)}, {[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)}, {[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
} }
@ -414,7 +412,7 @@ func TestReplace(t *testing.T) {
"__twitter_id":"1234567890" "__twitter_id":"1234567890"
}] }` }] }`
err := jsn.Replace(&buf, []byte(input4), from, to) err := Replace(&buf, []byte(input4), from, to)
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -430,7 +428,7 @@ func TestReplaceEmpty(t *testing.T) {
json := `{ "users" : [{"id":1,"full_name":"Sidney St[1]roman","email":"user0@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":2,"full_name":"Jerry Dickinson","email":"user1@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":3,"full_name":"Kenna Cassin","email":"user2@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":4,"full_name":"Mr. Pat Parisian","email":"rodney@kautzer.biz","__users_twitter_id":"2048666903444506956"}, {"id":5,"full_name":"Bette Ebert","email":"janeenrath@goyette.com","__users_twitter_id":"2048666903444506956"}, {"id":6,"full_name":"Everett Kiehn","email":"michael@bartoletti.com","__users_twitter_id":"2048666903444506956"}, {"id":7,"full_name":"Katrina Cronin","email":"loretaklocko@framivolkman.org","__users_twitter_id":"2048666903444506956"}, {"id":8,"full_name":"Caroll Orn Sr.","email":"joannarau@hegmann.io","__users_twitter_id":"2048666903444506956"}, {"id":9,"full_name":"Gwendolyn Ziemann","email":"renaytoy@rutherford.co","__users_twitter_id":"2048666903444506956"}, {"id":10,"full_name":"Mrs. Rosann Fritsch","email":"holliemosciski@thiel.org","__users_twitter_id":"2048666903444506956"}, {"id":11,"full_name":"Arden Koss","email":"cristobalankunding@howewelch.org","__users_twitter_id":"2048666903444506956"}, {"id":12,"full_name":"Brenton Bauch PhD","email":"renee@miller.co","__users_twitter_id":"2048666903444506956"}, {"id":13,"full_name":"Daine Gleichner","email":"andrea@nienow.co","__users_twitter_id":"2048666903444506956"}] }` json := `{ "users" : [{"id":1,"full_name":"Sidney St[1]roman","email":"user0@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":2,"full_name":"Jerry Dickinson","email":"user1@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":3,"full_name":"Kenna Cassin","email":"user2@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":4,"full_name":"Mr. Pat Parisian","email":"rodney@kautzer.biz","__users_twitter_id":"2048666903444506956"}, {"id":5,"full_name":"Bette Ebert","email":"janeenrath@goyette.com","__users_twitter_id":"2048666903444506956"}, {"id":6,"full_name":"Everett Kiehn","email":"michael@bartoletti.com","__users_twitter_id":"2048666903444506956"}, {"id":7,"full_name":"Katrina Cronin","email":"loretaklocko@framivolkman.org","__users_twitter_id":"2048666903444506956"}, {"id":8,"full_name":"Caroll Orn Sr.","email":"joannarau@hegmann.io","__users_twitter_id":"2048666903444506956"}, {"id":9,"full_name":"Gwendolyn Ziemann","email":"renaytoy@rutherford.co","__users_twitter_id":"2048666903444506956"}, {"id":10,"full_name":"Mrs. Rosann Fritsch","email":"holliemosciski@thiel.org","__users_twitter_id":"2048666903444506956"}, {"id":11,"full_name":"Arden Koss","email":"cristobalankunding@howewelch.org","__users_twitter_id":"2048666903444506956"}, {"id":12,"full_name":"Brenton Bauch PhD","email":"renee@miller.co","__users_twitter_id":"2048666903444506956"}, {"id":13,"full_name":"Daine Gleichner","email":"andrea@nienow.co","__users_twitter_id":"2048666903444506956"}] }`
err := jsn.Replace(&buf, []byte(json), []jsn.Field{}, []jsn.Field{}) err := Replace(&buf, []byte(json), []Field{}, []Field{})
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -444,7 +442,7 @@ func TestReplaceEmpty(t *testing.T) {
func TestKeys1(t *testing.T) { func TestKeys1(t *testing.T) {
json := `[{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}]` json := `[{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}]`
fields := jsn.Keys([]byte(json)) fields := Keys([]byte(json))
exp := []string{ exp := []string{
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description", "id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
@ -464,7 +462,7 @@ func TestKeys1(t *testing.T) {
func TestKeys2(t *testing.T) { func TestKeys2(t *testing.T) {
json := `{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}` json := `{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}`
fields := jsn.Keys([]byte(json)) fields := Keys([]byte(json))
exp := []string{ exp := []string{
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description", "id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
@ -493,7 +491,7 @@ func TestKeys3(t *testing.T) {
"user": 123 "user": 123
}` }`
fields := jsn.Keys([]byte(json)) fields := Keys([]byte(json))
exp := []string{ exp := []string{
"insert", "created_at", "test_1a", "type1", "type2", "name", "updated_at", "description", "insert", "created_at", "test_1a", "type1", "type2", "name", "updated_at", "description",
@ -528,7 +526,7 @@ func TestClear(t *testing.T) {
expected := `{"insert":{"created_at":"","test_1a":{"type1":"","type2":[{"a":0.0}]},"name":"","updated_at":"","description":""},"user":0.0,"tags":[]}` expected := `{"insert":{"created_at":"","test_1a":{"type1":"","type2":[{"a":0.0}]},"name":"","updated_at":"","description":""},"user":0.0,"tags":[]}`
err := jsn.Clear(&buf, []byte(json)) err := Clear(&buf, []byte(json))
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
} }
@ -543,7 +541,7 @@ func BenchmarkGet(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
jsn.Get([]byte(input1), [][]byte{[]byte("__twitter_id")}) Get([]byte(input1), [][]byte{[]byte("__twitter_id")})
} }
} }
@ -555,7 +553,7 @@ func BenchmarkFilter(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
err := jsn.Filter(&buf, []byte(input2), keys) err := Filter(&buf, []byte(input2), keys)
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)
} }
@ -568,19 +566,19 @@ func BenchmarkStrip(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
jsn.Strip([]byte(input3), path) Strip([]byte(input3), path)
} }
} }
func BenchmarkReplace(b *testing.B) { func BenchmarkReplace(b *testing.B) {
var buf bytes.Buffer var buf bytes.Buffer
from := []jsn.Field{ from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)}, {[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)}, {[]byte("__twitter_id"), []byte(`"ABC123"`)},
} }
to := []jsn.Field{ to := []Field{
{[]byte("__twitter_id"), []byte(`"1234567890"`)}, {[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)}, {[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
} }
@ -589,7 +587,7 @@ func BenchmarkReplace(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
err := jsn.Replace(&buf, []byte(input4), from, to) err := Replace(&buf, []byte(input4), from, to)
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)
} }

View File

@ -3,7 +3,8 @@ package jsn
import ( import (
"bytes" "bytes"
"errors" "errors"
"hash/maphash"
"github.com/cespare/xxhash/v2"
) )
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument // Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
@ -17,7 +18,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
return err return err
} }
h := maphash.Hash{} h := xxhash.New()
tmap := make(map[uint64]int, len(from)) tmap := make(map[uint64]int, len(from))
for i, f := range from { for i, f := range from {
@ -132,18 +133,9 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
if e != 0 { if e != 0 {
e++ e++
if e <= s {
return errors.New("invalid json")
}
if _, err := h.Write(b[s:e]); err != nil { if _, err := h.Write(b[s:e]); err != nil {
return err return err
} }
if (we + 1) <= ws {
return errors.New("invalid json")
}
n, ok := tmap[h.Sum64()] n, ok := tmap[h.Sum64()]
h.Reset() h.Reset()

37
jsn/test.go Normal file
View File

@ -0,0 +1,37 @@
package jsn
import (
"bytes"
"errors"
)
func unifiedTest(data []byte) error {
err1 := Validate(string(data))
var b1 bytes.Buffer
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
path1 := [][]byte{[]byte("data"), []byte("users")}
Strip(data, path1)
from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
}
to := []Field{
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
}
var b2 bytes.Buffer
err3 := Replace(&b2, data, from, to)
Keys(data)
if err1 != nil || err2 != nil || err3 != nil {
return errors.New("there was an error")
}
return nil
}