Compare commits
28 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7557a4c29c | ||
|
dd4accfdd2 | ||
|
06214a3850 | ||
|
7b5548a2c6 | ||
|
00cfa251a2 | ||
|
9f35f85857 | ||
|
f4f6420a30 | ||
|
6716b97a39 | ||
|
7169dd65f5 | ||
|
b26cdbf960 | ||
|
33f3fefbf3 | ||
|
a775f9475b | ||
|
bd157290f6 | ||
|
82cc712a93 | ||
|
0ce129de14 | ||
|
1a15e433ba | ||
|
816121fbcf | ||
|
e82e97a9d7 | ||
|
6102f1d66e | ||
|
701b2f3bfd | ||
|
bac89d8301 | ||
|
b3dfb2bc7b | ||
|
1fb7f0e6c8 | ||
|
2241364d00 | ||
|
f63e270c73 | ||
|
ccab367351 | ||
|
67ddc148a9 | ||
|
31afdac3af |
1
.gitignore
vendored
1
.gitignore
vendored
@ -38,4 +38,5 @@ release
|
||||
.gofuzz
|
||||
*-fuzz.zip
|
||||
*.test
|
||||
.firebase
|
||||
|
||||
|
@ -12,8 +12,7 @@ FROM golang:1.14-alpine as go-build
|
||||
RUN apk update && \
|
||||
apk add --no-cache make && \
|
||||
apk add --no-cache git && \
|
||||
apk add --no-cache jq && \
|
||||
apk add --no-cache upx=3.95-r2
|
||||
apk add --no-cache jq
|
||||
|
||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||
|
||||
@ -29,9 +28,9 @@ COPY --from=react-build /web/build/ ./internal/serv/web/build
|
||||
|
||||
RUN go mod vendor
|
||||
RUN make build
|
||||
RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
upx --ultra-brute -qq super-graph && \
|
||||
upx -t super-graph
|
||||
# RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
# upx --ultra-brute -qq super-graph && \
|
||||
# upx -t super-graph
|
||||
|
||||
|
||||
|
||||
|
@ -52,7 +52,9 @@ func main() {
|
||||
}
|
||||
}`
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||
|
||||
res, err := sg.GraphQL(ctx, query, nil)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -90,7 +92,7 @@ This compiler is what sits at the heart of Super Graph, with layers of useful fu
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Database seeding tool
|
||||
- Works with Postgres and YugabyteDB
|
||||
- Works with Postgres and Yugabyte DB
|
||||
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
|
||||
|
||||
## Documentation
|
||||
|
14
core/api.go
14
core/api.go
@ -32,7 +32,9 @@
|
||||
}
|
||||
}`
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||
|
||||
res, err := sg.GraphQL(ctx, query, nil)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
@ -47,6 +49,7 @@ import (
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"hash/maphash"
|
||||
_log "log"
|
||||
"os"
|
||||
|
||||
@ -81,12 +84,12 @@ type SuperGraph struct {
|
||||
schema *psql.DBSchema
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
prepared map[string]*preparedItem
|
||||
hashSeed maphash.Seed
|
||||
queries map[uint64]*query
|
||||
roles map[string]*Role
|
||||
getRole *sql.Stmt
|
||||
rmap map[uint64]*resolvFn
|
||||
rmap map[uint64]resolvFn
|
||||
abacEnabled bool
|
||||
anonExists bool
|
||||
qc *qcode.Compiler
|
||||
pc *psql.Compiler
|
||||
ge *graphql.Engine
|
||||
@ -109,6 +112,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
||||
db: db,
|
||||
dbinfo: dbinfo,
|
||||
log: _log.New(os.Stdout, "", 0),
|
||||
hashSeed: maphash.MakeSeed(),
|
||||
}
|
||||
|
||||
if err := sg.initConfig(); err != nil {
|
||||
@ -135,7 +139,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(conf.SecretKey) != 0 {
|
||||
if conf.SecretKey != "" {
|
||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||
conf.SecretKey = ""
|
||||
sg.encKey = sk
|
||||
|
130
core/args.go
130
core/args.go
@ -1,72 +1,19 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
// argMap function is used to string replace variables with values by
|
||||
// the fasttemplate code
|
||||
func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
|
||||
return func(w io.Writer, tag string) (int, error) {
|
||||
switch tag {
|
||||
case "user_id_provider":
|
||||
if v := c.Value(UserIDProviderKey); v != nil {
|
||||
return io.WriteString(w, v.(string))
|
||||
}
|
||||
return 0, argErr("user_id_provider")
|
||||
|
||||
case "user_id":
|
||||
if v := c.Value(UserIDKey); v != nil {
|
||||
return io.WriteString(w, v.(string))
|
||||
}
|
||||
return 0, argErr("user_id")
|
||||
|
||||
case "user_role":
|
||||
if v := c.Value(UserRoleKey); v != nil {
|
||||
return io.WriteString(w, v.(string))
|
||||
}
|
||||
return 0, argErr("user_role")
|
||||
}
|
||||
|
||||
fields := jsn.Get(c.vars, [][]byte{[]byte(tag)})
|
||||
|
||||
if len(fields) == 0 {
|
||||
return 0, argErr(tag)
|
||||
|
||||
}
|
||||
v := fields[0].Value
|
||||
|
||||
// Open and close quotes
|
||||
if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' {
|
||||
fields[0].Value = v[1 : len(v)-1]
|
||||
}
|
||||
|
||||
if tag == "cursor" {
|
||||
if bytes.EqualFold(v, []byte("null")) {
|
||||
return io.WriteString(w, ``)
|
||||
}
|
||||
v1, err := c.sg.decrypt(string(fields[0].Value))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return w.Write(v1)
|
||||
}
|
||||
|
||||
return w.Write(escSQuote(fields[0].Value))
|
||||
}
|
||||
}
|
||||
|
||||
// argList function is used to create a list of arguments to pass
|
||||
// to a prepared statement. FYI no escaping of single quotes is
|
||||
// needed here
|
||||
func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
vars := make([]interface{}, len(args))
|
||||
// to a prepared statement.
|
||||
|
||||
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||
params := md.Params()
|
||||
vars := make([]interface{}, len(params))
|
||||
|
||||
var fields map[string]json.RawMessage
|
||||
var err error
|
||||
@ -79,31 +26,30 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
}
|
||||
}
|
||||
|
||||
for i := range args {
|
||||
av := args[i]
|
||||
switch {
|
||||
case bytes.Equal(av, []byte("user_id")):
|
||||
for i, p := range params {
|
||||
switch p.Name {
|
||||
case "user_id":
|
||||
if v := c.Value(UserIDKey); v != nil {
|
||||
vars[i] = v.(string)
|
||||
} else {
|
||||
return nil, argErr("user_id")
|
||||
return nil, argErr(p)
|
||||
}
|
||||
|
||||
case bytes.Equal(av, []byte("user_id_provider")):
|
||||
case "user_id_provider":
|
||||
if v := c.Value(UserIDProviderKey); v != nil {
|
||||
vars[i] = v.(string)
|
||||
} else {
|
||||
return nil, argErr("user_id_provider")
|
||||
return nil, argErr(p)
|
||||
}
|
||||
|
||||
case bytes.Equal(av, []byte("user_role")):
|
||||
case "user_role":
|
||||
if v := c.Value(UserRoleKey); v != nil {
|
||||
vars[i] = v.(string)
|
||||
} else {
|
||||
return nil, argErr("user_role")
|
||||
return nil, argErr(p)
|
||||
}
|
||||
|
||||
case bytes.Equal(av, []byte("cursor")):
|
||||
case "cursor":
|
||||
if v, ok := fields["cursor"]; ok && v[0] == '"' {
|
||||
v1, err := c.sg.decrypt(string(v[1 : len(v)-1]))
|
||||
if err != nil {
|
||||
@ -111,25 +57,33 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
}
|
||||
vars[i] = v1
|
||||
} else {
|
||||
return nil, argErr("cursor")
|
||||
return nil, argErr(p)
|
||||
}
|
||||
|
||||
default:
|
||||
if v, ok := fields[string(av)]; ok {
|
||||
if v, ok := fields[p.Name]; ok {
|
||||
switch {
|
||||
case p.IsArray && v[0] != '[':
|
||||
return nil, fmt.Errorf("variable '%s' should be an array of type '%s'", p.Name, p.Type)
|
||||
|
||||
case p.Type == "json" && v[0] != '[' && v[0] != '{':
|
||||
return nil, fmt.Errorf("variable '%s' should be an array or object", p.Name)
|
||||
}
|
||||
|
||||
switch v[0] {
|
||||
case '[', '{':
|
||||
vars[i] = v
|
||||
|
||||
default:
|
||||
var val interface{}
|
||||
if err := json.Unmarshal(v, &val); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
vars[i] = val
|
||||
}
|
||||
|
||||
} else {
|
||||
return nil, argErr(string(av))
|
||||
return nil, argErr(p)
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -137,32 +91,6 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
return vars, nil
|
||||
}
|
||||
|
||||
//
|
||||
func escSQuote(b []byte) []byte {
|
||||
var buf *bytes.Buffer
|
||||
s := 0
|
||||
for i := range b {
|
||||
if b[i] == '\'' {
|
||||
if buf == nil {
|
||||
buf = &bytes.Buffer{}
|
||||
}
|
||||
buf.Write(b[s:i])
|
||||
buf.WriteString(`''`)
|
||||
s = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
if buf == nil {
|
||||
return b
|
||||
}
|
||||
|
||||
l := len(b)
|
||||
if s < (l - 1) {
|
||||
buf.Write(b[s:l])
|
||||
}
|
||||
return buf.Bytes()
|
||||
}
|
||||
|
||||
func argErr(name string) error {
|
||||
return fmt.Errorf("query requires variable '%s' to be set", name)
|
||||
func argErr(p psql.Param) error {
|
||||
return fmt.Errorf("required variable '%s' of type '%s' must be set", p.Name, p.Type)
|
||||
}
|
||||
|
@ -1,13 +0,0 @@
|
||||
package core
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestEscQuote(t *testing.T) {
|
||||
val := "That's the worst, don''t be calling me's again"
|
||||
exp := "That''s the worst, don''''t be calling me''s again"
|
||||
ret := escSQuote([]byte(val))
|
||||
|
||||
if exp != string(ret) {
|
||||
t.Errorf("escSQuote failed: %s", string(ret))
|
||||
}
|
||||
}
|
@ -14,7 +14,7 @@ import (
|
||||
type stmt struct {
|
||||
role *Role
|
||||
qc *qcode.QCode
|
||||
skipped uint32
|
||||
md psql.Metadata
|
||||
sql string
|
||||
}
|
||||
|
||||
@ -62,12 +62,11 @@ func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, er
|
||||
stmts := []stmt{stmt{role: ro, qc: qc}}
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
|
||||
stmts[0].md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stmts[0].skipped = skipped
|
||||
stmts[0].sql = w.String()
|
||||
|
||||
return stmts, nil
|
||||
@ -83,12 +82,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if len(sg.conf.RolesQuery) == 0 {
|
||||
if sg.conf.RolesQuery == "" {
|
||||
return nil, errors.New("roles_query not defined")
|
||||
}
|
||||
|
||||
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
||||
w := &bytes.Buffer{}
|
||||
md := psql.Metadata{}
|
||||
|
||||
for i := 0; i < len(sg.conf.Roles); i++ {
|
||||
role := &sg.conf.Roles[i]
|
||||
@ -104,19 +104,20 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
}
|
||||
|
||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||
s := &stmts[len(stmts)-1]
|
||||
|
||||
skipped, err := sg.pc.Compile(qc, w, psql.Variables(vm))
|
||||
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s := &stmts[len(stmts)-1]
|
||||
s.skipped = skipped
|
||||
s.sql = w.String()
|
||||
s.md = md
|
||||
|
||||
w.Reset()
|
||||
}
|
||||
|
||||
sql, err := sg.renderUserQuery(stmts)
|
||||
sql, err := sg.renderUserQuery(md, stmts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -126,13 +127,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) {
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||
|
||||
for _, s := range stmts {
|
||||
if len(s.role.Match) == 0 &&
|
||||
if s.role.Match == "" &&
|
||||
s.role.Name != "user" && s.role.Name != "anon" {
|
||||
continue
|
||||
}
|
||||
@ -144,12 +145,12 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
}
|
||||
|
||||
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
md.RenderVar(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, `) THEN `)
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
for _, s := range stmts {
|
||||
if len(s.role.Match) == 0 {
|
||||
if s.role.Match == "" {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
@ -160,7 +161,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
}
|
||||
|
||||
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
md.RenderVar(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
||||
|
||||
|
101
core/config.go
101
core/config.go
@ -30,12 +30,10 @@ type Config struct {
|
||||
// or other database functions
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
|
||||
// DefaultAllow reverses the blocked by default behaviour for queries in
|
||||
// anonymous mode. (anon role)
|
||||
// For example if the table `users` is not listed under the anon role then
|
||||
// access to it would by default for unauthenticated queries this reverses
|
||||
// this behavior (!!! Use with caution !!!!)
|
||||
DefaultAllow bool `mapstructure:"default_allow"`
|
||||
// DefaultBlock ensures that in anonymous mode (role 'anon') all tables
|
||||
// are blocked from queries and mutations. To open access to tables in
|
||||
// anonymous mode they have to be added to the 'anon' role config.
|
||||
DefaultBlock bool `mapstructure:"default_block"`
|
||||
|
||||
// Vars is a map of hardcoded variables that can be leveraged in your
|
||||
// queries (eg variable admin_id will be $admin_id in the query)
|
||||
@ -57,6 +55,9 @@ type Config struct {
|
||||
// Roles contains all the configuration for all the roles you want to support
|
||||
// `user` and `anon` are two default roles. User role is for when a user ID is
|
||||
// available and Anon when it's not.
|
||||
//
|
||||
// If you're using the RolesQuery config to enable atribute based acess control then
|
||||
// you can add more custom roles.
|
||||
Roles []Role
|
||||
|
||||
// Inflections is to add additionally singular to plural mappings
|
||||
@ -71,6 +72,7 @@ type Config struct {
|
||||
type Table struct {
|
||||
Name string
|
||||
Table string
|
||||
Type string
|
||||
Blocklist []string
|
||||
Remotes []Remote
|
||||
Columns []Column
|
||||
@ -108,12 +110,12 @@ type Role struct {
|
||||
// RoleTable struct contains role specific access control values for a database table
|
||||
type RoleTable struct {
|
||||
Name string
|
||||
ReadOnly *bool `mapstructure:"read_only"`
|
||||
ReadOnly bool `mapstructure:"read_only"`
|
||||
|
||||
Query Query
|
||||
Insert Insert
|
||||
Update Update
|
||||
Delete Delete
|
||||
Query *Query
|
||||
Insert *Insert
|
||||
Update *Update
|
||||
Delete *Delete
|
||||
}
|
||||
|
||||
// Query struct contains access control values for query operations
|
||||
@ -122,7 +124,7 @@ type Query struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for insert operations
|
||||
@ -130,7 +132,7 @@ type Insert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for update operations
|
||||
@ -138,43 +140,84 @@ type Update struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Delete struct contains access control values for delete operations
|
||||
type Delete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||
// row-level config
|
||||
func (c *Config) AddRoleTable(role, table string, conf interface{}) error {
|
||||
var r *Role
|
||||
|
||||
for i := range c.Roles {
|
||||
if strings.EqualFold(c.Roles[i].Name, role) {
|
||||
r = &c.Roles[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
if r == nil {
|
||||
nr := Role{Name: role}
|
||||
c.Roles = append(c.Roles, nr)
|
||||
r = &nr
|
||||
}
|
||||
|
||||
var t *RoleTable
|
||||
for i := range r.Tables {
|
||||
if strings.EqualFold(r.Tables[i].Name, table) {
|
||||
t = &r.Tables[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
if t == nil {
|
||||
nt := RoleTable{Name: table}
|
||||
r.Tables = append(r.Tables, nt)
|
||||
t = &nt
|
||||
}
|
||||
|
||||
switch v := conf.(type) {
|
||||
case Query:
|
||||
t.Query = &v
|
||||
case Insert:
|
||||
t.Insert = &v
|
||||
case Update:
|
||||
t.Update = &v
|
||||
case Delete:
|
||||
t.Delete = &v
|
||||
default:
|
||||
return fmt.Errorf("unsupported object type: %t", v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func ReadInConfig(configFile string) (*Config, error) {
|
||||
cpath := path.Dir(configFile)
|
||||
cfile := path.Base(configFile)
|
||||
vi := newViper(cpath, cfile)
|
||||
cp := path.Dir(configFile)
|
||||
vi := newViper(cp, path.Base(configFile))
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
|
||||
if inherits != "" {
|
||||
vi = newViper(cpath, inherits)
|
||||
if pcf := vi.GetString("inherits"); pcf != "" {
|
||||
cf := vi.ConfigFileUsed()
|
||||
vi = newViper(cp, pcf)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
if v := vi.GetString("inherits"); v != "" {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)", pcf, v)
|
||||
}
|
||||
|
||||
vi.SetConfigName(cfile)
|
||||
vi.SetConfigFile(cf)
|
||||
|
||||
if err := vi.MergeInConfig(); err != nil {
|
||||
return nil, err
|
||||
@ -188,7 +231,7 @@ func ReadInConfig(configFile string) (*Config, error) {
|
||||
}
|
||||
|
||||
if c.AllowListFile == "" {
|
||||
c.AllowListFile = path.Join(cpath, "allow.list")
|
||||
c.AllowListFile = path.Join(cp, "allow.list")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
@ -202,7 +245,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi.AutomaticEnv()
|
||||
|
||||
if filepath.Ext(configFile) != "" {
|
||||
vi.SetConfigFile(configFile)
|
||||
vi.SetConfigFile(path.Join(configPath, configFile))
|
||||
} else {
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath(configPath)
|
||||
|
@ -5,11 +5,6 @@ import (
|
||||
"errors"
|
||||
)
|
||||
|
||||
const (
|
||||
openVar = "{{"
|
||||
closeVar = "}}"
|
||||
)
|
||||
|
||||
var (
|
||||
errNotFound = errors.New("not found in prepared statements")
|
||||
)
|
||||
|
65
core/core.go
65
core/core.go
@ -1,17 +1,15 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
|
||||
type OpType int
|
||||
@ -93,6 +91,7 @@ func (sg *SuperGraph) initCompilers() error {
|
||||
}
|
||||
|
||||
sg.qc, err = qcode.NewCompiler(qcode.Config{
|
||||
DefaultBlock: sg.conf.DefaultBlock,
|
||||
Blocklist: sg.conf.Blocklist,
|
||||
})
|
||||
if err != nil {
|
||||
@ -126,7 +125,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(data) == 0 || st.skipped == 0 {
|
||||
if len(data) == 0 || st.md.Skipped() == 0 {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
@ -167,32 +166,44 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
} else {
|
||||
role = c.role
|
||||
|
||||
}
|
||||
|
||||
c.res.role = role
|
||||
|
||||
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(c.sg.hashSeed)
|
||||
id := queryID(&h, c.res.name, role)
|
||||
|
||||
q, ok := c.sg.queries[id]
|
||||
if !ok {
|
||||
return nil, nil, errNotFound
|
||||
}
|
||||
c.res.sql = ps.st.sql
|
||||
|
||||
if q.sd == nil {
|
||||
q.Do(func() { c.sg.prepare(q, role) })
|
||||
|
||||
if q.err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
c.res.sql = q.st.sql
|
||||
|
||||
var root []byte
|
||||
var row *sql.Row
|
||||
|
||||
varsList, err := c.argList(ps.args)
|
||||
varsList, err := c.argList(q.st.md)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if useTx {
|
||||
row = tx.Stmt(ps.sd).QueryRow(varsList...)
|
||||
row = tx.Stmt(q.sd).QueryRow(varsList...)
|
||||
} else {
|
||||
row = ps.sd.QueryRow(varsList...)
|
||||
row = q.sd.QueryRow(varsList...)
|
||||
}
|
||||
|
||||
if ps.roleArg {
|
||||
if q.roleArg {
|
||||
err = row.Scan(&role, &root)
|
||||
} else {
|
||||
err = row.Scan(&root)
|
||||
@ -206,15 +217,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
if useTx {
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, q.err
|
||||
}
|
||||
}
|
||||
|
||||
if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
|
||||
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return root, &ps.st, nil
|
||||
return root, &q.st, nil
|
||||
}
|
||||
|
||||
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
@ -252,15 +263,23 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
return nil, nil, err
|
||||
}
|
||||
st := &stmts[0]
|
||||
c.res.sql = st.sql
|
||||
|
||||
t := fasttemplate.New(st.sql, openVar, closeVar)
|
||||
buf := &bytes.Buffer{}
|
||||
|
||||
_, err = t.ExecuteFunc(buf, c.argMap())
|
||||
varList, err := c.argList(st.md)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
finalSQL := buf.String()
|
||||
// finalSQL := buf.String()
|
||||
|
||||
////
|
||||
|
||||
// _, err = t.ExecuteFunc(buf, c.argMap(st.md))
|
||||
// if err != nil {
|
||||
// return nil, nil, err
|
||||
// }
|
||||
// finalSQL := buf.String()
|
||||
|
||||
/////
|
||||
|
||||
// var stime time.Time
|
||||
|
||||
@ -275,9 +294,9 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
// defaultRole := c.role
|
||||
|
||||
if useTx {
|
||||
row = tx.QueryRow(finalSQL)
|
||||
row = tx.QueryRowContext(c, st.sql, varList...)
|
||||
} else {
|
||||
row = c.sg.db.QueryRow(finalSQL)
|
||||
row = c.sg.db.QueryRowContext(c, st.sql, varList...)
|
||||
}
|
||||
|
||||
if len(stmts) > 1 {
|
||||
@ -286,9 +305,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
err = row.Scan(&root)
|
||||
}
|
||||
|
||||
c.res.sql = finalSQL
|
||||
|
||||
if len(role) == 0 {
|
||||
if role == "" {
|
||||
c.res.role = c.role
|
||||
} else {
|
||||
c.res.role = role
|
||||
|
212
core/init.go
212
core/init.go
@ -2,9 +2,7 @@ package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
@ -18,17 +16,12 @@ func (sg *SuperGraph) initConfig() error {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
// Variables: Validate and sanitize
|
||||
for k, v := range c.Vars {
|
||||
c.Vars[k] = sanitizeVars(v)
|
||||
}
|
||||
|
||||
// Tables: Validate and sanitize
|
||||
tm := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Tables); i++ {
|
||||
t := &c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
// t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
|
||||
if _, ok := tm[t.Name]; ok {
|
||||
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||
@ -80,17 +73,23 @@ func (sg *SuperGraph) initConfig() error {
|
||||
sg.roles["anon"] = &ur
|
||||
}
|
||||
|
||||
// Roles: validate and sanitize
|
||||
c.RolesQuery = sanitizeVars(c.RolesQuery)
|
||||
|
||||
if c.RolesQuery == "" {
|
||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||
sg.log.Printf("INF roles_query not defined: attribute based access control disabled")
|
||||
} else {
|
||||
n := 0
|
||||
for k, v := range sg.roles {
|
||||
if k == "user" || k == "anon" {
|
||||
n++
|
||||
} else if v.Match != "" {
|
||||
n++
|
||||
}
|
||||
}
|
||||
sg.abacEnabled = (n > 2)
|
||||
|
||||
_, userExists := sg.roles["user"]
|
||||
_, sg.anonExists = sg.roles["anon"]
|
||||
|
||||
sg.abacEnabled = userExists && c.RolesQuery != ""
|
||||
if !sg.abacEnabled {
|
||||
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -101,21 +100,26 @@ func getDBTableAliases(c *Config) map[string][]string {
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
|
||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if t.Table != "" && t.Type == "" {
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func addTables(c *Config, di *psql.DBInfo) error {
|
||||
var err error
|
||||
|
||||
for _, t := range c.Tables {
|
||||
if t.Table == "" || len(t.Columns) == 0 {
|
||||
continue
|
||||
switch t.Type {
|
||||
case "json", "jsonb":
|
||||
err = addJsonTable(di, t.Columns, t)
|
||||
|
||||
case "polymorphic":
|
||||
err = addVirtualTable(di, t.Columns, t)
|
||||
}
|
||||
if err := addTable(di, t.Columns, t); err != nil {
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -123,17 +127,18 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
// This is for jsonb columns that want to be tables.
|
||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' not found on table '%s'",
|
||||
"json table: column '%s' not found on table '%s'",
|
||||
t.Name, t.Table)
|
||||
}
|
||||
|
||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
t.Name, t.Table, bc.Type)
|
||||
}
|
||||
|
||||
@ -160,8 +165,38 @@ func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
if len(cols) == 0 {
|
||||
return fmt.Errorf("polymorphic table: no id column specified")
|
||||
}
|
||||
|
||||
c := cols[0]
|
||||
|
||||
if c.ForeignKey == "" {
|
||||
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
|
||||
}
|
||||
|
||||
s := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
|
||||
if len(s) != 2 {
|
||||
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
|
||||
}
|
||||
|
||||
di.VTables = append(di.VTables, psql.VirtualTable{
|
||||
Name: t.Name,
|
||||
IDColumn: c.Name,
|
||||
TypeColumn: s[0],
|
||||
FKeyColumn: s[1],
|
||||
})
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
if t.Type != "" {
|
||||
continue
|
||||
}
|
||||
for _, c := range t.Columns {
|
||||
if c.ForeignKey == "" {
|
||||
continue
|
||||
@ -175,30 +210,52 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||
}
|
||||
|
||||
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||
var tn string
|
||||
|
||||
if t.Type == "polymorphic" {
|
||||
tn = t.Table
|
||||
} else {
|
||||
tn = t.Name
|
||||
}
|
||||
|
||||
c1, ok := di.GetColumn(tn, c.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid table '%s' or column '%s' in Config",
|
||||
t.Name, c.Name)
|
||||
"config: invalid table '%s' or column '%s' defined",
|
||||
tn, c.Name)
|
||||
}
|
||||
|
||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
if len(v) != 2 {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
"config: invalid foreign_key defined for table '%s' and column '%s': %s",
|
||||
tn, c.Name, c.ForeignKey)
|
||||
}
|
||||
|
||||
// check if it's a polymorphic foreign key
|
||||
if _, ok := di.GetColumn(tn, v[0]); ok {
|
||||
c2, ok := di.GetColumn(tn, v[1])
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
|
||||
v[1], tn, c.Name)
|
||||
}
|
||||
|
||||
c1.FKeyTable = v[0]
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
return nil
|
||||
}
|
||||
|
||||
fkt, fkc := v[0], v[1]
|
||||
c2, ok := di.GetColumn(fkt, fkc)
|
||||
c3, ok := di.GetColumn(fkt, fkc)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'",
|
||||
t.Name, c.Name, v[0], v[1])
|
||||
}
|
||||
|
||||
c1.FKeyTable = fkt
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
c1.FKeyColID = []int16{c3.ID}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -206,7 +263,7 @@ func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||
func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||
for _, r := range c.Roles {
|
||||
for _, t := range r.Tables {
|
||||
if err := addRole(qc, r, t, c.DefaultAllow); err != nil {
|
||||
if err := addRole(qc, r, t, c.DefaultBlock); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -215,67 +272,56 @@ func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultAllow bool) error {
|
||||
ro := true // read-only
|
||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultBlock bool) error {
|
||||
ro := false // read-only
|
||||
|
||||
if defaultAllow {
|
||||
ro = false
|
||||
if defaultBlock && r.Name == "anon" {
|
||||
ro = true
|
||||
}
|
||||
|
||||
if r.Name != "anon" {
|
||||
ro = false
|
||||
if t.ReadOnly {
|
||||
ro = true
|
||||
}
|
||||
|
||||
if t.ReadOnly != nil {
|
||||
ro = *t.ReadOnly
|
||||
}
|
||||
query := qcode.QueryConfig{Block: false}
|
||||
insert := qcode.InsertConfig{Block: ro}
|
||||
update := qcode.UpdateConfig{Block: ro}
|
||||
del := qcode.DeleteConfig{Block: ro}
|
||||
|
||||
blocked := struct {
|
||||
query bool
|
||||
insert bool
|
||||
update bool
|
||||
delete bool
|
||||
}{false, ro, ro, ro}
|
||||
|
||||
if t.Query.Block != nil {
|
||||
blocked.query = *t.Query.Block
|
||||
}
|
||||
if t.Insert.Block != nil {
|
||||
blocked.insert = *t.Insert.Block
|
||||
}
|
||||
if t.Update.Block != nil {
|
||||
blocked.update = *t.Update.Block
|
||||
}
|
||||
if t.Delete.Block != nil {
|
||||
blocked.delete = *t.Delete.Block
|
||||
}
|
||||
|
||||
query := qcode.QueryConfig{
|
||||
if t.Query != nil {
|
||||
query = qcode.QueryConfig{
|
||||
Limit: t.Query.Limit,
|
||||
Filters: t.Query.Filters,
|
||||
Columns: t.Query.Columns,
|
||||
DisableFunctions: t.Query.DisableFunctions,
|
||||
Block: blocked.query,
|
||||
Block: t.Query.Block,
|
||||
}
|
||||
}
|
||||
|
||||
insert := qcode.InsertConfig{
|
||||
if t.Insert != nil {
|
||||
insert = qcode.InsertConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
Block: blocked.insert,
|
||||
Block: t.Insert.Block,
|
||||
}
|
||||
}
|
||||
|
||||
update := qcode.UpdateConfig{
|
||||
if t.Update != nil {
|
||||
update = qcode.UpdateConfig{
|
||||
Filters: t.Update.Filters,
|
||||
Columns: t.Update.Columns,
|
||||
Presets: t.Update.Presets,
|
||||
Block: blocked.update,
|
||||
Block: t.Update.Block,
|
||||
}
|
||||
}
|
||||
|
||||
del := qcode.DeleteConfig{
|
||||
if t.Delete != nil {
|
||||
del = qcode.DeleteConfig{
|
||||
Filters: t.Delete.Filters,
|
||||
Columns: t.Delete.Columns,
|
||||
Block: blocked.delete,
|
||||
Block: t.Delete.Block,
|
||||
}
|
||||
}
|
||||
|
||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||
@ -293,23 +339,3 @@ func (r *Role) GetTable(name string) *RoleTable {
|
||||
func sanitize(value string) string {
|
||||
return strings.ToLower(strings.TrimSpace(value))
|
||||
}
|
||||
|
||||
var (
|
||||
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
||||
)
|
||||
|
||||
func sanitizeVars(s string) string {
|
||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
||||
|
||||
s1 := strings.Map(func(r rune) rune {
|
||||
if unicode.IsSpace(r) {
|
||||
return ' '
|
||||
}
|
||||
return r
|
||||
}, s0)
|
||||
|
||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
||||
return strings.ToLower(m)
|
||||
})
|
||||
}
|
||||
|
@ -6,24 +6,27 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"text/scanner"
|
||||
|
||||
"github.com/chirino/graphql/schema"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
const (
|
||||
AL_QUERY int = iota + 1
|
||||
AL_VARS
|
||||
expComment = iota + 1
|
||||
expVar
|
||||
expQuery
|
||||
)
|
||||
|
||||
type Item struct {
|
||||
Name string
|
||||
key string
|
||||
Query string
|
||||
Vars json.RawMessage
|
||||
Vars string
|
||||
Comment string
|
||||
}
|
||||
|
||||
@ -35,6 +38,7 @@ type List struct {
|
||||
type Config struct {
|
||||
CreateIfNotExists bool
|
||||
Persist bool
|
||||
Log *log.Logger
|
||||
}
|
||||
|
||||
func New(filename string, conf Config) (*List, error) {
|
||||
@ -80,6 +84,12 @@ func New(filename string, conf Config) (*List, error) {
|
||||
} else {
|
||||
al.filepath = filename
|
||||
}
|
||||
|
||||
if file, err := os.OpenFile(al.filepath, os.O_RDONLY|os.O_CREATE, 0644); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
file.Close()
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
@ -89,8 +99,10 @@ func New(filename string, conf Config) (*List, error) {
|
||||
|
||||
go func() {
|
||||
for v := range al.saveChan {
|
||||
if err = al.save(v); err != nil {
|
||||
break
|
||||
err := al.save(v)
|
||||
|
||||
if err != nil && conf.Log != nil {
|
||||
conf.Log.Println("WRN allow list save:", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
@ -116,121 +128,101 @@ func (al *List) Set(vars []byte, query, comment string) error {
|
||||
return errors.New("empty query")
|
||||
}
|
||||
|
||||
var q string
|
||||
|
||||
for i := 0; i < len(query); i++ {
|
||||
c := query[i]
|
||||
if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' {
|
||||
q = query
|
||||
break
|
||||
|
||||
} else if c == '{' {
|
||||
q = "query " + query
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
al.saveChan <- Item{
|
||||
Comment: comment,
|
||||
Query: q,
|
||||
Vars: vars,
|
||||
Query: query,
|
||||
Vars: string(vars),
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (al *List) Load() ([]Item, error) {
|
||||
var list []Item
|
||||
varString := "variables"
|
||||
|
||||
b, err := ioutil.ReadFile(al.filepath)
|
||||
if err != nil {
|
||||
return list, err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(b) == 0 {
|
||||
return list, nil
|
||||
}
|
||||
return parse(string(b), al.filepath)
|
||||
}
|
||||
|
||||
var comment bytes.Buffer
|
||||
var varBytes []byte
|
||||
func parse(b, filename string) ([]Item, error) {
|
||||
var items []Item
|
||||
|
||||
itemMap := make(map[string]struct{})
|
||||
var s scanner.Scanner
|
||||
s.Init(strings.NewReader(b))
|
||||
s.Filename = filename
|
||||
s.Mode ^= scanner.SkipComments
|
||||
|
||||
s, e, c := 0, 0, 0
|
||||
ty := 0
|
||||
var op, sp scanner.Position
|
||||
var item Item
|
||||
|
||||
for {
|
||||
fq := false
|
||||
newComment := false
|
||||
st := expComment
|
||||
|
||||
if c == 0 && b[e] == '#' {
|
||||
s = e
|
||||
for e < len(b) && b[e] != '\n' {
|
||||
e++
|
||||
}
|
||||
if (e - s) > 2 {
|
||||
comment.Write(b[(s + 1):(e + 1)])
|
||||
}
|
||||
}
|
||||
for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
|
||||
txt := s.TokenText()
|
||||
|
||||
if e >= len(b) {
|
||||
break
|
||||
switch {
|
||||
case strings.HasPrefix(txt, "/*"):
|
||||
if st == expQuery {
|
||||
v := b[sp.Offset:s.Pos().Offset]
|
||||
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||
items = append(items, item)
|
||||
}
|
||||
item = Item{Comment: strings.TrimSpace(txt[2 : len(txt)-2])}
|
||||
sp = s.Pos()
|
||||
st = expComment
|
||||
newComment = true
|
||||
|
||||
if matchPrefix(b, e, "query") || matchPrefix(b, e, "mutation") {
|
||||
if c == 0 {
|
||||
s = e
|
||||
case !newComment && strings.HasPrefix(txt, "#"):
|
||||
if st == expQuery {
|
||||
v := b[sp.Offset:s.Pos().Offset]
|
||||
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||
items = append(items, item)
|
||||
}
|
||||
ty = AL_QUERY
|
||||
} else if matchPrefix(b, e, varString) {
|
||||
if c == 0 {
|
||||
s = e + len(varString) + 1
|
||||
}
|
||||
ty = AL_VARS
|
||||
} else if b[e] == '{' {
|
||||
c++
|
||||
item = Item{}
|
||||
sp = s.Pos()
|
||||
st = expComment
|
||||
|
||||
} else if b[e] == '}' {
|
||||
c--
|
||||
case strings.HasPrefix(txt, "variables"):
|
||||
if st == expComment {
|
||||
v := b[sp.Offset:s.Pos().Offset]
|
||||
item.Comment = strings.TrimSpace(v[:strings.IndexByte(v, '\n')])
|
||||
}
|
||||
sp = s.Pos()
|
||||
st = expVar
|
||||
|
||||
if c == 0 {
|
||||
if ty == AL_QUERY {
|
||||
fq = true
|
||||
} else if ty == AL_VARS {
|
||||
varBytes = b[s:(e + 1)]
|
||||
case isGraphQL(txt):
|
||||
if st == expVar {
|
||||
v := b[sp.Offset:s.Pos().Offset]
|
||||
item.Vars = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||
}
|
||||
ty = 0
|
||||
}
|
||||
}
|
||||
|
||||
if fq {
|
||||
query := string(b[s:(e + 1)])
|
||||
name := QueryName(query)
|
||||
key := strings.ToLower(name)
|
||||
|
||||
if _, ok := itemMap[key]; !ok {
|
||||
v := Item{
|
||||
Name: name,
|
||||
key: key,
|
||||
Query: query,
|
||||
Vars: varBytes,
|
||||
Comment: comment.String(),
|
||||
}
|
||||
list = append(list, v)
|
||||
comment.Reset()
|
||||
}
|
||||
|
||||
varBytes = nil
|
||||
sp = op
|
||||
st = expQuery
|
||||
|
||||
}
|
||||
|
||||
e++
|
||||
if e >= len(b) {
|
||||
break
|
||||
}
|
||||
op = s.Pos()
|
||||
}
|
||||
|
||||
return list, nil
|
||||
if st == expQuery {
|
||||
v := b[sp.Offset:s.Pos().Offset]
|
||||
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||
items = append(items, item)
|
||||
}
|
||||
|
||||
for i := range items {
|
||||
items[i].Name = QueryName(items[i].Query)
|
||||
items[i].key = strings.ToLower(items[i].Name)
|
||||
}
|
||||
|
||||
return items, nil
|
||||
}
|
||||
|
||||
func isGraphQL(s string) bool {
|
||||
return strings.HasPrefix(s, "query") ||
|
||||
strings.HasPrefix(s, "mutation") ||
|
||||
strings.HasPrefix(s, "subscription")
|
||||
}
|
||||
|
||||
func (al *List) save(item Item) error {
|
||||
@ -239,8 +231,6 @@ func (al *List) save(item Item) error {
|
||||
qd := &schema.QueryDocument{}
|
||||
|
||||
if err := qd.Parse(item.Query); err != nil {
|
||||
fmt.Println("##", item.Query)
|
||||
|
||||
return err
|
||||
}
|
||||
|
||||
@ -248,8 +238,6 @@ func (al *List) save(item Item) error {
|
||||
query := buf.String()
|
||||
buf.Reset()
|
||||
|
||||
// fmt.Println(">", query)
|
||||
|
||||
item.Name = QueryName(query)
|
||||
item.key = strings.ToLower(item.Name)
|
||||
|
||||
@ -291,57 +279,43 @@ func (al *List) save(item Item) error {
|
||||
return strings.Compare(list[i].key, list[j].key) == -1
|
||||
})
|
||||
|
||||
for _, v := range list {
|
||||
cmtLines := strings.Split(v.Comment, "\n")
|
||||
|
||||
i := 0
|
||||
for _, c := range cmtLines {
|
||||
if c = strings.TrimSpace(c); c == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
_, err := f.WriteString(fmt.Sprintf("# %s\n", c))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
i++
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
if _, err := f.WriteString("\n"); err != nil {
|
||||
return err
|
||||
}
|
||||
} else {
|
||||
if _, err := f.WriteString(fmt.Sprintf("# Query named %s\n\n", v.Name)); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
|
||||
for i, v := range list {
|
||||
var vars string
|
||||
if v.Vars != "" {
|
||||
buf.Reset()
|
||||
|
||||
if err := jsn.Clear(&buf, v.Vars); err != nil {
|
||||
return fmt.Errorf("failed to clean vars: %w", err)
|
||||
if err := jsn.Clear(&buf, []byte(v.Vars)); err != nil {
|
||||
continue
|
||||
}
|
||||
vj := json.RawMessage(buf.Bytes())
|
||||
|
||||
vj, err = json.MarshalIndent(vj, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal vars: %w", err)
|
||||
if vj, err = json.MarshalIndent(vj, "", " "); err != nil {
|
||||
continue
|
||||
}
|
||||
vars = string(vj)
|
||||
}
|
||||
list[i].Vars = vars
|
||||
list[i].Comment = strings.TrimSpace(v.Comment)
|
||||
}
|
||||
|
||||
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
|
||||
for _, v := range list {
|
||||
if v.Comment != "" {
|
||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
||||
} else {
|
||||
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if v.Vars != "" {
|
||||
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", v.Vars))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if v.Query[0] == '{' {
|
||||
_, err = f.WriteString(fmt.Sprintf("query %s\n\n", v.Query))
|
||||
} else {
|
||||
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -350,18 +324,6 @@ func (al *List) save(item Item) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func matchPrefix(b []byte, i int, s string) bool {
|
||||
if (len(b) - i) < len(s) {
|
||||
return false
|
||||
}
|
||||
for n := 0; n < len(s); n++ {
|
||||
if b[(i+n)] != s[n] {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func QueryName(b string) string {
|
||||
state, s := 0, 0
|
||||
|
||||
|
@ -14,7 +14,7 @@ func TestGQLName1(t *testing.T) {
|
||||
|
||||
name := QueryName(q)
|
||||
|
||||
if len(name) != 0 {
|
||||
if name != "" {
|
||||
t.Fatal("Name should be empty, not ", name)
|
||||
}
|
||||
}
|
||||
@ -82,3 +82,160 @@ func TestGQLName5(t *testing.T) {
|
||||
t.Fatal("Name should be empty, not ", name)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse1(t *testing.T) {
|
||||
var al = `
|
||||
# Hello world
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"slug": "",
|
||||
"body": "",
|
||||
"post": {
|
||||
"connect": {
|
||||
"slug": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation createComment {
|
||||
comment(insert: $data) {
|
||||
slug
|
||||
body
|
||||
createdAt: created_at
|
||||
totalVotes: cached_votes_total
|
||||
totalReplies: cached_replies_total
|
||||
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
|
||||
created_at
|
||||
__typename
|
||||
}
|
||||
author: user {
|
||||
slug
|
||||
firstName: first_name
|
||||
lastName: last_name
|
||||
pictureURL: picture_url
|
||||
bio
|
||||
__typename
|
||||
}
|
||||
__typename
|
||||
}
|
||||
}
|
||||
|
||||
# Query named createPost
|
||||
|
||||
query createPost {
|
||||
post(insert: $data) {
|
||||
slug
|
||||
body
|
||||
published
|
||||
createdAt: created_at
|
||||
totalVotes: cached_votes_total
|
||||
totalComments: cached_comments_total
|
||||
vote: post_vote(where: {user_id: {eq: $user_id}}) {
|
||||
created_at
|
||||
__typename
|
||||
}
|
||||
author: user {
|
||||
slug
|
||||
firstName: first_name
|
||||
lastName: last_name
|
||||
pictureURL: picture_url
|
||||
bio
|
||||
__typename
|
||||
}
|
||||
__typename
|
||||
}
|
||||
}`
|
||||
|
||||
_, err := parse(al, "allow.list")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestParse2(t *testing.T) {
|
||||
var al = `
|
||||
/* Hello world */
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"slug": "",
|
||||
"body": "",
|
||||
"post": {
|
||||
"connect": {
|
||||
"slug": ""
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation createComment {
|
||||
comment(insert: $data) {
|
||||
slug
|
||||
body
|
||||
createdAt: created_at
|
||||
totalVotes: cached_votes_total
|
||||
totalReplies: cached_replies_total
|
||||
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
|
||||
created_at
|
||||
__typename
|
||||
}
|
||||
author: user {
|
||||
slug
|
||||
firstName: first_name
|
||||
lastName: last_name
|
||||
pictureURL: picture_url
|
||||
bio
|
||||
__typename
|
||||
}
|
||||
__typename
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
Query named createPost
|
||||
*/
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"thread": {
|
||||
"connect": {
|
||||
"slug": ""
|
||||
}
|
||||
},
|
||||
"slug": "",
|
||||
"published": false,
|
||||
"body": ""
|
||||
}
|
||||
}
|
||||
|
||||
query createPost {
|
||||
post(insert: $data) {
|
||||
slug
|
||||
body
|
||||
published
|
||||
createdAt: created_at
|
||||
totalVotes: cached_votes_total
|
||||
totalComments: cached_comments_total
|
||||
vote: post_vote(where: {user_id: {eq: $user_id}}) {
|
||||
created_at
|
||||
__typename
|
||||
}
|
||||
author: user {
|
||||
slug
|
||||
firstName: first_name
|
||||
lastName: last_name
|
||||
pictureURL: picture_url
|
||||
bio
|
||||
__typename
|
||||
}
|
||||
__typename
|
||||
}
|
||||
}`
|
||||
|
||||
_, err := parse(al, "allow.list")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
@ -55,19 +55,6 @@ func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
||||
config.AllowListFile = "./allow.list"
|
||||
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||
|
||||
blockFalse := false
|
||||
|
||||
config.Roles = []core.Role{
|
||||
core.Role{
|
||||
Name: "anon",
|
||||
Tables: []core.RoleTable{
|
||||
core.RoleTable{Name: "users", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "product", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "line_item", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
sg, err := core.NewSuperGraph(&config, db)
|
||||
require.NoError(t, err)
|
||||
ctx := context.Background()
|
||||
|
@ -1,4 +1,3 @@
|
||||
//nolint:errcheck
|
||||
package psql
|
||||
|
||||
import (
|
||||
@ -12,8 +11,7 @@ import (
|
||||
func (c *compilerContext) renderBaseColumns(
|
||||
sel *qcode.Select,
|
||||
ti *DBTableInfo,
|
||||
childCols []*qcode.Column,
|
||||
skipped uint32) ([]int, bool, error) {
|
||||
childCols []*qcode.Column) ([]int, bool, error) {
|
||||
|
||||
var realColsRendered []int
|
||||
|
||||
@ -113,15 +111,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
|
||||
c.renderComma(columnsRendered)
|
||||
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Name, cn, arg.Val, col.Name)
|
||||
io.WriteString(c.w, `ts_rank(`)
|
||||
_, _ = io.WriteString(c.w, `ts_rank(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
if c.schema.ver >= 110000 {
|
||||
io.WriteString(c.w, `, websearch_to_tsquery('{{`)
|
||||
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
} else {
|
||||
io.WriteString(c.w, `, to_tsquery('{{`)
|
||||
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||
}
|
||||
io.WriteString(c.w, arg.Val)
|
||||
io.WriteString(c.w, `}}'))`)
|
||||
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||
_, _ = io.WriteString(c.w, `))`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -138,15 +136,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
|
||||
c.renderComma(columnsRendered)
|
||||
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Name, cn, arg.Val, col.Name)
|
||||
io.WriteString(c.w, `ts_headline(`)
|
||||
_, _ = io.WriteString(c.w, `ts_headline(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
if c.schema.ver >= 110000 {
|
||||
io.WriteString(c.w, `, websearch_to_tsquery('{{`)
|
||||
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
} else {
|
||||
io.WriteString(c.w, `, to_tsquery('{{`)
|
||||
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||
}
|
||||
io.WriteString(c.w, arg.Val)
|
||||
io.WriteString(c.w, `}}'))`)
|
||||
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||
_, _ = io.WriteString(c.w, `))`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -158,9 +156,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
||||
}
|
||||
|
||||
c.renderComma(columnsRendered)
|
||||
io.WriteString(c.w, `(`)
|
||||
_, _ = io.WriteString(c.w, `(`)
|
||||
squoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` :: text)`)
|
||||
_, _ = io.WriteString(c.w, ` :: text)`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -170,9 +168,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||
// if pl == 0 {
|
||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||
// io.WriteString(c.w, `'`)
|
||||
// io.WriteString(c.w, col.Name)
|
||||
// io.WriteString(c.w, ` not defined'`)
|
||||
// _, _ = io.WriteString(c.w, `'`)
|
||||
// _, _ = io.WriteString(c.w, col.Name)
|
||||
// _, _ = io.WriteString(c.w, ` not defined'`)
|
||||
// alias(c.w, col.Name)
|
||||
// }
|
||||
|
||||
@ -191,10 +189,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
c.renderComma(columnsRendered)
|
||||
|
||||
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
||||
io.WriteString(c.w, fn)
|
||||
io.WriteString(c.w, `(`)
|
||||
_, _ = io.WriteString(c.w, fn)
|
||||
_, _ = io.WriteString(c.w, `(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
io.WriteString(c.w, `)`)
|
||||
_, _ = io.WriteString(c.w, `)`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -202,7 +200,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
|
||||
func (c *compilerContext) renderComma(columnsRendered int) {
|
||||
if columnsRendered != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
_, _ = io.WriteString(c.w, `, `)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -4,17 +4,18 @@ package psql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
)
|
||||
|
||||
var (
|
||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||
|
||||
schema = GetTestSchema()
|
||||
schema, _ = GetTestSchema()
|
||||
|
||||
vars = NewVariables(map[string]string{
|
||||
vars = map[string]string{
|
||||
"admin_account_id": "5",
|
||||
})
|
||||
}
|
||||
|
||||
pcompileTest = NewCompiler(Config{
|
||||
Schema: schema,
|
||||
@ -24,6 +25,37 @@ var (
|
||||
|
||||
// FuzzerEntrypoint for Fuzzbuzz
|
||||
func Fuzz(data []byte) int {
|
||||
err1 := query(data)
|
||||
err2 := insert(data)
|
||||
err3 := update(data)
|
||||
err4 := delete(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
func query(data []byte) error {
|
||||
gql := data
|
||||
|
||||
qc, err1 := qcompileTest.Compile(gql, "user")
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err2 := pcompileTest.CompileEx(qc, vars)
|
||||
|
||||
if err1 != nil {
|
||||
return err1
|
||||
} else {
|
||||
return err2
|
||||
}
|
||||
}
|
||||
|
||||
func insert(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
@ -46,9 +78,57 @@ func Fuzz(data []byte) int {
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
return err
|
||||
}
|
||||
|
||||
func update(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||
if err != nil {
|
||||
return 0
|
||||
panic("qcompile can't fail")
|
||||
}
|
||||
|
||||
return 1
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
return err
|
||||
}
|
||||
|
||||
func delete(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||
if err != nil {
|
||||
panic("qcompile can't fail")
|
||||
}
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
return err
|
||||
}
|
||||
|
20
core/internal/psql/fuzz_test.go
Normal file
20
core/internal/psql/fuzz_test.go
Normal file
@ -0,0 +1,20 @@
|
||||
// +build gofuzz
|
||||
|
||||
package psql
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var ret int
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"{\"connect\":{}}",
|
||||
"q(q{q{q{q{q{q{q{q{",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
ret = Fuzz([]byte(f))
|
||||
}
|
||||
}
|
@ -10,8 +10,8 @@ import (
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
)
|
||||
|
||||
func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
func (c *compilerContext) renderInsert(
|
||||
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
insert, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
@ -25,9 +25,8 @@ func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `json_array_elements(`)
|
||||
}
|
||||
io.WriteString(c.w, `'{{`)
|
||||
io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, `}}' :: json`)
|
||||
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||
io.WriteString(c.w, ` :: json`)
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
@ -90,12 +89,12 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
|
||||
io.WriteString(w, `INSERT INTO `)
|
||||
quoted(w, ti.Name)
|
||||
io.WriteString(w, ` (`)
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
|
||||
c.renderInsertUpdateColumns(qc, jt, ti, sk, false)
|
||||
renderNestedInsertRelColumns(w, item.kvitem, false)
|
||||
io.WriteString(w, `)`)
|
||||
|
||||
io.WriteString(w, ` SELECT `)
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||
c.renderInsertUpdateColumns(qc, jt, ti, sk, true)
|
||||
renderNestedInsertRelColumns(w, item.kvitem, true)
|
||||
|
||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||
|
61
core/internal/psql/metadata.go
Normal file
61
core/internal/psql/metadata.go
Normal file
@ -0,0 +1,61 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
func (md *Metadata) RenderVar(w io.Writer, vv string) {
|
||||
f, s := -1, 0
|
||||
|
||||
for i := range vv {
|
||||
v := vv[i]
|
||||
switch {
|
||||
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||
if (i - s) > 0 {
|
||||
_, _ = io.WriteString(w, vv[s:i])
|
||||
}
|
||||
f = i
|
||||
|
||||
case (v < 'a' && v > 'z') &&
|
||||
(v < 'A' && v > 'Z') &&
|
||||
(v < '0' && v > '9') &&
|
||||
v != '_' &&
|
||||
f != -1 &&
|
||||
(i-f) > 1:
|
||||
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
|
||||
s = i
|
||||
f = -1
|
||||
}
|
||||
}
|
||||
|
||||
if f != -1 && (len(vv)-f) > 1 {
|
||||
md.renderValueExp(w, Param{Name: vv[f+1:]})
|
||||
} else {
|
||||
_, _ = io.WriteString(w, vv[s:])
|
||||
}
|
||||
}
|
||||
|
||||
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
|
||||
_, _ = io.WriteString(w, `$`)
|
||||
if v, ok := md.pindex[p.Name]; ok {
|
||||
int32String(w, int32(v))
|
||||
|
||||
} else {
|
||||
md.params = append(md.params, p)
|
||||
n := len(md.params)
|
||||
|
||||
if md.pindex == nil {
|
||||
md.pindex = make(map[string]int)
|
||||
}
|
||||
md.pindex[p.Name] = n
|
||||
int32String(w, int32(n))
|
||||
}
|
||||
}
|
||||
|
||||
func (md Metadata) Skipped() uint32 {
|
||||
return md.skipped
|
||||
}
|
||||
|
||||
func (md Metadata) Params() []Param {
|
||||
return md.params
|
||||
}
|
@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
@ -33,42 +34,44 @@ var updateTypes = map[string]itemType{
|
||||
|
||||
var noLimit = qcode.Paging{NoLimit: true}
|
||||
|
||||
func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
||||
func (co *Compiler) compileMutation(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||
md := Metadata{}
|
||||
|
||||
if len(qc.Selects) == 0 {
|
||||
return 0, errors.New("empty query")
|
||||
return md, errors.New("empty query")
|
||||
}
|
||||
|
||||
c := &compilerContext{w, qc.Selects, co}
|
||||
c := &compilerContext{md, w, qc.Selects, co}
|
||||
root := &qc.Selects[0]
|
||||
|
||||
ti, err := c.schema.GetTable(root.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
switch qc.Type {
|
||||
case qcode.QTInsert:
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
if _, err := c.renderInsert(w, qc, vars, ti); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
case qcode.QTUpdate:
|
||||
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
if _, err := c.renderUpdate(w, qc, vars, ti); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
case qcode.QTUpsert:
|
||||
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
if _, err := c.renderUpsert(w, qc, vars, ti); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
case qcode.QTDelete:
|
||||
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
if _, err := c.renderDelete(w, qc, vars, ti); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
default:
|
||||
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
||||
return c.md, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
||||
}
|
||||
|
||||
root.Paging = noLimit
|
||||
@ -77,7 +80,7 @@ func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables
|
||||
root.Where = nil
|
||||
root.Args = nil
|
||||
|
||||
return c.compileQuery(qc, w, vars)
|
||||
return co.compileQueryWithMetadata(w, qc, vars, c.md)
|
||||
}
|
||||
|
||||
type kvitem struct {
|
||||
@ -365,12 +368,12 @@ func (c *compilerContext) renderUnionStmt(w io.Writer, item renitem) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func renderInsertUpdateColumns(w io.Writer,
|
||||
func (c *compilerContext) renderInsertUpdateColumns(
|
||||
qc *qcode.QCode,
|
||||
jt map[string]json.RawMessage,
|
||||
ti *DBTableInfo,
|
||||
skipcols map[string]struct{},
|
||||
values bool) (uint32, error) {
|
||||
isValues bool) (uint32, error) {
|
||||
|
||||
root := &qc.Selects[0]
|
||||
renderedCol := false
|
||||
@ -392,18 +395,18 @@ func renderInsertUpdateColumns(w io.Writer,
|
||||
}
|
||||
}
|
||||
if n != 0 {
|
||||
io.WriteString(w, `, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
|
||||
if values {
|
||||
io.WriteString(w, `CAST( i.j ->>`)
|
||||
io.WriteString(w, `'`)
|
||||
io.WriteString(w, cn.Name)
|
||||
io.WriteString(w, `' AS `)
|
||||
io.WriteString(w, cn.Type)
|
||||
io.WriteString(w, `)`)
|
||||
if isValues {
|
||||
io.WriteString(c.w, `CAST( i.j ->>`)
|
||||
io.WriteString(c.w, `'`)
|
||||
io.WriteString(c.w, cn.Name)
|
||||
io.WriteString(c.w, `' AS `)
|
||||
io.WriteString(c.w, cn.Type)
|
||||
io.WriteString(c.w, `)`)
|
||||
} else {
|
||||
quoted(w, cn.Name)
|
||||
quoted(c.w, cn.Name)
|
||||
}
|
||||
|
||||
if !renderedCol {
|
||||
@ -422,16 +425,28 @@ func renderInsertUpdateColumns(w io.Writer,
|
||||
continue
|
||||
}
|
||||
if i != 0 || n != 0 {
|
||||
io.WriteString(w, `, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
|
||||
if values {
|
||||
io.WriteString(w, `'`)
|
||||
io.WriteString(w, root.PresetMap[cn])
|
||||
io.WriteString(w, `' :: `)
|
||||
io.WriteString(w, col.Type)
|
||||
if isValues {
|
||||
val := root.PresetMap[cn]
|
||||
switch {
|
||||
case ok && len(val) > 1 && val[0] == '$':
|
||||
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type})
|
||||
|
||||
case ok && strings.HasPrefix(val, "sql:"):
|
||||
io.WriteString(c.w, `(`)
|
||||
c.md.RenderVar(c.w, val[4:])
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
case ok:
|
||||
squoted(c.w, val)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` :: `)
|
||||
io.WriteString(c.w, col.Type)
|
||||
} else {
|
||||
quoted(w, cn)
|
||||
quoted(c.w, cn)
|
||||
}
|
||||
|
||||
if !renderedCol {
|
||||
@ -440,15 +455,15 @@ func renderInsertUpdateColumns(w io.Writer,
|
||||
}
|
||||
|
||||
if len(skipcols) != 0 && renderedCol {
|
||||
io.WriteString(w, `, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
func (c *compilerContext) renderUpsert(
|
||||
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
root := &qc.Selects[0]
|
||||
upsert, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("variable '%s' not defined", qc.ActionVar)
|
||||
@ -466,7 +481,7 @@ func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
if _, err := c.renderInsert(w, qc, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
@ -527,6 +542,10 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
|
||||
|
||||
rel := item.relPC
|
||||
|
||||
if rel == nil {
|
||||
return errors.New("invalid connect value")
|
||||
}
|
||||
|
||||
// Render only for parent-to-child relationship of one-to-one
|
||||
// For this to work the child needs to found first so it's primary key
|
||||
// can be set in the related column on the parent object.
|
||||
@ -672,7 +691,7 @@ func renderCteName(w io.Writer, item kvitem) error {
|
||||
io.WriteString(w, item.ti.Name)
|
||||
if item._type == itemConnect || item._type == itemDisconnect {
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, item.id)
|
||||
int32String(w, item.id)
|
||||
}
|
||||
io.WriteString(w, `"`)
|
||||
return nil
|
||||
|
@ -72,7 +72,7 @@ func delete(t *testing.T) {
|
||||
// }
|
||||
// }`
|
||||
|
||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
|
||||
// vars := map[string]json.RawMessage{
|
||||
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
@ -97,7 +97,7 @@ func delete(t *testing.T) {
|
||||
// }
|
||||
// }`
|
||||
|
||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
// sql := `WITH "users" AS (WITH "input" AS (SELECT '$1' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
|
||||
// vars := map[string]json.RawMessage{
|
||||
// "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
|
@ -139,9 +139,9 @@ func TestMain(m *testing.M) {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
vars := psql.NewVariables(map[string]string{
|
||||
vars := map[string]string{
|
||||
"admin_account_id": "5",
|
||||
})
|
||||
}
|
||||
|
||||
pcompile = psql.NewCompiler(psql.Config{
|
||||
Schema: schema,
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
@ -17,6 +18,25 @@ const (
|
||||
closeBlock = 500
|
||||
)
|
||||
|
||||
type Param struct {
|
||||
Name string
|
||||
Type string
|
||||
IsArray bool
|
||||
}
|
||||
|
||||
type Metadata struct {
|
||||
skipped uint32
|
||||
params []Param
|
||||
pindex map[string]int
|
||||
}
|
||||
|
||||
type compilerContext struct {
|
||||
md Metadata
|
||||
w io.Writer
|
||||
s []qcode.Select
|
||||
*Compiler
|
||||
}
|
||||
|
||||
type Variables map[string]json.RawMessage
|
||||
|
||||
type Config struct {
|
||||
@ -36,12 +56,12 @@ func NewCompiler(conf Config) *Compiler {
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
|
||||
return c.schema.SetRel(child, parent, rel)
|
||||
func (co *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
|
||||
return co.schema.SetRel(child, parent, rel)
|
||||
}
|
||||
|
||||
func (c *Compiler) IDColumn(table string) (*DBColumn, error) {
|
||||
ti, err := c.schema.GetTable(table)
|
||||
func (co *Compiler) IDColumn(table string) (*DBColumn, error) {
|
||||
ti, err := co.schema.GetTable(table)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -53,36 +73,47 @@ func (c *Compiler) IDColumn(table string) (*DBColumn, error) {
|
||||
return ti.PrimaryCol, nil
|
||||
}
|
||||
|
||||
type compilerContext struct {
|
||||
w io.Writer
|
||||
s []qcode.Select
|
||||
*Compiler
|
||||
}
|
||||
|
||||
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte, error) {
|
||||
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte, error) {
|
||||
w := &bytes.Buffer{}
|
||||
skipped, err := co.Compile(qc, w, vars)
|
||||
return skipped, w.Bytes(), err
|
||||
metad, err := co.Compile(w, qc, vars)
|
||||
return metad, w.Bytes(), err
|
||||
}
|
||||
|
||||
func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
||||
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||
return co.CompileWithMetadata(w, qc, vars, Metadata{})
|
||||
}
|
||||
|
||||
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||
md.skipped = 0
|
||||
|
||||
if qc == nil {
|
||||
return md, fmt.Errorf("qcode is nil")
|
||||
}
|
||||
|
||||
switch qc.Type {
|
||||
case qcode.QTQuery:
|
||||
return co.compileQuery(qc, w, vars)
|
||||
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
|
||||
return co.compileMutation(qc, w, vars)
|
||||
return co.compileQueryWithMetadata(w, qc, vars, md)
|
||||
|
||||
case qcode.QTInsert,
|
||||
qcode.QTUpdate,
|
||||
qcode.QTDelete,
|
||||
qcode.QTUpsert:
|
||||
return co.compileMutation(w, qc, vars)
|
||||
|
||||
default:
|
||||
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||
}
|
||||
|
||||
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||
}
|
||||
|
||||
func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
||||
func (co *Compiler) compileQueryWithMetadata(
|
||||
w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||
|
||||
if len(qc.Selects) == 0 {
|
||||
return 0, errors.New("empty query")
|
||||
return md, errors.New("empty query")
|
||||
}
|
||||
|
||||
c := &compilerContext{w, qc.Selects, co}
|
||||
|
||||
c := &compilerContext{md, w, qc.Selects, co}
|
||||
st := NewIntStack()
|
||||
i := 0
|
||||
|
||||
@ -108,13 +139,11 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
i++
|
||||
}
|
||||
|
||||
var ignored uint32
|
||||
|
||||
if st.Len() != 0 {
|
||||
io.WriteString(c.w, `) as "__root" FROM `)
|
||||
} else {
|
||||
io.WriteString(c.w, `) as "__root"`)
|
||||
return ignored, nil
|
||||
return c.md, nil
|
||||
}
|
||||
|
||||
for {
|
||||
@ -127,13 +156,14 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
if id < closeBlock {
|
||||
sel := &c.s[id]
|
||||
|
||||
if len(sel.Cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
ti, err := c.schema.GetTable(sel.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
if sel.Type != qcode.STUnion {
|
||||
if len(sel.Cols) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if sel.ParentID == -1 {
|
||||
@ -146,21 +176,20 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
c.renderPluralSelect(sel, ti)
|
||||
}
|
||||
|
||||
skipped, err := c.renderSelect(sel, ti, vars)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||
return c.md, err
|
||||
}
|
||||
}
|
||||
ignored |= skipped
|
||||
|
||||
for _, cid := range sel.Children {
|
||||
if hasBit(skipped, uint32(cid)) {
|
||||
if hasBit(c.md.skipped, uint32(cid)) {
|
||||
continue
|
||||
}
|
||||
child := &c.s[cid]
|
||||
|
||||
if child.SkipRender {
|
||||
continue
|
||||
}
|
||||
|
||||
st.Push(child.ID + closeBlock)
|
||||
st.Push(child.ID)
|
||||
}
|
||||
@ -168,9 +197,10 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
} else {
|
||||
sel := &c.s[(id - closeBlock)]
|
||||
|
||||
if sel.Type != qcode.STUnion {
|
||||
ti, err := c.schema.GetTable(sel.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return c.md, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `)`)
|
||||
@ -191,23 +221,24 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
} else {
|
||||
c.renderLateralJoinClose(sel)
|
||||
}
|
||||
}
|
||||
|
||||
if sel.Type != qcode.STMember {
|
||||
if len(sel.Args) != 0 {
|
||||
i := 0
|
||||
for _, v := range sel.Args {
|
||||
qcode.FreeNode(v, 500)
|
||||
i++
|
||||
qcode.FreeNode(v)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return ignored, nil
|
||||
return c.md, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
io.WriteString(c.w, `SELECT coalesce(jsonb_agg("__sj_`)
|
||||
int2string(c.w, sel.ID)
|
||||
int32String(c.w, sel.ID)
|
||||
io.WriteString(c.w, `"."json"), '[]') as "json"`)
|
||||
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
@ -231,7 +262,7 @@ func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo)
|
||||
io.WriteString(c.w, `, CONCAT_WS(','`)
|
||||
for i := 0; i < n; i++ {
|
||||
io.WriteString(c.w, `, max("__cur_`)
|
||||
int2string(c.w, int32(i))
|
||||
int32String(c.w, int32(i))
|
||||
io.WriteString(c.w, `")`)
|
||||
}
|
||||
io.WriteString(c.w, `) as "cursor"`)
|
||||
@ -247,7 +278,7 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
|
||||
io.WriteString(c.w, `', `)
|
||||
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int2string(c.w, sel.ID)
|
||||
int32String(c.w, sel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
@ -256,16 +287,14 @@ func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
|
||||
io.WriteString(c.w, `_cursor', `)
|
||||
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int2string(c.w, sel.ID)
|
||||
int32String(c.w, sel.ID)
|
||||
io.WriteString(c.w, `"."cursor"`)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, []*qcode.Column, error) {
|
||||
var skipped uint32
|
||||
|
||||
func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) ([]*qcode.Column, error) {
|
||||
cols := make([]*qcode.Column, 0, len(sel.Cols))
|
||||
colmap := make(map[string]struct{}, len(sel.Cols))
|
||||
|
||||
@ -307,9 +336,7 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
||||
|
||||
rel, err := c.schema.GetRel(child.Name, ti.Name)
|
||||
if err != nil {
|
||||
return 0, nil, err
|
||||
//skipped |= (1 << uint(id))
|
||||
//continue
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch rel.Type {
|
||||
@ -335,16 +362,25 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
||||
colmap[rel.Left.Col] = struct{}{}
|
||||
skipped |= (1 << uint(id))
|
||||
c.md.skipped |= (1 << uint(id))
|
||||
}
|
||||
|
||||
case RelPolymorphic:
|
||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
|
||||
colmap[rel.Left.Col] = struct{}{}
|
||||
}
|
||||
if _, ok := colmap[rel.Right.Table]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
|
||||
colmap[rel.Right.Table] = struct{}{}
|
||||
}
|
||||
|
||||
default:
|
||||
return 0, nil, fmt.Errorf("unknown relationship %s", rel)
|
||||
//skipped |= (1 << uint(id))
|
||||
return nil, fmt.Errorf("unknown relationship %s", rel)
|
||||
}
|
||||
}
|
||||
|
||||
return skipped, cols, nil
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
// This
|
||||
@ -413,22 +449,30 @@ func (c *compilerContext) addSeekPredicate(sel *qcode.Select) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, error) {
|
||||
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) error {
|
||||
var rel *DBRel
|
||||
var err error
|
||||
|
||||
// Relationships must be between union parents and their parents
|
||||
if sel.ParentID != -1 {
|
||||
parent := c.s[sel.ParentID]
|
||||
if sel.Type == qcode.STMember && sel.UParentID != -1 {
|
||||
cn := c.s[sel.ParentID].Name
|
||||
pn := c.s[sel.UParentID].Name
|
||||
rel, err = c.schema.GetRel(cn, pn)
|
||||
|
||||
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
} else {
|
||||
pn := c.s[sel.ParentID].Name
|
||||
rel, err = c.schema.GetRel(ti.Name, pn)
|
||||
}
|
||||
}
|
||||
|
||||
skipped, childCols, err := c.initSelect(sel, ti, vars)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
return err
|
||||
}
|
||||
|
||||
childCols, err := c.initSelect(sel, ti, vars)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// SELECT
|
||||
@ -438,13 +482,13 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
// }
|
||||
|
||||
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
||||
int2string(c.w, sel.ID)
|
||||
int32String(c.w, sel.ID)
|
||||
io.WriteString(c.w, `".*) `)
|
||||
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
for i := range sel.OrderBy {
|
||||
io.WriteString(c.w, `- '__cur_`)
|
||||
int2string(c.w, int32(i))
|
||||
int32String(c.w, int32(i))
|
||||
io.WriteString(c.w, `' `)
|
||||
}
|
||||
}
|
||||
@ -454,15 +498,15 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
for i := range sel.OrderBy {
|
||||
io.WriteString(c.w, `, "__cur_`)
|
||||
int2string(c.w, int32(i))
|
||||
int32String(c.w, int32(i))
|
||||
io.WriteString(c.w, `"`)
|
||||
}
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `FROM (SELECT `)
|
||||
|
||||
if err := c.renderColumns(sel, ti, skipped); err != nil {
|
||||
return 0, err
|
||||
if err := c.renderColumns(sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
@ -470,7 +514,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
io.WriteString(c.w, `, LAST_VALUE(`)
|
||||
colWithTableID(c.w, ti.Name, sel.ID, ob.Col)
|
||||
io.WriteString(c.w, `) OVER() AS "__cur_`)
|
||||
int2string(c.w, int32(i))
|
||||
int32String(c.w, int32(i))
|
||||
io.WriteString(c.w, `"`)
|
||||
}
|
||||
}
|
||||
@ -478,9 +522,8 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
io.WriteString(c.w, ` FROM (`)
|
||||
|
||||
// FROM (SELECT .... )
|
||||
err = c.renderBaseSelect(sel, ti, rel, childCols, skipped)
|
||||
if err != nil {
|
||||
return skipped, err
|
||||
if err = c.renderBaseSelect(sel, ti, rel, childCols); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID)
|
||||
@ -489,7 +532,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
|
||||
// END-FROM
|
||||
|
||||
return skipped, nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
|
||||
@ -510,36 +553,33 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
rel, _ := c.schema.GetRel(table, parent)
|
||||
|
||||
// This join is only required for one-to-many relations since
|
||||
// these make use of join tables that need to be pulled in.
|
||||
if rel.Type != RelOneToManyThrough {
|
||||
return err
|
||||
if rel == nil || rel.Type != RelOneToManyThrough {
|
||||
return nil
|
||||
}
|
||||
|
||||
pt, err := c.schema.GetTable(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// pt, err := c.schema.GetTable(parent)
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
||||
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
||||
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
||||
io.WriteString(c.w, rel.Through)
|
||||
io.WriteString(c.w, rel.Through.Table)
|
||||
io.WriteString(c.w, `" ON ((`)
|
||||
colWithTable(c.w, rel.Through, rel.ColT)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColL)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, pt.Name, id, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
|
||||
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
i := 0
|
||||
var cn string
|
||||
|
||||
@ -575,7 +615,7 @@ func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skip
|
||||
|
||||
i += c.renderRemoteRelColumns(sel, ti, i)
|
||||
|
||||
return c.renderJoinColumns(sel, ti, skipped, i)
|
||||
return c.renderJoinColumns(sel, ti, i)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) int {
|
||||
@ -600,12 +640,12 @@ func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableI
|
||||
return i
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32, colsRendered int) error {
|
||||
func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo, colsRendered int) error {
|
||||
// columns previously rendered
|
||||
i := colsRendered
|
||||
|
||||
for _, id := range sel.Children {
|
||||
if hasBit(skipped, uint32(id)) {
|
||||
if hasBit(c.md.skipped, uint32(id)) {
|
||||
continue
|
||||
}
|
||||
childSel := &c.s[id]
|
||||
@ -620,14 +660,37 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
||||
continue
|
||||
}
|
||||
|
||||
if childSel.Type == qcode.STUnion {
|
||||
rel, err := c.schema.GetRel(childSel.Name, ti.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
io.WriteString(c.w, `(CASE `)
|
||||
for _, uid := range childSel.Children {
|
||||
unionSel := &c.s[uid]
|
||||
|
||||
io.WriteString(c.w, `WHEN `)
|
||||
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
|
||||
io.WriteString(c.w, ` = `)
|
||||
squoted(c.w, unionSel.Name)
|
||||
io.WriteString(c.w, ` THEN `)
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int2string(c.w, childSel.ID)
|
||||
int32String(c.w, unionSel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
}
|
||||
io.WriteString(c.w, `END)`)
|
||||
alias(c.w, childSel.FieldName)
|
||||
|
||||
} else {
|
||||
io.WriteString(c.w, `"__sj_`)
|
||||
int32String(c.w, childSel.ID)
|
||||
io.WriteString(c.w, `"."json"`)
|
||||
alias(c.w, childSel.FieldName)
|
||||
}
|
||||
|
||||
if childSel.Paging.Type != qcode.PtOffset {
|
||||
io.WriteString(c.w, `, "__sj_`)
|
||||
int2string(c.w, childSel.ID)
|
||||
int32String(c.w, childSel.ID)
|
||||
io.WriteString(c.w, `"."cursor" AS "`)
|
||||
io.WriteString(c.w, childSel.FieldName)
|
||||
io.WriteString(c.w, `_cursor"`)
|
||||
@ -640,7 +703,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, rel *DBRel,
|
||||
childCols []*qcode.Column, skipped uint32) error {
|
||||
childCols []*qcode.Column) error {
|
||||
isRoot := (rel == nil)
|
||||
isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop)
|
||||
hasOrder := len(sel.OrderBy) != 0
|
||||
@ -655,7 +718,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
c.renderDistinctOn(sel, ti)
|
||||
}
|
||||
|
||||
realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols, skipped)
|
||||
realColsRendered, isAgg, err := c.renderBaseColumns(sel, ti, childCols)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -678,7 +741,8 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` WHERE (`)
|
||||
if err := c.renderRelationship(sel, ti); err != nil {
|
||||
|
||||
if err := c.renderRelationship(sel, rel); err != nil {
|
||||
return err
|
||||
}
|
||||
if isFil {
|
||||
@ -710,7 +774,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
case ti.IsSingular:
|
||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||
|
||||
case len(sel.Paging.Limit) != 0:
|
||||
case sel.Paging.Limit != "":
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
io.WriteString(c.w, ` LIMIT ('`)
|
||||
io.WriteString(c.w, sel.Paging.Limit)
|
||||
@ -723,7 +787,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||
}
|
||||
|
||||
if len(sel.Paging.Offset) != 0 {
|
||||
if sel.Paging.Offset != "" {
|
||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||
io.WriteString(c.w, ` OFFSET ('`)
|
||||
io.WriteString(c.w, sel.Paging.Offset)
|
||||
@ -782,30 +846,35 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
io.WriteString(c.w, `a[`)
|
||||
int2string(c.w, int32(i+1))
|
||||
int32String(c.w, int32(i+1))
|
||||
io.WriteString(c.w, `] as `)
|
||||
quoted(c.w, ob.Col)
|
||||
}
|
||||
io.WriteString(c.w, ` FROM string_to_array('{{cursor}}', ',') as a) `)
|
||||
io.WriteString(c.w, ` FROM string_to_array(`)
|
||||
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
|
||||
io.WriteString(c.w, `, ',') as a) `)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
parent := c.s[sel.ParentID]
|
||||
|
||||
pti, err := c.schema.GetTable(parent.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
|
||||
func (c *compilerContext) renderRelationshipByName(table, parent string) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return c.renderRelationship(nil, rel)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error {
|
||||
var pid int32
|
||||
|
||||
switch {
|
||||
case sel == nil:
|
||||
pid = int32(-1)
|
||||
case sel.Type == qcode.STMember:
|
||||
pid = sel.UParentID
|
||||
default:
|
||||
pid = sel.ParentID
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `((`)
|
||||
|
||||
@ -817,19 +886,19 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
||||
|
||||
switch {
|
||||
case !rel.Left.Array && rel.Right.Array:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
|
||||
case rel.Left.Array && !rel.Right.Array:
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
|
||||
default:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
||||
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||
}
|
||||
|
||||
case RelOneToManyThrough:
|
||||
@ -839,25 +908,34 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
||||
|
||||
switch {
|
||||
case !rel.Left.Array && rel.Right.Array:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
|
||||
case rel.Left.Array && !rel.Right.Array:
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
io.WriteString(c.w, `) = any (`)
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
|
||||
default:
|
||||
colWithTable(c.w, table, rel.Left.Col)
|
||||
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
||||
colWithTable(c.w, rel.Right.Table, rel.Right.Col)
|
||||
}
|
||||
|
||||
case RelEmbedded:
|
||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, parent, id, rel.Left.Col)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||
|
||||
case RelPolymorphic:
|
||||
colWithTable(c.w, sel.Name, rel.Right.Col)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||
io.WriteString(c.w, `) AND (`)
|
||||
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
squoted(c.w, sel.Name)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `))`)
|
||||
@ -933,13 +1011,10 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
|
||||
if err := c.renderOp(val, ti); err != nil {
|
||||
} else if err := c.renderOp(val, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
//qcode.FreeExp(val)
|
||||
|
||||
default:
|
||||
@ -970,7 +1045,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
|
||||
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
|
||||
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
@ -999,7 +1074,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
if len(ex.Col) != 0 {
|
||||
if ex.Col != "" {
|
||||
if col, ok = ti.ColMap[ex.Col]; !ok {
|
||||
return fmt.Errorf("no column '%s' found ", ex.Col)
|
||||
}
|
||||
@ -1027,9 +1102,9 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
||||
case qcode.OpLesserThan:
|
||||
io.WriteString(c.w, `<`)
|
||||
case qcode.OpIn:
|
||||
io.WriteString(c.w, `IN`)
|
||||
io.WriteString(c.w, `= ANY`)
|
||||
case qcode.OpNotIn:
|
||||
io.WriteString(c.w, `NOT IN`)
|
||||
io.WriteString(c.w, `!= ANY`)
|
||||
case qcode.OpLike:
|
||||
io.WriteString(c.w, `LIKE`)
|
||||
case qcode.OpNotLike:
|
||||
@ -1079,12 +1154,13 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, ti.Name, ti.TSVCol.Name)
|
||||
if c.schema.ver >= 110000 {
|
||||
io.WriteString(c.w, `) @@ websearch_to_tsquery('{{`)
|
||||
io.WriteString(c.w, `) @@ websearch_to_tsquery(`)
|
||||
} else {
|
||||
io.WriteString(c.w, `) @@ to_tsquery('{{`)
|
||||
io.WriteString(c.w, `) @@ to_tsquery(`)
|
||||
}
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `}}'))`)
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"})
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
return nil
|
||||
|
||||
default:
|
||||
@ -1170,15 +1246,25 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
val, ok := vars[ex.Val]
|
||||
switch {
|
||||
case ok && strings.HasPrefix(val, "sql:"):
|
||||
io.WriteString(c.w, ` (`)
|
||||
io.WriteString(c.w, val[4:])
|
||||
io.WriteString(c.w, `(`)
|
||||
c.md.RenderVar(c.w, val[4:])
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
case ok:
|
||||
squoted(c.w, val)
|
||||
|
||||
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
||||
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
io.WriteString(c.w, ` :: `)
|
||||
io.WriteString(c.w, col.Type)
|
||||
io.WriteString(c.w, `[])`)
|
||||
return
|
||||
|
||||
default:
|
||||
io.WriteString(c.w, ` '{{`)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `}}'`)
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||
}
|
||||
|
||||
case qcode.ValRef:
|
||||
@ -1228,7 +1314,7 @@ func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||
return 0
|
||||
}
|
||||
|
||||
func hasBit(n uint32, pos uint32) bool {
|
||||
func hasBit(n, pos uint32) bool {
|
||||
val := n & (1 << pos)
|
||||
return (val > 0)
|
||||
}
|
||||
@ -1243,7 +1329,7 @@ func aliasWithID(w io.Writer, alias string, id int32) {
|
||||
io.WriteString(w, ` AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
int32String(w, id)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
@ -1260,7 +1346,7 @@ func colWithTableID(w io.Writer, table string, id int32, col string) {
|
||||
io.WriteString(w, table)
|
||||
if id >= 0 {
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
int32String(w, id)
|
||||
}
|
||||
io.WriteString(w, `"."`)
|
||||
io.WriteString(w, col)
|
||||
@ -1279,26 +1365,6 @@ func squoted(w io.Writer, identifier string) {
|
||||
io.WriteString(w, `'`)
|
||||
}
|
||||
|
||||
const charset = "0123456789"
|
||||
|
||||
func int2string(w io.Writer, val int32) {
|
||||
if val < 10 {
|
||||
w.Write([]byte{charset[val]})
|
||||
return
|
||||
}
|
||||
|
||||
temp := int32(0)
|
||||
val2 := val
|
||||
for val2 > 0 {
|
||||
temp *= 10
|
||||
temp += val2 % 10
|
||||
val2 = int32(float64(val2 / 10))
|
||||
}
|
||||
|
||||
val3 := temp
|
||||
for val3 > 0 {
|
||||
d := val3 % 10
|
||||
val3 /= 10
|
||||
w.Write([]byte{charset[d]})
|
||||
}
|
||||
func int32String(w io.Writer, val int32) {
|
||||
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
||||
}
|
||||
|
@ -32,6 +32,20 @@ func withComplexArgs(t *testing.T) {
|
||||
compileGQLToPSQL(t, gql, nil, "user")
|
||||
}
|
||||
|
||||
func withWhereIn(t *testing.T) {
|
||||
gql := `query {
|
||||
products(where: { id: { in: $list } }) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"list": json.RawMessage(`[1,2,3]`),
|
||||
}
|
||||
|
||||
compileGQLToPSQL(t, gql, vars, "user")
|
||||
}
|
||||
|
||||
func withWhereAndList(t *testing.T) {
|
||||
gql := `query {
|
||||
products(
|
||||
@ -293,6 +307,100 @@ func multiRoot(t *testing.T) {
|
||||
compileGQLToPSQL(t, gql, nil, "user")
|
||||
}
|
||||
|
||||
func withFragment1(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
func withFragment2(t *testing.T) {
|
||||
gql := `
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
func withFragment3(t *testing.T) {
|
||||
gql := `
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
// func withInlineFragment(t *testing.T) {
|
||||
// gql := `
|
||||
// query {
|
||||
// users {
|
||||
// ... on users {
|
||||
// id
|
||||
// email
|
||||
// }
|
||||
// created_at
|
||||
// ... on user {
|
||||
// first_name
|
||||
// last_name
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// `
|
||||
|
||||
// compileGQLToPSQL(t, gql, nil, "anon")
|
||||
// }
|
||||
|
||||
func withCursor(t *testing.T) {
|
||||
gql := `query {
|
||||
Products(
|
||||
@ -367,6 +475,7 @@ func blockedFunctions(t *testing.T) {
|
||||
|
||||
func TestCompileQuery(t *testing.T) {
|
||||
t.Run("withComplexArgs", withComplexArgs)
|
||||
t.Run("withWhereIn", withWhereIn)
|
||||
t.Run("withWhereAndList", withWhereAndList)
|
||||
t.Run("withWhereIsNull", withWhereIsNull)
|
||||
t.Run("withWhereMultiOr", withWhereMultiOr)
|
||||
@ -385,6 +494,10 @@ func TestCompileQuery(t *testing.T) {
|
||||
t.Run("queryWithVariables", queryWithVariables)
|
||||
t.Run("withWhereOnRelations", withWhereOnRelations)
|
||||
t.Run("multiRoot", multiRoot)
|
||||
t.Run("withFragment1", withFragment1)
|
||||
t.Run("withFragment2", withFragment2)
|
||||
t.Run("withFragment3", withFragment3)
|
||||
//t.Run("withInlineFragment", withInlineFragment)
|
||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||
t.Run("withCursor", withCursor)
|
||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||
@ -429,7 +542,7 @@ func BenchmarkCompile(b *testing.B) {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = pcompile.Compile(qc, w, nil)
|
||||
_, err = pcompile.Compile(w, qc, nil)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
@ -450,7 +563,7 @@ func BenchmarkCompileParallel(b *testing.B) {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
_, err = pcompile.Compile(qc, w, nil)
|
||||
_, err = pcompile.Compile(w, qc, nil)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
@ -11,6 +11,7 @@ type DBSchema struct {
|
||||
ver int
|
||||
t map[string]*DBTableInfo
|
||||
rm map[string]map[string]*DBRel
|
||||
vt map[string]*VirtualTable
|
||||
fm map[string]*DBFunction
|
||||
}
|
||||
|
||||
@ -33,14 +34,18 @@ const (
|
||||
RelOneToOne RelType = iota + 1
|
||||
RelOneToMany
|
||||
RelOneToManyThrough
|
||||
RelPolymorphic
|
||||
RelEmbedded
|
||||
RelRemote
|
||||
)
|
||||
|
||||
type DBRel struct {
|
||||
Type RelType
|
||||
Through string
|
||||
ColT string
|
||||
Through struct {
|
||||
Table string
|
||||
ColL string
|
||||
ColR string
|
||||
}
|
||||
Left struct {
|
||||
col *DBColumn
|
||||
Table string
|
||||
@ -60,6 +65,7 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
ver: info.Version,
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
vt: make(map[string]*VirtualTable),
|
||||
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||
}
|
||||
|
||||
@ -70,6 +76,10 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
}
|
||||
}
|
||||
|
||||
if err := schema.virtualRels(info.VTables); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for i, t := range info.Tables {
|
||||
err := schema.firstDegreeRels(t, info.Columns[i])
|
||||
if err != nil {
|
||||
@ -102,7 +112,7 @@ func (s *DBSchema) addTable(
|
||||
singular := flect.Singularize(t.Key)
|
||||
plural := flect.Pluralize(t.Key)
|
||||
|
||||
s.t[singular] = &DBTableInfo{
|
||||
ts := &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: true,
|
||||
@ -112,8 +122,9 @@ func (s *DBSchema) addTable(
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
s.t[singular] = ts
|
||||
|
||||
s.t[plural] = &DBTableInfo{
|
||||
tp := &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: false,
|
||||
@ -123,14 +134,15 @@ func (s *DBSchema) addTable(
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
s.t[plural] = tp
|
||||
|
||||
if al, ok := aliases[t.Key]; ok {
|
||||
for i := range al {
|
||||
k1 := flect.Singularize(al[i])
|
||||
s.t[k1] = s.t[singular]
|
||||
s.t[k1] = ts
|
||||
|
||||
k2 := flect.Pluralize(al[i])
|
||||
s.t[k2] = s.t[plural]
|
||||
s.t[k2] = tp
|
||||
}
|
||||
}
|
||||
|
||||
@ -154,6 +166,54 @@ func (s *DBSchema) addTable(
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
|
||||
for _, vt := range vts {
|
||||
s.vt[vt.Name] = &vt
|
||||
|
||||
for _, t := range s.t {
|
||||
idCol, ok := t.ColMap[vt.IDColumn]
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
nt := DBTable{
|
||||
ID: -1,
|
||||
Name: vt.Name,
|
||||
Key: strings.ToLower(vt.Name),
|
||||
Type: "virtual",
|
||||
}
|
||||
|
||||
if err := s.addTable(nt, nil, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
rel := &DBRel{Type: RelPolymorphic}
|
||||
rel.Left.col = idCol
|
||||
rel.Left.Table = t.Name
|
||||
rel.Left.Col = idCol.Name
|
||||
|
||||
rcol := DBColumn{
|
||||
Name: vt.FKeyColumn,
|
||||
Key: strings.ToLower(vt.FKeyColumn),
|
||||
Type: idCol.Type,
|
||||
}
|
||||
|
||||
rel.Right.col = &rcol
|
||||
rel.Right.Table = vt.TypeColumn
|
||||
rel.Right.Col = rcol.Name
|
||||
|
||||
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
ct := t.Key
|
||||
cti, ok := s.t[ct]
|
||||
@ -164,7 +224,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
|
||||
if len(c.FKeyTable) == 0 {
|
||||
if c.FKeyTable == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -268,7 +328,7 @@ func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
|
||||
if len(c.FKeyTable) == 0 {
|
||||
if c.FKeyTable == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
@ -344,16 +404,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
||||
// One-to-many-through relation between 1nd foreign key table and the
|
||||
// 2nd foreign key table
|
||||
rel1 := &DBRel{Type: RelOneToManyThrough}
|
||||
rel1.Through = ti.Name
|
||||
rel1.ColT = col2.Name
|
||||
rel1.Through.Table = ti.Name
|
||||
rel1.Through.ColL = col1.Name
|
||||
rel1.Through.ColR = col2.Name
|
||||
|
||||
rel1.Left.col = &col2
|
||||
rel1.Left.Table = col2.FKeyTable
|
||||
rel1.Left.Col = fc2.Name
|
||||
rel1.Left.col = fc1
|
||||
rel1.Left.Table = col1.FKeyTable
|
||||
rel1.Left.Col = fc1.Name
|
||||
|
||||
rel1.Right.col = &col1
|
||||
rel1.Right.Table = ti.Name
|
||||
rel1.Right.Col = col1.Name
|
||||
rel1.Right.col = fc2
|
||||
rel1.Right.Table = t2
|
||||
rel1.Right.Col = fc2.Name
|
||||
|
||||
if err := s.SetRel(t1, t2, rel1); err != nil {
|
||||
return err
|
||||
@ -362,16 +423,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
||||
// One-to-many-through relation between 2nd foreign key table and the
|
||||
// 1nd foreign key table
|
||||
rel2 := &DBRel{Type: RelOneToManyThrough}
|
||||
rel2.Through = ti.Name
|
||||
rel2.ColT = col1.Name
|
||||
rel2.Through.Table = ti.Name
|
||||
rel2.Through.ColL = col2.Name
|
||||
rel2.Through.ColR = col1.Name
|
||||
|
||||
rel1.Left.col = fc1
|
||||
rel2.Left.Table = col1.FKeyTable
|
||||
rel2.Left.Col = fc1.Name
|
||||
rel2.Left.col = fc2
|
||||
rel2.Left.Table = col2.FKeyTable
|
||||
rel2.Left.Col = fc2.Name
|
||||
|
||||
rel1.Right.col = &col2
|
||||
rel2.Right.Table = ti.Name
|
||||
rel2.Right.Col = col2.Name
|
||||
rel2.Right.col = fc1
|
||||
rel2.Right.Table = t1
|
||||
rel2.Right.Col = fc1.Name
|
||||
|
||||
if err := s.SetRel(t2, t1, rel2); err != nil {
|
||||
return err
|
||||
|
@ -14,14 +14,18 @@ func (rt RelType) String() string {
|
||||
return "remote"
|
||||
case RelEmbedded:
|
||||
return "embedded"
|
||||
case RelPolymorphic:
|
||||
return "polymorphic"
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (re *DBRel) String() string {
|
||||
if re.Type == RelOneToManyThrough {
|
||||
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
|
||||
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col,
|
||||
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
|
||||
re.Right.Table, re.Right.Col)
|
||||
}
|
||||
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
||||
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
||||
|
@ -14,9 +14,17 @@ type DBInfo struct {
|
||||
Tables []DBTable
|
||||
Columns [][]DBColumn
|
||||
Functions []DBFunction
|
||||
VTables []VirtualTable
|
||||
colMap map[string]map[string]*DBColumn
|
||||
}
|
||||
|
||||
type VirtualTable struct {
|
||||
Name string
|
||||
IDColumn string
|
||||
TypeColumn string
|
||||
FKeyColumn string
|
||||
}
|
||||
|
||||
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
||||
di := &DBInfo{}
|
||||
var version string
|
||||
|
@ -1,26 +1,26 @@
|
||||
=== RUN TestCompileInsert
|
||||
=== RUN TestCompileInsert/simpleInsert
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/singleInsert
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/bulkInsert
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileInsert (0.02s)
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileInsert (0.03s)
|
||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
||||
@ -33,14 +33,14 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id"
|
||||
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
||||
=== RUN TestCompileMutate
|
||||
=== RUN TestCompileMutate/singleUpsert
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/singleUpsertWhere
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/bulkUpsert
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/delete
|
||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileMutate (0.00s)
|
||||
--- PASS: TestCompileMutate (0.01s)
|
||||
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
||||
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
||||
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
||||
@ -48,6 +48,8 @@ WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :
|
||||
=== RUN TestCompileQuery
|
||||
=== RUN TestCompileQuery/withComplexArgs
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereIn
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = ANY (ARRAY(SELECT json_array_elements_text($1)) :: bigint[])))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereAndList
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereIsNull
|
||||
@ -55,9 +57,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
||||
=== RUN TestCompileQuery/withWhereMultiOr
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/fetchByID
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = $1 :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/searchQuery
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery($1)) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery($1)) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery($1))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/oneToMany
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/oneToManyReverse
|
||||
@ -65,9 +67,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
||||
=== RUN TestCompileQuery/oneToManyArray
|
||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToMany
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToManyReverse
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunction
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||
@ -77,25 +79,32 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
||||
=== RUN TestCompileQuery/aggFunctionWithFilter
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/syntheticTables
|
||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = $1 :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/queryWithVariables
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = $1 :: numeric(7,2)) AND (("products"."id") = $2 :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereOnRelations
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/multiRoot
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment1
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment2
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment3
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withCursor
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array($1, ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/blockedQuery
|
||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/blockedFunctions
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
--- PASS: TestCompileQuery (0.02s)
|
||||
--- PASS: TestCompileQuery (0.03s)
|
||||
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereIn (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereIsNull (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereMultiOr (0.00s)
|
||||
@ -114,6 +123,9 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
||||
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
||||
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment1 (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment2 (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment3 (0.00s)
|
||||
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
||||
--- PASS: TestCompileQuery/withCursor (0.00s)
|
||||
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
||||
@ -121,23 +133,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
||||
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
||||
=== RUN TestCompileUpdate
|
||||
=== RUN TestCompileUpdate/singleUpdate
|
||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = $2 :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = $2 :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileUpdate (0.02s)
|
||||
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
||||
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
||||
@ -148,4 +160,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.306s
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.323s
|
||||
|
@ -10,8 +10,8 @@ import (
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
)
|
||||
|
||||
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
func (c *compilerContext) renderUpdate(
|
||||
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
update, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
@ -21,9 +21,10 @@ func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
|
||||
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
|
||||
io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, `}}' :: json AS j)`)
|
||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||
// io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, ` :: json AS j)`)
|
||||
|
||||
st := util.NewStack()
|
||||
st.Push(kvitem{_type: itemUpdate, key: ti.Name, val: update, ti: ti})
|
||||
@ -84,11 +85,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
||||
io.WriteString(w, `UPDATE `)
|
||||
quoted(w, ti.Name)
|
||||
io.WriteString(w, ` SET (`)
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, false)
|
||||
c.renderInsertUpdateColumns(qc, jt, ti, sk, false)
|
||||
renderNestedUpdateRelColumns(w, item.kvitem, false)
|
||||
|
||||
io.WriteString(w, `) = (SELECT `)
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||
c.renderInsertUpdateColumns(qc, jt, ti, sk, true)
|
||||
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
||||
|
||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||
@ -120,14 +121,12 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
||||
}
|
||||
io.WriteString(w, `)`)
|
||||
|
||||
} else {
|
||||
if qc.Selects[0].Where != nil {
|
||||
io.WriteString(w, ` WHERE `)
|
||||
} else if qc.Selects[0].Where != nil {
|
||||
io.WriteString(w, `WHERE `)
|
||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
io.WriteString(w, ` RETURNING `)
|
||||
quoted(w, ti.Name)
|
||||
@ -197,8 +196,9 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
func (c *compilerContext) renderDelete(
|
||||
w io.Writer, qc *qcode.QCode, vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
root := &qc.Selects[0]
|
||||
|
||||
io.WriteString(c.w, `WITH `)
|
||||
|
@ -223,7 +223,7 @@ func nestedUpdateOneToOneWithDisconnect(t *testing.T) {
|
||||
// }
|
||||
// }`
|
||||
|
||||
// sql := `WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
// sql := `WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 2) RETURNING "products".*) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
|
||||
|
||||
// vars := map[string]json.RawMessage{
|
||||
// "data": json.RawMessage(`{
|
||||
|
@ -1,13 +0,0 @@
|
||||
package psql
|
||||
|
||||
import "regexp"
|
||||
|
||||
func NewVariables(varlist map[string]string) map[string]string {
|
||||
re := regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||
vars := make(map[string]string, len(varlist))
|
||||
|
||||
for k, v := range varlist {
|
||||
vars[k] = re.ReplaceAllString(v, `{{$1}}`)
|
||||
}
|
||||
return vars
|
||||
}
|
11
core/internal/qcode/bench.10
Normal file
11
core/internal/qcode/bench.10
Normal file
@ -0,0 +1,11 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 120888 9236 ns/op 3755 B/op 28 allocs/op
|
||||
BenchmarkQCompileP-16 502248 2620 ns/op 3795 B/op 28 allocs/op
|
||||
BenchmarkParse-16 128370 9294 ns/op 3902 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 575752 2340 ns/op 3903 B/op 18 allocs/op
|
||||
BenchmarkSchemaParse-16 212048 5779 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 630918 1686 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 7.710s
|
13
core/internal/qcode/bench.11
Normal file
13
core/internal/qcode/bench.11
Normal file
@ -0,0 +1,13 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
|
||||
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
|
||||
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
|
||||
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
|
||||
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
|
||||
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 11.092s
|
@ -1,12 +1,12 @@
|
||||
package qcode
|
||||
|
||||
import (
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
DefaultBlock bool
|
||||
Blocklist []string
|
||||
}
|
||||
|
||||
@ -46,7 +46,6 @@ type TRConfig struct {
|
||||
}
|
||||
|
||||
type trval struct {
|
||||
readOnly bool
|
||||
query struct {
|
||||
limit string
|
||||
fil *Exp
|
||||
@ -132,12 +131,3 @@ func mapToList(m map[string]string) []string {
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
||||
|
||||
var varRe = regexp.MustCompile(`\$([a-zA-Z0-9_]+)`)
|
||||
|
||||
func parsePresets(m map[string]string) map[string]string {
|
||||
for k, v := range m {
|
||||
m[k] = varRe.ReplaceAllString(v, `{{$1}}`)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
@ -11,15 +11,18 @@ import (
|
||||
var (
|
||||
queryToken = []byte("query")
|
||||
mutationToken = []byte("mutation")
|
||||
fragmentToken = []byte("fragment")
|
||||
subscriptionToken = []byte("subscription")
|
||||
onToken = []byte("on")
|
||||
trueToken = []byte("true")
|
||||
falseToken = []byte("false")
|
||||
quotesToken = []byte(`'"`)
|
||||
signsToken = []byte(`+-`)
|
||||
punctuatorToken = []byte(`!():=[]{|}`)
|
||||
spreadToken = []byte(`...`)
|
||||
digitToken = []byte(`0123456789`)
|
||||
dotToken = []byte(`.`)
|
||||
|
||||
punctuatorToken = `!():=[]{|}`
|
||||
)
|
||||
|
||||
// Pos represents a byte position in the original input text from which
|
||||
@ -43,6 +46,8 @@ const (
|
||||
itemName
|
||||
itemQuery
|
||||
itemMutation
|
||||
itemFragment
|
||||
itemOn
|
||||
itemSub
|
||||
itemPunctuator
|
||||
itemArgsOpen
|
||||
@ -136,8 +141,7 @@ func (l *lexer) current() (Pos, Pos) {
|
||||
func (l *lexer) emit(t itemType) {
|
||||
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
||||
// Some items contain text internally. If so, count their newlines.
|
||||
switch t {
|
||||
case itemStringVal:
|
||||
if t == itemStringVal {
|
||||
for i := l.start; i < l.pos; i++ {
|
||||
if l.input[i] == '\n' {
|
||||
l.line++
|
||||
@ -263,12 +267,12 @@ func lexRoot(l *lexer) stateFn {
|
||||
l.backup()
|
||||
return lexString
|
||||
case r == '.':
|
||||
if len(l.input) >= 3 {
|
||||
if equals(l.input, 0, 3, spreadToken) {
|
||||
l.acceptRun(dotToken)
|
||||
s, e := l.current()
|
||||
if equals(l.input, s, e, spreadToken) {
|
||||
l.emit(itemSpread)
|
||||
return lexRoot
|
||||
}
|
||||
}
|
||||
fallthrough // '.' can start a number.
|
||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
||||
l.backup()
|
||||
@ -299,10 +303,14 @@ func lexName(l *lexer) stateFn {
|
||||
switch {
|
||||
case equals(l.input, s, e, queryToken):
|
||||
l.emitL(itemQuery)
|
||||
case equals(l.input, s, e, fragmentToken):
|
||||
l.emitL(itemFragment)
|
||||
case equals(l.input, s, e, mutationToken):
|
||||
l.emitL(itemMutation)
|
||||
case equals(l.input, s, e, subscriptionToken):
|
||||
l.emitL(itemSub)
|
||||
case equals(l.input, s, e, onToken):
|
||||
l.emitL(itemOn)
|
||||
case equals(l.input, s, e, trueToken):
|
||||
l.emitL(itemBoolVal)
|
||||
case equals(l.input, s, e, falseToken):
|
||||
@ -395,35 +403,15 @@ func isAlphaNumeric(r rune) bool {
|
||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
||||
}
|
||||
|
||||
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
||||
n := 0
|
||||
for i := s; i < e; i++ {
|
||||
if n >= len(val) {
|
||||
return true
|
||||
}
|
||||
switch {
|
||||
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
|
||||
return false
|
||||
case b[i] != val[n]:
|
||||
return false
|
||||
}
|
||||
n++
|
||||
}
|
||||
return true
|
||||
func equals(b []byte, s, e Pos, val []byte) bool {
|
||||
return bytes.EqualFold(b[s:e], val)
|
||||
}
|
||||
|
||||
func contains(b []byte, s Pos, e Pos, val []byte) bool {
|
||||
for i := s; i < e; i++ {
|
||||
for n := 0; n < len(val); n++ {
|
||||
if b[i] == val[n] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
func contains(b []byte, s, e Pos, chars string) bool {
|
||||
return bytes.ContainsAny(b[s:e], chars)
|
||||
}
|
||||
|
||||
func lowercase(b []byte, s Pos, e Pos) {
|
||||
func lowercase(b []byte, s, e Pos) {
|
||||
for i := s; i < e; i++ {
|
||||
if b[i] >= 'A' && b[i] <= 'Z' {
|
||||
b[i] = ('a' + (b[i] - 'A'))
|
||||
|
@ -3,10 +3,9 @@ package qcode
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -50,6 +49,19 @@ func (o *Operation) Reset() {
|
||||
*o = zeroOperation
|
||||
}
|
||||
|
||||
type Fragment struct {
|
||||
Name string
|
||||
On string
|
||||
Fields []Field
|
||||
fieldsA [10]Field
|
||||
}
|
||||
|
||||
var zeroFragment = Fragment{}
|
||||
|
||||
func (f *Fragment) Reset() {
|
||||
*f = zeroFragment
|
||||
}
|
||||
|
||||
type Field struct {
|
||||
ID int32
|
||||
ParentID int32
|
||||
@ -59,11 +71,13 @@ type Field struct {
|
||||
argsA [5]Arg
|
||||
Children []int32
|
||||
childrenA [5]int32
|
||||
Union bool
|
||||
}
|
||||
|
||||
type Arg struct {
|
||||
Name string
|
||||
Val *Node
|
||||
df bool
|
||||
}
|
||||
|
||||
type Node struct {
|
||||
@ -82,6 +96,8 @@ func (n *Node) Reset() {
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
frags map[uint64]*Fragment
|
||||
h maphash.Hash
|
||||
input []byte // the string being scanned
|
||||
pos int
|
||||
items []item
|
||||
@ -96,12 +112,192 @@ var opPool = sync.Pool{
|
||||
New: func() interface{} { return new(Operation) },
|
||||
}
|
||||
|
||||
var fragPool = sync.Pool{
|
||||
New: func() interface{} { return new(Fragment) },
|
||||
}
|
||||
|
||||
var lexPool = sync.Pool{
|
||||
New: func() interface{} { return new(lexer) },
|
||||
}
|
||||
|
||||
func Parse(gql []byte) (*Operation, error) {
|
||||
return parseSelectionSet(gql)
|
||||
var err error
|
||||
|
||||
if len(gql) == 0 {
|
||||
return nil, errors.New("blank query")
|
||||
}
|
||||
|
||||
l := lexPool.Get().(*lexer)
|
||||
l.Reset()
|
||||
defer lexPool.Put(l)
|
||||
|
||||
if err = lex(l, gql); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &Parser{
|
||||
input: l.input,
|
||||
pos: -1,
|
||||
items: l.items,
|
||||
}
|
||||
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
op.Fields = op.fieldsA[:0]
|
||||
|
||||
s := -1
|
||||
qf := false
|
||||
|
||||
for {
|
||||
if p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
|
||||
if p.peek(itemFragment) {
|
||||
p.ignore()
|
||||
if f, err := p.parseFragment(); err != nil {
|
||||
fragPool.Put(f)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else {
|
||||
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||
s = p.pos
|
||||
qf = true
|
||||
}
|
||||
p.ignore()
|
||||
}
|
||||
}
|
||||
|
||||
p.reset(s)
|
||||
if err := p.parseOp(op); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, v := range p.frags {
|
||||
fragPool.Put(v)
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFragment() (*Fragment, error) {
|
||||
var err error
|
||||
|
||||
frag := fragPool.Get().(*Fragment)
|
||||
frag.Reset()
|
||||
frag.Fields = frag.fieldsA[:0]
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.Name = p.val(p.next())
|
||||
} else {
|
||||
return frag, errors.New("fragment: missing name")
|
||||
}
|
||||
|
||||
if p.peek(itemOn) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return frag, errors.New("fragment: missing 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.On = p.vall(p.next())
|
||||
} else {
|
||||
return frag, errors.New("fragment: missing table name after 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||
}
|
||||
|
||||
frag.Fields, err = p.parseFields(frag.Fields)
|
||||
if err != nil {
|
||||
return frag, fmt.Errorf("fragment: %v", err)
|
||||
}
|
||||
|
||||
if p.frags == nil {
|
||||
p.frags = make(map[uint64]*Fragment)
|
||||
}
|
||||
|
||||
_, _ = p.h.WriteString(frag.Name)
|
||||
k := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
p.frags[k] = frag
|
||||
|
||||
return frag, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseOp(op *Operation) error {
|
||||
var err error
|
||||
var typeSet bool
|
||||
|
||||
if p.peek(itemQuery, itemMutation, itemSub) {
|
||||
err = p.parseOpTypeAndArgs(op)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %v", op.Type, err)
|
||||
}
|
||||
typeSet = true
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
if !typeSet {
|
||||
op.Type = opQuery
|
||||
}
|
||||
|
||||
for {
|
||||
if p.peek(itemEOF, itemFragment) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %v", op.Type, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
||||
item := p.next()
|
||||
|
||||
switch item._type {
|
||||
case itemQuery:
|
||||
op.Type = opQuery
|
||||
case itemMutation:
|
||||
op.Type = opMutate
|
||||
case itemSub:
|
||||
op.Type = opSub
|
||||
}
|
||||
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
if p.peek(itemName) {
|
||||
op.Name = p.val(p.next())
|
||||
}
|
||||
|
||||
if p.peek(itemArgsOpen) {
|
||||
p.ignore()
|
||||
|
||||
op.Args, err = p.parseOpParams(op.Args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseArgValue(argVal string) (*Node, error) {
|
||||
@ -123,195 +319,59 @@ func ParseArgValue(argVal string) (*Node, error) {
|
||||
return op, err
|
||||
}
|
||||
|
||||
func parseSelectionSet(gql []byte) (*Operation, error) {
|
||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
var err error
|
||||
st := NewStack()
|
||||
|
||||
if len(gql) == 0 {
|
||||
return nil, errors.New("blank query")
|
||||
if !p.peek(itemName, itemSpread) {
|
||||
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
|
||||
}
|
||||
|
||||
l := lexPool.Get().(*lexer)
|
||||
l.Reset()
|
||||
|
||||
if err = lex(l, gql); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &Parser{
|
||||
input: l.input,
|
||||
pos: -1,
|
||||
items: l.items,
|
||||
}
|
||||
|
||||
var op *Operation
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
for {
|
||||
if p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
op, err = p.parseQueryOp()
|
||||
} else {
|
||||
op, err = p.parseOp()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, errors.New("invalid query")
|
||||
}
|
||||
|
||||
if p.peek(itemObjClose) {
|
||||
p.ignore()
|
||||
|
||||
if st.Len() != 0 {
|
||||
st.Pop()
|
||||
continue
|
||||
} else {
|
||||
return nil, fmt.Errorf("operation missing closing '}'")
|
||||
}
|
||||
|
||||
if !p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
|
||||
}
|
||||
|
||||
lexPool.Put(l)
|
||||
|
||||
return op, err
|
||||
}
|
||||
|
||||
func (p *Parser) next() item {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return item{_type: itemEOF}
|
||||
}
|
||||
p.pos = n
|
||||
return p.items[p.pos]
|
||||
}
|
||||
|
||||
func (p *Parser) ignore() {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return
|
||||
}
|
||||
p.pos = n
|
||||
}
|
||||
|
||||
func (p *Parser) current() string {
|
||||
item := p.items[p.pos]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peek(types ...itemType) bool {
|
||||
n := p.pos + 1
|
||||
// if p.items[n]._type == itemEOF {
|
||||
// return false
|
||||
// }
|
||||
if n >= len(p.items) {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < len(types); i++ {
|
||||
if p.items[n]._type == types[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) parseOp() (*Operation, error) {
|
||||
if !p.peek(itemQuery, itemMutation, itemSub) {
|
||||
err := errors.New("expecting a query, mutation or subscription")
|
||||
return nil, err
|
||||
}
|
||||
item := p.next()
|
||||
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
|
||||
switch item._type {
|
||||
case itemQuery:
|
||||
op.Type = opQuery
|
||||
case itemMutation:
|
||||
op.Type = opMutate
|
||||
case itemSub:
|
||||
op.Type = opSub
|
||||
}
|
||||
|
||||
op.Fields = op.fieldsA[:0]
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
if p.peek(itemName) {
|
||||
op.Name = p.val(p.next())
|
||||
}
|
||||
|
||||
if p.peek(itemArgsOpen) {
|
||||
p.ignore()
|
||||
|
||||
op.Args, err = p.parseOpParams(op.Args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
|
||||
for n := 0; n < 10; n++ {
|
||||
if !p.peek(itemName) {
|
||||
break
|
||||
}
|
||||
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseQueryOp() (*Operation, error) {
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
|
||||
op.Type = opQuery
|
||||
op.Fields = op.fieldsA[:0]
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
for n := 0; n < 10; n++ {
|
||||
if !p.peek(itemName) {
|
||||
break
|
||||
}
|
||||
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
st := util.NewStack()
|
||||
|
||||
for {
|
||||
if len(fields) >= maxFields {
|
||||
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
||||
}
|
||||
|
||||
if p.peek(itemObjClose) {
|
||||
isFrag := false
|
||||
|
||||
if p.peek(itemSpread) {
|
||||
p.ignore()
|
||||
st.Pop()
|
||||
isFrag = true
|
||||
}
|
||||
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
if isFrag {
|
||||
fields, err = p.parseFragmentFields(st, fields)
|
||||
} else {
|
||||
continue
|
||||
fields, err = p.parseNormalFields(st, fields)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return fields, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
||||
if !p.peek(itemName) {
|
||||
return nil, errors.New("expecting an alias or field name")
|
||||
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||
}
|
||||
|
||||
fields = append(fields, Field{ID: int32(len(fields))})
|
||||
@ -320,18 +380,17 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
f.Args = f.argsA[:0]
|
||||
f.Children = f.childrenA[:0]
|
||||
|
||||
// Parse the inside of the the fields () parentheses
|
||||
// in short parse the args like id, where, etc
|
||||
// Parse the field
|
||||
if err := p.parseField(f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
intf := st.Peek()
|
||||
if pid, ok := intf.(int32); ok {
|
||||
if st.Len() == 0 {
|
||||
f.ParentID = -1
|
||||
} else {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
} else {
|
||||
f.ParentID = -1
|
||||
}
|
||||
|
||||
// The first opening curley brackets after this
|
||||
@ -339,12 +398,79 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
st.Push(f.ID)
|
||||
}
|
||||
|
||||
return fields, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) {
|
||||
var err error
|
||||
pid := st.Peek()
|
||||
|
||||
if p.peek(itemOn) {
|
||||
p.ignore()
|
||||
fields[pid].Union = true
|
||||
|
||||
if fields, err = p.parseNormalFields(st, fields); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// If parent is a union selector than copy over args from the parent
|
||||
// to the first child which is the root selector for each union type.
|
||||
for i := pid + 1; i < int32(len(fields)); i++ {
|
||||
f := &fields[i]
|
||||
if f.ParentID == pid {
|
||||
f.Args = fields[pid].Args
|
||||
}
|
||||
}
|
||||
|
||||
} else if p.peek(itemObjClose) {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
} else {
|
||||
continue
|
||||
if !p.peek(itemName) {
|
||||
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||
}
|
||||
|
||||
name := p.val(p.next())
|
||||
_, _ = p.h.WriteString(name)
|
||||
id := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
fr, ok := p.frags[id]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||
}
|
||||
ff := fr.Fields
|
||||
|
||||
n := int32(len(fields))
|
||||
fields = append(fields, ff...)
|
||||
|
||||
for i := 0; i < len(ff); i++ {
|
||||
k := (n + int32(i))
|
||||
f := &fields[k]
|
||||
f.ID = int32(k)
|
||||
|
||||
// If this is the top-level point the parent to the parent of the
|
||||
// previous field.
|
||||
if f.ParentID == -1 {
|
||||
f.ParentID = pid
|
||||
if f.ParentID != -1 {
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
}
|
||||
// Update all the other parents id's by our new place in this new array
|
||||
} else {
|
||||
f.ParentID += n
|
||||
}
|
||||
|
||||
// Copy over children since fields append is not a deep copy
|
||||
f.Children = make([]int32, len(f.Children))
|
||||
copy(f.Children, ff[i].Children)
|
||||
|
||||
// Copy over args since args append is not a deep copy
|
||||
f.Args = make([]Arg, len(f.Args))
|
||||
copy(f.Args, ff[i].Args)
|
||||
|
||||
// Update all the children which is needed.
|
||||
for j := range f.Children {
|
||||
f.Children[j] += n
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -385,7 +511,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
|
||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||
}
|
||||
|
||||
if p.peek(itemArgsClose) {
|
||||
if p.peek(itemEOF, itemArgsClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -403,7 +529,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
|
||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||
}
|
||||
|
||||
if p.peek(itemArgsClose) {
|
||||
if p.peek(itemEOF, itemArgsClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -445,11 +571,9 @@ func (p *Parser) parseList() (*Node, error) {
|
||||
}
|
||||
if ty == 0 {
|
||||
ty = node.Type
|
||||
} else {
|
||||
if ty != node.Type {
|
||||
} else if ty != node.Type {
|
||||
return nil, errors.New("All values in a list must be of the same type")
|
||||
}
|
||||
}
|
||||
node.Parent = parent
|
||||
nodes = append(nodes, node)
|
||||
}
|
||||
@ -470,7 +594,7 @@ func (p *Parser) parseObj() (*Node, error) {
|
||||
parent.Reset()
|
||||
|
||||
for {
|
||||
if p.peek(itemObjClose) {
|
||||
if p.peek(itemEOF, itemObjClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -545,6 +669,57 @@ func (p *Parser) vall(v item) string {
|
||||
return b2s(p.input[v.pos:v.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peek(types ...itemType) bool {
|
||||
n := p.pos + 1
|
||||
l := len(types)
|
||||
// if p.items[n]._type == itemEOF {
|
||||
// return false
|
||||
// }
|
||||
|
||||
if n >= len(p.items) {
|
||||
return types[0] == itemEOF
|
||||
}
|
||||
|
||||
if l == 1 {
|
||||
return p.items[n]._type == types[0]
|
||||
}
|
||||
|
||||
for i := 0; i < l; i++ {
|
||||
if p.items[n]._type == types[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) next() item {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return item{_type: itemEOF}
|
||||
}
|
||||
p.pos = n
|
||||
return p.items[p.pos]
|
||||
}
|
||||
|
||||
func (p *Parser) ignore() {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return
|
||||
}
|
||||
p.pos = n
|
||||
}
|
||||
|
||||
func (p *Parser) peekNext() string {
|
||||
item := p.items[p.pos+1]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) reset(to int) {
|
||||
p.pos = to
|
||||
}
|
||||
|
||||
func b2s(b []byte) string {
|
||||
return *(*string)(unsafe.Pointer(&b))
|
||||
}
|
||||
@ -578,34 +753,9 @@ func (t parserType) String() string {
|
||||
case NodeList:
|
||||
v = "node-list"
|
||||
}
|
||||
return fmt.Sprintf("<%s>", v)
|
||||
return v
|
||||
}
|
||||
|
||||
// type Frees struct {
|
||||
// n *Node
|
||||
// loc int
|
||||
// }
|
||||
|
||||
// var freeList []Frees
|
||||
|
||||
// func FreeNode(n *Node, loc int) {
|
||||
// j := -1
|
||||
|
||||
// for i := range freeList {
|
||||
// if n == freeList[i].n {
|
||||
// j = i
|
||||
// break
|
||||
// }
|
||||
// }
|
||||
|
||||
// if j == -1 {
|
||||
// nodePool.Put(n)
|
||||
// freeList = append(freeList, Frees{n, loc})
|
||||
// } else {
|
||||
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
||||
// }
|
||||
// }
|
||||
|
||||
func FreeNode(n *Node, loc int) {
|
||||
func FreeNode(n *Node) {
|
||||
nodePool.Put(n)
|
||||
}
|
||||
|
@ -2,8 +2,9 @@ package qcode
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/chirino/graphql/schema"
|
||||
"testing"
|
||||
|
||||
"github.com/chirino/graphql/schema"
|
||||
)
|
||||
|
||||
func TestCompile1(t *testing.T) {
|
||||
@ -120,7 +121,7 @@ updateThread {
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
}}`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "anon")
|
||||
|
||||
@ -130,6 +131,93 @@ updateThread {
|
||||
|
||||
}
|
||||
|
||||
func TestFragmentsCompile1(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFragmentsCompile2(t *testing.T) {
|
||||
gql := `
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFragmentsCompile3(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
var gql = []byte(`
|
||||
{products(
|
||||
# returns only 30 items
|
||||
@ -151,6 +239,29 @@ var gql = []byte(`
|
||||
price
|
||||
}}`)
|
||||
|
||||
var gqlWithFragments = []byte(`
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
__typename
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
__typename
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
__typename
|
||||
}`)
|
||||
|
||||
func BenchmarkQCompile(b *testing.B) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
|
||||
@ -183,8 +294,22 @@ func BenchmarkQCompileP(b *testing.B) {
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
func BenchmarkQCompileFragment(b *testing.B) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
_, err := qcompile.Compile(gqlWithFragments, "user")
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
@ -211,6 +336,18 @@ func BenchmarkParseP(b *testing.B) {
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkParseFragment(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
_, err := Parse(gqlWithFragments)
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSchemaParse(b *testing.B) {
|
||||
|
||||
b.ResetTimer()
|
||||
|
@ -12,6 +12,7 @@ import (
|
||||
)
|
||||
|
||||
type QType int
|
||||
type SType int
|
||||
type Action int
|
||||
|
||||
const (
|
||||
@ -19,7 +20,8 @@ const (
|
||||
)
|
||||
|
||||
const (
|
||||
QTQuery QType = iota + 1
|
||||
QTUnknown QType = iota
|
||||
QTQuery
|
||||
QTMutation
|
||||
QTInsert
|
||||
QTUpdate
|
||||
@ -27,6 +29,12 @@ const (
|
||||
QTUpsert
|
||||
)
|
||||
|
||||
const (
|
||||
STNone SType = iota
|
||||
STUnion
|
||||
STMember
|
||||
)
|
||||
|
||||
type QCode struct {
|
||||
Type QType
|
||||
ActionVar string
|
||||
@ -38,6 +46,8 @@ type QCode struct {
|
||||
type Select struct {
|
||||
ID int32
|
||||
ParentID int32
|
||||
UParentID int32
|
||||
Type SType
|
||||
Args map[string]*Node
|
||||
Name string
|
||||
FieldName string
|
||||
@ -170,9 +180,10 @@ const (
|
||||
)
|
||||
|
||||
type Compiler struct {
|
||||
db bool // default block tables if not defined in anon role
|
||||
tr map[string]map[string]*trval
|
||||
bl map[string]struct{}
|
||||
|
||||
defBlock bool
|
||||
}
|
||||
|
||||
var expPool = sync.Pool{
|
||||
@ -180,7 +191,7 @@ var expPool = sync.Pool{
|
||||
}
|
||||
|
||||
func NewCompiler(c Config) (*Compiler, error) {
|
||||
co := &Compiler{}
|
||||
co := &Compiler{defBlock: c.DefaultBlock}
|
||||
co.tr = make(map[string]map[string]*trval)
|
||||
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
||||
|
||||
@ -227,7 +238,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
|
||||
return err
|
||||
}
|
||||
trv.insert.cols = listToMap(trc.Insert.Columns)
|
||||
trv.insert.psmap = parsePresets(trc.Insert.Presets)
|
||||
trv.insert.psmap = trc.Insert.Presets
|
||||
trv.insert.pslist = mapToList(trv.insert.psmap)
|
||||
trv.insert.block = trc.Insert.Block
|
||||
|
||||
@ -237,7 +248,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
|
||||
return err
|
||||
}
|
||||
trv.update.cols = listToMap(trc.Update.Columns)
|
||||
trv.update.psmap = parsePresets(trc.Update.Presets)
|
||||
trv.update.psmap = trc.Update.Presets
|
||||
trv.update.pslist = mapToList(trv.update.psmap)
|
||||
trv.update.block = trc.Update.Block
|
||||
|
||||
@ -276,6 +287,7 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
freeNodes(op)
|
||||
opPool.Put(op)
|
||||
|
||||
return &qc, nil
|
||||
@ -344,22 +356,22 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
|
||||
case QTInsert:
|
||||
if trv.insert.block {
|
||||
return fmt.Errorf("insert blocked: %s", field.Name)
|
||||
return fmt.Errorf("%s, insert blocked: %s", role, field.Name)
|
||||
}
|
||||
|
||||
case QTUpdate:
|
||||
if trv.update.block {
|
||||
return fmt.Errorf("update blocked: %s", field.Name)
|
||||
return fmt.Errorf("%s, update blocked: %s", role, field.Name)
|
||||
}
|
||||
|
||||
case QTDelete:
|
||||
if trv.delete.block {
|
||||
return fmt.Errorf("delete blocked: %s", field.Name)
|
||||
return fmt.Errorf("%s, delete blocked: %s", role, field.Name)
|
||||
}
|
||||
}
|
||||
|
||||
} else if role == "anon" {
|
||||
skipRender = true
|
||||
skipRender = com.defBlock
|
||||
}
|
||||
|
||||
selects = append(selects, Select{
|
||||
@ -370,7 +382,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
})
|
||||
s := &selects[(len(selects) - 1)]
|
||||
|
||||
if len(field.Alias) != 0 {
|
||||
if field.Union {
|
||||
s.Type = STUnion
|
||||
}
|
||||
|
||||
if field.Alias != "" {
|
||||
s.FieldName = field.Alias
|
||||
} else {
|
||||
s.FieldName = s.Name
|
||||
@ -381,6 +397,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
} else {
|
||||
p := &selects[s.ParentID]
|
||||
p.Children = append(p.Children, s.ID)
|
||||
|
||||
if p.Type == STUnion {
|
||||
s.Type = STMember
|
||||
s.UParentID = p.ParentID
|
||||
}
|
||||
}
|
||||
|
||||
if skipRender {
|
||||
@ -418,6 +439,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
com.AddFilters(qc, s, role)
|
||||
|
||||
s.Cols = make([]Column, 0, len(field.Children))
|
||||
cm := make(map[string]struct{})
|
||||
action = QTQuery
|
||||
|
||||
for _, cid := range field.Children {
|
||||
@ -427,19 +449,27 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
continue
|
||||
}
|
||||
|
||||
var fname string
|
||||
|
||||
if f.Alias != "" {
|
||||
fname = f.Alias
|
||||
} else {
|
||||
fname = f.Name
|
||||
}
|
||||
|
||||
if _, ok := cm[fname]; ok {
|
||||
continue
|
||||
} else {
|
||||
cm[fname] = struct{}{}
|
||||
}
|
||||
|
||||
if len(f.Children) != 0 {
|
||||
val := f.ID | (s.ID << 16)
|
||||
st.Push(val)
|
||||
continue
|
||||
}
|
||||
|
||||
col := Column{Name: f.Name}
|
||||
|
||||
if len(f.Alias) != 0 {
|
||||
col.FieldName = f.Alias
|
||||
} else {
|
||||
col.FieldName = f.Name
|
||||
}
|
||||
col := Column{Name: f.Name, FieldName: fname}
|
||||
s.Cols = append(s.Cols, col)
|
||||
}
|
||||
|
||||
@ -451,6 +481,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
}
|
||||
|
||||
qc.Selects = selects[:id]
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -482,50 +513,42 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
||||
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
||||
var err error
|
||||
|
||||
// don't free this arg either previously done or will be free'd
|
||||
// in the future like in psql
|
||||
var df bool
|
||||
|
||||
for i := range args {
|
||||
arg := &args[i]
|
||||
|
||||
switch arg.Name {
|
||||
case "id":
|
||||
err, df = com.compileArgID(sel, arg)
|
||||
err = com.compileArgID(sel, arg)
|
||||
|
||||
case "search":
|
||||
err, df = com.compileArgSearch(sel, arg)
|
||||
err = com.compileArgSearch(sel, arg)
|
||||
|
||||
case "where":
|
||||
err, df = com.compileArgWhere(sel, arg, role)
|
||||
err = com.compileArgWhere(sel, arg, role)
|
||||
|
||||
case "orderby", "order_by", "order":
|
||||
err, df = com.compileArgOrderBy(sel, arg)
|
||||
err = com.compileArgOrderBy(sel, arg)
|
||||
|
||||
case "distinct_on", "distinct":
|
||||
err, df = com.compileArgDistinctOn(sel, arg)
|
||||
err = com.compileArgDistinctOn(sel, arg)
|
||||
|
||||
case "limit":
|
||||
err, df = com.compileArgLimit(sel, arg)
|
||||
err = com.compileArgLimit(sel, arg)
|
||||
|
||||
case "offset":
|
||||
err, df = com.compileArgOffset(sel, arg)
|
||||
err = com.compileArgOffset(sel, arg)
|
||||
|
||||
case "first":
|
||||
err, df = com.compileArgFirstLast(sel, arg, PtForward)
|
||||
err = com.compileArgFirstLast(sel, arg, PtForward)
|
||||
|
||||
case "last":
|
||||
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||
err = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||
|
||||
case "after":
|
||||
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||
err = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||
|
||||
case "before":
|
||||
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||
}
|
||||
|
||||
if !df {
|
||||
FreeNode(arg.Val, 5)
|
||||
err = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@ -606,15 +629,13 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
||||
}
|
||||
|
||||
// Objects inside a list
|
||||
if len(node.Name) == 0 {
|
||||
if node.Name == "" {
|
||||
pushChildren(st, node.exp, node)
|
||||
continue
|
||||
|
||||
} else {
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
} else if _, ok := com.bl[node.Name]; ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
ex, err := newExp(st, node, usePool)
|
||||
if err != nil {
|
||||
@ -636,39 +657,20 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
||||
}
|
||||
}
|
||||
|
||||
if usePool {
|
||||
st.Push(node)
|
||||
|
||||
for {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
}
|
||||
intf := st.Pop()
|
||||
node, ok := intf.(*Node)
|
||||
if !ok || node == nil {
|
||||
continue
|
||||
}
|
||||
for i := range node.Children {
|
||||
st.Push(node.Children[i])
|
||||
}
|
||||
FreeNode(node, 1)
|
||||
}
|
||||
}
|
||||
|
||||
return root, needsUser, nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
||||
if sel.ID != 0 {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if sel.Where != nil && sel.Where.Op == OpEqID {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if arg.Val.Type != NodeVar {
|
||||
return argErr("id", "variable"), false
|
||||
return argErr("id", "variable")
|
||||
}
|
||||
|
||||
ex := expPool.Get().(*Exp)
|
||||
@ -679,12 +681,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
||||
ex.Val = arg.Val.Val
|
||||
|
||||
sel.Where = ex
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
|
||||
if arg.Val.Type != NodeVar {
|
||||
return argErr("search", "variable"), false
|
||||
return argErr("search", "variable")
|
||||
}
|
||||
|
||||
ex := expPool.Get().(*Exp)
|
||||
@ -699,18 +701,19 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
||||
}
|
||||
|
||||
sel.Args[arg.Name] = arg.Val
|
||||
arg.df = true
|
||||
AddFilter(sel, ex)
|
||||
|
||||
return nil, true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
|
||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
|
||||
st := util.NewStack()
|
||||
var err error
|
||||
|
||||
ex, nu, err := com.compileArgObj(st, arg)
|
||||
if err != nil {
|
||||
return err, false
|
||||
return err
|
||||
}
|
||||
|
||||
if nu && role == "anon" {
|
||||
@ -718,12 +721,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
|
||||
}
|
||||
AddFilter(sel, ex)
|
||||
|
||||
return nil, true
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
||||
if arg.Val.Type != NodeObj {
|
||||
return fmt.Errorf("expecting an object"), false
|
||||
return fmt.Errorf("expecting an object")
|
||||
}
|
||||
|
||||
st := util.NewStack()
|
||||
@ -741,16 +744,15 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
node, ok := intf.(*Node)
|
||||
|
||||
if !ok || node == nil {
|
||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
|
||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
|
||||
}
|
||||
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
FreeNode(node, 2)
|
||||
continue
|
||||
}
|
||||
|
||||
if node.Type != NodeStr && node.Type != NodeVar {
|
||||
return fmt.Errorf("expecting a string or variable"), false
|
||||
return fmt.Errorf("expecting a string or variable")
|
||||
}
|
||||
|
||||
ob := &OrderBy{}
|
||||
@ -769,25 +771,24 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
||||
case "desc_nulls_last":
|
||||
ob.Order = OrderDescNullsLast
|
||||
default:
|
||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
|
||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
|
||||
}
|
||||
|
||||
setOrderByColName(ob, node)
|
||||
sel.OrderBy = append(sel.OrderBy, ob)
|
||||
FreeNode(node, 3)
|
||||
}
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if _, ok := com.bl[node.Name]; ok {
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
if node.Type != NodeList && node.Type != NodeStr {
|
||||
return fmt.Errorf("expecting a list of strings or just a string"), false
|
||||
return fmt.Errorf("expecting a list of strings or just a string")
|
||||
}
|
||||
|
||||
if node.Type == NodeStr {
|
||||
@ -796,58 +797,57 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
||||
|
||||
for i := range node.Children {
|
||||
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
||||
FreeNode(node.Children[i], 5)
|
||||
}
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeInt {
|
||||
return argErr("limit", "number"), false
|
||||
return argErr("limit", "number")
|
||||
}
|
||||
|
||||
sel.Paging.Limit = node.Val
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
|
||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeVar {
|
||||
return argErr("offset", "variable"), false
|
||||
return argErr("offset", "variable")
|
||||
}
|
||||
|
||||
sel.Paging.Offset = node.Val
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeInt {
|
||||
return argErr(arg.Name, "number"), false
|
||||
return argErr(arg.Name, "number")
|
||||
}
|
||||
|
||||
sel.Paging.Type = pt
|
||||
sel.Paging.Limit = node.Val
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
|
||||
node := arg.Val
|
||||
|
||||
if node.Type != NodeVar || node.Val != "cursor" {
|
||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
|
||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
|
||||
}
|
||||
sel.Paging.Type = pt
|
||||
sel.Paging.Cursor = true
|
||||
|
||||
return nil, false
|
||||
return nil
|
||||
}
|
||||
|
||||
// var zeroTrv = &trval{}
|
||||
@ -1030,10 +1030,15 @@ func setListVal(ex *Exp, node *Node) {
|
||||
case NodeFloat:
|
||||
ex.ListType = ValFloat
|
||||
}
|
||||
} else {
|
||||
ex.Val = node.Val
|
||||
return
|
||||
}
|
||||
|
||||
for i := range node.Children {
|
||||
ex.ListVal = append(ex.ListVal, node.Children[i].Val)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func setWhereColName(ex *Exp, node *Node) {
|
||||
@ -1043,7 +1048,7 @@ func setWhereColName(ex *Exp, node *Node) {
|
||||
if n.Type != NodeObj {
|
||||
continue
|
||||
}
|
||||
if len(n.Name) != 0 {
|
||||
if n.Name != "" {
|
||||
k := n.Name
|
||||
if k == "and" || k == "or" || k == "not" ||
|
||||
k == "_and" || k == "_or" || k == "_not" {
|
||||
@ -1222,3 +1227,81 @@ func FreeExp(ex *Exp) {
|
||||
func argErr(name, ty string) error {
|
||||
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
||||
}
|
||||
|
||||
func freeNodes(op *Operation) {
|
||||
var st *util.Stack
|
||||
fm := make(map[*Node]struct{})
|
||||
|
||||
for i := range op.Args {
|
||||
arg := op.Args[i]
|
||||
if arg.df {
|
||||
continue
|
||||
}
|
||||
|
||||
for i := range arg.Val.Children {
|
||||
if st == nil {
|
||||
st = util.NewStack()
|
||||
}
|
||||
c := arg.Val.Children[i]
|
||||
if _, ok := fm[c]; !ok {
|
||||
st.Push(c)
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := fm[arg.Val]; !ok {
|
||||
nodePool.Put(arg.Val)
|
||||
fm[arg.Val] = struct{}{}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for i := range op.Fields {
|
||||
f := op.Fields[i]
|
||||
|
||||
for j := range f.Args {
|
||||
arg := f.Args[j]
|
||||
if arg.df {
|
||||
continue
|
||||
}
|
||||
|
||||
for k := range arg.Val.Children {
|
||||
if st == nil {
|
||||
st = util.NewStack()
|
||||
}
|
||||
c := arg.Val.Children[k]
|
||||
if _, ok := fm[c]; !ok {
|
||||
st.Push(c)
|
||||
}
|
||||
}
|
||||
|
||||
if _, ok := fm[arg.Val]; !ok {
|
||||
nodePool.Put(arg.Val)
|
||||
fm[arg.Val] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if st == nil {
|
||||
return
|
||||
}
|
||||
|
||||
for {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
}
|
||||
intf := st.Pop()
|
||||
node, ok := intf.(*Node)
|
||||
if !ok || node == nil {
|
||||
continue
|
||||
}
|
||||
|
||||
for i := range node.Children {
|
||||
st.Push(node.Children[i])
|
||||
}
|
||||
|
||||
if _, ok := fm[node]; !ok {
|
||||
nodePool.Put(node)
|
||||
fm[node] = struct{}{}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -29,6 +29,8 @@ func al(b byte) bool {
|
||||
|
||||
func (qt QType) String() string {
|
||||
switch qt {
|
||||
case QTUnknown:
|
||||
return "unknown"
|
||||
case QTQuery:
|
||||
return "query"
|
||||
case QTMutation:
|
||||
|
216
core/prepare.go
216
core/prepare.go
@ -2,128 +2,93 @@ package core
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"io"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/allow"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
|
||||
type preparedItem struct {
|
||||
type query struct {
|
||||
sync.Once
|
||||
sd *sql.Stmt
|
||||
args [][]byte
|
||||
ai allow.Item
|
||||
qt qcode.QType
|
||||
err error
|
||||
st stmt
|
||||
roleArg bool
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initPrepared() error {
|
||||
ct := context.Background()
|
||||
func (sg *SuperGraph) prepare(q *query, role string) {
|
||||
var stmts []stmt
|
||||
var err error
|
||||
|
||||
qb := []byte(q.ai.Query)
|
||||
vars := []byte(q.ai.Vars)
|
||||
|
||||
switch q.qt {
|
||||
case qcode.QTQuery:
|
||||
if sg.abacEnabled {
|
||||
stmts, err = sg.buildMultiStmt(qb, vars)
|
||||
} else {
|
||||
stmts, err = sg.buildRoleStmt(qb, vars, role)
|
||||
}
|
||||
|
||||
case qcode.QTMutation:
|
||||
stmts, err = sg.buildRoleStmt(qb, vars, role)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
|
||||
}
|
||||
|
||||
q.st = stmts[0]
|
||||
q.roleArg = len(stmts) > 1
|
||||
|
||||
q.sd, err = sg.db.Prepare(q.st.sql)
|
||||
if err != nil {
|
||||
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
|
||||
}
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initPrepared() error {
|
||||
if sg.allowList.IsPersist() {
|
||||
return nil
|
||||
}
|
||||
sg.prepared = make(map[string]*preparedItem)
|
||||
|
||||
tx, err := sg.db.BeginTx(ct, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback() //nolint: errcheck
|
||||
|
||||
if err = sg.prepareRoleStmt(tx); err != nil {
|
||||
return fmt.Errorf("prepareRoleStmt: %w", err)
|
||||
if err := sg.prepareRoleStmt(); err != nil {
|
||||
return fmt.Errorf("role query: %w", err)
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
success := 0
|
||||
sg.queries = make(map[uint64]*query)
|
||||
|
||||
list, err := sg.allowList.Load()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
for _, v := range list {
|
||||
if len(v.Query) == 0 {
|
||||
if v.Query == "" {
|
||||
continue
|
||||
}
|
||||
|
||||
err := sg.prepareStmt(v)
|
||||
if err != nil {
|
||||
sg.log.Printf("WRN %s: %v", v.Name, err)
|
||||
} else {
|
||||
success++
|
||||
}
|
||||
}
|
||||
qt := qcode.GetQType(v.Query)
|
||||
|
||||
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
query := item.Query
|
||||
qb := []byte(query)
|
||||
vars := item.Vars
|
||||
|
||||
qt := qcode.GetQType(query)
|
||||
ct := context.Background()
|
||||
switch qt {
|
||||
case qcode.QTQuery:
|
||||
var stmts1 []stmt
|
||||
var err error
|
||||
|
||||
if sg.abacEnabled {
|
||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
||||
} else {
|
||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
||||
|
||||
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if sg.anonExists {
|
||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
||||
|
||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
|
||||
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
|
||||
|
||||
case qcode.QTMutation:
|
||||
for _, role := range sg.conf.Roles {
|
||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||
|
||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -131,40 +96,24 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||
finalSQL, am := processTemplate(st[0].sql)
|
||||
|
||||
sd, err := sg.db.Prepare(finalSQL)
|
||||
if err != nil {
|
||||
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
|
||||
}
|
||||
|
||||
sg.prepared[key] = &preparedItem{
|
||||
sd: sd,
|
||||
args: am,
|
||||
st: st[0],
|
||||
roleArg: len(st) > 1,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
func (sg *SuperGraph) prepareRoleStmt() error {
|
||||
var err error
|
||||
|
||||
if !sg.abacEnabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
rq := strings.ReplaceAll(sg.conf.RolesQuery, "$user_id", "$1")
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
io.WriteString(w, `SELECT (CASE WHEN EXISTS (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, rq)
|
||||
io.WriteString(w, `) THEN `)
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
for _, role := range sg.conf.Roles {
|
||||
if len(role.Match) == 0 {
|
||||
if role.Match == "" {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
@ -174,14 +123,12 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
io.WriteString(w, `'`)
|
||||
}
|
||||
|
||||
io.WriteString(w, ` ELSE {{role}} END) FROM (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
||||
io.WriteString(w, rq)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
||||
|
||||
roleSQL, _ := processTemplate(w.String())
|
||||
|
||||
sg.getRole, err = tx.Prepare(roleSQL)
|
||||
sg.getRole, err = sg.db.Prepare(w.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -189,49 +136,18 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func processTemplate(tmpl string) (string, [][]byte) {
|
||||
st := struct {
|
||||
vmap map[string]int
|
||||
am [][]byte
|
||||
i int
|
||||
}{
|
||||
vmap: make(map[string]int),
|
||||
am: make([][]byte, 0, 5),
|
||||
i: 0,
|
||||
}
|
||||
|
||||
execFunc := func(w io.Writer, tag string) (int, error) {
|
||||
if n, ok := st.vmap[tag]; ok {
|
||||
return w.Write([]byte(fmt.Sprintf("$%d", n)))
|
||||
}
|
||||
st.am = append(st.am, []byte(tag))
|
||||
st.i++
|
||||
st.vmap[tag] = st.i
|
||||
return w.Write([]byte(fmt.Sprintf("$%d", st.i)))
|
||||
}
|
||||
|
||||
t1 := fasttemplate.New(tmpl, `'{{`, `}}'`)
|
||||
ts1 := t1.ExecuteFuncString(execFunc)
|
||||
|
||||
t2 := fasttemplate.New(ts1, `{{`, `}}`)
|
||||
ts2 := t2.ExecuteFuncString(execFunc)
|
||||
|
||||
return ts2, st.am
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initAllowList() error {
|
||||
var ac allow.Config
|
||||
var err error
|
||||
|
||||
if len(sg.conf.AllowListFile) == 0 {
|
||||
sg.conf.UseAllowList = false
|
||||
sg.log.Printf("WRN allow list disabled no file specified")
|
||||
if sg.conf.AllowListFile == "" {
|
||||
sg.conf.AllowListFile = "allow.list"
|
||||
}
|
||||
|
||||
// When list is not eabled it is still created and
|
||||
// and new queries are saved to it.
|
||||
if !sg.conf.UseAllowList {
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true, Log: sg.log}
|
||||
}
|
||||
|
||||
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
||||
@ -243,9 +159,11 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func stmtHash(name string, role string) string {
|
||||
h := sha256.New()
|
||||
io.WriteString(h, strings.ToLower(name))
|
||||
io.WriteString(h, role)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
func queryID(h *maphash.Hash, name, role string) uint64 {
|
||||
h.WriteString(name)
|
||||
h.WriteString(role)
|
||||
v := h.Sum64()
|
||||
h.Reset()
|
||||
|
||||
return v
|
||||
}
|
||||
|
@ -4,10 +4,10 @@ import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
var err error
|
||||
|
||||
sel := st.qc.Selects
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
// fetch the field name used within the db response json
|
||||
// that are used to mark insertion points and the mapping between
|
||||
// those field names and their select objects
|
||||
fids, sfmap := sg.parentFieldIds(h, sel, st.skipped)
|
||||
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped())
|
||||
|
||||
// fetch the field values of the marked insertion points
|
||||
// these values contain the id to be used with fetching remote data
|
||||
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
|
||||
switch {
|
||||
case len(from) == 1:
|
||||
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
|
||||
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
|
||||
|
||||
case len(from) > 1:
|
||||
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
|
||||
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
|
||||
|
||||
default:
|
||||
return nil, errors.New("something wrong no remote ids found in db response")
|
||||
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
|
||||
func (sg *SuperGraph) resolveRemote(
|
||||
hdr http.Header,
|
||||
h *xxhash.Digest,
|
||||
h *maphash.Hash,
|
||||
field jsn.Field,
|
||||
sel []qcode.Select,
|
||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
|
||||
to := toA[:1]
|
||||
|
||||
// use the json key to find the related Select object
|
||||
k1 := xxhash.Sum64(field.Key)
|
||||
_, _ = h.Write(field.Key)
|
||||
k1 := h.Sum64()
|
||||
|
||||
s, ok := sfmap[k1]
|
||||
if !ok {
|
||||
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
|
||||
|
||||
func (sg *SuperGraph) resolveRemotes(
|
||||
hdr http.Header,
|
||||
h *xxhash.Digest,
|
||||
h *maphash.Hash,
|
||||
from []jsn.Field,
|
||||
sel []qcode.Select,
|
||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
|
||||
for i, id := range from {
|
||||
|
||||
// use the json key to find the related Select object
|
||||
k1 := xxhash.Sum64(id.Key)
|
||||
_, _ = h.Write(id.Key)
|
||||
k1 := h.Sum64()
|
||||
|
||||
s, ok := sfmap[k1]
|
||||
if !ok {
|
||||
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
|
||||
return to, cerr
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
||||
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
|
||||
[][]byte,
|
||||
map[uint64]*qcode.Select) {
|
||||
|
||||
@ -227,15 +230,15 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
|
||||
fm[n] = r.IDField
|
||||
n++
|
||||
|
||||
k := xxhash.Sum64(r.IDField)
|
||||
sm[k] = s
|
||||
_, _ = h.Write(r.IDField)
|
||||
sm[h.Sum64()] = s
|
||||
}
|
||||
}
|
||||
|
||||
return fm, sm
|
||||
}
|
||||
|
||||
func isSkipped(n uint32, pos uint32) bool {
|
||||
func isSkipped(n, pos uint32) bool {
|
||||
return ((n & (1 << pos)) != 0)
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,11 @@ package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
@ -19,7 +19,7 @@ type resolvFn struct {
|
||||
|
||||
func (sg *SuperGraph) initResolvers() error {
|
||||
var err error
|
||||
sg.rmap = make(map[uint64]*resolvFn)
|
||||
sg.rmap = make(map[uint64]resolvFn)
|
||||
|
||||
for _, t := range sg.conf.Tables {
|
||||
err = sg.initRemotes(t)
|
||||
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
for _, r := range t.Remotes {
|
||||
// defines the table column to be used as an id in the
|
||||
@ -45,7 +46,7 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
|
||||
// if no table column specified in the config then
|
||||
// use the primary key of the table as the id
|
||||
if len(idcol) == 0 {
|
||||
if idcol == "" {
|
||||
pcol, err := sg.pc.IDColumn(t.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
path = append(path, []byte(p))
|
||||
}
|
||||
|
||||
rf := &resolvFn{
|
||||
rf := resolvFn{
|
||||
IDField: []byte(idk),
|
||||
Path: path,
|
||||
Fn: fn,
|
||||
}
|
||||
|
||||
// index resolver obj by parent and child names
|
||||
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
|
||||
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
|
||||
|
||||
// index resolver obj by IDField
|
||||
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
|
||||
_, _ = h.Write(rf.IDField)
|
||||
sg.rmap[h.Sum64()] = rf
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -1,11 +1,9 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"github.com/cespare/xxhash/v2"
|
||||
)
|
||||
import "hash/maphash"
|
||||
|
||||
// nolint: errcheck
|
||||
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||
func mkkey(h *maphash.Hash, k1, k2 string) uint64 {
|
||||
h.WriteString(k1)
|
||||
h.WriteString(k2)
|
||||
v := h.Sum64()
|
||||
|
@ -1,40 +0,0 @@
|
||||
01a106d5.06939d67.js,1589776216137,2e1ce67f6cf79a8a8e2070fc4ea4a6104ac73a5b26a1ab10b62f6cd8e45a8074
|
||||
1.1c32171f.js.LICENSE.txt,1589776216144,31b4d50dbbd144da150dcdcf0ccef8f6cf8b6b5204d5c9adde3b24466777fad5
|
||||
0e384e19.7f29b403.js,1589776216137,e2c3882226f2a601b65e4bb1fdb771296c1946f9f125c90af4a8f451dfd2c867
|
||||
19.fdfbe826.js.LICENSE.txt,1589776216145,6ad95a8099127a8d42b5ace6d148064b1d3e922174f08d75d0ee2220ebeacd0b
|
||||
17896441.183211f5.js,1589776216137,7736db62d7498a8d3a10a617b1bdfac08c8f29dc03329f4ad3320f2571c223c0
|
||||
20ac7829.c04b4a1e.js,1589776216137,5b95f479848ccd6959630d4a24bd551d0dbc74457911e9b6f3498655bfaf8ea7
|
||||
1.1c32171f.js,1589776216137,5441b74bfad9f5a37ba0e6123621c73c3e3b9064bda6b9dcf62fdb7381bf8e41
|
||||
2.8f12478f.js,1589776216137,3ac7ca0df8fca86145f5decbd86c8adfbc6b5b11a5be96fc96cc9bc33d6306e6
|
||||
395f47e2.28d67f37.js,1589776216137,8a9b6bc2afdd99ca2b1827c8289352fab6163e30151b9701c29a7863b6cd00b6
|
||||
404.html,1589776218438,0a748eaa7614b1982623360ba8554c0f498b0796ead3cc429a2c84d287084b50
|
||||
3d9c95a4.c89589de.js,1589776216137,d5c45e5a3671f303683451d448e2e5d5b464f041cde683af6e824b9e7f951412
|
||||
9225b3a9.a5e6036b.js,1589776216137,ec9a0d4b34d8751f74348d0da369625a18f320c9ed5ab3c5ccf047ead2551bd8
|
||||
741df2ae.e13b96b2.js,1589776216137,12028f0cbdf783ac91ea42db64d91190ebd4df24cc74162f953aacc75d16d078
|
||||
969d212d.9fc45877.js,1589776216138,8323c9f2db042bfaa2ebba43d9500bed881a694d0bfc27fd796cec95bb032dc5
|
||||
c4f5d8e4.47e70b85.js,1589776216145,6f986b48720724e7c8a715812b5f6625c71c8eca258bb4b410a447eb5da52734
|
||||
index.html,1589776218438,89f81ec3d3be439a827bd61448dcaddb71c33422df7baa88a7bbcdf784dbc0b2
|
||||
98ce8162.b5ace15d.js,1589776216137,935e1c6dd08f7e9d0d00221559b95f0f649e28ddf64be6bbb7b3e65bae1aba72
|
||||
main.e30d99cd.js.LICENSE.txt,1589776216144,1d906c3b83eacffe298d21eeb73e6e73e96310983224783d236195098e6765a7
|
||||
runtime~main.366c29ad.js,1589776216145,0e550cc9522cd99c5fa4097c7db629eef56127a7f8ade0b7c9954cc8f6a01239
|
||||
5043549d.62508ecf.js,1589776216137,383959b80d2b0c6416e83c9640ea03c666fe92c407e13a6f022b58072feeafd2
|
||||
99e04881.197dcef6.js,1589776216144,af99883cbd4d58fbac7cbf814be33032b77bc8daf856aed54bdf0bf27ed5708d
|
||||
sitemap.xml,1589776218455,660ed269bf0306ba47ecdfb638e487147784d614c43c6c4a8e84194973baf183
|
||||
styles.9155f1d2.js,1589776216137,f1e0863928710e67338dc88c37f47ef3ff164d36c4bba40d005561094c9c3284
|
||||
db32d859.a032827a.js,1589776216145,36d575ffad747898726a97cb7a3551e636f744218595bea5c060536eb8d8390f
|
||||
docs/advanced/index.html,1589776218439,31171870786a597597de9417978a27253581c013962e39959ae4c0777bf86c28
|
||||
docs/deploy/index.html,1589776218440,7a4735edb93006311b704e62b843bf89bc4354fdf0fdc22a0c5802e39878c193
|
||||
docs/home/index.html,1589776218440,c7fbb0c1084c6ef8858775c5083b6b416b8188942d4402a5a625eadb3bc00942
|
||||
docs/intro/index.html,1589776218440,c7a50ae98c0b279f422e55c2eeb9f7ba1c7c1a8bcac07be11fd6e05ced224094
|
||||
img/super-graph-logo.svg,1589776218438,66a865c4936f44ea811464b967f221b615b7553e85dca0d6f1ef620da3911857
|
||||
docs/react/index.html,1589776218440,f76fc976f3491d9aacf19ce3b34bee1339f87c673a9da95f192683615618f210
|
||||
docs/why/index.html,1589776218440,4aa380fe4e5d8476645e368d1f708d5d1344331c572383db823c3499fa0c99cc
|
||||
docs/security/index.html,1589776218440,0c7d466dc143935db8c02a448952cae2465635e4b6782b1682449bbd56807917
|
||||
styles.8ee0cad4.css,1589776216137,34b2e79c5c5b1f7afda4376e422e8ccb2c3c04213ca09d788f0c68ecf153d6e6
|
||||
docs/config/index.html,1589776218440,25b6e87a42c163ac966e80acebca8708f56ae95ba8f3ed8b98ff7fd70ca5a222
|
||||
docs/internals/index.html,1589776218440,b6f2136a1c832f421a46329fb1f39269d820c55a0dfc9351848271a5501d8e6e
|
||||
docs/start/index.html,1589776218440,485ec2c61117d8940d8028f34d51d421995a814d5b9d4d5a1870adaed48aec2c
|
||||
docs/graphql/index.html,1589776218440,3bd79f703fe67656884f3121bfddc3a4fc4d9e5bb2bf9271c94014058fbbd806
|
||||
main.e30d99cd.js,1589776216144,98a4087d6f537aaddbc1225aaabfb4d12d1394772deb618d4d457685cee59311
|
||||
19.fdfbe826.js,1589776216144,b8abb73aea5fc0aa50d7e8b8bd38984e3b3aec62de2faf66fb3e55fd1428f8a7
|
||||
server.bundle.js,1589776218438,826db37f1de931e8b088c1ff20b4a3c2fe0c3d54d9ff4020e500f0df1b83a616
|
@ -55,6 +55,30 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
### Fragments
|
||||
|
||||
Fragments make it easy to build large complex queries with small composible and re-usable fragment blocks.
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
...userFields1
|
||||
picture_url
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
```
|
||||
|
||||
### Sorting
|
||||
|
||||
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.
|
||||
|
@ -157,7 +157,9 @@ func main() {
|
||||
}
|
||||
}`
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, 1)
|
||||
|
||||
res, err := sg.GraphQL(ctx, query, nil)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
@ -4,6 +4,8 @@ title: Introduction
|
||||
sidebar_label: Introduction
|
||||
---
|
||||
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||
|
||||
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
|
||||
|
||||
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
|
||||
@ -25,6 +27,8 @@ Super Graph has a rich feature set like integrating with your existing Ruby on R
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Database seeding tool
|
||||
- Works with Postgres and Yugabyte DB
|
||||
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
|
||||
|
||||
## Try the demo app
|
||||
|
||||
@ -132,3 +136,9 @@ mutation {
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
### Built-in GraphQL Editor
|
||||
|
||||
Quickly craft and test your queries with a full-featured GraphQL editor. Auto-complete and schema documentation is automatically available.
|
||||
|
||||
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
|
||||
|
@ -95,7 +95,7 @@ auth:
|
||||
type: jwt
|
||||
|
||||
jwt:
|
||||
# the two providers are 'auth0' and 'none'
|
||||
# valid providers are auth0, firebase and none
|
||||
provider: auth0
|
||||
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
public_key_file: /secrets/public_key.pem
|
||||
@ -108,6 +108,19 @@ We can get the JWT token either from the `authorization` header where we expect
|
||||
|
||||
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
|
||||
|
||||
### Firebase Auth
|
||||
|
||||
```yaml
|
||||
auth:
|
||||
type: jwt
|
||||
|
||||
jwt:
|
||||
provider: firebase
|
||||
audience: <firebase-project-id>
|
||||
```
|
||||
|
||||
Firebase auth also uses JWT the keys are auto-fetched from Google and used according to their documentation mechanism. The `audience` config value needs to be set to your project id and everything else is taken care for you.
|
||||
|
||||
### HTTP Headers
|
||||
|
||||
```yaml
|
||||
|
249
docs/website/docs/seed.md
Normal file
249
docs/website/docs/seed.md
Normal file
@ -0,0 +1,249 @@
|
||||
---
|
||||
id: seed
|
||||
title: Database Seeding
|
||||
sidebar_label: Seed Scripts
|
||||
---
|
||||
|
||||
While developing it's often useful to be able to have fake data available in the database. Fake data can help with building the UI and save you time when trying to get the GraphQL query correct. Super Graph has the ability do this for you. All you have to do is write a seed script `config/seed.js` (In Javascript) and use the `db:seed` command line option. Below is an example of kind of things you can do in a seed script.
|
||||
|
||||
## Creating fake users
|
||||
|
||||
Since all mutations and queries are in standard GraphQL you can use all the features available in Super Graph GraphQL.
|
||||
|
||||
```javascript
|
||||
var users = [];
|
||||
|
||||
for (i = 0; i < 20; i++) {
|
||||
var data = {
|
||||
slug: util.make_slug(fake.first_name() + "-" + fake.last_name()),
|
||||
first_name: fake.first_name(),
|
||||
last_name: fake.last_name(),
|
||||
picture_url: fake.avatar_url(),
|
||||
email: fake.email(),
|
||||
bio: fake.sentence(10),
|
||||
};
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
user(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data });
|
||||
|
||||
users.push(res.user);
|
||||
}
|
||||
```
|
||||
|
||||
## Inserting the users fake blog posts
|
||||
|
||||
Another example highlighting how the `connect` syntax of Super Graph GraphQL can be used to connect inserted posts
|
||||
to random users that were previously created. For futher details checkout the [seed script](/seed) documentation.
|
||||
|
||||
```javascript
|
||||
var posts = [];
|
||||
|
||||
for (i = 0; i < 1500; i++) {
|
||||
var user.id = users[Math.floor(Math.random() * 10)];
|
||||
|
||||
var data = {
|
||||
slug: util.make_slug(fake.sentence(3) + i),
|
||||
body: fake.sentence(100),
|
||||
published: true,
|
||||
thread: {
|
||||
connect: { user: user.id }
|
||||
}
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
post(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}",
|
||||
{ data: data },
|
||||
{ user_id: u.id })
|
||||
|
||||
posts.push(res.post.slug)
|
||||
}
|
||||
```
|
||||
|
||||
## Insert a large number of rows efficiently
|
||||
|
||||
This feature uses the `COPY` functionality available in Postgres this is the best way to
|
||||
insert a large number of rows into a table. The `import_csv` function reads in a CSV file using the first
|
||||
line of the file as column names.
|
||||
|
||||
```javascript
|
||||
import_csv("post_tags", "./tags.csv");
|
||||
```
|
||||
|
||||
## A list of fake data functions available to you.
|
||||
|
||||
```
|
||||
person
|
||||
name
|
||||
name_prefix
|
||||
name_suffix
|
||||
first_name
|
||||
last_name
|
||||
gender
|
||||
ssn
|
||||
contact
|
||||
email
|
||||
phone
|
||||
phone_formatted
|
||||
username
|
||||
password
|
||||
|
||||
// Address
|
||||
address
|
||||
city
|
||||
country
|
||||
country_abr
|
||||
state
|
||||
state_abr
|
||||
street
|
||||
street_name
|
||||
street_number
|
||||
street_prefix
|
||||
street_suffix
|
||||
zip
|
||||
latitude
|
||||
latitude_in_range
|
||||
longitude
|
||||
longitude_in_range
|
||||
|
||||
// Beer
|
||||
beer_alcohol
|
||||
beer_hop
|
||||
beer_ibu
|
||||
beer_blg
|
||||
beer_malt
|
||||
beer_name
|
||||
beer_style
|
||||
beer_yeast
|
||||
|
||||
// Cars
|
||||
car
|
||||
car_type
|
||||
car_maker
|
||||
car_model
|
||||
|
||||
// Text
|
||||
word
|
||||
sentence
|
||||
paragraph
|
||||
question
|
||||
quote
|
||||
|
||||
// Misc
|
||||
generate
|
||||
boolean
|
||||
uuid
|
||||
|
||||
// Colors
|
||||
color
|
||||
hex_color
|
||||
rgb_color
|
||||
safe_color
|
||||
|
||||
// Internet
|
||||
url
|
||||
image_url
|
||||
avatar_url
|
||||
domain_name
|
||||
domain_suffix
|
||||
ipv4_address
|
||||
ipv6_address
|
||||
http_method
|
||||
user_agent
|
||||
user_agent_firefox
|
||||
user_agent_chrome
|
||||
user_agent_opera
|
||||
user_agent_safari
|
||||
|
||||
// Date / Time
|
||||
date
|
||||
date_range
|
||||
nano_second
|
||||
second
|
||||
minute
|
||||
hour
|
||||
month
|
||||
day
|
||||
weekday
|
||||
year
|
||||
timezone
|
||||
timezone_abv
|
||||
timezone_full
|
||||
timezone_offset
|
||||
|
||||
// Payment
|
||||
price
|
||||
credit_card
|
||||
credit_card_cvv
|
||||
credit_card_number
|
||||
credit_card_type
|
||||
currency
|
||||
currency_long
|
||||
currency_short
|
||||
|
||||
// Company
|
||||
bs
|
||||
buzzword
|
||||
company
|
||||
company_suffix
|
||||
job
|
||||
job_description
|
||||
job_level
|
||||
job_title
|
||||
|
||||
// Hacker
|
||||
hacker_abbreviation
|
||||
hacker_adjective
|
||||
hacker_noun
|
||||
hacker_phrase
|
||||
hacker_verb
|
||||
|
||||
//Hipster
|
||||
hipster_word
|
||||
hipster_paragraph
|
||||
hipster_sentence
|
||||
|
||||
// File
|
||||
file_extension
|
||||
file_mine_type
|
||||
|
||||
// Numbers
|
||||
number
|
||||
numerify
|
||||
int8
|
||||
int16
|
||||
int32
|
||||
int64
|
||||
uint8
|
||||
uint16
|
||||
uint32
|
||||
uint64
|
||||
float32
|
||||
float32_range
|
||||
float64
|
||||
float64_range
|
||||
shuffle_ints
|
||||
mac_address
|
||||
|
||||
// String
|
||||
digit
|
||||
letter
|
||||
lexify
|
||||
rand_string
|
||||
numerify
|
||||
```
|
||||
|
||||
## Some more utility functions
|
||||
|
||||
```
|
||||
shuffle_strings(string_array)
|
||||
make_slug(text)
|
||||
make_slug_lang(text, lang)
|
||||
```
|
@ -10,7 +10,7 @@ You can then add your database schema to the migrations, maybe create some seed
|
||||
|
||||
```bash
|
||||
# Download and install Super Graph. You will need Go 1.14 or above
|
||||
go get https://github.com/dosco/super-graph
|
||||
go get github.com/dosco/super-graph
|
||||
```
|
||||
|
||||
And then create and launch your new app
|
||||
@ -96,179 +96,6 @@ var post_count = import_csv("posts", "posts.csv");
|
||||
|
||||
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
|
||||
|
||||
```
|
||||
// Person
|
||||
person
|
||||
name
|
||||
name_prefix
|
||||
name_suffix
|
||||
first_name
|
||||
last_name
|
||||
gender
|
||||
ssn
|
||||
contact
|
||||
email
|
||||
phone
|
||||
phone_formatted
|
||||
username
|
||||
password
|
||||
|
||||
// Address
|
||||
address
|
||||
city
|
||||
country
|
||||
country_abr
|
||||
state
|
||||
state_abr
|
||||
status_code
|
||||
street
|
||||
street_name
|
||||
street_number
|
||||
street_prefix
|
||||
street_suffix
|
||||
zip
|
||||
latitude
|
||||
latitude_in_range
|
||||
longitude
|
||||
longitude_in_range
|
||||
|
||||
// Beer
|
||||
beer_alcohol
|
||||
beer_hop
|
||||
beer_ibu
|
||||
beer_blg
|
||||
beer_malt
|
||||
beer_name
|
||||
beer_style
|
||||
beer_yeast
|
||||
|
||||
// Cars
|
||||
car
|
||||
car_type
|
||||
car_maker
|
||||
car_model
|
||||
|
||||
// Text
|
||||
word
|
||||
sentence
|
||||
paragraph
|
||||
question
|
||||
quote
|
||||
|
||||
// Misc
|
||||
generate
|
||||
boolean
|
||||
uuid
|
||||
|
||||
// Colors
|
||||
color
|
||||
hex_color
|
||||
rgb_color
|
||||
safe_color
|
||||
|
||||
// Internet
|
||||
url
|
||||
image_url
|
||||
domain_name
|
||||
domain_suffix
|
||||
ipv4_address
|
||||
ipv6_address
|
||||
simple_status_code
|
||||
http_method
|
||||
user_agent
|
||||
user_agent_firefox
|
||||
user_agent_chrome
|
||||
user_agent_opera
|
||||
user_agent_safari
|
||||
|
||||
// Date / Time
|
||||
date
|
||||
date_range
|
||||
nano_second
|
||||
second
|
||||
minute
|
||||
hour
|
||||
month
|
||||
day
|
||||
weekday
|
||||
year
|
||||
timezone
|
||||
timezone_abv
|
||||
timezone_full
|
||||
timezone_offset
|
||||
|
||||
// Payment
|
||||
price
|
||||
credit_card
|
||||
credit_card_cvv
|
||||
credit_card_number
|
||||
credit_card_number_luhn
|
||||
credit_card_type
|
||||
currency
|
||||
currency_long
|
||||
currency_short
|
||||
|
||||
// Company
|
||||
bs
|
||||
buzzword
|
||||
company
|
||||
company_suffix
|
||||
job
|
||||
job_description
|
||||
job_level
|
||||
job_title
|
||||
|
||||
// Hacker
|
||||
hacker_abbreviation
|
||||
hacker_adjective
|
||||
hacker_ingverb
|
||||
hacker_noun
|
||||
hacker_phrase
|
||||
hacker_verb
|
||||
|
||||
//Hipster
|
||||
hipster_word
|
||||
hipster_paragraph
|
||||
hipster_sentence
|
||||
|
||||
// File
|
||||
file_extension
|
||||
file_mine_type
|
||||
|
||||
// Numbers
|
||||
number
|
||||
numerify
|
||||
int8
|
||||
int16
|
||||
int32
|
||||
int64
|
||||
uint8
|
||||
uint16
|
||||
uint32
|
||||
uint64
|
||||
float32
|
||||
float32_range
|
||||
float64
|
||||
float64_range
|
||||
shuffle_ints
|
||||
mac_address
|
||||
|
||||
//String
|
||||
digit
|
||||
letter
|
||||
lexify
|
||||
shuffle_strings
|
||||
numerify
|
||||
```
|
||||
|
||||
Other utility functions
|
||||
|
||||
```
|
||||
shuffle_strings(string_array)
|
||||
make_slug(text)
|
||||
make_slug_lang(text, lang)
|
||||
```
|
||||
|
||||
### Migrations
|
||||
|
||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||
|
82
docs/website/docs/telemetry.md
Normal file
82
docs/website/docs/telemetry.md
Normal file
@ -0,0 +1,82 @@
|
||||
---
|
||||
id: telemetry
|
||||
title: Tracing and Metrics
|
||||
sidebar_label: Telemetry
|
||||
---
|
||||
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||
|
||||
Having observability and telemetry is at the core of any production ready service. Super Graph has built-in support for OpenCensus for tracing requests all the way from HTTP to the database and providing all kinds of metrics.
|
||||
|
||||
OpenCensus has a concept called exporters these are external services that can consume this data and make to give you graphs, charts, alerting etc. Super Graph again has built-in support for Zipkin, Prometheus, Google Stackdriver and the AWS X-Ray exporters.
|
||||
|
||||
## Telemetry config
|
||||
|
||||
The `telemetry` section of the standard config files is where you set values to configure this feature to your needs.
|
||||
|
||||
```yaml
|
||||
telemetry:
|
||||
debug: true
|
||||
interval: 5s
|
||||
metrics:
|
||||
exporter: "prometheus"
|
||||
endpoint: ""
|
||||
namespace: "web api"
|
||||
key: "1234xyz"
|
||||
tracing:
|
||||
exporter: "zipkin"
|
||||
endpoint: "http://zipkin:9411/api/v2/spans"
|
||||
sample: 0.2
|
||||
include_query: false
|
||||
include_params: false
|
||||
```
|
||||
|
||||
**debug**: Enabling debug enables an embedded web ui to test and debug tracing and metrics. This UI called `zPages` is provided by OpenCensus and will be made available on the `/telemetry` path. For more information on using `zPages` https://opencensus.io/zpages/. Remeber to disable this in production.
|
||||
|
||||
**interval**: This controls the interval setting for OpenCensus metrics collection. This deafaults to `5 seconds` if not set.
|
||||
|
||||
**metric.exporters** Setting this enables metrics collection. The supported values for this field are `prometheus` and `stackdriver`. The Prometheus exporter requires `metric.namespace` to be set. The Sackdriver exporter requires the `metric.key` to be set to the Google Cloud Project ID.
|
||||
|
||||
**metric.endpoint** Is not currently used by any of the exporters.
|
||||
|
||||
**tracing.exporter** Setting this enables request tracing. The supported values for this field are `zipkin`, `aws` and `xray`. Zipkin requires `tracing.endpoint` to be set. AWS and Xray are the same and do not require any addiitonal settings.
|
||||
|
||||
**tracing.sample** This controls what percentage of the requests should be traced. By default `0.5` or 50% of the requests are traced, `always` is also a valid value for this field and it means all requests will be traced.
|
||||
|
||||
**include_query** Include the Super Graph SQL query to the trace. Be careful with this setting in production it will add the entire SQL query to the trace. This can be veru useful to debug slow requests.
|
||||
|
||||
**include_params** Include the Super Graph SQL query parameters to the trace. Be careful with this setting in production it will it can potentially leak sensitive user information into tracing logs.
|
||||
|
||||
## Using Zipkin
|
||||
|
||||
Zipkin is a really great open source request tracing project. It's easy to add to your current Super Graph app as a way to test tracing in development. Add the following to the Super Graph generated `docker-compose.yml` file. Also add `zipkin` in your current apps `depends_on` list. Once setup the Zipkin UI is available at http://localhost:9411
|
||||
|
||||
```yaml
|
||||
your_api:
|
||||
...
|
||||
depends_on:
|
||||
- db
|
||||
- zipkin
|
||||
|
||||
zipkin:
|
||||
image: openzipkin/zipkin-slim
|
||||
container_name: zipkin
|
||||
# Environment settings are defined here https://github.com/openzipkin/zipkin/blob/master/zipkin-server/README.md#environment-variables
|
||||
environment:
|
||||
- STORAGE_TYPE=mem
|
||||
# Uncomment to enable self-tracing
|
||||
# - SELF_TRACING_ENABLED=true
|
||||
# Uncomment to enable debug logging
|
||||
# - JAVA_OPTS=-Dorg.slf4j.simpleLogger.log.zipkin2=debug
|
||||
ports:
|
||||
# Port used for the Zipkin UI and HTTP Api
|
||||
- 9411:9411
|
||||
```
|
||||
|
||||
### Zipkin HTTP to DB traces
|
||||
|
||||
<img alt="Zipkin Traces" src={useBaseUrl("img/zipkin1.png")} />
|
||||
|
||||
### Zipkin trace details
|
||||
|
||||
<img alt="Zipkin Traces" src={useBaseUrl('img/zipkin2.png')} />
|
13
docs/website/docs/webui.md
Normal file
13
docs/website/docs/webui.md
Normal file
@ -0,0 +1,13 @@
|
||||
---
|
||||
id: webui
|
||||
title: Web UI / GraphQL Editor
|
||||
sidebar_label: Web UI
|
||||
---
|
||||
|
||||
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||
|
||||
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
|
||||
|
||||
Super Graph comes with a build-in GraphQL editor that only runs in development. Use it to craft your queries and copy-paste them into you're app once you're ready. The editor supports auto-completation and schema documentation. This makes it super easy to craft and test your query all in one go without knowing anything about the underlying database structure.
|
||||
|
||||
You can even set query variables or http headers as required. To simulate an authenticated user set the http header `"X-USER-ID": 5` to the user id of the user you want to test with.
|
@ -36,8 +36,8 @@ module.exports = {
|
||||
position: "left",
|
||||
},
|
||||
{
|
||||
label: "Art Compute",
|
||||
href: "https://artcompute.com/s/super-graph",
|
||||
label: "AbtCode",
|
||||
href: "https://abtcode.com/s/super-graph",
|
||||
position: "left",
|
||||
},
|
||||
],
|
||||
|
@ -3,13 +3,16 @@ module.exports = {
|
||||
Docusaurus: [
|
||||
"home",
|
||||
"intro",
|
||||
"webui",
|
||||
"start",
|
||||
"why",
|
||||
"graphql",
|
||||
"react",
|
||||
"advanced",
|
||||
"security",
|
||||
"telemetry",
|
||||
"config",
|
||||
"seed",
|
||||
"deploy",
|
||||
"internals",
|
||||
],
|
||||
|
BIN
docs/website/static/img/webui.jpg
Normal file
BIN
docs/website/static/img/webui.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 117 KiB |
BIN
docs/website/static/img/zipkin1.png
Normal file
BIN
docs/website/static/img/zipkin1.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 112 KiB |
BIN
docs/website/static/img/zipkin2.png
Normal file
BIN
docs/website/static/img/zipkin2.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 141 KiB |
@ -1805,11 +1805,6 @@ asynckit@^0.4.0:
|
||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||
|
||||
at-least-node@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
|
||||
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
|
||||
|
||||
atob@^2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
||||
@ -2323,7 +2318,7 @@ ccount@^1.0.0, ccount@^1.0.3:
|
||||
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
||||
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
||||
|
||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
|
||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
|
||||
version "2.4.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
||||
@ -2522,15 +2517,6 @@ cliui@^5.0.0:
|
||||
strip-ansi "^5.2.0"
|
||||
wrap-ansi "^5.1.0"
|
||||
|
||||
cliui@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
|
||||
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
|
||||
dependencies:
|
||||
string-width "^4.2.0"
|
||||
strip-ansi "^6.0.0"
|
||||
wrap-ansi "^6.2.0"
|
||||
|
||||
coa@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
||||
@ -3216,11 +3202,6 @@ depd@~1.1.2:
|
||||
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
||||
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
||||
|
||||
dependency-graph@^0.9.0:
|
||||
version "0.9.0"
|
||||
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
|
||||
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
|
||||
|
||||
des.js@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
||||
@ -3830,7 +3811,7 @@ fast-glob@^2.0.2:
|
||||
merge2 "^1.2.3"
|
||||
micromatch "^3.1.10"
|
||||
|
||||
fast-glob@^3.0.3, fast-glob@^3.1.1:
|
||||
fast-glob@^3.0.3:
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
||||
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
||||
@ -3970,7 +3951,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
|
||||
make-dir "^3.0.2"
|
||||
pkg-dir "^4.1.0"
|
||||
|
||||
find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
|
||||
find-up@4.1.0, find-up@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
||||
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
||||
@ -4084,16 +4065,6 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
|
||||
jsonfile "^4.0.0"
|
||||
universalify "^0.1.0"
|
||||
|
||||
fs-extra@^9.0.0:
|
||||
version "9.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
|
||||
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
|
||||
dependencies:
|
||||
at-least-node "^1.0.0"
|
||||
graceful-fs "^4.2.0"
|
||||
jsonfile "^6.0.1"
|
||||
universalify "^1.0.0"
|
||||
|
||||
fs-minipass@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
||||
@ -4149,11 +4120,6 @@ get-own-enumerable-property-symbols@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
||||
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
||||
|
||||
get-stdin@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
|
||||
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
|
||||
|
||||
get-stream@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||
@ -4275,18 +4241,6 @@ globby@^10.0.1:
|
||||
merge2 "^1.2.3"
|
||||
slash "^3.0.0"
|
||||
|
||||
globby@^11.0.0:
|
||||
version "11.0.0"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
|
||||
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
|
||||
dependencies:
|
||||
array-union "^2.1.0"
|
||||
dir-glob "^3.0.1"
|
||||
fast-glob "^3.1.1"
|
||||
ignore "^5.1.4"
|
||||
merge2 "^1.3.0"
|
||||
slash "^3.0.0"
|
||||
|
||||
globby@^6.1.0:
|
||||
version "6.1.0"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
||||
@ -4743,7 +4697,7 @@ ignore@^3.3.5:
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
||||
|
||||
ignore@^5.1.1, ignore@^5.1.4:
|
||||
ignore@^5.1.1:
|
||||
version "5.1.4"
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
||||
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
||||
@ -5382,15 +5336,6 @@ jsonfile@^4.0.0:
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsonfile@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
|
||||
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
|
||||
dependencies:
|
||||
universalify "^1.0.0"
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsprim@^1.2.2:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
||||
@ -5656,13 +5601,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||
|
||||
log-symbols@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
|
||||
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
|
||||
dependencies:
|
||||
chalk "^2.0.1"
|
||||
|
||||
loglevel@^1.6.8:
|
||||
version "1.6.8"
|
||||
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
||||
@ -6645,7 +6583,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
|
||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||
|
||||
pify@^2.0.0, pify@^2.3.0:
|
||||
pify@^2.0.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
||||
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
||||
@ -6731,24 +6669,6 @@ postcss-calc@^7.0.1:
|
||||
postcss-selector-parser "^6.0.2"
|
||||
postcss-value-parser "^4.0.2"
|
||||
|
||||
postcss-cli@^7.1.1:
|
||||
version "7.1.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
|
||||
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
|
||||
dependencies:
|
||||
chalk "^4.0.0"
|
||||
chokidar "^3.3.0"
|
||||
dependency-graph "^0.9.0"
|
||||
fs-extra "^9.0.0"
|
||||
get-stdin "^7.0.0"
|
||||
globby "^11.0.0"
|
||||
postcss "^7.0.0"
|
||||
postcss-load-config "^2.0.0"
|
||||
postcss-reporter "^6.0.0"
|
||||
pretty-hrtime "^1.0.3"
|
||||
read-cache "^1.0.0"
|
||||
yargs "^15.0.2"
|
||||
|
||||
postcss-color-functional-notation@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
||||
@ -7288,16 +7208,6 @@ postcss-replace-overflow-wrap@^3.0.0:
|
||||
dependencies:
|
||||
postcss "^7.0.2"
|
||||
|
||||
postcss-reporter@^6.0.0:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
|
||||
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
|
||||
dependencies:
|
||||
chalk "^2.4.1"
|
||||
lodash "^4.17.11"
|
||||
log-symbols "^2.2.0"
|
||||
postcss "^7.0.7"
|
||||
|
||||
postcss-selector-matches@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
||||
@ -7397,7 +7307,7 @@ postcss@^6.0.9:
|
||||
source-map "^0.6.1"
|
||||
supports-color "^5.4.0"
|
||||
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6:
|
||||
version "7.0.30"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
||||
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
||||
@ -7692,6 +7602,11 @@ react-helmet@^6.0.0-beta:
|
||||
react-fast-compare "^2.0.4"
|
||||
react-side-effect "^2.1.0"
|
||||
|
||||
react-hook-sticky@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
|
||||
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
|
||||
|
||||
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
||||
version "16.13.1"
|
||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||
@ -7785,13 +7700,6 @@ react@^16.8.4:
|
||||
object-assign "^4.1.1"
|
||||
prop-types "^15.6.2"
|
||||
|
||||
read-cache@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
|
||||
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
|
||||
dependencies:
|
||||
pify "^2.3.0"
|
||||
|
||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||
version "2.3.7"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||
@ -8709,7 +8617,7 @@ string-width@^3.0.0, string-width@^3.1.0:
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^5.1.0"
|
||||
|
||||
string-width@^4.1.0, string-width@^4.2.0:
|
||||
string-width@^4.1.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
||||
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
||||
@ -9305,11 +9213,6 @@ universalify@^0.1.0:
|
||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
||||
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
||||
|
||||
universalify@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
|
||||
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
|
||||
|
||||
unpipe@1.0.0, unpipe@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||
@ -9723,7 +9626,7 @@ wrap-ansi@^5.1.0:
|
||||
string-width "^3.0.0"
|
||||
strip-ansi "^5.0.0"
|
||||
|
||||
wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
|
||||
wrap-ansi@^6.0.0:
|
||||
version "6.2.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
||||
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
||||
@ -9784,14 +9687,6 @@ yargs-parser@^13.1.2:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs-parser@^18.1.1:
|
||||
version "18.1.3"
|
||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
|
||||
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
|
||||
dependencies:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs@^13.3.2:
|
||||
version "13.3.2"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
||||
@ -9808,23 +9703,6 @@ yargs@^13.3.2:
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^13.1.2"
|
||||
|
||||
yargs@^15.0.2:
|
||||
version "15.3.1"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
|
||||
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
|
||||
dependencies:
|
||||
cliui "^6.0.0"
|
||||
decamelize "^1.2.0"
|
||||
find-up "^4.1.0"
|
||||
get-caller-file "^2.0.1"
|
||||
require-directory "^2.1.1"
|
||||
require-main-filename "^2.0.0"
|
||||
set-blocking "^2.0.0"
|
||||
string-width "^4.2.0"
|
||||
which-module "^2.0.0"
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^18.1.1"
|
||||
|
||||
zepto@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
||||
|
2
go.mod
2
go.mod
@ -12,7 +12,6 @@ require (
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||
github.com/cespare/xxhash/v2 v2.1.1
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
@ -37,7 +36,6 @@ require (
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.6.3
|
||||
github.com/stretchr/testify v1.5.1
|
||||
github.com/valyala/fasttemplate v1.1.0
|
||||
go.opencensus.io v0.22.3
|
||||
go.uber.org/zap v1.14.1
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
||||
|
13
go.sum
13
go.sum
@ -35,7 +35,9 @@ github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgv
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
|
||||
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
|
||||
@ -53,8 +55,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
@ -113,6 +113,7 @@ github.com/gogo/googleapis v1.1.0/go.mod h1:gf4bu3Q80BeJ6H1S1vYPm8/ELATdvryBaNFG
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.0/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
github.com/gogo/protobuf v1.2.1/go.mod h1:hp+jE20tsWTFYpLwKvXlhS1hjn+gTNwPg2I6zVXpSg4=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b h1:VKtxabqXZkF25pY9ekfRL6a582T4P37/31XEstQ5p58=
|
||||
github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfUlaOkMMuAQtPIUF8ecWP5IEl/CR7VP2Q=
|
||||
github.com/golang/groupcache v0.0.0-20190129154638-5b532d6fd5ef/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc=
|
||||
github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6 h1:ZgQEtGgCBiWRM39fZuwSd1LwSqqSW0hOdXCYYDX0R3I=
|
||||
@ -131,6 +132,7 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0 h1:crn/baboCvb5fXaQ0IJ1SGTsTVrWpDsCWC8EGETZijY=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1 h1:Xye71clBPdm5HgqGwUkwhbynsUJZhDbS20FvLhQ2izg=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
@ -142,7 +144,9 @@ github.com/googleapis/gax-go/v2 v2.0.5 h1:sjZBwGj9Jlw33ImPtvFviGYvseOtDM7hkSKB7+
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/context v1.1.1 h1:AWwleXJkX/nhcU9bZSnZoi3h/qGYqQAGhq6zZe/aQW8=
|
||||
github.com/gorilla/context v1.1.1/go.mod h1:kBGZzfjB9CEq2AlWe17Uuf7NDRt0dE0s8S51q0aT7Yg=
|
||||
github.com/gorilla/mux v1.6.2 h1:Pgr17XVTNXAk3q/r4CpKzC5xBM/qW1uVLV+IhRZpIIk=
|
||||
github.com/gorilla/mux v1.6.2/go.mod h1:1lud6UwP+6orDFRuTfBEV8e9/aOM/c4fVVCaMa2zaAs=
|
||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
@ -216,6 +220,7 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
@ -315,6 +320,7 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
@ -369,8 +375,6 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
|
||||
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
@ -541,6 +545,7 @@ google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ij
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
|
||||
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@ -155,8 +155,8 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
func BuildDetails() string {
|
||||
if len(version) == 0 {
|
||||
return fmt.Sprintf(`
|
||||
if version == "" {
|
||||
return `
|
||||
Super Graph (unknown version)
|
||||
For documentation, visit https://supergraph.dev
|
||||
|
||||
@ -166,7 +166,7 @@ To build with version information please use the Makefile
|
||||
|
||||
Licensed under the Apache Public License 2.0
|
||||
Copyright 2020, Vikram Rangnekar
|
||||
`)
|
||||
`
|
||||
}
|
||||
|
||||
return fmt.Sprintf(`
|
||||
|
@ -298,9 +298,9 @@ func ExtractErrorLine(source string, position int) (ErrorLineExtract, error) {
|
||||
|
||||
func getMigrationVars() map[string]interface{} {
|
||||
return map[string]interface{}{
|
||||
"app_name": strings.Title(conf.AppName),
|
||||
"app_name_slug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
|
||||
"env": strings.ToLower(os.Getenv("GO_ENV")),
|
||||
"AppName": strings.Title(conf.AppName),
|
||||
"AppNameSlug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
|
||||
"Env": strings.ToLower(os.Getenv("GO_ENV")),
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,8 +2,7 @@ package serv
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"io"
|
||||
"html/template"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
@ -11,7 +10,6 @@ import (
|
||||
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
|
||||
func cmdNew(cmd *cobra.Command, args []string) {
|
||||
@ -21,8 +19,8 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
tmpl := newTempl(map[string]string{
|
||||
"app_name": strings.Title(strings.Join(args, " ")),
|
||||
"app_name_slug": strings.ToLower(strings.Join(args, "_")),
|
||||
"AppName": strings.Title(strings.Join(args, " ")),
|
||||
"AppNameSlug": strings.ToLower(strings.Join(args, "_")),
|
||||
})
|
||||
|
||||
// Create app folder and add relevant files
|
||||
@ -90,6 +88,10 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
})
|
||||
|
||||
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
|
||||
return ioutil.WriteFile(p, []byte{}, 0644)
|
||||
})
|
||||
|
||||
// Create app migrations folder and add relevant files
|
||||
|
||||
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
||||
@ -121,19 +123,16 @@ func newTempl(data map[string]string) *Templ {
|
||||
func (t *Templ) get(name string) ([]byte, error) {
|
||||
v := t.MustString(name)
|
||||
b := bytes.Buffer{}
|
||||
tmpl := fasttemplate.New(v, "{%", "%}")
|
||||
|
||||
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
|
||||
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
|
||||
return w.Write([]byte(val))
|
||||
}
|
||||
return 0, fmt.Errorf("unknown template variable '%s'", tag)
|
||||
})
|
||||
|
||||
tmpl, err := template.New(name).Parse(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := tmpl.Execute(&b, t.data); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.Bytes(), nil
|
||||
}
|
||||
|
||||
|
@ -17,6 +17,8 @@ import (
|
||||
"github.com/dop251/goja"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/gosimple/slug"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -27,6 +29,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
log.Fatalf("ERR failed to read config: %s", err)
|
||||
}
|
||||
conf.Production = false
|
||||
conf.DefaultBlock = false
|
||||
|
||||
db, err = initDB(conf, true, false)
|
||||
if err != nil {
|
||||
@ -51,7 +54,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
|
||||
vm := goja.New()
|
||||
vm.Set("graphql", graphQLFn)
|
||||
//vm.Set("import_csv", importCSV)
|
||||
vm.Set("import_csv", importCSV)
|
||||
|
||||
console := vm.NewObject()
|
||||
console.Set("log", logFunc) //nolint: errcheck
|
||||
@ -77,10 +80,8 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||
ct := context.Background()
|
||||
|
||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||
if v, ok := opt["user_id"]; ok && v != "" {
|
||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||
} else {
|
||||
ct = context.WithValue(ct, core.UserIDKey, "-1")
|
||||
}
|
||||
|
||||
// var role string
|
||||
@ -143,7 +144,7 @@ func (c *csvSource) Values() ([]interface{}, error) {
|
||||
|
||||
for _, v := range c.rows[c.i] {
|
||||
switch {
|
||||
case len(v) == 0:
|
||||
case v == "":
|
||||
vals = append(vals, "")
|
||||
case isDigit(v):
|
||||
var n int
|
||||
@ -181,34 +182,42 @@ func (c *csvSource) Err() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// func importCSV(table, filename string) int64 {
|
||||
// if filename[0] != '/' {
|
||||
// filename = path.Join(conf.ConfigPathUsed(), filename)
|
||||
// }
|
||||
func importCSV(table, filename string) int64 {
|
||||
if filename[0] != '/' {
|
||||
filename = path.Join(confPath, filename)
|
||||
}
|
||||
|
||||
// s, err := NewCSVSource(filename)
|
||||
// if err != nil {
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
s, err := NewCSVSource(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", err)
|
||||
}
|
||||
|
||||
// var cols []string
|
||||
// colval, _ := s.Values()
|
||||
var cols []string
|
||||
colval, _ := s.Values()
|
||||
|
||||
// for _, c := range colval {
|
||||
// cols = append(cols, c.(string))
|
||||
// }
|
||||
for _, c := range colval {
|
||||
cols = append(cols, c.(string))
|
||||
}
|
||||
|
||||
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
|
||||
// cols,
|
||||
// s)
|
||||
conn, err := stdlib.AcquireConn(db)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", err)
|
||||
}
|
||||
//nolint: errcheck
|
||||
defer stdlib.ReleaseConn(db, conn)
|
||||
|
||||
// if err != nil {
|
||||
// err = fmt.Errorf("%w (line no %d)", err, s.i)
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
n, err := conn.CopyFrom(
|
||||
context.Background(),
|
||||
pgx.Identifier{table},
|
||||
cols,
|
||||
s)
|
||||
|
||||
// return n
|
||||
// }
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", fmt.Errorf("%w (line no %d)", err, s.i))
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func logFunc(args ...interface{}) {
|
||||
@ -234,7 +243,7 @@ func avatarURL(size int) string {
|
||||
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
||||
}
|
||||
|
||||
func imageURL(width int, height int) string {
|
||||
func imageURL(width, height int) string {
|
||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||
}
|
||||
|
||||
@ -377,11 +386,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
|
||||
f.Set("hipster_sentence", gofakeit.HipsterSentence)
|
||||
|
||||
//Languages
|
||||
//f.Set("language", gofakeit.Language)
|
||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||
|
||||
// File
|
||||
f.Set("file_extension", gofakeit.FileExtension)
|
||||
f.Set("file_mine_type", gofakeit.FileMimeType)
|
||||
@ -410,8 +414,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("lexify", gofakeit.Lexify)
|
||||
f.Set("rand_string", getRandValue)
|
||||
f.Set("numerify", gofakeit.Numerify)
|
||||
|
||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
|
@ -66,9 +66,11 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
vi.SetDefault("enable_tracing", false)
|
||||
vi.SetDefault("auth_fail_block", "always")
|
||||
vi.SetDefault("auth_fail_block", false)
|
||||
vi.SetDefault("seed_file", "seed.js")
|
||||
|
||||
vi.SetDefault("default_block", true)
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
@ -88,7 +90,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
}
|
||||
|
||||
func GetConfigName() string {
|
||||
if len(os.Getenv("GO_ENV")) == 0 {
|
||||
if os.Getenv("GO_ENV") == "" {
|
||||
return "dev"
|
||||
}
|
||||
|
||||
|
@ -7,8 +7,8 @@ import (
|
||||
|
||||
var healthyResponse = []byte("All's Well")
|
||||
|
||||
func health(w http.ResponseWriter, _ *http.Request) {
|
||||
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
|
||||
func health(w http.ResponseWriter, r *http.Request) {
|
||||
ct, cancel := context.WithTimeout(r.Context(), conf.DB.PingTimeout)
|
||||
defer cancel()
|
||||
|
||||
if err := db.PingContext(ct); err != nil {
|
||||
|
@ -105,7 +105,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if err == nil {
|
||||
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
|
||||
if conf.CacheControl != "" && res.Operation() == core.OpQuery {
|
||||
w.Header().Set("Cache-Control", conf.CacheControl)
|
||||
}
|
||||
//nolint: errcheck
|
||||
|
@ -15,7 +15,6 @@ import (
|
||||
"contrib.go.opencensus.io/integrations/ocsql"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -216,7 +215,7 @@ func initDB(c *Config, useDB, useTelemetry bool) (*sql.DB, error) {
|
||||
|
||||
if useTelemetry && conf.telemetryEnabled() {
|
||||
opts := ocsql.TraceOptions{
|
||||
AllowRoot: false,
|
||||
AllowRoot: true,
|
||||
Ping: true,
|
||||
RowsNext: true,
|
||||
RowsClose: true,
|
||||
|
@ -32,6 +32,7 @@ type Auth struct {
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
Audience string `mapstructure:"audience"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
@ -46,17 +47,17 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
ctx := r.Context()
|
||||
|
||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||
if len(userIDProvider) != 0 {
|
||||
if userIDProvider != "" {
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
||||
}
|
||||
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if len(userID) != 0 {
|
||||
if userID != "" {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
||||
}
|
||||
|
||||
userRole := r.Header.Get("X-User-Role")
|
||||
if len(userRole) != 0 {
|
||||
if userRole != "" {
|
||||
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
||||
}
|
||||
|
||||
@ -67,11 +68,11 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
hdr := ac.Header
|
||||
|
||||
if len(hdr.Name) == 0 {
|
||||
if hdr.Name == "" {
|
||||
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
||||
}
|
||||
|
||||
if !hdr.Exists && len(hdr.Value) == 0 {
|
||||
if !hdr.Exists && hdr.Value == "" {
|
||||
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
||||
}
|
||||
|
||||
@ -81,7 +82,7 @@ func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
|
||||
switch {
|
||||
case hdr.Exists:
|
||||
fo1 = (len(value) == 0)
|
||||
fo1 = (value == "")
|
||||
|
||||
default:
|
||||
fo1 = (value != hdr.Value)
|
||||
|
@ -2,9 +2,12 @@ package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
jwt "github.com/dgrijalva/jwt-go"
|
||||
"github.com/dosco/super-graph/core"
|
||||
@ -13,8 +16,18 @@ import (
|
||||
const (
|
||||
authHeader = "Authorization"
|
||||
jwtAuth0 int = iota + 1
|
||||
jwtFirebase int = iota + 2
|
||||
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
|
||||
firebaseIssuerPrefix = "https://securetoken.google.com/"
|
||||
)
|
||||
|
||||
type firebasePKCache struct {
|
||||
PublicKeys map[string]string
|
||||
Expiration time.Time
|
||||
}
|
||||
|
||||
var firebasePublicKeys firebasePKCache
|
||||
|
||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
var key interface{}
|
||||
var jwtProvider int
|
||||
@ -23,16 +36,18 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
|
||||
if ac.JWT.Provider == "auth0" {
|
||||
jwtProvider = jwtAuth0
|
||||
} else if ac.JWT.Provider == "firebase" {
|
||||
jwtProvider = jwtFirebase
|
||||
}
|
||||
|
||||
secret := ac.JWT.Secret
|
||||
publicKeyFile := ac.JWT.PubKeyFile
|
||||
|
||||
switch {
|
||||
case len(secret) != 0:
|
||||
case secret != "":
|
||||
key = []byte(secret)
|
||||
|
||||
case len(publicKeyFile) != 0:
|
||||
case publicKeyFile != "":
|
||||
kd, err := ioutil.ReadFile(publicKeyFile)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -56,9 +71,10 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var tok string
|
||||
|
||||
if len(cookie) != 0 {
|
||||
if cookie != "" {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
@ -74,9 +90,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
tok = ah[7:]
|
||||
}
|
||||
|
||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
|
||||
var keyFunc jwt.Keyfunc
|
||||
if jwtProvider == jwtFirebase {
|
||||
keyFunc = firebaseKeyFunction
|
||||
} else {
|
||||
keyFunc = func(token *jwt.Token) (interface{}, error) {
|
||||
return key, nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
|
||||
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
@ -86,12 +109,20 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
||||
ctx := r.Context()
|
||||
|
||||
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if jwtProvider == jwtAuth0 {
|
||||
sub := strings.Split(claims.Subject, "|")
|
||||
if len(sub) != 2 {
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
||||
}
|
||||
} else if jwtProvider == jwtFirebase &&
|
||||
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||
} else {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||
}
|
||||
@ -103,3 +134,92 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
next.ServeHTTP(w, r)
|
||||
}, nil
|
||||
}
|
||||
|
||||
type firebaseKeyError struct {
|
||||
Err error
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e *firebaseKeyError) Error() string {
|
||||
return e.Message + " " + e.Err.Error()
|
||||
}
|
||||
|
||||
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
|
||||
kid, ok := token.Header["kid"]
|
||||
|
||||
if !ok {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error 'kid' header not found in token",
|
||||
}
|
||||
}
|
||||
|
||||
if firebasePublicKeys.Expiration.Before(time.Now()) {
|
||||
resp, err := http.Get(firebasePKEndpoint)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error connecting to firebase certificate server",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(resp.Body)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error reading firebase certificate server response",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
cachePolicy := resp.Header.Get("cache-control")
|
||||
ageIndex := strings.Index(cachePolicy, "max-age=")
|
||||
|
||||
if ageIndex < 0 {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error parsing cache-control header: 'max-age=' not found",
|
||||
}
|
||||
}
|
||||
|
||||
ageToEnd := cachePolicy[ageIndex+8:]
|
||||
endIndex := strings.Index(ageToEnd, ",")
|
||||
if endIndex < 0 {
|
||||
endIndex = len(ageToEnd) - 1
|
||||
}
|
||||
ageString := ageToEnd[:endIndex]
|
||||
|
||||
age, err := strconv.ParseInt(ageString, 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error parsing max-age cache policy",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
|
||||
|
||||
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
|
||||
|
||||
if err != nil {
|
||||
firebasePublicKeys = firebasePKCache{}
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error unmarshalling firebase public key json",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
firebasePublicKeys.Expiration = expiration
|
||||
}
|
||||
|
||||
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
|
||||
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
|
||||
return k, err
|
||||
}
|
||||
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error no matching public key for kid supplied in jwt",
|
||||
}
|
||||
}
|
||||
|
@ -165,7 +165,7 @@ func railsAuth(ac *Auth) (*rails.Auth, error) {
|
||||
}
|
||||
|
||||
version := ac.Rails.Version
|
||||
if len(version) == 0 {
|
||||
if version == "" {
|
||||
return nil, errors.New("no auth.rails.version defined")
|
||||
}
|
||||
|
||||
|
@ -6,9 +6,11 @@ import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
@ -105,39 +107,40 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
|
||||
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
||||
path = strings.TrimRight(path, string(filepath.Separator))
|
||||
|
||||
fileInfos, err := fs.ReadDir(path)
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
paths := make([]string, 0, len(fileInfos))
|
||||
for _, fi := range fileInfos {
|
||||
fm := make(map[int]string, len(files))
|
||||
keys := make([]int, 0, len(files))
|
||||
|
||||
for _, fi := range files {
|
||||
if fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
||||
|
||||
if len(matches) != 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
n, err := strconv.ParseInt(matches[1], 10, 32)
|
||||
n, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
// The regexp already validated that the prefix is all digits so this *should* never fail
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mcount := len(paths)
|
||||
|
||||
if n < int64(mcount) {
|
||||
return nil, fmt.Errorf("Duplicate migration %d", n)
|
||||
fm[n] = filepath.Join(path, fi.Name())
|
||||
keys = append(keys, n)
|
||||
}
|
||||
|
||||
if int64(mcount) < n {
|
||||
return nil, fmt.Errorf("Missing migration %d", mcount)
|
||||
}
|
||||
sort.Ints(keys)
|
||||
|
||||
paths = append(paths, filepath.Join(path, fi.Name()))
|
||||
paths := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
paths = append(paths, fm[k])
|
||||
}
|
||||
|
||||
return paths, nil
|
||||
@ -196,7 +199,7 @@ func (m *Migrator) LoadMigrations(path string) error {
|
||||
for _, v := range strings.Split(upSQL, "\n") {
|
||||
// Only account for regular single line comment, empty line and space/comment combination
|
||||
cleanString := strings.TrimSpace(v)
|
||||
if len(cleanString) != 0 &&
|
||||
if cleanString != "" &&
|
||||
!strings.HasPrefix(cleanString, "--") {
|
||||
containsSQL = true
|
||||
break
|
||||
|
File diff suppressed because one or more lines are too long
@ -27,7 +27,7 @@ func initWatcher() {
|
||||
}
|
||||
|
||||
var d dir
|
||||
if len(cpath) == 0 || cpath == "./" {
|
||||
if cpath == "" || cpath == "./" {
|
||||
d = Dir("./config", ReExec)
|
||||
} else {
|
||||
d = Dir(cpath, ReExec)
|
||||
@ -52,11 +52,11 @@ func startHTTP() {
|
||||
hp := strings.SplitN(conf.HostPort, ":", 2)
|
||||
|
||||
if len(hp) == 2 {
|
||||
if len(conf.Host) != 0 {
|
||||
if conf.Host != "" {
|
||||
hp[0] = conf.Host
|
||||
}
|
||||
|
||||
if len(conf.Port) != 0 {
|
||||
if conf.Port != "" {
|
||||
hp[1] = conf.Port
|
||||
}
|
||||
|
||||
@ -64,7 +64,7 @@ func startHTTP() {
|
||||
}
|
||||
}
|
||||
|
||||
if len(conf.hostPort) == 0 {
|
||||
if conf.hostPort == "" {
|
||||
conf.hostPort = defaultHP
|
||||
}
|
||||
|
||||
@ -123,7 +123,7 @@ func routeHandler() (http.Handler, error) {
|
||||
return mux, nil
|
||||
}
|
||||
|
||||
if len(conf.APIPath) != 0 {
|
||||
if conf.APIPath != "" {
|
||||
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
||||
}
|
||||
|
||||
|
@ -5,7 +5,7 @@ steps:
|
||||
[
|
||||
"build",
|
||||
"--tag",
|
||||
"gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
|
||||
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest",
|
||||
"--build-arg",
|
||||
"GO_ENV=production",
|
||||
".",
|
||||
@ -13,7 +13,7 @@ steps:
|
||||
|
||||
# Push new image to Google Container Registry
|
||||
- name: "gcr.io/cloud-builders/docker"
|
||||
args: ["push", "gcr.io/$PROJECT_ID/{% app_name_slug %}:latest"]
|
||||
args: ["push", "gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest"]
|
||||
|
||||
# Deploy image to Cloud Run
|
||||
- name: "gcr.io/cloud-builders/gcloud"
|
||||
@ -23,15 +23,15 @@ steps:
|
||||
"deploy",
|
||||
"data",
|
||||
"--image",
|
||||
"gcr.io/$PROJECT_ID/{% app_name_slug %}:latest",
|
||||
"gcr.io/$PROJECT_ID/{{- .AppNameSlug -}}:latest",
|
||||
"--add-cloudsql-instances",
|
||||
"$PROJECT_ID:$REGION:{% app_name_slug %}_production",
|
||||
"$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production",
|
||||
"--region",
|
||||
"$REGION",
|
||||
"--platform",
|
||||
"managed",
|
||||
"--update-env-vars",
|
||||
"GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{% app_name_slug %}_production,SECRETS_FILE=prod.secrets.yml",
|
||||
"GO_ENV=production,SG_DATABASE_HOST=/cloudsql/$PROJECT_ID:$REGION:{{- .AppNameSlug -}}_production,SECRETS_FILE=prod.secrets.yml",
|
||||
"--port",
|
||||
"8080",
|
||||
"--service-account",
|
||||
|
@ -1,4 +1,4 @@
|
||||
app_name: "{% app_name %} Development"
|
||||
app_name: "{{- .AppName }} Development"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
|
||||
@ -82,7 +82,7 @@ cors_debug: false
|
||||
auth:
|
||||
# Can be 'rails', 'jwt' or 'header'
|
||||
type: rails
|
||||
cookie: _{% app_name_slug %}_session
|
||||
cookie: _{{- .AppNameSlug -}}_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header for testing.
|
||||
@ -134,7 +134,7 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
dbname: {{ .AppNameSlug -}}_development
|
||||
user: postgres
|
||||
password: postgres
|
||||
|
||||
|
@ -9,48 +9,10 @@ services:
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
# Yugabyte DB
|
||||
# yb-master:
|
||||
# image: yugabytedb/yugabyte:latest
|
||||
# container_name: yb-master-n1
|
||||
# command: [ "/home/yugabyte/bin/yb-master",
|
||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
||||
# "--master_addresses=yb-master-n1:7100",
|
||||
# "--replication_factor=1",
|
||||
# "--enable_ysql=true"]
|
||||
# ports:
|
||||
# - "7000:7000"
|
||||
# environment:
|
||||
# SERVICE_7000_NAME: yb-master
|
||||
|
||||
# db:
|
||||
# image: yugabytedb/yugabyte:latest
|
||||
# container_name: yb-tserver-n1
|
||||
# command: [ "/home/yugabyte/bin/yb-tserver",
|
||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
||||
# "--start_pgsql_proxy",
|
||||
# "--tserver_master_addrs=yb-master-n1:7100"]
|
||||
# ports:
|
||||
# - "9042:9042"
|
||||
# - "6379:6379"
|
||||
# - "5433:5433"
|
||||
# - "9000:9000"
|
||||
# environment:
|
||||
# SERVICE_5433_NAME: ysql
|
||||
# SERVICE_9042_NAME: ycql
|
||||
# SERVICE_6379_NAME: yedis
|
||||
# SERVICE_9000_NAME: yb-tserver
|
||||
# depends_on:
|
||||
# - yb-master
|
||||
|
||||
{% app_name_slug %}_api:
|
||||
{{ .AppNameSlug -}}_api:
|
||||
image: dosco/super-graph:latest
|
||||
environment:
|
||||
GO_ENV: "development"
|
||||
# Uncomment below for Yugabyte DB
|
||||
# SG_DATABASE_PORT: 5433
|
||||
# SG_DATABASE_USER: yugabyte
|
||||
# SG_DATABASE_PASSWORD: yugabyte
|
||||
volumes:
|
||||
- ./config:/config
|
||||
ports:
|
||||
|
@ -2,7 +2,7 @@
|
||||
# so I only need to overwrite some values
|
||||
inherits: dev
|
||||
|
||||
app_name: "{% app_name %} Production"
|
||||
app_name: "{{- .AppName }} Production"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: false
|
||||
|
||||
@ -82,7 +82,7 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_production
|
||||
dbname: {{ .AppNameSlug -}}_production
|
||||
user: postgres
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
|
@ -11,9 +11,9 @@
|
||||
// opt-in, read http://bit.ly/CRA-PWA
|
||||
|
||||
const isLocalhost = Boolean(
|
||||
window.location.hostname === 'localhost' ||
|
||||
window.location.hostname === "localhost" ||
|
||||
// [::1] is the IPv6 localhost address.
|
||||
window.location.hostname === '[::1]' ||
|
||||
window.location.hostname === "[::1]" ||
|
||||
// 127.0.0.1/8 is considered localhost for IPv4.
|
||||
window.location.hostname.match(
|
||||
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
||||
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
|
||||
);
|
||||
|
||||
export function register(config) {
|
||||
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
|
||||
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
|
||||
// The URL constructor is available in all browsers that support SW.
|
||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
||||
if (publicUrl.origin !== window.location.origin) {
|
||||
@ -31,7 +31,7 @@ export function register(config) {
|
||||
return;
|
||||
}
|
||||
|
||||
window.addEventListener('load', () => {
|
||||
window.addEventListener("load", () => {
|
||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
||||
|
||||
if (isLocalhost) {
|
||||
@ -42,8 +42,8 @@ export function register(config) {
|
||||
// service worker/PWA documentation.
|
||||
navigator.serviceWorker.ready.then(() => {
|
||||
console.log(
|
||||
'This web app is being served cache-first by a service ' +
|
||||
'worker. To learn more, visit http://bit.ly/CRA-PWA'
|
||||
"This web app is being served cache-first by a service " +
|
||||
"worker. To learn more, visit http://bit.ly/CRA-PWA"
|
||||
);
|
||||
});
|
||||
} else {
|
||||
@ -57,21 +57,21 @@ export function register(config) {
|
||||
function registerValidSW(swUrl, config) {
|
||||
navigator.serviceWorker
|
||||
.register(swUrl)
|
||||
.then(registration => {
|
||||
.then((registration) => {
|
||||
registration.onupdatefound = () => {
|
||||
const installingWorker = registration.installing;
|
||||
if (installingWorker == null) {
|
||||
return;
|
||||
}
|
||||
installingWorker.onstatechange = () => {
|
||||
if (installingWorker.state === 'installed') {
|
||||
if (installingWorker.state === "installed") {
|
||||
if (navigator.serviceWorker.controller) {
|
||||
// At this point, the updated precached content has been fetched,
|
||||
// but the previous service worker will still serve the older
|
||||
// content until all client tabs are closed.
|
||||
console.log(
|
||||
'New content is available and will be used when all ' +
|
||||
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
|
||||
"New content is available and will be used when all " +
|
||||
"tabs for this page are closed. See http://bit.ly/CRA-PWA."
|
||||
);
|
||||
|
||||
// Execute callback
|
||||
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
|
||||
// At this point, everything has been precached.
|
||||
// It's the perfect time to display a
|
||||
// "Content is cached for offline use." message.
|
||||
console.log('Content is cached for offline use.');
|
||||
console.log("Content is cached for offline use.");
|
||||
|
||||
// Execute callback
|
||||
if (config && config.onSuccess) {
|
||||
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
|
||||
};
|
||||
};
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error during service worker registration:', error);
|
||||
.catch((error) => {
|
||||
console.error("Error during service worker registration:", error);
|
||||
});
|
||||
}
|
||||
|
||||
function checkValidServiceWorker(swUrl, config) {
|
||||
// Check if the service worker can be found. If it can't reload the page.
|
||||
fetch(swUrl)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
// Ensure service worker exists, and that we really are getting a JS file.
|
||||
const contentType = response.headers.get('content-type');
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (
|
||||
response.status === 404 ||
|
||||
(contentType != null && contentType.indexOf('javascript') === -1)
|
||||
(contentType != null && contentType.indexOf("javascript") === -1)
|
||||
) {
|
||||
// No service worker found. Probably a different app. Reload the page.
|
||||
navigator.serviceWorker.ready.then(registration => {
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.unregister().then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
|
||||
})
|
||||
.catch(() => {
|
||||
console.log(
|
||||
'No internet connection found. App is running in offline mode.'
|
||||
"No internet connection found. App is running in offline mode."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function unregister() {
|
||||
if ('serviceWorker' in navigator) {
|
||||
navigator.serviceWorker.ready.then(registration => {
|
||||
if ("serviceWorker" in navigator) {
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.unregister();
|
||||
});
|
||||
}
|
||||
|
13
jsn/bench.1
Normal file
13
jsn/bench.1
Normal file
@ -0,0 +1,13 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/jsn
|
||||
BenchmarkGet
|
||||
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
|
||||
BenchmarkFilter
|
||||
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
|
||||
BenchmarkStrip
|
||||
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
|
||||
BenchmarkReplace
|
||||
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/jsn 6.029s
|
@ -3,6 +3,7 @@ package jsn
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
@ -68,7 +69,12 @@ func Clear(w *bytes.Buffer, v []byte) error {
|
||||
}
|
||||
|
||||
io := int(dec.InputOffset())
|
||||
w.Write(v[io-len(v1)-2 : io])
|
||||
s := io - len(v1) - 2
|
||||
if io <= s || s <= 0 {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
w.Write(v[s:io])
|
||||
w.WriteString(`:`)
|
||||
isValue = true
|
||||
|
||||
|
@ -2,17 +2,19 @@ package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
// Filter function filters the JSON keeping only the provided keys and removing all others
|
||||
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
var err error
|
||||
kmap := make(map[uint64]struct{}, len(keys))
|
||||
h := maphash.Hash{}
|
||||
|
||||
for i := range keys {
|
||||
kmap[xxhash.Sum64String(keys[i])] = struct{}{}
|
||||
_, _ = h.WriteString(keys[i])
|
||||
kmap[h.Sum64()] = struct{}{}
|
||||
h.Reset()
|
||||
}
|
||||
|
||||
// is an list
|
||||
@ -132,7 +134,11 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
cb := b[s:(e + 1)]
|
||||
e = 0
|
||||
|
||||
if _, ok := kmap[xxhash.Sum64(k)]; !ok {
|
||||
_, _ = h.Write(k)
|
||||
_, ok := kmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
|
51
jsn/fuzz.go
51
jsn/fuzz.go
@ -2,10 +2,55 @@
|
||||
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
if err := unifiedTest(data); err != nil {
|
||||
return 0
|
||||
c := 0
|
||||
|
||||
if err := Validate(string(data)); err == nil {
|
||||
c = 1
|
||||
}
|
||||
|
||||
return 1
|
||||
if err := fuzzTest(data); err == nil {
|
||||
c = 1
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func fuzzTest(data []byte) error {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
var b3 bytes.Buffer
|
||||
err4 := Clear(&b3, data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||
return errors.New("there was an error")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -1,9 +1,15 @@
|
||||
package jsn
|
||||
// +build gofuzz
|
||||
|
||||
package jsn_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
var ret int
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"00\"0000\"0{",
|
||||
@ -52,9 +58,16 @@ func TestFuzzCrashers(t *testing.T) {
|
||||
"0000\"0\"{",
|
||||
"000\"000\"{",
|
||||
"\"00000000\"{",
|
||||
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
|
||||
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
|
||||
"\xef\x99\xe3\"",
|
||||
"\"\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xe3\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xef\xe3" +
|
||||
"\xe3\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\"",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
_ = unifiedTest([]byte(f))
|
||||
ret = jsn.Fuzz([]byte(f))
|
||||
}
|
||||
}
|
||||
|
11
jsn/get.go
11
jsn/get.go
@ -1,7 +1,7 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -41,9 +41,12 @@ func Value(b []byte) []byte {
|
||||
// Keys function fetches values for the provided keys
|
||||
func Get(b []byte, keys [][]byte) []Field {
|
||||
kmap := make(map[uint64]struct{}, len(keys))
|
||||
h := maphash.Hash{}
|
||||
|
||||
for i := range keys {
|
||||
kmap[xxhash.Sum64(keys[i])] = struct{}{}
|
||||
_, _ = h.Write(keys[i])
|
||||
kmap[h.Sum64()] = struct{}{}
|
||||
h.Reset()
|
||||
}
|
||||
|
||||
res := make([]Field, 0, 20)
|
||||
@ -141,7 +144,9 @@ func Get(b []byte, keys [][]byte) []Field {
|
||||
}
|
||||
|
||||
if e != 0 {
|
||||
_, ok := kmap[xxhash.Sum64(k)]
|
||||
_, _ = h.Write(k)
|
||||
_, ok := kmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
if ok {
|
||||
res = append(res, Field{k, b[s:(e + 1)]})
|
||||
|
@ -1,4 +1,4 @@
|
||||
package jsn
|
||||
package jsn_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@ -6,6 +6,8 @@ import (
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -171,13 +173,13 @@ var (
|
||||
)
|
||||
|
||||
func TestGet(t *testing.T) {
|
||||
values := Get([]byte(input1), [][]byte{
|
||||
values := jsn.Get([]byte(input1), [][]byte{
|
||||
[]byte("test_1a"),
|
||||
[]byte("__twitter_id"),
|
||||
[]byte("work_email"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("test_1a"), []byte(`{ "__twitter_id": "ABCD" }`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABCD"`)},
|
||||
{[]byte("__twitter_id"), []byte(`"2048666903444506956"`)},
|
||||
@ -214,11 +216,11 @@ func TestGet(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet1(t *testing.T) {
|
||||
values := Get([]byte(input5), [][]byte{
|
||||
values := jsn.Get([]byte(input5), [][]byte{
|
||||
[]byte("thread_slug"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("thread_slug"), []byte(`"in-september-2018-slovak-police-stated-that-kuciak-7929"`)},
|
||||
}
|
||||
|
||||
@ -238,11 +240,11 @@ func TestGet1(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet2(t *testing.T) {
|
||||
values := Get([]byte(input6), [][]byte{
|
||||
values := jsn.Get([]byte(input6), [][]byte{
|
||||
[]byte("users_cursor"), []byte("threads_cursor"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("threads_cursor"), []byte(`null`)},
|
||||
{[]byte("threads_cursor"), []byte(`25`)},
|
||||
{[]byte("users_cursor"), []byte(`3`)},
|
||||
@ -264,7 +266,7 @@ func TestGet2(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet3(t *testing.T) {
|
||||
values := Get(input7, [][]byte{[]byte("data")})
|
||||
values := jsn.Get(input7, [][]byte{[]byte("data")})
|
||||
v := values[0].Value
|
||||
|
||||
if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) {
|
||||
@ -277,7 +279,7 @@ func TestGet4(t *testing.T) {
|
||||
|
||||
exp = strings.ReplaceAll(exp, "@", "`")
|
||||
|
||||
values := Get(input8, [][]byte{[]byte("body")})
|
||||
values := jsn.Get(input8, [][]byte{[]byte("body")})
|
||||
|
||||
if string(values[0].Key) != "body" {
|
||||
t.Fatal("unexpected key")
|
||||
@ -291,29 +293,29 @@ func TestGet4(t *testing.T) {
|
||||
|
||||
func TestValue(t *testing.T) {
|
||||
v1 := []byte("12345")
|
||||
if !bytes.Equal(Value(v1), v1) {
|
||||
if !bytes.Equal(jsn.Value(v1), v1) {
|
||||
t.Fatal("Number value invalid")
|
||||
}
|
||||
|
||||
v2 := []byte(`"12345"`)
|
||||
if !bytes.Equal(Value(v2), []byte(`12345`)) {
|
||||
if !bytes.Equal(jsn.Value(v2), []byte(`12345`)) {
|
||||
t.Fatal("String value invalid")
|
||||
}
|
||||
|
||||
v3 := []byte(`{ "hello": "world" }`)
|
||||
if Value(v3) != nil {
|
||||
t.Fatal("Object value is not nil", Value(v3))
|
||||
if jsn.Value(v3) != nil {
|
||||
t.Fatal("Object value is not nil", jsn.Value(v3))
|
||||
}
|
||||
|
||||
v4 := []byte(`[ "hello", "world" ]`)
|
||||
if Value(v4) != nil {
|
||||
if jsn.Value(v4) != nil {
|
||||
t.Fatal("List value is not nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilter1(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
err := Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
|
||||
err := jsn.Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -329,7 +331,7 @@ func TestFilter2(t *testing.T) {
|
||||
value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]`
|
||||
|
||||
var b bytes.Buffer
|
||||
err := Filter(&b, []byte(value), []string{"id"})
|
||||
err := jsn.Filter(&b, []byte(value), []string{"id"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -343,7 +345,7 @@ func TestFilter2(t *testing.T) {
|
||||
|
||||
func TestStrip(t *testing.T) {
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
value1 := Strip([]byte(input3), path1)
|
||||
value1 := jsn.Strip([]byte(input3), path1)
|
||||
|
||||
expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
|
||||
|
||||
@ -353,7 +355,7 @@ func TestStrip(t *testing.T) {
|
||||
}
|
||||
|
||||
path2 := [][]byte{[]byte("boo"), []byte("hoo")}
|
||||
value2 := Strip([]byte(input3), path2)
|
||||
value2 := jsn.Strip([]byte(input3), path2)
|
||||
|
||||
if !bytes.Equal(value2, []byte(input3)) {
|
||||
t.Log(value2)
|
||||
@ -364,7 +366,7 @@ func TestStrip(t *testing.T) {
|
||||
func TestValidateTrue(t *testing.T) {
|
||||
json := []byte(` [{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
|
||||
|
||||
err := Validate(string(json))
|
||||
err := jsn.Validate(string(json))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -373,7 +375,7 @@ func TestValidateTrue(t *testing.T) {
|
||||
func TestValidateFalse(t *testing.T) {
|
||||
json := []byte(` [{ "hello": 123"<html>}]`)
|
||||
|
||||
err := Validate(string(json))
|
||||
err := jsn.Validate(string(json))
|
||||
if err == nil {
|
||||
t.Error("JSON validation failed to detect invalid json")
|
||||
}
|
||||
@ -382,12 +384,12 @@ func TestValidateFalse(t *testing.T) {
|
||||
func TestReplace(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
|
||||
from := []Field{
|
||||
from := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
to := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
@ -412,7 +414,7 @@ func TestReplace(t *testing.T) {
|
||||
"__twitter_id":"1234567890"
|
||||
}] }`
|
||||
|
||||
err := Replace(&buf, []byte(input4), from, to)
|
||||
err := jsn.Replace(&buf, []byte(input4), from, to)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -428,7 +430,7 @@ func TestReplaceEmpty(t *testing.T) {
|
||||
|
||||
json := `{ "users" : [{"id":1,"full_name":"Sidney St[1]roman","email":"user0@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":2,"full_name":"Jerry Dickinson","email":"user1@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":3,"full_name":"Kenna Cassin","email":"user2@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":4,"full_name":"Mr. Pat Parisian","email":"rodney@kautzer.biz","__users_twitter_id":"2048666903444506956"}, {"id":5,"full_name":"Bette Ebert","email":"janeenrath@goyette.com","__users_twitter_id":"2048666903444506956"}, {"id":6,"full_name":"Everett Kiehn","email":"michael@bartoletti.com","__users_twitter_id":"2048666903444506956"}, {"id":7,"full_name":"Katrina Cronin","email":"loretaklocko@framivolkman.org","__users_twitter_id":"2048666903444506956"}, {"id":8,"full_name":"Caroll Orn Sr.","email":"joannarau@hegmann.io","__users_twitter_id":"2048666903444506956"}, {"id":9,"full_name":"Gwendolyn Ziemann","email":"renaytoy@rutherford.co","__users_twitter_id":"2048666903444506956"}, {"id":10,"full_name":"Mrs. Rosann Fritsch","email":"holliemosciski@thiel.org","__users_twitter_id":"2048666903444506956"}, {"id":11,"full_name":"Arden Koss","email":"cristobalankunding@howewelch.org","__users_twitter_id":"2048666903444506956"}, {"id":12,"full_name":"Brenton Bauch PhD","email":"renee@miller.co","__users_twitter_id":"2048666903444506956"}, {"id":13,"full_name":"Daine Gleichner","email":"andrea@nienow.co","__users_twitter_id":"2048666903444506956"}] }`
|
||||
|
||||
err := Replace(&buf, []byte(json), []Field{}, []Field{})
|
||||
err := jsn.Replace(&buf, []byte(json), []jsn.Field{}, []jsn.Field{})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -442,7 +444,7 @@ func TestReplaceEmpty(t *testing.T) {
|
||||
func TestKeys1(t *testing.T) {
|
||||
json := `[{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}]`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
|
||||
@ -462,7 +464,7 @@ func TestKeys1(t *testing.T) {
|
||||
func TestKeys2(t *testing.T) {
|
||||
json := `{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
|
||||
@ -491,7 +493,7 @@ func TestKeys3(t *testing.T) {
|
||||
"user": 123
|
||||
}`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"insert", "created_at", "test_1a", "type1", "type2", "name", "updated_at", "description",
|
||||
@ -526,7 +528,7 @@ func TestClear(t *testing.T) {
|
||||
|
||||
expected := `{"insert":{"created_at":"","test_1a":{"type1":"","type2":[{"a":0.0}]},"name":"","updated_at":"","description":""},"user":0.0,"tags":[]}`
|
||||
|
||||
err := Clear(&buf, []byte(json))
|
||||
err := jsn.Clear(&buf, []byte(json))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -541,7 +543,7 @@ func BenchmarkGet(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
Get([]byte(input1), [][]byte{[]byte("__twitter_id")})
|
||||
jsn.Get([]byte(input1), [][]byte{[]byte("__twitter_id")})
|
||||
}
|
||||
}
|
||||
|
||||
@ -553,7 +555,7 @@ func BenchmarkFilter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
err := Filter(&buf, []byte(input2), keys)
|
||||
err := jsn.Filter(&buf, []byte(input2), keys)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
@ -566,19 +568,19 @@ func BenchmarkStrip(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
Strip([]byte(input3), path)
|
||||
jsn.Strip([]byte(input3), path)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkReplace(b *testing.B) {
|
||||
var buf bytes.Buffer
|
||||
|
||||
from := []Field{
|
||||
from := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
to := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
@ -587,7 +589,7 @@ func BenchmarkReplace(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
err := Replace(&buf, []byte(input4), from, to)
|
||||
err := jsn.Replace(&buf, []byte(input4), from, to)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
@ -3,8 +3,7 @@ package jsn
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
||||
@ -18,7 +17,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
return err
|
||||
}
|
||||
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
tmap := make(map[uint64]int, len(from))
|
||||
|
||||
for i, f := range from {
|
||||
@ -133,9 +132,18 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
if e != 0 {
|
||||
e++
|
||||
|
||||
if e <= s {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
if _, err := h.Write(b[s:e]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if (we + 1) <= ws {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
n, ok := tmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
|
37
jsn/test.go
37
jsn/test.go
@ -1,37 +0,0 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func unifiedTest(data []byte) error {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil {
|
||||
return errors.New("there was an error")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
Loading…
x
Reference in New Issue
Block a user