Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
7557a4c29c | ||
|
dd4accfdd2 | ||
|
06214a3850 | ||
|
7b5548a2c6 | ||
|
00cfa251a2 | ||
|
9f35f85857 | ||
|
f4f6420a30 | ||
|
6716b97a39 | ||
|
7169dd65f5 | ||
|
b26cdbf960 | ||
|
33f3fefbf3 | ||
|
a775f9475b | ||
|
bd157290f6 | ||
|
82cc712a93 | ||
|
0ce129de14 | ||
|
1a15e433ba | ||
|
816121fbcf | ||
|
e82e97a9d7 |
@ -12,8 +12,7 @@ FROM golang:1.14-alpine as go-build
|
|||||||
RUN apk update && \
|
RUN apk update && \
|
||||||
apk add --no-cache make && \
|
apk add --no-cache make && \
|
||||||
apk add --no-cache git && \
|
apk add --no-cache git && \
|
||||||
apk add --no-cache jq && \
|
apk add --no-cache jq
|
||||||
apk add --no-cache upx=3.95-r2
|
|
||||||
|
|
||||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||||
|
|
||||||
|
18
core/api.go
18
core/api.go
@ -49,6 +49,7 @@ import (
|
|||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"hash/maphash"
|
||||||
_log "log"
|
_log "log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
@ -83,12 +84,12 @@ type SuperGraph struct {
|
|||||||
schema *psql.DBSchema
|
schema *psql.DBSchema
|
||||||
allowList *allow.List
|
allowList *allow.List
|
||||||
encKey [32]byte
|
encKey [32]byte
|
||||||
prepared map[string]*preparedItem
|
hashSeed maphash.Seed
|
||||||
|
queries map[uint64]*query
|
||||||
roles map[string]*Role
|
roles map[string]*Role
|
||||||
getRole *sql.Stmt
|
getRole *sql.Stmt
|
||||||
rmap map[uint64]*resolvFn
|
rmap map[uint64]resolvFn
|
||||||
abacEnabled bool
|
abacEnabled bool
|
||||||
anonExists bool
|
|
||||||
qc *qcode.Compiler
|
qc *qcode.Compiler
|
||||||
pc *psql.Compiler
|
pc *psql.Compiler
|
||||||
ge *graphql.Engine
|
ge *graphql.Engine
|
||||||
@ -107,10 +108,11 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
|||||||
}
|
}
|
||||||
|
|
||||||
sg := &SuperGraph{
|
sg := &SuperGraph{
|
||||||
conf: conf,
|
conf: conf,
|
||||||
db: db,
|
db: db,
|
||||||
dbinfo: dbinfo,
|
dbinfo: dbinfo,
|
||||||
log: _log.New(os.Stdout, "", 0),
|
log: _log.New(os.Stdout, "", 0),
|
||||||
|
hashSeed: maphash.MakeSeed(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sg.initConfig(); err != nil {
|
if err := sg.initConfig(); err != nil {
|
||||||
@ -137,7 +139,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(conf.SecretKey) != 0 {
|
if conf.SecretKey != "" {
|
||||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||||
conf.SecretKey = ""
|
conf.SecretKey = ""
|
||||||
sg.encKey = sk
|
sg.encKey = sk
|
||||||
|
@ -12,7 +12,8 @@ import (
|
|||||||
// to a prepared statement.
|
// to a prepared statement.
|
||||||
|
|
||||||
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||||
vars := make([]interface{}, len(md.Params))
|
params := md.Params()
|
||||||
|
vars := make([]interface{}, len(params))
|
||||||
|
|
||||||
var fields map[string]json.RawMessage
|
var fields map[string]json.RawMessage
|
||||||
var err error
|
var err error
|
||||||
@ -25,7 +26,7 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, p := range md.Params {
|
for i, p := range params {
|
||||||
switch p.Name {
|
switch p.Name {
|
||||||
case "user_id":
|
case "user_id":
|
||||||
if v := c.Value(UserIDKey); v != nil {
|
if v := c.Value(UserIDKey); v != nil {
|
||||||
|
@ -82,12 +82,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(sg.conf.RolesQuery) == 0 {
|
if sg.conf.RolesQuery == "" {
|
||||||
return nil, errors.New("roles_query not defined")
|
return nil, errors.New("roles_query not defined")
|
||||||
}
|
}
|
||||||
|
|
||||||
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
md := psql.Metadata{}
|
||||||
|
|
||||||
for i := 0; i < len(sg.conf.Roles); i++ {
|
for i := 0; i < len(sg.conf.Roles); i++ {
|
||||||
role := &sg.conf.Roles[i]
|
role := &sg.conf.Roles[i]
|
||||||
@ -105,16 +106,18 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||||
s := &stmts[len(stmts)-1]
|
s := &stmts[len(stmts)-1]
|
||||||
|
|
||||||
s.md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.sql = w.String()
|
s.sql = w.String()
|
||||||
|
s.md = md
|
||||||
|
|
||||||
w.Reset()
|
w.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
sql, err := sg.renderUserQuery(stmts)
|
sql, err := sg.renderUserQuery(md, stmts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -124,13 +127,13 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) {
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||||
|
|
||||||
for _, s := range stmts {
|
for _, s := range stmts {
|
||||||
if len(s.role.Match) == 0 &&
|
if s.role.Match == "" &&
|
||||||
s.role.Name != "user" && s.role.Name != "anon" {
|
s.role.Name != "user" && s.role.Name != "anon" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -142,12 +145,12 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
md.RenderVar(w, sg.conf.RolesQuery)
|
||||||
io.WriteString(w, `) THEN `)
|
io.WriteString(w, `) THEN `)
|
||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
for _, s := range stmts {
|
for _, s := range stmts {
|
||||||
if len(s.role.Match) == 0 {
|
if s.role.Match == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
io.WriteString(w, ` WHEN `)
|
io.WriteString(w, ` WHEN `)
|
||||||
@ -158,7 +161,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
md.RenderVar(w, sg.conf.RolesQuery)
|
||||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
||||||
|
|
||||||
|
@ -72,6 +72,7 @@ type Config struct {
|
|||||||
type Table struct {
|
type Table struct {
|
||||||
Name string
|
Name string
|
||||||
Table string
|
Table string
|
||||||
|
Type string
|
||||||
Blocklist []string
|
Blocklist []string
|
||||||
Remotes []Remote
|
Remotes []Remote
|
||||||
Columns []Column
|
Columns []Column
|
||||||
@ -151,7 +152,7 @@ type Delete struct {
|
|||||||
|
|
||||||
// AddRoleTable function is a helper function to make it easy to add per-table
|
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||||
// row-level config
|
// row-level config
|
||||||
func (c *Config) AddRoleTable(role string, table string, conf interface{}) error {
|
func (c *Config) AddRoleTable(role, table string, conf interface{}) error {
|
||||||
var r *Role
|
var r *Role
|
||||||
|
|
||||||
for i := range c.Roles {
|
for i := range c.Roles {
|
||||||
@ -197,30 +198,26 @@ func (c *Config) AddRoleTable(role string, table string, conf interface{}) error
|
|||||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||||
// environment variable. This is the best way to create a new Super Graph config.
|
// environment variable. This is the best way to create a new Super Graph config.
|
||||||
func ReadInConfig(configFile string) (*Config, error) {
|
func ReadInConfig(configFile string) (*Config, error) {
|
||||||
cpath := path.Dir(configFile)
|
cp := path.Dir(configFile)
|
||||||
cfile := path.Base(configFile)
|
vi := newViper(cp, path.Base(configFile))
|
||||||
vi := newViper(cpath, cfile)
|
|
||||||
|
|
||||||
if err := vi.ReadInConfig(); err != nil {
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
inherits := vi.GetString("inherits")
|
if pcf := vi.GetString("inherits"); pcf != "" {
|
||||||
|
cf := vi.ConfigFileUsed()
|
||||||
if inherits != "" {
|
vi = newViper(cp, pcf)
|
||||||
vi = newViper(cpath, inherits)
|
|
||||||
|
|
||||||
if err := vi.ReadInConfig(); err != nil {
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if vi.IsSet("inherits") {
|
if v := vi.GetString("inherits"); v != "" {
|
||||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)", pcf, v)
|
||||||
inherits,
|
|
||||||
vi.GetString("inherits"))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
vi.SetConfigName(cfile)
|
vi.SetConfigFile(cf)
|
||||||
|
|
||||||
if err := vi.MergeInConfig(); err != nil {
|
if err := vi.MergeInConfig(); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -234,7 +231,7 @@ func ReadInConfig(configFile string) (*Config, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if c.AllowListFile == "" {
|
if c.AllowListFile == "" {
|
||||||
c.AllowListFile = path.Join(cpath, "allow.list")
|
c.AllowListFile = path.Join(cp, "allow.list")
|
||||||
}
|
}
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
@ -248,7 +245,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
|||||||
vi.AutomaticEnv()
|
vi.AutomaticEnv()
|
||||||
|
|
||||||
if filepath.Ext(configFile) != "" {
|
if filepath.Ext(configFile) != "" {
|
||||||
vi.SetConfigFile(configFile)
|
vi.SetConfigFile(path.Join(configPath, configFile))
|
||||||
} else {
|
} else {
|
||||||
vi.SetConfigName(configFile)
|
vi.SetConfigName(configFile)
|
||||||
vi.AddConfigPath(configPath)
|
vi.AddConfigPath(configPath)
|
||||||
|
37
core/core.go
37
core/core.go
@ -5,6 +5,7 @@ import (
|
|||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
@ -124,7 +125,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(data) == 0 || st.md.Skipped == 0 {
|
if len(data) == 0 || st.md.Skipped() == 0 {
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,32 +166,44 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
role = c.role
|
role = c.role
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c.res.role = role
|
c.res.role = role
|
||||||
|
|
||||||
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(c.sg.hashSeed)
|
||||||
|
id := queryID(&h, c.res.name, role)
|
||||||
|
|
||||||
|
q, ok := c.sg.queries[id]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, errNotFound
|
return nil, nil, errNotFound
|
||||||
}
|
}
|
||||||
c.res.sql = ps.st.sql
|
|
||||||
|
if q.sd == nil {
|
||||||
|
q.Do(func() { c.sg.prepare(q, role) })
|
||||||
|
|
||||||
|
if q.err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.res.sql = q.st.sql
|
||||||
|
|
||||||
var root []byte
|
var root []byte
|
||||||
var row *sql.Row
|
var row *sql.Row
|
||||||
|
|
||||||
varsList, err := c.argList(ps.st.md)
|
varsList, err := c.argList(q.st.md)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
row = tx.Stmt(ps.sd).QueryRow(varsList...)
|
row = tx.Stmt(q.sd).QueryRow(varsList...)
|
||||||
} else {
|
} else {
|
||||||
row = ps.sd.QueryRow(varsList...)
|
row = q.sd.QueryRow(varsList...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ps.roleArg {
|
if q.roleArg {
|
||||||
err = row.Scan(&role, &root)
|
err = row.Scan(&role, &root)
|
||||||
} else {
|
} else {
|
||||||
err = row.Scan(&root)
|
err = row.Scan(&root)
|
||||||
@ -204,15 +217,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, q.err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
|
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return root, &ps.st, nil
|
return root, &q.st, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||||
@ -292,7 +305,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
|||||||
err = row.Scan(&root)
|
err = row.Scan(&root)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(role) == 0 {
|
if role == "" {
|
||||||
c.res.role = c.role
|
c.res.role = c.role
|
||||||
} else {
|
} else {
|
||||||
c.res.role = role
|
c.res.role = role
|
||||||
|
119
core/init.go
119
core/init.go
@ -21,7 +21,7 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
|
|
||||||
for i := 0; i < len(c.Tables); i++ {
|
for i := 0; i < len(c.Tables); i++ {
|
||||||
t := &c.Tables[i]
|
t := &c.Tables[i]
|
||||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
// t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||||
|
|
||||||
if _, ok := tm[t.Name]; ok {
|
if _, ok := tm[t.Name]; ok {
|
||||||
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||||
@ -74,14 +74,23 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if c.RolesQuery == "" {
|
if c.RolesQuery == "" {
|
||||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
sg.log.Printf("INF roles_query not defined: attribute based access control disabled")
|
||||||
|
} else {
|
||||||
|
n := 0
|
||||||
|
for k, v := range sg.roles {
|
||||||
|
if k == "user" || k == "anon" {
|
||||||
|
n++
|
||||||
|
} else if v.Match != "" {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sg.abacEnabled = (n > 2)
|
||||||
|
|
||||||
|
if !sg.abacEnabled {
|
||||||
|
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, userExists := sg.roles["user"]
|
|
||||||
_, sg.anonExists = sg.roles["anon"]
|
|
||||||
|
|
||||||
sg.abacEnabled = userExists && c.RolesQuery != ""
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -91,21 +100,26 @@ func getDBTableAliases(c *Config) map[string][]string {
|
|||||||
for i := range c.Tables {
|
for i := range c.Tables {
|
||||||
t := c.Tables[i]
|
t := c.Tables[i]
|
||||||
|
|
||||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
if t.Table != "" && t.Type == "" {
|
||||||
continue
|
m[t.Table] = append(m[t.Table], t.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
m[t.Table] = append(m[t.Table], t.Name)
|
|
||||||
}
|
}
|
||||||
return m
|
return m
|
||||||
}
|
}
|
||||||
|
|
||||||
func addTables(c *Config, di *psql.DBInfo) error {
|
func addTables(c *Config, di *psql.DBInfo) error {
|
||||||
|
var err error
|
||||||
|
|
||||||
for _, t := range c.Tables {
|
for _, t := range c.Tables {
|
||||||
if t.Table == "" || len(t.Columns) == 0 {
|
switch t.Type {
|
||||||
continue
|
case "json", "jsonb":
|
||||||
|
err = addJsonTable(di, t.Columns, t)
|
||||||
|
|
||||||
|
case "polymorphic":
|
||||||
|
err = addVirtualTable(di, t.Columns, t)
|
||||||
}
|
}
|
||||||
if err := addTable(di, t.Columns, t); err != nil {
|
|
||||||
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,17 +127,18 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
func addJsonTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||||
|
// This is for jsonb columns that want to be tables.
|
||||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"Column '%s' not found on table '%s'",
|
"json table: column '%s' not found on table '%s'",
|
||||||
t.Name, t.Table)
|
t.Name, t.Table)
|
||||||
}
|
}
|
||||||
|
|
||||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
"json table: column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||||
t.Name, t.Table, bc.Type)
|
t.Name, t.Table, bc.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -150,8 +165,38 @@ func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func addVirtualTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||||
|
if len(cols) == 0 {
|
||||||
|
return fmt.Errorf("polymorphic table: no id column specified")
|
||||||
|
}
|
||||||
|
|
||||||
|
c := cols[0]
|
||||||
|
|
||||||
|
if c.ForeignKey == "" {
|
||||||
|
return fmt.Errorf("polymorphic table: no 'related_to' specified on id column")
|
||||||
|
}
|
||||||
|
|
||||||
|
s := strings.SplitN(c.ForeignKey, ".", 2)
|
||||||
|
|
||||||
|
if len(s) != 2 {
|
||||||
|
return fmt.Errorf("polymorphic table: foreign key must be <type column>.<foreign key column>")
|
||||||
|
}
|
||||||
|
|
||||||
|
di.VTables = append(di.VTables, psql.VirtualTable{
|
||||||
|
Name: t.Name,
|
||||||
|
IDColumn: c.Name,
|
||||||
|
TypeColumn: s[0],
|
||||||
|
FKeyColumn: s[1],
|
||||||
|
})
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||||
for _, t := range c.Tables {
|
for _, t := range c.Tables {
|
||||||
|
if t.Type != "" {
|
||||||
|
continue
|
||||||
|
}
|
||||||
for _, c := range t.Columns {
|
for _, c := range t.Columns {
|
||||||
if c.ForeignKey == "" {
|
if c.ForeignKey == "" {
|
||||||
continue
|
continue
|
||||||
@ -165,30 +210,52 @@ func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
var tn string
|
||||||
|
|
||||||
|
if t.Type == "polymorphic" {
|
||||||
|
tn = t.Table
|
||||||
|
} else {
|
||||||
|
tn = t.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
c1, ok := di.GetColumn(tn, c.Name)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"Invalid table '%s' or column '%s' in Config",
|
"config: invalid table '%s' or column '%s' defined",
|
||||||
t.Name, c.Name)
|
tn, c.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||||
if len(v) != 2 {
|
if len(v) != 2 {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
"config: invalid foreign_key defined for table '%s' and column '%s': %s",
|
||||||
t.Name, c.Name)
|
tn, c.Name, c.ForeignKey)
|
||||||
|
}
|
||||||
|
|
||||||
|
// check if it's a polymorphic foreign key
|
||||||
|
if _, ok := di.GetColumn(tn, v[0]); ok {
|
||||||
|
c2, ok := di.GetColumn(tn, v[1])
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"config: invalid column '%s' for polymorphic relationship on table '%s' and column '%s'",
|
||||||
|
v[1], tn, c.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
c1.FKeyTable = v[0]
|
||||||
|
c1.FKeyColID = []int16{c2.ID}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
fkt, fkc := v[0], v[1]
|
fkt, fkc := v[0], v[1]
|
||||||
c2, ok := di.GetColumn(fkt, fkc)
|
c3, ok := di.GetColumn(fkt, fkc)
|
||||||
if !ok {
|
if !ok {
|
||||||
return fmt.Errorf(
|
return fmt.Errorf(
|
||||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
"config: foreign_key for table '%s' and column '%s' points to unknown table '%s' and column '%s'",
|
||||||
t.Name, c.Name)
|
t.Name, c.Name, v[0], v[1])
|
||||||
}
|
}
|
||||||
|
|
||||||
c1.FKeyTable = fkt
|
c1.FKeyTable = fkt
|
||||||
c1.FKeyColID = []int16{c2.ID}
|
c1.FKeyColID = []int16{c3.ID}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -10,21 +10,23 @@ import (
|
|||||||
"os"
|
"os"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
|
"text/scanner"
|
||||||
|
|
||||||
"github.com/chirino/graphql/schema"
|
"github.com/chirino/graphql/schema"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
AL_QUERY int = iota + 1
|
expComment = iota + 1
|
||||||
AL_VARS
|
expVar
|
||||||
|
expQuery
|
||||||
)
|
)
|
||||||
|
|
||||||
type Item struct {
|
type Item struct {
|
||||||
Name string
|
Name string
|
||||||
key string
|
key string
|
||||||
Query string
|
Query string
|
||||||
Vars json.RawMessage
|
Vars string
|
||||||
Comment string
|
Comment string
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -126,121 +128,101 @@ func (al *List) Set(vars []byte, query, comment string) error {
|
|||||||
return errors.New("empty query")
|
return errors.New("empty query")
|
||||||
}
|
}
|
||||||
|
|
||||||
var q string
|
|
||||||
|
|
||||||
for i := 0; i < len(query); i++ {
|
|
||||||
c := query[i]
|
|
||||||
if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' {
|
|
||||||
q = query
|
|
||||||
break
|
|
||||||
|
|
||||||
} else if c == '{' {
|
|
||||||
q = "query " + query
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
al.saveChan <- Item{
|
al.saveChan <- Item{
|
||||||
Comment: comment,
|
Comment: comment,
|
||||||
Query: q,
|
Query: query,
|
||||||
Vars: vars,
|
Vars: string(vars),
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (al *List) Load() ([]Item, error) {
|
func (al *List) Load() ([]Item, error) {
|
||||||
var list []Item
|
|
||||||
varString := "variables"
|
|
||||||
|
|
||||||
b, err := ioutil.ReadFile(al.filepath)
|
b, err := ioutil.ReadFile(al.filepath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return list, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(b) == 0 {
|
return parse(string(b), al.filepath)
|
||||||
return list, nil
|
}
|
||||||
|
|
||||||
|
func parse(b, filename string) ([]Item, error) {
|
||||||
|
var items []Item
|
||||||
|
|
||||||
|
var s scanner.Scanner
|
||||||
|
s.Init(strings.NewReader(b))
|
||||||
|
s.Filename = filename
|
||||||
|
s.Mode ^= scanner.SkipComments
|
||||||
|
|
||||||
|
var op, sp scanner.Position
|
||||||
|
var item Item
|
||||||
|
|
||||||
|
newComment := false
|
||||||
|
st := expComment
|
||||||
|
|
||||||
|
for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
|
||||||
|
txt := s.TokenText()
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(txt, "/*"):
|
||||||
|
if st == expQuery {
|
||||||
|
v := b[sp.Offset:s.Pos().Offset]
|
||||||
|
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
item = Item{Comment: strings.TrimSpace(txt[2 : len(txt)-2])}
|
||||||
|
sp = s.Pos()
|
||||||
|
st = expComment
|
||||||
|
newComment = true
|
||||||
|
|
||||||
|
case !newComment && strings.HasPrefix(txt, "#"):
|
||||||
|
if st == expQuery {
|
||||||
|
v := b[sp.Offset:s.Pos().Offset]
|
||||||
|
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||||
|
items = append(items, item)
|
||||||
|
}
|
||||||
|
item = Item{}
|
||||||
|
sp = s.Pos()
|
||||||
|
st = expComment
|
||||||
|
|
||||||
|
case strings.HasPrefix(txt, "variables"):
|
||||||
|
if st == expComment {
|
||||||
|
v := b[sp.Offset:s.Pos().Offset]
|
||||||
|
item.Comment = strings.TrimSpace(v[:strings.IndexByte(v, '\n')])
|
||||||
|
}
|
||||||
|
sp = s.Pos()
|
||||||
|
st = expVar
|
||||||
|
|
||||||
|
case isGraphQL(txt):
|
||||||
|
if st == expVar {
|
||||||
|
v := b[sp.Offset:s.Pos().Offset]
|
||||||
|
item.Vars = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||||
|
}
|
||||||
|
sp = op
|
||||||
|
st = expQuery
|
||||||
|
|
||||||
|
}
|
||||||
|
op = s.Pos()
|
||||||
}
|
}
|
||||||
|
|
||||||
var comment bytes.Buffer
|
if st == expQuery {
|
||||||
var varBytes []byte
|
v := b[sp.Offset:s.Pos().Offset]
|
||||||
|
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
|
||||||
itemMap := make(map[string]struct{})
|
items = append(items, item)
|
||||||
|
|
||||||
s, e, c := 0, 0, 0
|
|
||||||
ty := 0
|
|
||||||
|
|
||||||
for {
|
|
||||||
fq := false
|
|
||||||
|
|
||||||
if c == 0 && b[e] == '#' {
|
|
||||||
s = e
|
|
||||||
for e < len(b) && b[e] != '\n' {
|
|
||||||
e++
|
|
||||||
}
|
|
||||||
if (e - s) > 2 {
|
|
||||||
comment.Write(b[(s + 1):(e + 1)])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if e >= len(b) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
if matchPrefix(b, e, "query") || matchPrefix(b, e, "mutation") {
|
|
||||||
if c == 0 {
|
|
||||||
s = e
|
|
||||||
}
|
|
||||||
ty = AL_QUERY
|
|
||||||
} else if matchPrefix(b, e, varString) {
|
|
||||||
if c == 0 {
|
|
||||||
s = e + len(varString) + 1
|
|
||||||
}
|
|
||||||
ty = AL_VARS
|
|
||||||
} else if b[e] == '{' {
|
|
||||||
c++
|
|
||||||
|
|
||||||
} else if b[e] == '}' {
|
|
||||||
c--
|
|
||||||
|
|
||||||
if c == 0 {
|
|
||||||
if ty == AL_QUERY {
|
|
||||||
fq = true
|
|
||||||
} else if ty == AL_VARS {
|
|
||||||
varBytes = b[s:(e + 1)]
|
|
||||||
}
|
|
||||||
ty = 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if fq {
|
|
||||||
query := string(b[s:(e + 1)])
|
|
||||||
name := QueryName(query)
|
|
||||||
key := strings.ToLower(name)
|
|
||||||
|
|
||||||
if _, ok := itemMap[key]; !ok {
|
|
||||||
v := Item{
|
|
||||||
Name: name,
|
|
||||||
key: key,
|
|
||||||
Query: query,
|
|
||||||
Vars: varBytes,
|
|
||||||
Comment: comment.String(),
|
|
||||||
}
|
|
||||||
list = append(list, v)
|
|
||||||
comment.Reset()
|
|
||||||
}
|
|
||||||
|
|
||||||
varBytes = nil
|
|
||||||
|
|
||||||
}
|
|
||||||
|
|
||||||
e++
|
|
||||||
if e >= len(b) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return list, nil
|
for i := range items {
|
||||||
|
items[i].Name = QueryName(items[i].Query)
|
||||||
|
items[i].key = strings.ToLower(items[i].Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
return items, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func isGraphQL(s string) bool {
|
||||||
|
return strings.HasPrefix(s, "query") ||
|
||||||
|
strings.HasPrefix(s, "mutation") ||
|
||||||
|
strings.HasPrefix(s, "subscription")
|
||||||
}
|
}
|
||||||
|
|
||||||
func (al *List) save(item Item) error {
|
func (al *List) save(item Item) error {
|
||||||
@ -297,57 +279,43 @@ func (al *List) save(item Item) error {
|
|||||||
return strings.Compare(list[i].key, list[j].key) == -1
|
return strings.Compare(list[i].key, list[j].key) == -1
|
||||||
})
|
})
|
||||||
|
|
||||||
for _, v := range list {
|
for i, v := range list {
|
||||||
cmtLines := strings.Split(v.Comment, "\n")
|
var vars string
|
||||||
|
if v.Vars != "" {
|
||||||
i := 0
|
|
||||||
for _, c := range cmtLines {
|
|
||||||
if c = strings.TrimSpace(c); c == "" {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
_, err := f.WriteString(fmt.Sprintf("# %s\n", c))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
i++
|
|
||||||
}
|
|
||||||
|
|
||||||
if i != 0 {
|
|
||||||
if _, err := f.WriteString("\n"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if _, err := f.WriteString(fmt.Sprintf("# Query named %s\n\n", v.Name)); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
|
|
||||||
buf.Reset()
|
buf.Reset()
|
||||||
|
if err := jsn.Clear(&buf, []byte(v.Vars)); err != nil {
|
||||||
if err := jsn.Clear(&buf, v.Vars); err != nil {
|
continue
|
||||||
return fmt.Errorf("failed to clean vars: %w", err)
|
|
||||||
}
|
}
|
||||||
vj := json.RawMessage(buf.Bytes())
|
vj := json.RawMessage(buf.Bytes())
|
||||||
|
|
||||||
vj, err = json.MarshalIndent(vj, "", " ")
|
if vj, err = json.MarshalIndent(vj, "", " "); err != nil {
|
||||||
if err != nil {
|
continue
|
||||||
return fmt.Errorf("failed to marshal vars: %w", err)
|
|
||||||
}
|
}
|
||||||
|
vars = string(vj)
|
||||||
|
}
|
||||||
|
list[i].Vars = vars
|
||||||
|
list[i].Comment = strings.TrimSpace(v.Comment)
|
||||||
|
}
|
||||||
|
|
||||||
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
|
for _, v := range list {
|
||||||
|
if v.Comment != "" {
|
||||||
|
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
|
||||||
|
} else {
|
||||||
|
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if v.Vars != "" {
|
||||||
|
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", v.Vars))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if v.Query[0] == '{' {
|
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
|
||||||
_, err = f.WriteString(fmt.Sprintf("query %s\n\n", v.Query))
|
|
||||||
} else {
|
|
||||||
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -356,18 +324,6 @@ func (al *List) save(item Item) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func matchPrefix(b []byte, i int, s string) bool {
|
|
||||||
if (len(b) - i) < len(s) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for n := 0; n < len(s); n++ {
|
|
||||||
if b[(i+n)] != s[n] {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
|
|
||||||
func QueryName(b string) string {
|
func QueryName(b string) string {
|
||||||
state, s := 0, 0
|
state, s := 0, 0
|
||||||
|
|
||||||
|
@ -14,7 +14,7 @@ func TestGQLName1(t *testing.T) {
|
|||||||
|
|
||||||
name := QueryName(q)
|
name := QueryName(q)
|
||||||
|
|
||||||
if len(name) != 0 {
|
if name != "" {
|
||||||
t.Fatal("Name should be empty, not ", name)
|
t.Fatal("Name should be empty, not ", name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -82,3 +82,160 @@ func TestGQLName5(t *testing.T) {
|
|||||||
t.Fatal("Name should be empty, not ", name)
|
t.Fatal("Name should be empty, not ", name)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParse1(t *testing.T) {
|
||||||
|
var al = `
|
||||||
|
# Hello world
|
||||||
|
|
||||||
|
variables {
|
||||||
|
"data": {
|
||||||
|
"slug": "",
|
||||||
|
"body": "",
|
||||||
|
"post": {
|
||||||
|
"connect": {
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation createComment {
|
||||||
|
comment(insert: $data) {
|
||||||
|
slug
|
||||||
|
body
|
||||||
|
createdAt: created_at
|
||||||
|
totalVotes: cached_votes_total
|
||||||
|
totalReplies: cached_replies_total
|
||||||
|
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
|
||||||
|
created_at
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
author: user {
|
||||||
|
slug
|
||||||
|
firstName: first_name
|
||||||
|
lastName: last_name
|
||||||
|
pictureURL: picture_url
|
||||||
|
bio
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
# Query named createPost
|
||||||
|
|
||||||
|
query createPost {
|
||||||
|
post(insert: $data) {
|
||||||
|
slug
|
||||||
|
body
|
||||||
|
published
|
||||||
|
createdAt: created_at
|
||||||
|
totalVotes: cached_votes_total
|
||||||
|
totalComments: cached_comments_total
|
||||||
|
vote: post_vote(where: {user_id: {eq: $user_id}}) {
|
||||||
|
created_at
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
author: user {
|
||||||
|
slug
|
||||||
|
firstName: first_name
|
||||||
|
lastName: last_name
|
||||||
|
pictureURL: picture_url
|
||||||
|
bio
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
_, err := parse(al, "allow.list")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestParse2(t *testing.T) {
|
||||||
|
var al = `
|
||||||
|
/* Hello world */
|
||||||
|
|
||||||
|
variables {
|
||||||
|
"data": {
|
||||||
|
"slug": "",
|
||||||
|
"body": "",
|
||||||
|
"post": {
|
||||||
|
"connect": {
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
mutation createComment {
|
||||||
|
comment(insert: $data) {
|
||||||
|
slug
|
||||||
|
body
|
||||||
|
createdAt: created_at
|
||||||
|
totalVotes: cached_votes_total
|
||||||
|
totalReplies: cached_replies_total
|
||||||
|
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
|
||||||
|
created_at
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
author: user {
|
||||||
|
slug
|
||||||
|
firstName: first_name
|
||||||
|
lastName: last_name
|
||||||
|
pictureURL: picture_url
|
||||||
|
bio
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/*
|
||||||
|
Query named createPost
|
||||||
|
*/
|
||||||
|
|
||||||
|
variables {
|
||||||
|
"data": {
|
||||||
|
"thread": {
|
||||||
|
"connect": {
|
||||||
|
"slug": ""
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"slug": "",
|
||||||
|
"published": false,
|
||||||
|
"body": ""
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
query createPost {
|
||||||
|
post(insert: $data) {
|
||||||
|
slug
|
||||||
|
body
|
||||||
|
published
|
||||||
|
createdAt: created_at
|
||||||
|
totalVotes: cached_votes_total
|
||||||
|
totalComments: cached_comments_total
|
||||||
|
vote: post_vote(where: {user_id: {eq: $user_id}}) {
|
||||||
|
created_at
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
author: user {
|
||||||
|
slug
|
||||||
|
firstName: first_name
|
||||||
|
lastName: last_name
|
||||||
|
pictureURL: picture_url
|
||||||
|
bio
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
_, err := parse(al, "allow.list")
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
//nolint:errcheck
|
|
||||||
package psql
|
package psql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -112,15 +111,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||||
//c.sel.Name, cn, arg.Val, col.Name)
|
//c.sel.Name, cn, arg.Val, col.Name)
|
||||||
io.WriteString(c.w, `ts_rank(`)
|
_, _ = io.WriteString(c.w, `ts_rank(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
_, _ = io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -137,15 +136,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||||
//c.sel.Name, cn, arg.Val, col.Name)
|
//c.sel.Name, cn, arg.Val, col.Name)
|
||||||
io.WriteString(c.w, `ts_headline(`)
|
_, _ = io.WriteString(c.w, `ts_headline(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
_, _ = io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -157,9 +156,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
|||||||
}
|
}
|
||||||
|
|
||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
io.WriteString(c.w, `(`)
|
_, _ = io.WriteString(c.w, `(`)
|
||||||
squoted(c.w, ti.Name)
|
squoted(c.w, ti.Name)
|
||||||
io.WriteString(c.w, ` :: text)`)
|
_, _ = io.WriteString(c.w, ` :: text)`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -169,9 +168,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
pl := funcPrefixLen(c.schema.fm, col.Name)
|
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||||
// if pl == 0 {
|
// if pl == 0 {
|
||||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||||
// io.WriteString(c.w, `'`)
|
// _, _ = io.WriteString(c.w, `'`)
|
||||||
// io.WriteString(c.w, col.Name)
|
// _, _ = io.WriteString(c.w, col.Name)
|
||||||
// io.WriteString(c.w, ` not defined'`)
|
// _, _ = io.WriteString(c.w, ` not defined'`)
|
||||||
// alias(c.w, col.Name)
|
// alias(c.w, col.Name)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
@ -190,10 +189,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
|
|
||||||
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
||||||
io.WriteString(c.w, fn)
|
_, _ = io.WriteString(c.w, fn)
|
||||||
io.WriteString(c.w, `(`)
|
_, _ = io.WriteString(c.w, `(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
io.WriteString(c.w, `)`)
|
_, _ = io.WriteString(c.w, `)`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -201,7 +200,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
|
|
||||||
func (c *compilerContext) renderComma(columnsRendered int) {
|
func (c *compilerContext) renderComma(columnsRendered int) {
|
||||||
if columnsRendered != 0 {
|
if columnsRendered != 0 {
|
||||||
io.WriteString(c.w, `, `)
|
_, _ = io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import (
|
|||||||
var (
|
var (
|
||||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||||
|
|
||||||
schema = GetTestSchema()
|
schema, _ = GetTestSchema()
|
||||||
|
|
||||||
vars = map[string]string{
|
vars = map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
@ -25,6 +25,37 @@ var (
|
|||||||
|
|
||||||
// FuzzerEntrypoint for Fuzzbuzz
|
// FuzzerEntrypoint for Fuzzbuzz
|
||||||
func Fuzz(data []byte) int {
|
func Fuzz(data []byte) int {
|
||||||
|
err1 := query(data)
|
||||||
|
err2 := insert(data)
|
||||||
|
err3 := update(data)
|
||||||
|
err4 := delete(data)
|
||||||
|
|
||||||
|
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func query(data []byte) error {
|
||||||
|
gql := data
|
||||||
|
|
||||||
|
qc, err1 := qcompileTest.Compile(gql, "user")
|
||||||
|
|
||||||
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err2 := pcompileTest.CompileEx(qc, vars)
|
||||||
|
|
||||||
|
if err1 != nil {
|
||||||
|
return err1
|
||||||
|
} else {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func insert(data []byte) error {
|
||||||
gql := `mutation {
|
gql := `mutation {
|
||||||
product(insert: $data) {
|
product(insert: $data) {
|
||||||
id
|
id
|
||||||
@ -47,9 +78,57 @@ func Fuzz(data []byte) int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(data []byte) error {
|
||||||
|
gql := `mutation {
|
||||||
|
product(insert: $data) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
full_name
|
||||||
|
email
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0
|
panic("qcompile can't fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
return 1
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func delete(data []byte) error {
|
||||||
|
gql := `mutation {
|
||||||
|
product(insert: $data) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
full_name
|
||||||
|
email
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||||
|
if err != nil {
|
||||||
|
panic("qcompile can't fail")
|
||||||
|
}
|
||||||
|
|
||||||
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
20
core/internal/psql/fuzz_test.go
Normal file
20
core/internal/psql/fuzz_test.go
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// +build gofuzz
|
||||||
|
|
||||||
|
package psql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ret int
|
||||||
|
|
||||||
|
func TestFuzzCrashers(t *testing.T) {
|
||||||
|
var crashers = []string{
|
||||||
|
"{\"connect\":{}}",
|
||||||
|
"q(q{q{q{q{q{q{q{q{",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range crashers {
|
||||||
|
ret = Fuzz([]byte(f))
|
||||||
|
}
|
||||||
|
}
|
@ -25,7 +25,7 @@ func (c *compilerContext) renderInsert(
|
|||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `json_array_elements(`)
|
io.WriteString(c.w, `json_array_elements(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||||
io.WriteString(c.w, ` :: json`)
|
io.WriteString(c.w, ` :: json`)
|
||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
61
core/internal/psql/metadata.go
Normal file
61
core/internal/psql/metadata.go
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package psql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (md *Metadata) RenderVar(w io.Writer, vv string) {
|
||||||
|
f, s := -1, 0
|
||||||
|
|
||||||
|
for i := range vv {
|
||||||
|
v := vv[i]
|
||||||
|
switch {
|
||||||
|
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||||
|
if (i - s) > 0 {
|
||||||
|
_, _ = io.WriteString(w, vv[s:i])
|
||||||
|
}
|
||||||
|
f = i
|
||||||
|
|
||||||
|
case (v < 'a' && v > 'z') &&
|
||||||
|
(v < 'A' && v > 'Z') &&
|
||||||
|
(v < '0' && v > '9') &&
|
||||||
|
v != '_' &&
|
||||||
|
f != -1 &&
|
||||||
|
(i-f) > 1:
|
||||||
|
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
|
||||||
|
s = i
|
||||||
|
f = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != -1 && (len(vv)-f) > 1 {
|
||||||
|
md.renderValueExp(w, Param{Name: vv[f+1:]})
|
||||||
|
} else {
|
||||||
|
_, _ = io.WriteString(w, vv[s:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
|
||||||
|
_, _ = io.WriteString(w, `$`)
|
||||||
|
if v, ok := md.pindex[p.Name]; ok {
|
||||||
|
int32String(w, int32(v))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
md.params = append(md.params, p)
|
||||||
|
n := len(md.params)
|
||||||
|
|
||||||
|
if md.pindex == nil {
|
||||||
|
md.pindex = make(map[string]int)
|
||||||
|
}
|
||||||
|
md.pindex[p.Name] = n
|
||||||
|
int32String(w, int32(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md Metadata) Skipped() uint32 {
|
||||||
|
return md.skipped
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md Metadata) Params() []Param {
|
||||||
|
return md.params
|
||||||
|
}
|
@ -432,11 +432,11 @@ func (c *compilerContext) renderInsertUpdateColumns(
|
|||||||
val := root.PresetMap[cn]
|
val := root.PresetMap[cn]
|
||||||
switch {
|
switch {
|
||||||
case ok && len(val) > 1 && val[0] == '$':
|
case ok && len(val) > 1 && val[0] == '$':
|
||||||
c.renderValueExp(Param{Name: val[1:], Type: col.Type})
|
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type})
|
||||||
|
|
||||||
case ok && strings.HasPrefix(val, "sql:"):
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
io.WriteString(c.w, `(`)
|
io.WriteString(c.w, `(`)
|
||||||
c.renderVar(val[4:], c.renderValueExp)
|
c.md.RenderVar(c.w, val[4:])
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
case ok:
|
case ok:
|
||||||
@ -542,6 +542,10 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
|
|||||||
|
|
||||||
rel := item.relPC
|
rel := item.relPC
|
||||||
|
|
||||||
|
if rel == nil {
|
||||||
|
return errors.New("invalid connect value")
|
||||||
|
}
|
||||||
|
|
||||||
// Render only for parent-to-child relationship of one-to-one
|
// Render only for parent-to-child relationship of one-to-one
|
||||||
// For this to work the child needs to found first so it's primary key
|
// For this to work the child needs to found first so it's primary key
|
||||||
// can be set in the related column on the parent object.
|
// can be set in the related column on the parent object.
|
||||||
|
@ -25,8 +25,8 @@ type Param struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Metadata struct {
|
type Metadata struct {
|
||||||
Skipped uint32
|
skipped uint32
|
||||||
Params []Param
|
params []Param
|
||||||
pindex map[string]int
|
pindex map[string]int
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,22 +80,30 @@ func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||||
|
return co.CompileWithMetadata(w, qc, vars, Metadata{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||||
|
md.skipped = 0
|
||||||
|
|
||||||
|
if qc == nil {
|
||||||
|
return md, fmt.Errorf("qcode is nil")
|
||||||
|
}
|
||||||
|
|
||||||
switch qc.Type {
|
switch qc.Type {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
return co.compileQuery(w, qc, vars)
|
return co.compileQueryWithMetadata(w, qc, vars, md)
|
||||||
|
|
||||||
case qcode.QTInsert,
|
case qcode.QTInsert,
|
||||||
qcode.QTUpdate,
|
qcode.QTUpdate,
|
||||||
qcode.QTDelete,
|
qcode.QTDelete,
|
||||||
qcode.QTUpsert:
|
qcode.QTUpsert:
|
||||||
return co.compileMutation(w, qc, vars)
|
return co.compileMutation(w, qc, vars)
|
||||||
|
|
||||||
|
default:
|
||||||
|
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (co *Compiler) compileQuery(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
|
||||||
return co.compileQueryWithMetadata(w, qc, vars, Metadata{})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) compileQueryWithMetadata(
|
func (co *Compiler) compileQueryWithMetadata(
|
||||||
@ -148,38 +156,40 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
if id < closeBlock {
|
if id < closeBlock {
|
||||||
sel := &c.s[id]
|
sel := &c.s[id]
|
||||||
|
|
||||||
if len(sel.Cols) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return c.md, err
|
return c.md, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.ParentID == -1 {
|
if sel.Type != qcode.STUnion {
|
||||||
io.WriteString(c.w, `(`)
|
if len(sel.Cols) == 0 {
|
||||||
} else {
|
continue
|
||||||
c.renderLateralJoin(sel)
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if !ti.IsSingular {
|
if sel.ParentID == -1 {
|
||||||
c.renderPluralSelect(sel, ti)
|
io.WriteString(c.w, `(`)
|
||||||
}
|
} else {
|
||||||
|
c.renderLateralJoin(sel)
|
||||||
|
}
|
||||||
|
|
||||||
if err := c.renderSelect(sel, ti, vars); err != nil {
|
if !ti.IsSingular {
|
||||||
return c.md, err
|
c.renderPluralSelect(sel, ti)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := c.renderSelect(sel, ti, vars); err != nil {
|
||||||
|
return c.md, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, cid := range sel.Children {
|
for _, cid := range sel.Children {
|
||||||
if hasBit(c.md.Skipped, uint32(cid)) {
|
if hasBit(c.md.skipped, uint32(cid)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
child := &c.s[cid]
|
child := &c.s[cid]
|
||||||
|
|
||||||
if child.SkipRender {
|
if child.SkipRender {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
st.Push(child.ID + closeBlock)
|
st.Push(child.ID + closeBlock)
|
||||||
st.Push(child.ID)
|
st.Push(child.ID)
|
||||||
}
|
}
|
||||||
@ -187,35 +197,37 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
} else {
|
} else {
|
||||||
sel := &c.s[(id - closeBlock)]
|
sel := &c.s[(id - closeBlock)]
|
||||||
|
|
||||||
ti, err := c.schema.GetTable(sel.Name)
|
if sel.Type != qcode.STUnion {
|
||||||
if err != nil {
|
ti, err := c.schema.GetTable(sel.Name)
|
||||||
return c.md, err
|
if err != nil {
|
||||||
}
|
return c.md, err
|
||||||
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sr", sel.ID)
|
aliasWithID(c.w, "__sr", sel.ID)
|
||||||
|
|
||||||
io.WriteString(c.w, `)`)
|
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
|
||||||
|
|
||||||
if !ti.IsSingular {
|
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
}
|
|
||||||
|
|
||||||
if sel.ParentID == -1 {
|
if !ti.IsSingular {
|
||||||
if st.Len() != 0 {
|
io.WriteString(c.w, `)`)
|
||||||
io.WriteString(c.w, `, `)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
if sel.ParentID == -1 {
|
||||||
|
if st.Len() != 0 {
|
||||||
|
io.WriteString(c.w, `, `)
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
c.renderLateralJoinClose(sel)
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
c.renderLateralJoinClose(sel)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(sel.Args) != 0 {
|
if sel.Type != qcode.STMember {
|
||||||
i := 0
|
if len(sel.Args) != 0 {
|
||||||
for _, v := range sel.Args {
|
for _, v := range sel.Args {
|
||||||
qcode.FreeNode(v, 500)
|
qcode.FreeNode(v)
|
||||||
i++
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -350,7 +362,17 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
|||||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
||||||
colmap[rel.Left.Col] = struct{}{}
|
colmap[rel.Left.Col] = struct{}{}
|
||||||
c.md.Skipped |= (1 << uint(id))
|
c.md.skipped |= (1 << uint(id))
|
||||||
|
}
|
||||||
|
|
||||||
|
case RelPolymorphic:
|
||||||
|
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||||
|
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Left.Col})
|
||||||
|
colmap[rel.Left.Col] = struct{}{}
|
||||||
|
}
|
||||||
|
if _, ok := colmap[rel.Right.Table]; !ok {
|
||||||
|
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Right.Table, FieldName: rel.Right.Table})
|
||||||
|
colmap[rel.Right.Table] = struct{}{}
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -431,15 +453,23 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
var rel *DBRel
|
var rel *DBRel
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
// Relationships must be between union parents and their parents
|
||||||
if sel.ParentID != -1 {
|
if sel.ParentID != -1 {
|
||||||
parent := c.s[sel.ParentID]
|
if sel.Type == qcode.STMember && sel.UParentID != -1 {
|
||||||
|
cn := c.s[sel.ParentID].Name
|
||||||
|
pn := c.s[sel.UParentID].Name
|
||||||
|
rel, err = c.schema.GetRel(cn, pn)
|
||||||
|
|
||||||
rel, err = c.schema.GetRel(ti.Name, parent.Name)
|
} else {
|
||||||
if err != nil {
|
pn := c.s[sel.ParentID].Name
|
||||||
return err
|
rel, err = c.schema.GetRel(ti.Name, pn)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
childCols, err := c.initSelect(sel, ti, vars)
|
childCols, err := c.initSelect(sel, ti, vars)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -523,30 +553,27 @@ func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
||||||
rel, err := c.schema.GetRel(table, parent)
|
rel, _ := c.schema.GetRel(table, parent)
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// This join is only required for one-to-many relations since
|
// This join is only required for one-to-many relations since
|
||||||
// these make use of join tables that need to be pulled in.
|
// these make use of join tables that need to be pulled in.
|
||||||
if rel.Type != RelOneToManyThrough {
|
if rel == nil || rel.Type != RelOneToManyThrough {
|
||||||
return err
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
pt, err := c.schema.GetTable(parent)
|
// pt, err := c.schema.GetTable(parent)
|
||||||
if err != nil {
|
// if err != nil {
|
||||||
return err
|
// return err
|
||||||
}
|
// }
|
||||||
|
|
||||||
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
||||||
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Left.Col)
|
||||||
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
||||||
io.WriteString(c.w, rel.Through)
|
io.WriteString(c.w, rel.Through.Table)
|
||||||
io.WriteString(c.w, `" ON ((`)
|
io.WriteString(c.w, `" ON ((`)
|
||||||
colWithTable(c.w, rel.Through, rel.ColT)
|
colWithTable(c.w, rel.Through.Table, rel.Through.ColL)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTableID(c.w, pt.Name, id, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -618,7 +645,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
i := colsRendered
|
i := colsRendered
|
||||||
|
|
||||||
for _, id := range sel.Children {
|
for _, id := range sel.Children {
|
||||||
if hasBit(c.md.Skipped, uint32(id)) {
|
if hasBit(c.md.skipped, uint32(id)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
childSel := &c.s[id]
|
childSel := &c.s[id]
|
||||||
@ -633,10 +660,33 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `"__sj_`)
|
if childSel.Type == qcode.STUnion {
|
||||||
int32String(c.w, childSel.ID)
|
rel, err := c.schema.GetRel(childSel.Name, ti.Name)
|
||||||
io.WriteString(c.w, `"."json"`)
|
if err != nil {
|
||||||
alias(c.w, childSel.FieldName)
|
return err
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, `(CASE `)
|
||||||
|
for _, uid := range childSel.Children {
|
||||||
|
unionSel := &c.s[uid]
|
||||||
|
|
||||||
|
io.WriteString(c.w, `WHEN `)
|
||||||
|
colWithTableID(c.w, ti.Name, sel.ID, rel.Right.Table)
|
||||||
|
io.WriteString(c.w, ` = `)
|
||||||
|
squoted(c.w, unionSel.Name)
|
||||||
|
io.WriteString(c.w, ` THEN `)
|
||||||
|
io.WriteString(c.w, `"__sj_`)
|
||||||
|
int32String(c.w, unionSel.ID)
|
||||||
|
io.WriteString(c.w, `"."json"`)
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, `END)`)
|
||||||
|
alias(c.w, childSel.FieldName)
|
||||||
|
|
||||||
|
} else {
|
||||||
|
io.WriteString(c.w, `"__sj_`)
|
||||||
|
int32String(c.w, childSel.ID)
|
||||||
|
io.WriteString(c.w, `"."json"`)
|
||||||
|
alias(c.w, childSel.FieldName)
|
||||||
|
}
|
||||||
|
|
||||||
if childSel.Paging.Type != qcode.PtOffset {
|
if childSel.Paging.Type != qcode.PtOffset {
|
||||||
io.WriteString(c.w, `, "__sj_`)
|
io.WriteString(c.w, `, "__sj_`)
|
||||||
@ -691,7 +741,8 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, ` WHERE (`)
|
io.WriteString(c.w, ` WHERE (`)
|
||||||
if err := c.renderRelationship(sel, ti); err != nil {
|
|
||||||
|
if err := c.renderRelationship(sel, rel); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
if isFil {
|
if isFil {
|
||||||
@ -723,7 +774,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
case ti.IsSingular:
|
case ti.IsSingular:
|
||||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||||
|
|
||||||
case len(sel.Paging.Limit) != 0:
|
case sel.Paging.Limit != "":
|
||||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||||
io.WriteString(c.w, ` LIMIT ('`)
|
io.WriteString(c.w, ` LIMIT ('`)
|
||||||
io.WriteString(c.w, sel.Paging.Limit)
|
io.WriteString(c.w, sel.Paging.Limit)
|
||||||
@ -736,7 +787,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(sel.Paging.Offset) != 0 {
|
if sel.Paging.Offset != "" {
|
||||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||||
io.WriteString(c.w, ` OFFSET ('`)
|
io.WriteString(c.w, ` OFFSET ('`)
|
||||||
io.WriteString(c.w, sel.Paging.Offset)
|
io.WriteString(c.w, sel.Paging.Offset)
|
||||||
@ -800,27 +851,30 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
|||||||
quoted(c.w, ob.Col)
|
quoted(c.w, ob.Col)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, ` FROM string_to_array(`)
|
io.WriteString(c.w, ` FROM string_to_array(`)
|
||||||
c.renderValueExp(Param{Name: "cursor", Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
|
||||||
io.WriteString(c.w, `, ',') as a) `)
|
io.WriteString(c.w, `, ',') as a) `)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
|
func (c *compilerContext) renderRelationshipByName(table, parent string) error {
|
||||||
parent := c.s[sel.ParentID]
|
|
||||||
|
|
||||||
pti, err := c.schema.GetTable(parent.Name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return c.renderRelationshipByName(ti.Name, pti.Name, parent.ID)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
|
|
||||||
rel, err := c.schema.GetRel(table, parent)
|
rel, err := c.schema.GetRel(table, parent)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
return c.renderRelationship(nil, rel)
|
||||||
|
}
|
||||||
|
|
||||||
|
func (c *compilerContext) renderRelationship(sel *qcode.Select, rel *DBRel) error {
|
||||||
|
var pid int32
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case sel == nil:
|
||||||
|
pid = int32(-1)
|
||||||
|
case sel.Type == qcode.STMember:
|
||||||
|
pid = sel.UParentID
|
||||||
|
default:
|
||||||
|
pid = sel.ParentID
|
||||||
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `((`)
|
io.WriteString(c.w, `((`)
|
||||||
|
|
||||||
@ -832,19 +886,19 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case !rel.Left.Array && rel.Right.Array:
|
case !rel.Left.Array && rel.Right.Array:
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||||
|
|
||||||
case rel.Left.Array && !rel.Right.Array:
|
case rel.Left.Array && !rel.Right.Array:
|
||||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTableID(c.w, parent, id, rel.Right.Col)
|
colWithTableID(c.w, rel.Right.Table, pid, rel.Right.Col)
|
||||||
}
|
}
|
||||||
|
|
||||||
case RelOneToManyThrough:
|
case RelOneToManyThrough:
|
||||||
@ -854,25 +908,34 @@ func (c *compilerContext) renderRelationshipByName(table, parent string, id int3
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case !rel.Left.Array && rel.Right.Array:
|
case !rel.Left.Array && rel.Right.Array:
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||||
|
|
||||||
case rel.Left.Array && !rel.Right.Array:
|
case rel.Left.Array && !rel.Right.Array:
|
||||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||||
io.WriteString(c.w, `) = any (`)
|
io.WriteString(c.w, `) = any (`)
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
colWithTable(c.w, table, rel.Left.Col)
|
colWithTable(c.w, rel.Through.Table, rel.Through.ColR)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTable(c.w, rel.Through, rel.Right.Col)
|
colWithTable(c.w, rel.Right.Table, rel.Right.Col)
|
||||||
}
|
}
|
||||||
|
|
||||||
case RelEmbedded:
|
case RelEmbedded:
|
||||||
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
colWithTable(c.w, rel.Left.Table, rel.Left.Col)
|
||||||
io.WriteString(c.w, `) = (`)
|
io.WriteString(c.w, `) = (`)
|
||||||
colWithTableID(c.w, parent, id, rel.Left.Col)
|
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||||
|
|
||||||
|
case RelPolymorphic:
|
||||||
|
colWithTable(c.w, sel.Name, rel.Right.Col)
|
||||||
|
io.WriteString(c.w, `) = (`)
|
||||||
|
colWithTableID(c.w, rel.Left.Table, pid, rel.Left.Col)
|
||||||
|
io.WriteString(c.w, `) AND (`)
|
||||||
|
colWithTableID(c.w, rel.Left.Table, pid, rel.Right.Table)
|
||||||
|
io.WriteString(c.w, `) = (`)
|
||||||
|
squoted(c.w, sel.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
@ -948,11 +1011,8 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
} else {
|
} else if err := c.renderOp(val, ti); err != nil {
|
||||||
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
|
return err
|
||||||
if err := c.renderOp(val, ti); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
//qcode.FreeExp(val)
|
//qcode.FreeExp(val)
|
||||||
@ -985,7 +1045,7 @@ func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, ti *DBTableInfo) erro
|
|||||||
|
|
||||||
io.WriteString(c.w, ` WHERE `)
|
io.WriteString(c.w, ` WHERE `)
|
||||||
|
|
||||||
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
|
if err := c.renderRelationshipByName(cti.Name, ti.Name); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -1014,7 +1074,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(ex.Col) != 0 {
|
if ex.Col != "" {
|
||||||
if col, ok = ti.ColMap[ex.Col]; !ok {
|
if col, ok = ti.ColMap[ex.Col]; !ok {
|
||||||
return fmt.Errorf("no column '%s' found ", ex.Col)
|
return fmt.Errorf("no column '%s' found ", ex.Col)
|
||||||
}
|
}
|
||||||
@ -1098,7 +1158,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `) @@ to_tsquery(`)
|
io.WriteString(c.w, `) @@ to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -1187,7 +1247,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
switch {
|
switch {
|
||||||
case ok && strings.HasPrefix(val, "sql:"):
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
io.WriteString(c.w, `(`)
|
io.WriteString(c.w, `(`)
|
||||||
c.renderVar(val[4:], c.renderValueExp)
|
c.md.RenderVar(c.w, val[4:])
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
case ok:
|
case ok:
|
||||||
@ -1195,7 +1255,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
|
|
||||||
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
||||||
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
io.WriteString(c.w, ` :: `)
|
io.WriteString(c.w, ` :: `)
|
||||||
@ -1204,7 +1264,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
return
|
return
|
||||||
|
|
||||||
default:
|
default:
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.ValRef:
|
case qcode.ValRef:
|
||||||
@ -1218,54 +1278,6 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
io.WriteString(c.w, col.Type)
|
io.WriteString(c.w, col.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderValueExp(p Param) {
|
|
||||||
io.WriteString(c.w, `$`)
|
|
||||||
if v, ok := c.md.pindex[p.Name]; ok {
|
|
||||||
int32String(c.w, int32(v))
|
|
||||||
|
|
||||||
} else {
|
|
||||||
c.md.Params = append(c.md.Params, p)
|
|
||||||
n := len(c.md.Params)
|
|
||||||
|
|
||||||
if c.md.pindex == nil {
|
|
||||||
c.md.pindex = make(map[string]int)
|
|
||||||
}
|
|
||||||
c.md.pindex[p.Name] = n
|
|
||||||
int32String(c.w, int32(n))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compilerContext) renderVar(vv string, fn func(Param)) {
|
|
||||||
f, s := -1, 0
|
|
||||||
|
|
||||||
for i := range vv {
|
|
||||||
v := vv[i]
|
|
||||||
switch {
|
|
||||||
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
|
||||||
if (i - s) > 0 {
|
|
||||||
io.WriteString(c.w, vv[s:i])
|
|
||||||
}
|
|
||||||
f = i
|
|
||||||
|
|
||||||
case (v < 'a' && v > 'z') &&
|
|
||||||
(v < 'A' && v > 'Z') &&
|
|
||||||
(v < '0' && v > '9') &&
|
|
||||||
v != '_' &&
|
|
||||||
f != -1 &&
|
|
||||||
(i-f) > 1:
|
|
||||||
fn(Param{Name: vv[f+1 : i]})
|
|
||||||
s = i
|
|
||||||
f = -1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if f != -1 && (len(vv)-f) > 1 {
|
|
||||||
fn(Param{Name: vv[f+1:]})
|
|
||||||
} else {
|
|
||||||
io.WriteString(c.w, vv[s:])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||||
switch {
|
switch {
|
||||||
case strings.HasPrefix(fn, "avg_"):
|
case strings.HasPrefix(fn, "avg_"):
|
||||||
@ -1302,7 +1314,7 @@ func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
|||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
func hasBit(n uint32, pos uint32) bool {
|
func hasBit(n, pos uint32) bool {
|
||||||
val := n & (1 << pos)
|
val := n & (1 << pos)
|
||||||
return (val > 0)
|
return (val > 0)
|
||||||
}
|
}
|
||||||
@ -1353,8 +1365,6 @@ func squoted(w io.Writer, identifier string) {
|
|||||||
io.WriteString(w, `'`)
|
io.WriteString(w, `'`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const charset = "0123456789"
|
|
||||||
|
|
||||||
func int32String(w io.Writer, val int32) {
|
func int32String(w io.Writer, val int32) {
|
||||||
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
||||||
}
|
}
|
||||||
|
@ -307,6 +307,100 @@ func multiRoot(t *testing.T) {
|
|||||||
compileGQLToPSQL(t, gql, nil, "user")
|
compileGQLToPSQL(t, gql, nil, "user")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func withFragment1(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
|
func withFragment2(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
|
func withFragment3(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
|
// func withInlineFragment(t *testing.T) {
|
||||||
|
// gql := `
|
||||||
|
// query {
|
||||||
|
// users {
|
||||||
|
// ... on users {
|
||||||
|
// id
|
||||||
|
// email
|
||||||
|
// }
|
||||||
|
// created_at
|
||||||
|
// ... on user {
|
||||||
|
// first_name
|
||||||
|
// last_name
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// }
|
||||||
|
// `
|
||||||
|
|
||||||
|
// compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
// }
|
||||||
|
|
||||||
func withCursor(t *testing.T) {
|
func withCursor(t *testing.T) {
|
||||||
gql := `query {
|
gql := `query {
|
||||||
Products(
|
Products(
|
||||||
@ -400,6 +494,10 @@ func TestCompileQuery(t *testing.T) {
|
|||||||
t.Run("queryWithVariables", queryWithVariables)
|
t.Run("queryWithVariables", queryWithVariables)
|
||||||
t.Run("withWhereOnRelations", withWhereOnRelations)
|
t.Run("withWhereOnRelations", withWhereOnRelations)
|
||||||
t.Run("multiRoot", multiRoot)
|
t.Run("multiRoot", multiRoot)
|
||||||
|
t.Run("withFragment1", withFragment1)
|
||||||
|
t.Run("withFragment2", withFragment2)
|
||||||
|
t.Run("withFragment3", withFragment3)
|
||||||
|
//t.Run("withInlineFragment", withInlineFragment)
|
||||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||||
t.Run("withCursor", withCursor)
|
t.Run("withCursor", withCursor)
|
||||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||||
|
@ -11,6 +11,7 @@ type DBSchema struct {
|
|||||||
ver int
|
ver int
|
||||||
t map[string]*DBTableInfo
|
t map[string]*DBTableInfo
|
||||||
rm map[string]map[string]*DBRel
|
rm map[string]map[string]*DBRel
|
||||||
|
vt map[string]*VirtualTable
|
||||||
fm map[string]*DBFunction
|
fm map[string]*DBFunction
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -33,15 +34,19 @@ const (
|
|||||||
RelOneToOne RelType = iota + 1
|
RelOneToOne RelType = iota + 1
|
||||||
RelOneToMany
|
RelOneToMany
|
||||||
RelOneToManyThrough
|
RelOneToManyThrough
|
||||||
|
RelPolymorphic
|
||||||
RelEmbedded
|
RelEmbedded
|
||||||
RelRemote
|
RelRemote
|
||||||
)
|
)
|
||||||
|
|
||||||
type DBRel struct {
|
type DBRel struct {
|
||||||
Type RelType
|
Type RelType
|
||||||
Through string
|
Through struct {
|
||||||
ColT string
|
Table string
|
||||||
Left struct {
|
ColL string
|
||||||
|
ColR string
|
||||||
|
}
|
||||||
|
Left struct {
|
||||||
col *DBColumn
|
col *DBColumn
|
||||||
Table string
|
Table string
|
||||||
Col string
|
Col string
|
||||||
@ -60,6 +65,7 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
|||||||
ver: info.Version,
|
ver: info.Version,
|
||||||
t: make(map[string]*DBTableInfo),
|
t: make(map[string]*DBTableInfo),
|
||||||
rm: make(map[string]map[string]*DBRel),
|
rm: make(map[string]map[string]*DBRel),
|
||||||
|
vt: make(map[string]*VirtualTable),
|
||||||
fm: make(map[string]*DBFunction, len(info.Functions)),
|
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -70,6 +76,10 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := schema.virtualRels(info.VTables); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
for i, t := range info.Tables {
|
for i, t := range info.Tables {
|
||||||
err := schema.firstDegreeRels(t, info.Columns[i])
|
err := schema.firstDegreeRels(t, info.Columns[i])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -102,7 +112,7 @@ func (s *DBSchema) addTable(
|
|||||||
singular := flect.Singularize(t.Key)
|
singular := flect.Singularize(t.Key)
|
||||||
plural := flect.Pluralize(t.Key)
|
plural := flect.Pluralize(t.Key)
|
||||||
|
|
||||||
s.t[singular] = &DBTableInfo{
|
ts := &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
IsSingular: true,
|
IsSingular: true,
|
||||||
@ -112,8 +122,9 @@ func (s *DBSchema) addTable(
|
|||||||
Singular: singular,
|
Singular: singular,
|
||||||
Plural: plural,
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
s.t[singular] = ts
|
||||||
|
|
||||||
s.t[plural] = &DBTableInfo{
|
tp := &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
IsSingular: false,
|
IsSingular: false,
|
||||||
@ -123,14 +134,15 @@ func (s *DBSchema) addTable(
|
|||||||
Singular: singular,
|
Singular: singular,
|
||||||
Plural: plural,
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
s.t[plural] = tp
|
||||||
|
|
||||||
if al, ok := aliases[t.Key]; ok {
|
if al, ok := aliases[t.Key]; ok {
|
||||||
for i := range al {
|
for i := range al {
|
||||||
k1 := flect.Singularize(al[i])
|
k1 := flect.Singularize(al[i])
|
||||||
s.t[k1] = s.t[singular]
|
s.t[k1] = ts
|
||||||
|
|
||||||
k2 := flect.Pluralize(al[i])
|
k2 := flect.Pluralize(al[i])
|
||||||
s.t[k2] = s.t[plural]
|
s.t[k2] = tp
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -154,6 +166,54 @@ func (s *DBSchema) addTable(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *DBSchema) virtualRels(vts []VirtualTable) error {
|
||||||
|
for _, vt := range vts {
|
||||||
|
s.vt[vt.Name] = &vt
|
||||||
|
|
||||||
|
for _, t := range s.t {
|
||||||
|
idCol, ok := t.ColMap[vt.IDColumn]
|
||||||
|
if !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if _, ok = t.ColMap[vt.TypeColumn]; !ok {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
nt := DBTable{
|
||||||
|
ID: -1,
|
||||||
|
Name: vt.Name,
|
||||||
|
Key: strings.ToLower(vt.Name),
|
||||||
|
Type: "virtual",
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := s.addTable(nt, nil, nil); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
rel := &DBRel{Type: RelPolymorphic}
|
||||||
|
rel.Left.col = idCol
|
||||||
|
rel.Left.Table = t.Name
|
||||||
|
rel.Left.Col = idCol.Name
|
||||||
|
|
||||||
|
rcol := DBColumn{
|
||||||
|
Name: vt.FKeyColumn,
|
||||||
|
Key: strings.ToLower(vt.FKeyColumn),
|
||||||
|
Type: idCol.Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
rel.Right.col = &rcol
|
||||||
|
rel.Right.Table = vt.TypeColumn
|
||||||
|
rel.Right.Col = rcol.Name
|
||||||
|
|
||||||
|
if err := s.SetRel(vt.Name, t.Name, rel); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
||||||
ct := t.Key
|
ct := t.Key
|
||||||
cti, ok := s.t[ct]
|
cti, ok := s.t[ct]
|
||||||
@ -164,7 +224,7 @@ func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
|
|||||||
for i := range cols {
|
for i := range cols {
|
||||||
c := cols[i]
|
c := cols[i]
|
||||||
|
|
||||||
if len(c.FKeyTable) == 0 {
|
if c.FKeyTable == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -268,7 +328,7 @@ func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
|
|||||||
for i := range cols {
|
for i := range cols {
|
||||||
c := cols[i]
|
c := cols[i]
|
||||||
|
|
||||||
if len(c.FKeyTable) == 0 {
|
if c.FKeyTable == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -344,16 +404,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
// One-to-many-through relation between 1nd foreign key table and the
|
// One-to-many-through relation between 1nd foreign key table and the
|
||||||
// 2nd foreign key table
|
// 2nd foreign key table
|
||||||
rel1 := &DBRel{Type: RelOneToManyThrough}
|
rel1 := &DBRel{Type: RelOneToManyThrough}
|
||||||
rel1.Through = ti.Name
|
rel1.Through.Table = ti.Name
|
||||||
rel1.ColT = col2.Name
|
rel1.Through.ColL = col1.Name
|
||||||
|
rel1.Through.ColR = col2.Name
|
||||||
|
|
||||||
rel1.Left.col = &col2
|
rel1.Left.col = fc1
|
||||||
rel1.Left.Table = col2.FKeyTable
|
rel1.Left.Table = col1.FKeyTable
|
||||||
rel1.Left.Col = fc2.Name
|
rel1.Left.Col = fc1.Name
|
||||||
|
|
||||||
rel1.Right.col = &col1
|
rel1.Right.col = fc2
|
||||||
rel1.Right.Table = ti.Name
|
rel1.Right.Table = t2
|
||||||
rel1.Right.Col = col1.Name
|
rel1.Right.Col = fc2.Name
|
||||||
|
|
||||||
if err := s.SetRel(t1, t2, rel1); err != nil {
|
if err := s.SetRel(t1, t2, rel1); err != nil {
|
||||||
return err
|
return err
|
||||||
@ -362,16 +423,17 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
// One-to-many-through relation between 2nd foreign key table and the
|
// One-to-many-through relation between 2nd foreign key table and the
|
||||||
// 1nd foreign key table
|
// 1nd foreign key table
|
||||||
rel2 := &DBRel{Type: RelOneToManyThrough}
|
rel2 := &DBRel{Type: RelOneToManyThrough}
|
||||||
rel2.Through = ti.Name
|
rel2.Through.Table = ti.Name
|
||||||
rel2.ColT = col1.Name
|
rel2.Through.ColL = col2.Name
|
||||||
|
rel2.Through.ColR = col1.Name
|
||||||
|
|
||||||
rel1.Left.col = fc1
|
rel2.Left.col = fc2
|
||||||
rel2.Left.Table = col1.FKeyTable
|
rel2.Left.Table = col2.FKeyTable
|
||||||
rel2.Left.Col = fc1.Name
|
rel2.Left.Col = fc2.Name
|
||||||
|
|
||||||
rel1.Right.col = &col2
|
rel2.Right.col = fc1
|
||||||
rel2.Right.Table = ti.Name
|
rel2.Right.Table = t1
|
||||||
rel2.Right.Col = col2.Name
|
rel2.Right.Col = fc1.Name
|
||||||
|
|
||||||
if err := s.SetRel(t2, t1, rel2); err != nil {
|
if err := s.SetRel(t2, t1, rel2); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -14,14 +14,18 @@ func (rt RelType) String() string {
|
|||||||
return "remote"
|
return "remote"
|
||||||
case RelEmbedded:
|
case RelEmbedded:
|
||||||
return "embedded"
|
return "embedded"
|
||||||
|
case RelPolymorphic:
|
||||||
|
return "polymorphic"
|
||||||
}
|
}
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
func (re *DBRel) String() string {
|
func (re *DBRel) String() string {
|
||||||
if re.Type == RelOneToManyThrough {
|
if re.Type == RelOneToManyThrough {
|
||||||
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
|
return fmt.Sprintf("'%s.%s' --(%s.%s, %s.%s)--> '%s.%s'",
|
||||||
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
|
re.Left.Table, re.Left.Col,
|
||||||
|
re.Through.Table, re.Through.ColL, re.Through.Table, re.Through.ColR,
|
||||||
|
re.Right.Table, re.Right.Col)
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
|
||||||
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
|
||||||
|
@ -14,9 +14,17 @@ type DBInfo struct {
|
|||||||
Tables []DBTable
|
Tables []DBTable
|
||||||
Columns [][]DBColumn
|
Columns [][]DBColumn
|
||||||
Functions []DBFunction
|
Functions []DBFunction
|
||||||
|
VTables []VirtualTable
|
||||||
colMap map[string]map[string]*DBColumn
|
colMap map[string]map[string]*DBColumn
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type VirtualTable struct {
|
||||||
|
Name string
|
||||||
|
IDColumn string
|
||||||
|
TypeColumn string
|
||||||
|
FKeyColumn string
|
||||||
|
}
|
||||||
|
|
||||||
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
func GetDBInfo(db *sql.DB, schema string) (*DBInfo, error) {
|
||||||
di := &DBInfo{}
|
di := &DBInfo{}
|
||||||
var version string
|
var version string
|
||||||
|
@ -8,8 +8,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "produc
|
|||||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, $2 :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||||
@ -20,7 +20,7 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (INSERT INTO "users" ("
|
|||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileInsert (0.02s)
|
--- PASS: TestCompileInsert (0.03s)
|
||||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
--- PASS: TestCompileInsert/bulkInsert (0.00s)
|
||||||
@ -67,9 +67,9 @@ SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT
|
|||||||
=== RUN TestCompileQuery/oneToManyArray
|
=== RUN TestCompileQuery/oneToManyArray
|
||||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToMany
|
=== RUN TestCompileQuery/manyToMany
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToManyReverse
|
=== RUN TestCompileQuery/manyToManyReverse
|
||||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products"."id")) WHERE ((("purchases"."customer_id") = ("customers"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunction
|
=== RUN TestCompileQuery/aggFunction
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||||
@ -85,7 +85,13 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
|
|||||||
=== RUN TestCompileQuery/withWhereOnRelations
|
=== RUN TestCompileQuery/withWhereOnRelations
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers"."id")) WHERE ((("purchases"."product_id") = ("products"."id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment1
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment2
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment3
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withCursor
|
=== RUN TestCompileQuery/withCursor
|
||||||
@ -117,6 +123,9 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
|||||||
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
||||||
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment1 (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment2 (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment3 (0.00s)
|
||||||
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
||||||
--- PASS: TestCompileQuery/withCursor (0.00s)
|
--- PASS: TestCompileQuery/withCursor (0.00s)
|
||||||
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
||||||
@ -137,8 +146,8 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "products" AS (UPDATE "products" S
|
|||||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = $2 :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||||
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = $2 :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileUpdate (0.02s)
|
--- PASS: TestCompileUpdate (0.02s)
|
||||||
@ -151,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql (cached)
|
ok github.com/dosco/super-graph/core/internal/psql 0.323s
|
||||||
|
@ -22,7 +22,7 @@ func (c *compilerContext) renderUpdate(
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||||
// io.WriteString(c.w, qc.ActionVar)
|
// io.WriteString(c.w, qc.ActionVar)
|
||||||
io.WriteString(c.w, ` :: json AS j)`)
|
io.WriteString(c.w, ` :: json AS j)`)
|
||||||
|
|
||||||
@ -121,12 +121,10 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
}
|
}
|
||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
|
|
||||||
} else {
|
} else if qc.Selects[0].Where != nil {
|
||||||
if qc.Selects[0].Where != nil {
|
io.WriteString(w, `WHERE `)
|
||||||
io.WriteString(w, `WHERE `)
|
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
return err
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
13
core/internal/qcode/bench.11
Normal file
13
core/internal/qcode/bench.11
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||||
|
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
|
||||||
|
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
|
||||||
|
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
|
||||||
|
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
|
||||||
|
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
|
||||||
|
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
|
||||||
|
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
|
||||||
|
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/qcode 11.092s
|
@ -11,15 +11,18 @@ import (
|
|||||||
var (
|
var (
|
||||||
queryToken = []byte("query")
|
queryToken = []byte("query")
|
||||||
mutationToken = []byte("mutation")
|
mutationToken = []byte("mutation")
|
||||||
|
fragmentToken = []byte("fragment")
|
||||||
subscriptionToken = []byte("subscription")
|
subscriptionToken = []byte("subscription")
|
||||||
|
onToken = []byte("on")
|
||||||
trueToken = []byte("true")
|
trueToken = []byte("true")
|
||||||
falseToken = []byte("false")
|
falseToken = []byte("false")
|
||||||
quotesToken = []byte(`'"`)
|
quotesToken = []byte(`'"`)
|
||||||
signsToken = []byte(`+-`)
|
signsToken = []byte(`+-`)
|
||||||
punctuatorToken = []byte(`!():=[]{|}`)
|
|
||||||
spreadToken = []byte(`...`)
|
spreadToken = []byte(`...`)
|
||||||
digitToken = []byte(`0123456789`)
|
digitToken = []byte(`0123456789`)
|
||||||
dotToken = []byte(`.`)
|
dotToken = []byte(`.`)
|
||||||
|
|
||||||
|
punctuatorToken = `!():=[]{|}`
|
||||||
)
|
)
|
||||||
|
|
||||||
// Pos represents a byte position in the original input text from which
|
// Pos represents a byte position in the original input text from which
|
||||||
@ -43,6 +46,8 @@ const (
|
|||||||
itemName
|
itemName
|
||||||
itemQuery
|
itemQuery
|
||||||
itemMutation
|
itemMutation
|
||||||
|
itemFragment
|
||||||
|
itemOn
|
||||||
itemSub
|
itemSub
|
||||||
itemPunctuator
|
itemPunctuator
|
||||||
itemArgsOpen
|
itemArgsOpen
|
||||||
@ -136,8 +141,7 @@ func (l *lexer) current() (Pos, Pos) {
|
|||||||
func (l *lexer) emit(t itemType) {
|
func (l *lexer) emit(t itemType) {
|
||||||
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
l.items = append(l.items, item{t, l.start, l.pos, l.line})
|
||||||
// Some items contain text internally. If so, count their newlines.
|
// Some items contain text internally. If so, count their newlines.
|
||||||
switch t {
|
if t == itemStringVal {
|
||||||
case itemStringVal:
|
|
||||||
for i := l.start; i < l.pos; i++ {
|
for i := l.start; i < l.pos; i++ {
|
||||||
if l.input[i] == '\n' {
|
if l.input[i] == '\n' {
|
||||||
l.line++
|
l.line++
|
||||||
@ -263,11 +267,11 @@ func lexRoot(l *lexer) stateFn {
|
|||||||
l.backup()
|
l.backup()
|
||||||
return lexString
|
return lexString
|
||||||
case r == '.':
|
case r == '.':
|
||||||
if len(l.input) >= 3 {
|
l.acceptRun(dotToken)
|
||||||
if equals(l.input, 0, 3, spreadToken) {
|
s, e := l.current()
|
||||||
l.emit(itemSpread)
|
if equals(l.input, s, e, spreadToken) {
|
||||||
return lexRoot
|
l.emit(itemSpread)
|
||||||
}
|
return lexRoot
|
||||||
}
|
}
|
||||||
fallthrough // '.' can start a number.
|
fallthrough // '.' can start a number.
|
||||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
||||||
@ -299,10 +303,14 @@ func lexName(l *lexer) stateFn {
|
|||||||
switch {
|
switch {
|
||||||
case equals(l.input, s, e, queryToken):
|
case equals(l.input, s, e, queryToken):
|
||||||
l.emitL(itemQuery)
|
l.emitL(itemQuery)
|
||||||
|
case equals(l.input, s, e, fragmentToken):
|
||||||
|
l.emitL(itemFragment)
|
||||||
case equals(l.input, s, e, mutationToken):
|
case equals(l.input, s, e, mutationToken):
|
||||||
l.emitL(itemMutation)
|
l.emitL(itemMutation)
|
||||||
case equals(l.input, s, e, subscriptionToken):
|
case equals(l.input, s, e, subscriptionToken):
|
||||||
l.emitL(itemSub)
|
l.emitL(itemSub)
|
||||||
|
case equals(l.input, s, e, onToken):
|
||||||
|
l.emitL(itemOn)
|
||||||
case equals(l.input, s, e, trueToken):
|
case equals(l.input, s, e, trueToken):
|
||||||
l.emitL(itemBoolVal)
|
l.emitL(itemBoolVal)
|
||||||
case equals(l.input, s, e, falseToken):
|
case equals(l.input, s, e, falseToken):
|
||||||
@ -395,35 +403,15 @@ func isAlphaNumeric(r rune) bool {
|
|||||||
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
|
||||||
}
|
}
|
||||||
|
|
||||||
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
func equals(b []byte, s, e Pos, val []byte) bool {
|
||||||
n := 0
|
return bytes.EqualFold(b[s:e], val)
|
||||||
for i := s; i < e; i++ {
|
|
||||||
if n >= len(val) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
|
|
||||||
return false
|
|
||||||
case b[i] != val[n]:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
n++
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func contains(b []byte, s Pos, e Pos, val []byte) bool {
|
func contains(b []byte, s, e Pos, chars string) bool {
|
||||||
for i := s; i < e; i++ {
|
return bytes.ContainsAny(b[s:e], chars)
|
||||||
for n := 0; n < len(val); n++ {
|
|
||||||
if b[i] == val[n] {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func lowercase(b []byte, s Pos, e Pos) {
|
func lowercase(b []byte, s, e Pos) {
|
||||||
for i := s; i < e; i++ {
|
for i := s; i < e; i++ {
|
||||||
if b[i] >= 'A' && b[i] <= 'Z' {
|
if b[i] >= 'A' && b[i] <= 'Z' {
|
||||||
b[i] = ('a' + (b[i] - 'A'))
|
b[i] = ('a' + (b[i] - 'A'))
|
||||||
|
@ -3,10 +3,9 @@ package qcode
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"sync"
|
"sync"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/util"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -50,6 +49,19 @@ func (o *Operation) Reset() {
|
|||||||
*o = zeroOperation
|
*o = zeroOperation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Fragment struct {
|
||||||
|
Name string
|
||||||
|
On string
|
||||||
|
Fields []Field
|
||||||
|
fieldsA [10]Field
|
||||||
|
}
|
||||||
|
|
||||||
|
var zeroFragment = Fragment{}
|
||||||
|
|
||||||
|
func (f *Fragment) Reset() {
|
||||||
|
*f = zeroFragment
|
||||||
|
}
|
||||||
|
|
||||||
type Field struct {
|
type Field struct {
|
||||||
ID int32
|
ID int32
|
||||||
ParentID int32
|
ParentID int32
|
||||||
@ -59,11 +71,13 @@ type Field struct {
|
|||||||
argsA [5]Arg
|
argsA [5]Arg
|
||||||
Children []int32
|
Children []int32
|
||||||
childrenA [5]int32
|
childrenA [5]int32
|
||||||
|
Union bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type Arg struct {
|
type Arg struct {
|
||||||
Name string
|
Name string
|
||||||
Val *Node
|
Val *Node
|
||||||
|
df bool
|
||||||
}
|
}
|
||||||
|
|
||||||
type Node struct {
|
type Node struct {
|
||||||
@ -82,6 +96,8 @@ func (n *Node) Reset() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Parser struct {
|
type Parser struct {
|
||||||
|
frags map[uint64]*Fragment
|
||||||
|
h maphash.Hash
|
||||||
input []byte // the string being scanned
|
input []byte // the string being scanned
|
||||||
pos int
|
pos int
|
||||||
items []item
|
items []item
|
||||||
@ -96,12 +112,192 @@ var opPool = sync.Pool{
|
|||||||
New: func() interface{} { return new(Operation) },
|
New: func() interface{} { return new(Operation) },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fragPool = sync.Pool{
|
||||||
|
New: func() interface{} { return new(Fragment) },
|
||||||
|
}
|
||||||
|
|
||||||
var lexPool = sync.Pool{
|
var lexPool = sync.Pool{
|
||||||
New: func() interface{} { return new(lexer) },
|
New: func() interface{} { return new(lexer) },
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(gql []byte) (*Operation, error) {
|
func Parse(gql []byte) (*Operation, error) {
|
||||||
return parseSelectionSet(gql)
|
var err error
|
||||||
|
|
||||||
|
if len(gql) == 0 {
|
||||||
|
return nil, errors.New("blank query")
|
||||||
|
}
|
||||||
|
|
||||||
|
l := lexPool.Get().(*lexer)
|
||||||
|
l.Reset()
|
||||||
|
defer lexPool.Put(l)
|
||||||
|
|
||||||
|
if err = lex(l, gql); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p := &Parser{
|
||||||
|
input: l.input,
|
||||||
|
pos: -1,
|
||||||
|
items: l.items,
|
||||||
|
}
|
||||||
|
|
||||||
|
op := opPool.Get().(*Operation)
|
||||||
|
op.Reset()
|
||||||
|
op.Fields = op.fieldsA[:0]
|
||||||
|
|
||||||
|
s := -1
|
||||||
|
qf := false
|
||||||
|
|
||||||
|
for {
|
||||||
|
if p.peek(itemEOF) {
|
||||||
|
p.ignore()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemFragment) {
|
||||||
|
p.ignore()
|
||||||
|
if f, err := p.parseFragment(); err != nil {
|
||||||
|
fragPool.Put(f)
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
} else {
|
||||||
|
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||||
|
s = p.pos
|
||||||
|
qf = true
|
||||||
|
}
|
||||||
|
p.ignore()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.reset(s)
|
||||||
|
if err := p.parseOp(op); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, v := range p.frags {
|
||||||
|
fragPool.Put(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
return op, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseFragment() (*Fragment, error) {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
frag := fragPool.Get().(*Fragment)
|
||||||
|
frag.Reset()
|
||||||
|
frag.Fields = frag.fieldsA[:0]
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
frag.Name = p.val(p.next())
|
||||||
|
} else {
|
||||||
|
return frag, errors.New("fragment: missing name")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemOn) {
|
||||||
|
p.ignore()
|
||||||
|
} else {
|
||||||
|
return frag, errors.New("fragment: missing 'on' keyword")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
frag.On = p.vall(p.next())
|
||||||
|
} else {
|
||||||
|
return frag, errors.New("fragment: missing table name after 'on' keyword")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemObjOpen) {
|
||||||
|
p.ignore()
|
||||||
|
} else {
|
||||||
|
return frag, fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
frag.Fields, err = p.parseFields(frag.Fields)
|
||||||
|
if err != nil {
|
||||||
|
return frag, fmt.Errorf("fragment: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.frags == nil {
|
||||||
|
p.frags = make(map[uint64]*Fragment)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _ = p.h.WriteString(frag.Name)
|
||||||
|
k := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
|
||||||
|
p.frags[k] = frag
|
||||||
|
|
||||||
|
return frag, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseOp(op *Operation) error {
|
||||||
|
var err error
|
||||||
|
var typeSet bool
|
||||||
|
|
||||||
|
if p.peek(itemQuery, itemMutation, itemSub) {
|
||||||
|
err = p.parseOpTypeAndArgs(op)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: %v", op.Type, err)
|
||||||
|
}
|
||||||
|
typeSet = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemObjOpen) {
|
||||||
|
p.ignore()
|
||||||
|
if !typeSet {
|
||||||
|
op.Type = opQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
if p.peek(itemEOF, itemFragment) {
|
||||||
|
p.ignore()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
op.Fields, err = p.parseFields(op.Fields)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: %v", op.Type, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
||||||
|
item := p.next()
|
||||||
|
|
||||||
|
switch item._type {
|
||||||
|
case itemQuery:
|
||||||
|
op.Type = opQuery
|
||||||
|
case itemMutation:
|
||||||
|
op.Type = opMutate
|
||||||
|
case itemSub:
|
||||||
|
op.Type = opSub
|
||||||
|
}
|
||||||
|
|
||||||
|
op.Args = op.argsA[:0]
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
op.Name = p.val(p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemArgsOpen) {
|
||||||
|
p.ignore()
|
||||||
|
|
||||||
|
op.Args, err = p.parseOpParams(op.Args)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseArgValue(argVal string) (*Node, error) {
|
func ParseArgValue(argVal string) (*Node, error) {
|
||||||
@ -123,228 +319,158 @@ func ParseArgValue(argVal string) (*Node, error) {
|
|||||||
return op, err
|
return op, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseSelectionSet(gql []byte) (*Operation, error) {
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if len(gql) == 0 {
|
|
||||||
return nil, errors.New("blank query")
|
|
||||||
}
|
|
||||||
|
|
||||||
l := lexPool.Get().(*lexer)
|
|
||||||
l.Reset()
|
|
||||||
|
|
||||||
if err = lex(l, gql); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p := &Parser{
|
|
||||||
input: l.input,
|
|
||||||
pos: -1,
|
|
||||||
items: l.items,
|
|
||||||
}
|
|
||||||
|
|
||||||
var op *Operation
|
|
||||||
|
|
||||||
if p.peek(itemObjOpen) {
|
|
||||||
p.ignore()
|
|
||||||
op, err = p.parseQueryOp()
|
|
||||||
} else {
|
|
||||||
op, err = p.parseOp()
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemObjClose) {
|
|
||||||
p.ignore()
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("operation missing closing '}'")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.peek(itemEOF) {
|
|
||||||
p.ignore()
|
|
||||||
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
|
|
||||||
}
|
|
||||||
|
|
||||||
lexPool.Put(l)
|
|
||||||
|
|
||||||
return op, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) next() item {
|
|
||||||
n := p.pos + 1
|
|
||||||
if n >= len(p.items) {
|
|
||||||
p.err = errEOT
|
|
||||||
return item{_type: itemEOF}
|
|
||||||
}
|
|
||||||
p.pos = n
|
|
||||||
return p.items[p.pos]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) ignore() {
|
|
||||||
n := p.pos + 1
|
|
||||||
if n >= len(p.items) {
|
|
||||||
p.err = errEOT
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.pos = n
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) current() string {
|
|
||||||
item := p.items[p.pos]
|
|
||||||
return b2s(p.input[item.pos:item.end])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) peek(types ...itemType) bool {
|
|
||||||
n := p.pos + 1
|
|
||||||
// if p.items[n]._type == itemEOF {
|
|
||||||
// return false
|
|
||||||
// }
|
|
||||||
if n >= len(p.items) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i := 0; i < len(types); i++ {
|
|
||||||
if p.items[n]._type == types[i] {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseOp() (*Operation, error) {
|
|
||||||
if !p.peek(itemQuery, itemMutation, itemSub) {
|
|
||||||
err := errors.New("expecting a query, mutation or subscription")
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
item := p.next()
|
|
||||||
|
|
||||||
op := opPool.Get().(*Operation)
|
|
||||||
op.Reset()
|
|
||||||
|
|
||||||
switch item._type {
|
|
||||||
case itemQuery:
|
|
||||||
op.Type = opQuery
|
|
||||||
case itemMutation:
|
|
||||||
op.Type = opMutate
|
|
||||||
case itemSub:
|
|
||||||
op.Type = opSub
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields = op.fieldsA[:0]
|
|
||||||
op.Args = op.argsA[:0]
|
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if p.peek(itemName) {
|
|
||||||
op.Name = p.val(p.next())
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemArgsOpen) {
|
|
||||||
p.ignore()
|
|
||||||
|
|
||||||
op.Args, err = p.parseOpParams(op.Args)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemObjOpen) {
|
|
||||||
p.ignore()
|
|
||||||
|
|
||||||
for n := 0; n < 10; n++ {
|
|
||||||
if !p.peek(itemName) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields, err = p.parseFields(op.Fields)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return op, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseQueryOp() (*Operation, error) {
|
|
||||||
op := opPool.Get().(*Operation)
|
|
||||||
op.Reset()
|
|
||||||
|
|
||||||
op.Type = opQuery
|
|
||||||
op.Fields = op.fieldsA[:0]
|
|
||||||
op.Args = op.argsA[:0]
|
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
for n := 0; n < 10; n++ {
|
|
||||||
if !p.peek(itemName) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields, err = p.parseFields(op.Fields)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return op, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||||
st := util.NewStack()
|
var err error
|
||||||
|
st := NewStack()
|
||||||
|
|
||||||
|
if !p.peek(itemName, itemSpread) {
|
||||||
|
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if len(fields) >= maxFields {
|
if p.peek(itemEOF) {
|
||||||
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
p.ignore()
|
||||||
|
return nil, errors.New("invalid query")
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemObjClose) {
|
if p.peek(itemObjClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
st.Pop()
|
|
||||||
|
|
||||||
if st.Len() == 0 {
|
if st.Len() != 0 {
|
||||||
break
|
st.Pop()
|
||||||
} else {
|
|
||||||
continue
|
continue
|
||||||
|
} else {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !p.peek(itemName) {
|
if len(fields) >= maxFields {
|
||||||
return nil, errors.New("expecting an alias or field name")
|
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
||||||
}
|
}
|
||||||
|
|
||||||
fields = append(fields, Field{ID: int32(len(fields))})
|
isFrag := false
|
||||||
|
|
||||||
f := &fields[(len(fields) - 1)]
|
if p.peek(itemSpread) {
|
||||||
f.Args = f.argsA[:0]
|
p.ignore()
|
||||||
f.Children = f.childrenA[:0]
|
isFrag = true
|
||||||
|
}
|
||||||
|
|
||||||
// Parse the inside of the the fields () parentheses
|
if isFrag {
|
||||||
// in short parse the args like id, where, etc
|
fields, err = p.parseFragmentFields(st, fields)
|
||||||
if err := p.parseField(f); err != nil {
|
} else {
|
||||||
|
fields, err = p.parseNormalFields(st, fields)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseNormalFields(st *Stack, fields []Field) ([]Field, error) {
|
||||||
|
if !p.peek(itemName) {
|
||||||
|
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
fields = append(fields, Field{ID: int32(len(fields))})
|
||||||
|
|
||||||
|
f := &fields[(len(fields) - 1)]
|
||||||
|
f.Args = f.argsA[:0]
|
||||||
|
f.Children = f.childrenA[:0]
|
||||||
|
|
||||||
|
// Parse the field
|
||||||
|
if err := p.parseField(f); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if st.Len() == 0 {
|
||||||
|
f.ParentID = -1
|
||||||
|
} else {
|
||||||
|
pid := st.Peek()
|
||||||
|
f.ParentID = pid
|
||||||
|
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
// The first opening curley brackets after this
|
||||||
|
// comes the columns or child fields
|
||||||
|
if p.peek(itemObjOpen) {
|
||||||
|
p.ignore()
|
||||||
|
st.Push(f.ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
return fields, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseFragmentFields(st *Stack, fields []Field) ([]Field, error) {
|
||||||
|
var err error
|
||||||
|
pid := st.Peek()
|
||||||
|
|
||||||
|
if p.peek(itemOn) {
|
||||||
|
p.ignore()
|
||||||
|
fields[pid].Union = true
|
||||||
|
|
||||||
|
if fields, err = p.parseNormalFields(st, fields); err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
intf := st.Peek()
|
// If parent is a union selector than copy over args from the parent
|
||||||
if pid, ok := intf.(int32); ok {
|
// to the first child which is the root selector for each union type.
|
||||||
f.ParentID = pid
|
for i := pid + 1; i < int32(len(fields)); i++ {
|
||||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
f := &fields[i]
|
||||||
} else {
|
if f.ParentID == pid {
|
||||||
f.ParentID = -1
|
f.Args = fields[pid].Args
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The first opening curley brackets after this
|
} else {
|
||||||
// comes the columns or child fields
|
if !p.peek(itemName) {
|
||||||
if p.peek(itemObjOpen) {
|
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||||
p.ignore()
|
}
|
||||||
st.Push(f.ID)
|
|
||||||
|
|
||||||
} else if p.peek(itemObjClose) {
|
name := p.val(p.next())
|
||||||
if st.Len() == 0 {
|
_, _ = p.h.WriteString(name)
|
||||||
break
|
id := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
|
||||||
|
fr, ok := p.frags[id]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||||
|
}
|
||||||
|
ff := fr.Fields
|
||||||
|
|
||||||
|
n := int32(len(fields))
|
||||||
|
fields = append(fields, ff...)
|
||||||
|
|
||||||
|
for i := 0; i < len(ff); i++ {
|
||||||
|
k := (n + int32(i))
|
||||||
|
f := &fields[k]
|
||||||
|
f.ID = int32(k)
|
||||||
|
|
||||||
|
// If this is the top-level point the parent to the parent of the
|
||||||
|
// previous field.
|
||||||
|
if f.ParentID == -1 {
|
||||||
|
f.ParentID = pid
|
||||||
|
if f.ParentID != -1 {
|
||||||
|
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||||
|
}
|
||||||
|
// Update all the other parents id's by our new place in this new array
|
||||||
} else {
|
} else {
|
||||||
continue
|
f.ParentID += n
|
||||||
|
}
|
||||||
|
|
||||||
|
// Copy over children since fields append is not a deep copy
|
||||||
|
f.Children = make([]int32, len(f.Children))
|
||||||
|
copy(f.Children, ff[i].Children)
|
||||||
|
|
||||||
|
// Copy over args since args append is not a deep copy
|
||||||
|
f.Args = make([]Arg, len(f.Args))
|
||||||
|
copy(f.Args, ff[i].Args)
|
||||||
|
|
||||||
|
// Update all the children which is needed.
|
||||||
|
for j := range f.Children {
|
||||||
|
f.Children[j] += n
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -385,7 +511,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
|
|||||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemArgsClose) {
|
if p.peek(itemEOF, itemArgsClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -403,7 +529,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
|
|||||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemArgsClose) {
|
if p.peek(itemEOF, itemArgsClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -445,10 +571,8 @@ func (p *Parser) parseList() (*Node, error) {
|
|||||||
}
|
}
|
||||||
if ty == 0 {
|
if ty == 0 {
|
||||||
ty = node.Type
|
ty = node.Type
|
||||||
} else {
|
} else if ty != node.Type {
|
||||||
if ty != node.Type {
|
return nil, errors.New("All values in a list must be of the same type")
|
||||||
return nil, errors.New("All values in a list must be of the same type")
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
node.Parent = parent
|
node.Parent = parent
|
||||||
nodes = append(nodes, node)
|
nodes = append(nodes, node)
|
||||||
@ -470,7 +594,7 @@ func (p *Parser) parseObj() (*Node, error) {
|
|||||||
parent.Reset()
|
parent.Reset()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if p.peek(itemObjClose) {
|
if p.peek(itemEOF, itemObjClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -545,6 +669,57 @@ func (p *Parser) vall(v item) string {
|
|||||||
return b2s(p.input[v.pos:v.end])
|
return b2s(p.input[v.pos:v.end])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peek(types ...itemType) bool {
|
||||||
|
n := p.pos + 1
|
||||||
|
l := len(types)
|
||||||
|
// if p.items[n]._type == itemEOF {
|
||||||
|
// return false
|
||||||
|
// }
|
||||||
|
|
||||||
|
if n >= len(p.items) {
|
||||||
|
return types[0] == itemEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
if l == 1 {
|
||||||
|
return p.items[n]._type == types[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < l; i++ {
|
||||||
|
if p.items[n]._type == types[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) next() item {
|
||||||
|
n := p.pos + 1
|
||||||
|
if n >= len(p.items) {
|
||||||
|
p.err = errEOT
|
||||||
|
return item{_type: itemEOF}
|
||||||
|
}
|
||||||
|
p.pos = n
|
||||||
|
return p.items[p.pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) ignore() {
|
||||||
|
n := p.pos + 1
|
||||||
|
if n >= len(p.items) {
|
||||||
|
p.err = errEOT
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.pos = n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peekNext() string {
|
||||||
|
item := p.items[p.pos+1]
|
||||||
|
return b2s(p.input[item.pos:item.end])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) reset(to int) {
|
||||||
|
p.pos = to
|
||||||
|
}
|
||||||
|
|
||||||
func b2s(b []byte) string {
|
func b2s(b []byte) string {
|
||||||
return *(*string)(unsafe.Pointer(&b))
|
return *(*string)(unsafe.Pointer(&b))
|
||||||
}
|
}
|
||||||
@ -578,34 +753,9 @@ func (t parserType) String() string {
|
|||||||
case NodeList:
|
case NodeList:
|
||||||
v = "node-list"
|
v = "node-list"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("<%s>", v)
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// type Frees struct {
|
func FreeNode(n *Node) {
|
||||||
// n *Node
|
|
||||||
// loc int
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var freeList []Frees
|
|
||||||
|
|
||||||
// func FreeNode(n *Node, loc int) {
|
|
||||||
// j := -1
|
|
||||||
|
|
||||||
// for i := range freeList {
|
|
||||||
// if n == freeList[i].n {
|
|
||||||
// j = i
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if j == -1 {
|
|
||||||
// nodePool.Put(n)
|
|
||||||
// freeList = append(freeList, Frees{n, loc})
|
|
||||||
// } else {
|
|
||||||
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
func FreeNode(n *Node, loc int) {
|
|
||||||
nodePool.Put(n)
|
nodePool.Put(n)
|
||||||
}
|
}
|
||||||
|
@ -2,8 +2,9 @@ package qcode
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/chirino/graphql/schema"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCompile1(t *testing.T) {
|
func TestCompile1(t *testing.T) {
|
||||||
@ -120,7 +121,7 @@ updateThread {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}`
|
}}`
|
||||||
qcompile, _ := NewCompiler(Config{})
|
qcompile, _ := NewCompiler(Config{})
|
||||||
_, err := qcompile.Compile([]byte(gql), "anon")
|
_, err := qcompile.Compile([]byte(gql), "anon")
|
||||||
|
|
||||||
@ -130,6 +131,93 @@ updateThread {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile1(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile2(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile3(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var gql = []byte(`
|
var gql = []byte(`
|
||||||
{products(
|
{products(
|
||||||
# returns only 30 items
|
# returns only 30 items
|
||||||
@ -151,6 +239,29 @@ var gql = []byte(`
|
|||||||
price
|
price
|
||||||
}}`)
|
}}`)
|
||||||
|
|
||||||
|
var gqlWithFragments = []byte(`
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
__typename
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
__typename
|
||||||
|
}`)
|
||||||
|
|
||||||
func BenchmarkQCompile(b *testing.B) {
|
func BenchmarkQCompile(b *testing.B) {
|
||||||
qcompile, _ := NewCompiler(Config{})
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
|
||||||
@ -183,8 +294,22 @@ func BenchmarkQCompileP(b *testing.B) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkParse(b *testing.B) {
|
func BenchmarkQCompileFragment(b *testing.B) {
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
b.ReportAllocs()
|
||||||
|
for n := 0; n < b.N; n++ {
|
||||||
|
_, err := qcompile.Compile(gqlWithFragments, "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func BenchmarkParse(b *testing.B) {
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
@ -211,6 +336,18 @@ func BenchmarkParseP(b *testing.B) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func BenchmarkParseFragment(b *testing.B) {
|
||||||
|
b.ResetTimer()
|
||||||
|
b.ReportAllocs()
|
||||||
|
for n := 0; n < b.N; n++ {
|
||||||
|
_, err := Parse(gqlWithFragments)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
func BenchmarkSchemaParse(b *testing.B) {
|
func BenchmarkSchemaParse(b *testing.B) {
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
|
@ -12,6 +12,7 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type QType int
|
type QType int
|
||||||
|
type SType int
|
||||||
type Action int
|
type Action int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -19,7 +20,8 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
QTQuery QType = iota + 1
|
QTUnknown QType = iota
|
||||||
|
QTQuery
|
||||||
QTMutation
|
QTMutation
|
||||||
QTInsert
|
QTInsert
|
||||||
QTUpdate
|
QTUpdate
|
||||||
@ -27,6 +29,12 @@ const (
|
|||||||
QTUpsert
|
QTUpsert
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
STNone SType = iota
|
||||||
|
STUnion
|
||||||
|
STMember
|
||||||
|
)
|
||||||
|
|
||||||
type QCode struct {
|
type QCode struct {
|
||||||
Type QType
|
Type QType
|
||||||
ActionVar string
|
ActionVar string
|
||||||
@ -38,6 +46,8 @@ type QCode struct {
|
|||||||
type Select struct {
|
type Select struct {
|
||||||
ID int32
|
ID int32
|
||||||
ParentID int32
|
ParentID int32
|
||||||
|
UParentID int32
|
||||||
|
Type SType
|
||||||
Args map[string]*Node
|
Args map[string]*Node
|
||||||
Name string
|
Name string
|
||||||
FieldName string
|
FieldName string
|
||||||
@ -277,6 +287,7 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
freeNodes(op)
|
||||||
opPool.Put(op)
|
opPool.Put(op)
|
||||||
|
|
||||||
return &qc, nil
|
return &qc, nil
|
||||||
@ -371,7 +382,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
})
|
})
|
||||||
s := &selects[(len(selects) - 1)]
|
s := &selects[(len(selects) - 1)]
|
||||||
|
|
||||||
if len(field.Alias) != 0 {
|
if field.Union {
|
||||||
|
s.Type = STUnion
|
||||||
|
}
|
||||||
|
|
||||||
|
if field.Alias != "" {
|
||||||
s.FieldName = field.Alias
|
s.FieldName = field.Alias
|
||||||
} else {
|
} else {
|
||||||
s.FieldName = s.Name
|
s.FieldName = s.Name
|
||||||
@ -382,6 +397,11 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
} else {
|
} else {
|
||||||
p := &selects[s.ParentID]
|
p := &selects[s.ParentID]
|
||||||
p.Children = append(p.Children, s.ID)
|
p.Children = append(p.Children, s.ID)
|
||||||
|
|
||||||
|
if p.Type == STUnion {
|
||||||
|
s.Type = STMember
|
||||||
|
s.UParentID = p.ParentID
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if skipRender {
|
if skipRender {
|
||||||
@ -419,6 +439,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
com.AddFilters(qc, s, role)
|
com.AddFilters(qc, s, role)
|
||||||
|
|
||||||
s.Cols = make([]Column, 0, len(field.Children))
|
s.Cols = make([]Column, 0, len(field.Children))
|
||||||
|
cm := make(map[string]struct{})
|
||||||
action = QTQuery
|
action = QTQuery
|
||||||
|
|
||||||
for _, cid := range field.Children {
|
for _, cid := range field.Children {
|
||||||
@ -428,19 +449,27 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fname string
|
||||||
|
|
||||||
|
if f.Alias != "" {
|
||||||
|
fname = f.Alias
|
||||||
|
} else {
|
||||||
|
fname = f.Name
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := cm[fname]; ok {
|
||||||
|
continue
|
||||||
|
} else {
|
||||||
|
cm[fname] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
if len(f.Children) != 0 {
|
if len(f.Children) != 0 {
|
||||||
val := f.ID | (s.ID << 16)
|
val := f.ID | (s.ID << 16)
|
||||||
st.Push(val)
|
st.Push(val)
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
col := Column{Name: f.Name}
|
col := Column{Name: f.Name, FieldName: fname}
|
||||||
|
|
||||||
if len(f.Alias) != 0 {
|
|
||||||
col.FieldName = f.Alias
|
|
||||||
} else {
|
|
||||||
col.FieldName = f.Name
|
|
||||||
}
|
|
||||||
s.Cols = append(s.Cols, col)
|
s.Cols = append(s.Cols, col)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -452,6 +481,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
qc.Selects = selects[:id]
|
qc.Selects = selects[:id]
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -483,50 +513,42 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
|||||||
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// don't free this arg either previously done or will be free'd
|
|
||||||
// in the future like in psql
|
|
||||||
var df bool
|
|
||||||
|
|
||||||
for i := range args {
|
for i := range args {
|
||||||
arg := &args[i]
|
arg := &args[i]
|
||||||
|
|
||||||
switch arg.Name {
|
switch arg.Name {
|
||||||
case "id":
|
case "id":
|
||||||
err, df = com.compileArgID(sel, arg)
|
err = com.compileArgID(sel, arg)
|
||||||
|
|
||||||
case "search":
|
case "search":
|
||||||
err, df = com.compileArgSearch(sel, arg)
|
err = com.compileArgSearch(sel, arg)
|
||||||
|
|
||||||
case "where":
|
case "where":
|
||||||
err, df = com.compileArgWhere(sel, arg, role)
|
err = com.compileArgWhere(sel, arg, role)
|
||||||
|
|
||||||
case "orderby", "order_by", "order":
|
case "orderby", "order_by", "order":
|
||||||
err, df = com.compileArgOrderBy(sel, arg)
|
err = com.compileArgOrderBy(sel, arg)
|
||||||
|
|
||||||
case "distinct_on", "distinct":
|
case "distinct_on", "distinct":
|
||||||
err, df = com.compileArgDistinctOn(sel, arg)
|
err = com.compileArgDistinctOn(sel, arg)
|
||||||
|
|
||||||
case "limit":
|
case "limit":
|
||||||
err, df = com.compileArgLimit(sel, arg)
|
err = com.compileArgLimit(sel, arg)
|
||||||
|
|
||||||
case "offset":
|
case "offset":
|
||||||
err, df = com.compileArgOffset(sel, arg)
|
err = com.compileArgOffset(sel, arg)
|
||||||
|
|
||||||
case "first":
|
case "first":
|
||||||
err, df = com.compileArgFirstLast(sel, arg, PtForward)
|
err = com.compileArgFirstLast(sel, arg, PtForward)
|
||||||
|
|
||||||
case "last":
|
case "last":
|
||||||
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
|
err = com.compileArgFirstLast(sel, arg, PtBackward)
|
||||||
|
|
||||||
case "after":
|
case "after":
|
||||||
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
|
err = com.compileArgAfterBefore(sel, arg, PtForward)
|
||||||
|
|
||||||
case "before":
|
case "before":
|
||||||
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
|
err = com.compileArgAfterBefore(sel, arg, PtBackward)
|
||||||
}
|
|
||||||
|
|
||||||
if !df {
|
|
||||||
FreeNode(arg.Val, 5)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -607,14 +629,12 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Objects inside a list
|
// Objects inside a list
|
||||||
if len(node.Name) == 0 {
|
if node.Name == "" {
|
||||||
pushChildren(st, node.exp, node)
|
pushChildren(st, node.exp, node)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
} else {
|
} else if _, ok := com.bl[node.Name]; ok {
|
||||||
if _, ok := com.bl[node.Name]; ok {
|
continue
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
ex, err := newExp(st, node, usePool)
|
ex, err := newExp(st, node, usePool)
|
||||||
@ -637,39 +657,20 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if usePool {
|
|
||||||
st.Push(node)
|
|
||||||
|
|
||||||
for {
|
|
||||||
if st.Len() == 0 {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
intf := st.Pop()
|
|
||||||
node, ok := intf.(*Node)
|
|
||||||
if !ok || node == nil {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
for i := range node.Children {
|
|
||||||
st.Push(node.Children[i])
|
|
||||||
}
|
|
||||||
FreeNode(node, 1)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return root, needsUser, nil
|
return root, needsUser, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
||||||
if sel.ID != 0 {
|
if sel.ID != 0 {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if sel.Where != nil && sel.Where.Op == OpEqID {
|
if sel.Where != nil && sel.Where.Op == OpEqID {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if arg.Val.Type != NodeVar {
|
if arg.Val.Type != NodeVar {
|
||||||
return argErr("id", "variable"), false
|
return argErr("id", "variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
ex := expPool.Get().(*Exp)
|
ex := expPool.Get().(*Exp)
|
||||||
@ -680,12 +681,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
|
|||||||
ex.Val = arg.Val.Val
|
ex.Val = arg.Val.Val
|
||||||
|
|
||||||
sel.Where = ex
|
sel.Where = ex
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
|
||||||
if arg.Val.Type != NodeVar {
|
if arg.Val.Type != NodeVar {
|
||||||
return argErr("search", "variable"), false
|
return argErr("search", "variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
ex := expPool.Get().(*Exp)
|
ex := expPool.Get().(*Exp)
|
||||||
@ -700,18 +701,19 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
sel.Args[arg.Name] = arg.Val
|
sel.Args[arg.Name] = arg.Val
|
||||||
|
arg.df = true
|
||||||
AddFilter(sel, ex)
|
AddFilter(sel, ex)
|
||||||
|
|
||||||
return nil, true
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
|
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
ex, nu, err := com.compileArgObj(st, arg)
|
ex, nu, err := com.compileArgObj(st, arg)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err, false
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if nu && role == "anon" {
|
if nu && role == "anon" {
|
||||||
@ -719,12 +721,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
|
|||||||
}
|
}
|
||||||
AddFilter(sel, ex)
|
AddFilter(sel, ex)
|
||||||
|
|
||||||
return nil, true
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
|
||||||
if arg.Val.Type != NodeObj {
|
if arg.Val.Type != NodeObj {
|
||||||
return fmt.Errorf("expecting an object"), false
|
return fmt.Errorf("expecting an object")
|
||||||
}
|
}
|
||||||
|
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
@ -742,16 +744,15 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
|||||||
node, ok := intf.(*Node)
|
node, ok := intf.(*Node)
|
||||||
|
|
||||||
if !ok || node == nil {
|
if !ok || node == nil {
|
||||||
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
|
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, ok := com.bl[node.Name]; ok {
|
if _, ok := com.bl[node.Name]; ok {
|
||||||
FreeNode(node, 2)
|
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type != NodeStr && node.Type != NodeVar {
|
if node.Type != NodeStr && node.Type != NodeVar {
|
||||||
return fmt.Errorf("expecting a string or variable"), false
|
return fmt.Errorf("expecting a string or variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
ob := &OrderBy{}
|
ob := &OrderBy{}
|
||||||
@ -770,25 +771,24 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
|
|||||||
case "desc_nulls_last":
|
case "desc_nulls_last":
|
||||||
ob.Order = OrderDescNullsLast
|
ob.Order = OrderDescNullsLast
|
||||||
default:
|
default:
|
||||||
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
|
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
|
||||||
}
|
}
|
||||||
|
|
||||||
setOrderByColName(ob, node)
|
setOrderByColName(ob, node)
|
||||||
sel.OrderBy = append(sel.OrderBy, ob)
|
sel.OrderBy = append(sel.OrderBy, ob)
|
||||||
FreeNode(node, 3)
|
|
||||||
}
|
}
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if _, ok := com.bl[node.Name]; ok {
|
if _, ok := com.bl[node.Name]; ok {
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type != NodeList && node.Type != NodeStr {
|
if node.Type != NodeList && node.Type != NodeStr {
|
||||||
return fmt.Errorf("expecting a list of strings or just a string"), false
|
return fmt.Errorf("expecting a list of strings or just a string")
|
||||||
}
|
}
|
||||||
|
|
||||||
if node.Type == NodeStr {
|
if node.Type == NodeStr {
|
||||||
@ -797,58 +797,57 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
|
|||||||
|
|
||||||
for i := range node.Children {
|
for i := range node.Children {
|
||||||
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
|
||||||
FreeNode(node.Children[i], 5)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeInt {
|
if node.Type != NodeInt {
|
||||||
return argErr("limit", "number"), false
|
return argErr("limit", "number")
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Limit = node.Val
|
sel.Paging.Limit = node.Val
|
||||||
|
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
|
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeVar {
|
if node.Type != NodeVar {
|
||||||
return argErr("offset", "variable"), false
|
return argErr("offset", "variable")
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Offset = node.Val
|
sel.Paging.Offset = node.Val
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeInt {
|
if node.Type != NodeInt {
|
||||||
return argErr(arg.Name, "number"), false
|
return argErr(arg.Name, "number")
|
||||||
}
|
}
|
||||||
|
|
||||||
sel.Paging.Type = pt
|
sel.Paging.Type = pt
|
||||||
sel.Paging.Limit = node.Val
|
sel.Paging.Limit = node.Val
|
||||||
|
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
|
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
|
||||||
node := arg.Val
|
node := arg.Val
|
||||||
|
|
||||||
if node.Type != NodeVar || node.Val != "cursor" {
|
if node.Type != NodeVar || node.Val != "cursor" {
|
||||||
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
|
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
|
||||||
}
|
}
|
||||||
sel.Paging.Type = pt
|
sel.Paging.Type = pt
|
||||||
sel.Paging.Cursor = true
|
sel.Paging.Cursor = true
|
||||||
|
|
||||||
return nil, false
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// var zeroTrv = &trval{}
|
// var zeroTrv = &trval{}
|
||||||
@ -1049,7 +1048,7 @@ func setWhereColName(ex *Exp, node *Node) {
|
|||||||
if n.Type != NodeObj {
|
if n.Type != NodeObj {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
if len(n.Name) != 0 {
|
if n.Name != "" {
|
||||||
k := n.Name
|
k := n.Name
|
||||||
if k == "and" || k == "or" || k == "not" ||
|
if k == "and" || k == "or" || k == "not" ||
|
||||||
k == "_and" || k == "_or" || k == "_not" {
|
k == "_and" || k == "_or" || k == "_not" {
|
||||||
@ -1228,3 +1227,81 @@ func FreeExp(ex *Exp) {
|
|||||||
func argErr(name, ty string) error {
|
func argErr(name, ty string) error {
|
||||||
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func freeNodes(op *Operation) {
|
||||||
|
var st *util.Stack
|
||||||
|
fm := make(map[*Node]struct{})
|
||||||
|
|
||||||
|
for i := range op.Args {
|
||||||
|
arg := op.Args[i]
|
||||||
|
if arg.df {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range arg.Val.Children {
|
||||||
|
if st == nil {
|
||||||
|
st = util.NewStack()
|
||||||
|
}
|
||||||
|
c := arg.Val.Children[i]
|
||||||
|
if _, ok := fm[c]; !ok {
|
||||||
|
st.Push(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := fm[arg.Val]; !ok {
|
||||||
|
nodePool.Put(arg.Val)
|
||||||
|
fm[arg.Val] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range op.Fields {
|
||||||
|
f := op.Fields[i]
|
||||||
|
|
||||||
|
for j := range f.Args {
|
||||||
|
arg := f.Args[j]
|
||||||
|
if arg.df {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for k := range arg.Val.Children {
|
||||||
|
if st == nil {
|
||||||
|
st = util.NewStack()
|
||||||
|
}
|
||||||
|
c := arg.Val.Children[k]
|
||||||
|
if _, ok := fm[c]; !ok {
|
||||||
|
st.Push(c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := fm[arg.Val]; !ok {
|
||||||
|
nodePool.Put(arg.Val)
|
||||||
|
fm[arg.Val] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if st == nil {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
if st.Len() == 0 {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
intf := st.Pop()
|
||||||
|
node, ok := intf.(*Node)
|
||||||
|
if !ok || node == nil {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := range node.Children {
|
||||||
|
st.Push(node.Children[i])
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := fm[node]; !ok {
|
||||||
|
nodePool.Put(node)
|
||||||
|
fm[node] = struct{}{}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -29,6 +29,8 @@ func al(b byte) bool {
|
|||||||
|
|
||||||
func (qt QType) String() string {
|
func (qt QType) String() string {
|
||||||
switch qt {
|
switch qt {
|
||||||
|
case QTUnknown:
|
||||||
|
return "unknown"
|
||||||
case QTQuery:
|
case QTQuery:
|
||||||
return "query"
|
return "query"
|
||||||
case QTMutation:
|
case QTMutation:
|
||||||
|
175
core/prepare.go
175
core/prepare.go
@ -2,126 +2,93 @@ package core
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
|
||||||
"crypto/sha256"
|
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type preparedItem struct {
|
type query struct {
|
||||||
|
sync.Once
|
||||||
sd *sql.Stmt
|
sd *sql.Stmt
|
||||||
|
ai allow.Item
|
||||||
|
qt qcode.QType
|
||||||
|
err error
|
||||||
st stmt
|
st stmt
|
||||||
roleArg bool
|
roleArg bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) initPrepared() error {
|
func (sg *SuperGraph) prepare(q *query, role string) {
|
||||||
ct := context.Background()
|
var stmts []stmt
|
||||||
|
var err error
|
||||||
|
|
||||||
|
qb := []byte(q.ai.Query)
|
||||||
|
vars := []byte(q.ai.Vars)
|
||||||
|
|
||||||
|
switch q.qt {
|
||||||
|
case qcode.QTQuery:
|
||||||
|
if sg.abacEnabled {
|
||||||
|
stmts, err = sg.buildMultiStmt(qb, vars)
|
||||||
|
} else {
|
||||||
|
stmts, err = sg.buildRoleStmt(qb, vars, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
case qcode.QTMutation:
|
||||||
|
stmts, err = sg.buildRoleStmt(qb, vars, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.st = stmts[0]
|
||||||
|
q.roleArg = len(stmts) > 1
|
||||||
|
|
||||||
|
q.sd, err = sg.db.Prepare(q.st.sql)
|
||||||
|
if err != nil {
|
||||||
|
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sg *SuperGraph) initPrepared() error {
|
||||||
if sg.allowList.IsPersist() {
|
if sg.allowList.IsPersist() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
sg.prepared = make(map[string]*preparedItem)
|
|
||||||
|
|
||||||
tx, err := sg.db.BeginTx(ct, nil)
|
if err := sg.prepareRoleStmt(); err != nil {
|
||||||
if err != nil {
|
return fmt.Errorf("role query: %w", err)
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer tx.Rollback() //nolint: errcheck
|
|
||||||
|
|
||||||
if err = sg.prepareRoleStmt(tx); err != nil {
|
|
||||||
return fmt.Errorf("prepareRoleStmt: %w", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
sg.queries = make(map[uint64]*query)
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
success := 0
|
|
||||||
|
|
||||||
list, err := sg.allowList.Load()
|
list, err := sg.allowList.Load()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
for _, v := range list {
|
for _, v := range list {
|
||||||
if len(v.Query) == 0 {
|
if v.Query == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
err := sg.prepareStmt(v)
|
qt := qcode.GetQType(v.Query)
|
||||||
if err != nil {
|
|
||||||
sg.log.Printf("WRN %s: %v", v.Name, err)
|
|
||||||
} else {
|
|
||||||
success++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
switch qt {
|
||||||
|
case qcode.QTQuery:
|
||||||
|
sg.queries[queryID(&h, v.Name, "user")] = &query{ai: v, qt: qt}
|
||||||
|
sg.queries[queryID(&h, v.Name, "anon")] = &query{ai: v, qt: qt}
|
||||||
|
|
||||||
return nil
|
case qcode.QTMutation:
|
||||||
}
|
for _, role := range sg.conf.Roles {
|
||||||
|
sg.queries[queryID(&h, v.Name, role.Name)] = &query{ai: v, qt: qt}
|
||||||
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|
||||||
query := item.Query
|
|
||||||
qb := []byte(query)
|
|
||||||
vars := item.Vars
|
|
||||||
|
|
||||||
qt := qcode.GetQType(query)
|
|
||||||
ct := context.Background()
|
|
||||||
switch qt {
|
|
||||||
case qcode.QTQuery:
|
|
||||||
var stmts1 []stmt
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if sg.abacEnabled {
|
|
||||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
|
||||||
} else {
|
|
||||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if sg.anonExists {
|
|
||||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
|
||||||
|
|
||||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case qcode.QTMutation:
|
|
||||||
for _, role := range sg.conf.Roles {
|
|
||||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
|
||||||
|
|
||||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -129,22 +96,8 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
|
||||||
sd, err := sg.db.PrepareContext(ct, st[0].sql)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("prepare failed: %v: %s", err, st[0].sql)
|
|
||||||
}
|
|
||||||
|
|
||||||
sg.prepared[key] = &preparedItem{
|
|
||||||
sd: sd,
|
|
||||||
st: st[0],
|
|
||||||
roleArg: len(st) > 1,
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
func (sg *SuperGraph) prepareRoleStmt() error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !sg.abacEnabled {
|
if !sg.abacEnabled {
|
||||||
@ -160,7 +113,7 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
for _, role := range sg.conf.Roles {
|
for _, role := range sg.conf.Roles {
|
||||||
if len(role.Match) == 0 {
|
if role.Match == "" {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
io.WriteString(w, ` WHEN `)
|
io.WriteString(w, ` WHEN `)
|
||||||
@ -171,11 +124,11 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
io.WriteString(w, rq)
|
||||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
||||||
|
|
||||||
sg.getRole, err = tx.Prepare(w.String())
|
sg.getRole, err = sg.db.Prepare(w.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -206,9 +159,11 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func stmtHash(name string, role string) string {
|
func queryID(h *maphash.Hash, name, role string) uint64 {
|
||||||
h := sha256.New()
|
h.WriteString(name)
|
||||||
io.WriteString(h, strings.ToLower(name))
|
h.WriteString(role)
|
||||||
io.WriteString(h, role)
|
v := h.Sum64()
|
||||||
return hex.EncodeToString(h.Sum(nil))
|
h.Reset()
|
||||||
|
|
||||||
|
return v
|
||||||
}
|
}
|
||||||
|
@ -4,10 +4,10 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"net/http"
|
"net/http"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
var err error
|
var err error
|
||||||
|
|
||||||
sel := st.qc.Selects
|
sel := st.qc.Selects
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
// fetch the field name used within the db response json
|
// fetch the field name used within the db response json
|
||||||
// that are used to mark insertion points and the mapping between
|
// that are used to mark insertion points and the mapping between
|
||||||
// those field names and their select objects
|
// those field names and their select objects
|
||||||
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
|
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped())
|
||||||
|
|
||||||
// fetch the field values of the marked insertion points
|
// fetch the field values of the marked insertion points
|
||||||
// these values contain the id to be used with fetching remote data
|
// these values contain the id to be used with fetching remote data
|
||||||
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case len(from) == 1:
|
case len(from) == 1:
|
||||||
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
|
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
|
||||||
|
|
||||||
case len(from) > 1:
|
case len(from) > 1:
|
||||||
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
|
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("something wrong no remote ids found in db response")
|
return nil, errors.New("something wrong no remote ids found in db response")
|
||||||
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
|
|
||||||
func (sg *SuperGraph) resolveRemote(
|
func (sg *SuperGraph) resolveRemote(
|
||||||
hdr http.Header,
|
hdr http.Header,
|
||||||
h *xxhash.Digest,
|
h *maphash.Hash,
|
||||||
field jsn.Field,
|
field jsn.Field,
|
||||||
sel []qcode.Select,
|
sel []qcode.Select,
|
||||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
|
|||||||
to := toA[:1]
|
to := toA[:1]
|
||||||
|
|
||||||
// use the json key to find the related Select object
|
// use the json key to find the related Select object
|
||||||
k1 := xxhash.Sum64(field.Key)
|
_, _ = h.Write(field.Key)
|
||||||
|
k1 := h.Sum64()
|
||||||
|
|
||||||
s, ok := sfmap[k1]
|
s, ok := sfmap[k1]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
|
|||||||
|
|
||||||
func (sg *SuperGraph) resolveRemotes(
|
func (sg *SuperGraph) resolveRemotes(
|
||||||
hdr http.Header,
|
hdr http.Header,
|
||||||
h *xxhash.Digest,
|
h *maphash.Hash,
|
||||||
from []jsn.Field,
|
from []jsn.Field,
|
||||||
sel []qcode.Select,
|
sel []qcode.Select,
|
||||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
|
|||||||
for i, id := range from {
|
for i, id := range from {
|
||||||
|
|
||||||
// use the json key to find the related Select object
|
// use the json key to find the related Select object
|
||||||
k1 := xxhash.Sum64(id.Key)
|
_, _ = h.Write(id.Key)
|
||||||
|
k1 := h.Sum64()
|
||||||
|
|
||||||
s, ok := sfmap[k1]
|
s, ok := sfmap[k1]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
|
|||||||
return to, cerr
|
return to, cerr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
|
||||||
[][]byte,
|
[][]byte,
|
||||||
map[uint64]*qcode.Select) {
|
map[uint64]*qcode.Select) {
|
||||||
|
|
||||||
@ -227,15 +230,15 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
|
|||||||
fm[n] = r.IDField
|
fm[n] = r.IDField
|
||||||
n++
|
n++
|
||||||
|
|
||||||
k := xxhash.Sum64(r.IDField)
|
_, _ = h.Write(r.IDField)
|
||||||
sm[k] = s
|
sm[h.Sum64()] = s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return fm, sm
|
return fm, sm
|
||||||
}
|
}
|
||||||
|
|
||||||
func isSkipped(n uint32, pos uint32) bool {
|
func isSkipped(n, pos uint32) bool {
|
||||||
return ((n & (1 << pos)) != 0)
|
return ((n & (1 << pos)) != 0)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,11 +2,11 @@ package core
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
@ -19,7 +19,7 @@ type resolvFn struct {
|
|||||||
|
|
||||||
func (sg *SuperGraph) initResolvers() error {
|
func (sg *SuperGraph) initResolvers() error {
|
||||||
var err error
|
var err error
|
||||||
sg.rmap = make(map[uint64]*resolvFn)
|
sg.rmap = make(map[uint64]resolvFn)
|
||||||
|
|
||||||
for _, t := range sg.conf.Tables {
|
for _, t := range sg.conf.Tables {
|
||||||
err = sg.initRemotes(t)
|
err = sg.initRemotes(t)
|
||||||
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) initRemotes(t Table) error {
|
func (sg *SuperGraph) initRemotes(t Table) error {
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
for _, r := range t.Remotes {
|
for _, r := range t.Remotes {
|
||||||
// defines the table column to be used as an id in the
|
// defines the table column to be used as an id in the
|
||||||
@ -45,7 +46,7 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
|||||||
|
|
||||||
// if no table column specified in the config then
|
// if no table column specified in the config then
|
||||||
// use the primary key of the table as the id
|
// use the primary key of the table as the id
|
||||||
if len(idcol) == 0 {
|
if idcol == "" {
|
||||||
pcol, err := sg.pc.IDColumn(t.Name)
|
pcol, err := sg.pc.IDColumn(t.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
|||||||
path = append(path, []byte(p))
|
path = append(path, []byte(p))
|
||||||
}
|
}
|
||||||
|
|
||||||
rf := &resolvFn{
|
rf := resolvFn{
|
||||||
IDField: []byte(idk),
|
IDField: []byte(idk),
|
||||||
Path: path,
|
Path: path,
|
||||||
Fn: fn,
|
Fn: fn,
|
||||||
}
|
}
|
||||||
|
|
||||||
// index resolver obj by parent and child names
|
// index resolver obj by parent and child names
|
||||||
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
|
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
|
||||||
|
|
||||||
// index resolver obj by IDField
|
// index resolver obj by IDField
|
||||||
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
|
_, _ = h.Write(rf.IDField)
|
||||||
|
sg.rmap[h.Sum64()] = rf
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import "hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
func mkkey(h *maphash.Hash, k1, k2 string) uint64 {
|
||||||
h.WriteString(k1)
|
h.WriteString(k1)
|
||||||
h.WriteString(k2)
|
h.WriteString(k2)
|
||||||
v := h.Sum64()
|
v := h.Sum64()
|
||||||
|
@ -55,6 +55,30 @@ query {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Fragments
|
||||||
|
|
||||||
|
Fragments make it easy to build large complex queries with small composible and re-usable fragment blocks.
|
||||||
|
|
||||||
|
```graphql
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
...userFields1
|
||||||
|
picture_url
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
### Sorting
|
### Sorting
|
||||||
|
|
||||||
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.
|
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.
|
||||||
|
@ -4,6 +4,8 @@ title: Introduction
|
|||||||
sidebar_label: Introduction
|
sidebar_label: Introduction
|
||||||
---
|
---
|
||||||
|
|
||||||
|
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||||
|
|
||||||
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
|
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
|
||||||
|
|
||||||
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
|
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
|
||||||
@ -134,3 +136,9 @@ mutation {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Built-in GraphQL Editor
|
||||||
|
|
||||||
|
Quickly craft and test your queries with a full-featured GraphQL editor. Auto-complete and schema documentation is automatically available.
|
||||||
|
|
||||||
|
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
|
||||||
|
@ -95,7 +95,7 @@ auth:
|
|||||||
type: jwt
|
type: jwt
|
||||||
|
|
||||||
jwt:
|
jwt:
|
||||||
# the two providers are 'auth0' and 'none'
|
# valid providers are auth0, firebase and none
|
||||||
provider: auth0
|
provider: auth0
|
||||||
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||||
public_key_file: /secrets/public_key.pem
|
public_key_file: /secrets/public_key.pem
|
||||||
@ -108,6 +108,19 @@ We can get the JWT token either from the `authorization` header where we expect
|
|||||||
|
|
||||||
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
|
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
|
||||||
|
|
||||||
|
### Firebase Auth
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
auth:
|
||||||
|
type: jwt
|
||||||
|
|
||||||
|
jwt:
|
||||||
|
provider: firebase
|
||||||
|
audience: <firebase-project-id>
|
||||||
|
```
|
||||||
|
|
||||||
|
Firebase auth also uses JWT the keys are auto-fetched from Google and used according to their documentation mechanism. The `audience` config value needs to be set to your project id and everything else is taken care for you.
|
||||||
|
|
||||||
### HTTP Headers
|
### HTTP Headers
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
13
docs/website/docs/webui.md
Normal file
13
docs/website/docs/webui.md
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
---
|
||||||
|
id: webui
|
||||||
|
title: Web UI / GraphQL Editor
|
||||||
|
sidebar_label: Web UI
|
||||||
|
---
|
||||||
|
|
||||||
|
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
|
||||||
|
|
||||||
|
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
|
||||||
|
|
||||||
|
Super Graph comes with a build-in GraphQL editor that only runs in development. Use it to craft your queries and copy-paste them into you're app once you're ready. The editor supports auto-completation and schema documentation. This makes it super easy to craft and test your query all in one go without knowing anything about the underlying database structure.
|
||||||
|
|
||||||
|
You can even set query variables or http headers as required. To simulate an authenticated user set the http header `"X-USER-ID": 5` to the user id of the user you want to test with.
|
@ -36,8 +36,8 @@ module.exports = {
|
|||||||
position: "left",
|
position: "left",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Art Compute",
|
label: "AbtCode",
|
||||||
href: "https://artcompute.com/s/super-graph",
|
href: "https://abtcode.com/s/super-graph",
|
||||||
position: "left",
|
position: "left",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -3,6 +3,7 @@ module.exports = {
|
|||||||
Docusaurus: [
|
Docusaurus: [
|
||||||
"home",
|
"home",
|
||||||
"intro",
|
"intro",
|
||||||
|
"webui",
|
||||||
"start",
|
"start",
|
||||||
"why",
|
"why",
|
||||||
"graphql",
|
"graphql",
|
||||||
|
BIN
docs/website/static/img/webui.jpg
Normal file
BIN
docs/website/static/img/webui.jpg
Normal file
Binary file not shown.
After Width: | Height: | Size: 117 KiB |
@ -1805,11 +1805,6 @@ asynckit@^0.4.0:
|
|||||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||||
|
|
||||||
at-least-node@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
|
|
||||||
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
|
|
||||||
|
|
||||||
atob@^2.1.2:
|
atob@^2.1.2:
|
||||||
version "2.1.2"
|
version "2.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
||||||
@ -2323,7 +2318,7 @@ ccount@^1.0.0, ccount@^1.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
||||||
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
||||||
|
|
||||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
|
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
|
||||||
version "2.4.2"
|
version "2.4.2"
|
||||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||||
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
||||||
@ -2522,15 +2517,6 @@ cliui@^5.0.0:
|
|||||||
strip-ansi "^5.2.0"
|
strip-ansi "^5.2.0"
|
||||||
wrap-ansi "^5.1.0"
|
wrap-ansi "^5.1.0"
|
||||||
|
|
||||||
cliui@^6.0.0:
|
|
||||||
version "6.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
|
|
||||||
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
|
|
||||||
dependencies:
|
|
||||||
string-width "^4.2.0"
|
|
||||||
strip-ansi "^6.0.0"
|
|
||||||
wrap-ansi "^6.2.0"
|
|
||||||
|
|
||||||
coa@^2.0.2:
|
coa@^2.0.2:
|
||||||
version "2.0.2"
|
version "2.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
||||||
@ -3216,11 +3202,6 @@ depd@~1.1.2:
|
|||||||
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
||||||
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
||||||
|
|
||||||
dependency-graph@^0.9.0:
|
|
||||||
version "0.9.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
|
|
||||||
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
|
|
||||||
|
|
||||||
des.js@^1.0.0:
|
des.js@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
||||||
@ -3830,7 +3811,7 @@ fast-glob@^2.0.2:
|
|||||||
merge2 "^1.2.3"
|
merge2 "^1.2.3"
|
||||||
micromatch "^3.1.10"
|
micromatch "^3.1.10"
|
||||||
|
|
||||||
fast-glob@^3.0.3, fast-glob@^3.1.1:
|
fast-glob@^3.0.3:
|
||||||
version "3.2.2"
|
version "3.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
||||||
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
||||||
@ -3970,7 +3951,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
|
|||||||
make-dir "^3.0.2"
|
make-dir "^3.0.2"
|
||||||
pkg-dir "^4.1.0"
|
pkg-dir "^4.1.0"
|
||||||
|
|
||||||
find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
|
find-up@4.1.0, find-up@^4.0.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
||||||
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
||||||
@ -4084,16 +4065,6 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
|
|||||||
jsonfile "^4.0.0"
|
jsonfile "^4.0.0"
|
||||||
universalify "^0.1.0"
|
universalify "^0.1.0"
|
||||||
|
|
||||||
fs-extra@^9.0.0:
|
|
||||||
version "9.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
|
|
||||||
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
|
|
||||||
dependencies:
|
|
||||||
at-least-node "^1.0.0"
|
|
||||||
graceful-fs "^4.2.0"
|
|
||||||
jsonfile "^6.0.1"
|
|
||||||
universalify "^1.0.0"
|
|
||||||
|
|
||||||
fs-minipass@^2.0.0:
|
fs-minipass@^2.0.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
||||||
@ -4149,11 +4120,6 @@ get-own-enumerable-property-symbols@^3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
||||||
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
||||||
|
|
||||||
get-stdin@^7.0.0:
|
|
||||||
version "7.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
|
|
||||||
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
|
|
||||||
|
|
||||||
get-stream@^4.0.0:
|
get-stream@^4.0.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||||
@ -4275,18 +4241,6 @@ globby@^10.0.1:
|
|||||||
merge2 "^1.2.3"
|
merge2 "^1.2.3"
|
||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
|
|
||||||
globby@^11.0.0:
|
|
||||||
version "11.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
|
|
||||||
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
|
|
||||||
dependencies:
|
|
||||||
array-union "^2.1.0"
|
|
||||||
dir-glob "^3.0.1"
|
|
||||||
fast-glob "^3.1.1"
|
|
||||||
ignore "^5.1.4"
|
|
||||||
merge2 "^1.3.0"
|
|
||||||
slash "^3.0.0"
|
|
||||||
|
|
||||||
globby@^6.1.0:
|
globby@^6.1.0:
|
||||||
version "6.1.0"
|
version "6.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
||||||
@ -4743,7 +4697,7 @@ ignore@^3.3.5:
|
|||||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||||
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
||||||
|
|
||||||
ignore@^5.1.1, ignore@^5.1.4:
|
ignore@^5.1.1:
|
||||||
version "5.1.4"
|
version "5.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
||||||
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
||||||
@ -5382,15 +5336,6 @@ jsonfile@^4.0.0:
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
graceful-fs "^4.1.6"
|
graceful-fs "^4.1.6"
|
||||||
|
|
||||||
jsonfile@^6.0.1:
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
|
|
||||||
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
|
|
||||||
dependencies:
|
|
||||||
universalify "^1.0.0"
|
|
||||||
optionalDependencies:
|
|
||||||
graceful-fs "^4.1.6"
|
|
||||||
|
|
||||||
jsprim@^1.2.2:
|
jsprim@^1.2.2:
|
||||||
version "1.4.1"
|
version "1.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
||||||
@ -5656,13 +5601,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
|
|||||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||||
|
|
||||||
log-symbols@^2.2.0:
|
|
||||||
version "2.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
|
|
||||||
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
|
|
||||||
dependencies:
|
|
||||||
chalk "^2.0.1"
|
|
||||||
|
|
||||||
loglevel@^1.6.8:
|
loglevel@^1.6.8:
|
||||||
version "1.6.8"
|
version "1.6.8"
|
||||||
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
||||||
@ -6645,7 +6583,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
|
|||||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||||
|
|
||||||
pify@^2.0.0, pify@^2.3.0:
|
pify@^2.0.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
||||||
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
||||||
@ -6731,24 +6669,6 @@ postcss-calc@^7.0.1:
|
|||||||
postcss-selector-parser "^6.0.2"
|
postcss-selector-parser "^6.0.2"
|
||||||
postcss-value-parser "^4.0.2"
|
postcss-value-parser "^4.0.2"
|
||||||
|
|
||||||
postcss-cli@^7.1.1:
|
|
||||||
version "7.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
|
|
||||||
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
|
|
||||||
dependencies:
|
|
||||||
chalk "^4.0.0"
|
|
||||||
chokidar "^3.3.0"
|
|
||||||
dependency-graph "^0.9.0"
|
|
||||||
fs-extra "^9.0.0"
|
|
||||||
get-stdin "^7.0.0"
|
|
||||||
globby "^11.0.0"
|
|
||||||
postcss "^7.0.0"
|
|
||||||
postcss-load-config "^2.0.0"
|
|
||||||
postcss-reporter "^6.0.0"
|
|
||||||
pretty-hrtime "^1.0.3"
|
|
||||||
read-cache "^1.0.0"
|
|
||||||
yargs "^15.0.2"
|
|
||||||
|
|
||||||
postcss-color-functional-notation@^2.0.1:
|
postcss-color-functional-notation@^2.0.1:
|
||||||
version "2.0.1"
|
version "2.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
||||||
@ -7288,16 +7208,6 @@ postcss-replace-overflow-wrap@^3.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
postcss "^7.0.2"
|
postcss "^7.0.2"
|
||||||
|
|
||||||
postcss-reporter@^6.0.0:
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
|
|
||||||
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
|
|
||||||
dependencies:
|
|
||||||
chalk "^2.4.1"
|
|
||||||
lodash "^4.17.11"
|
|
||||||
log-symbols "^2.2.0"
|
|
||||||
postcss "^7.0.7"
|
|
||||||
|
|
||||||
postcss-selector-matches@^4.0.0:
|
postcss-selector-matches@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
||||||
@ -7397,7 +7307,7 @@ postcss@^6.0.9:
|
|||||||
source-map "^0.6.1"
|
source-map "^0.6.1"
|
||||||
supports-color "^5.4.0"
|
supports-color "^5.4.0"
|
||||||
|
|
||||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
|
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6:
|
||||||
version "7.0.30"
|
version "7.0.30"
|
||||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
||||||
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
||||||
@ -7692,6 +7602,11 @@ react-helmet@^6.0.0-beta:
|
|||||||
react-fast-compare "^2.0.4"
|
react-fast-compare "^2.0.4"
|
||||||
react-side-effect "^2.1.0"
|
react-side-effect "^2.1.0"
|
||||||
|
|
||||||
|
react-hook-sticky@^0.2.0:
|
||||||
|
version "0.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
|
||||||
|
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
|
||||||
|
|
||||||
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
||||||
version "16.13.1"
|
version "16.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||||
@ -7785,13 +7700,6 @@ react@^16.8.4:
|
|||||||
object-assign "^4.1.1"
|
object-assign "^4.1.1"
|
||||||
prop-types "^15.6.2"
|
prop-types "^15.6.2"
|
||||||
|
|
||||||
read-cache@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
|
|
||||||
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
|
|
||||||
dependencies:
|
|
||||||
pify "^2.3.0"
|
|
||||||
|
|
||||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||||
version "2.3.7"
|
version "2.3.7"
|
||||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||||
@ -8709,7 +8617,7 @@ string-width@^3.0.0, string-width@^3.1.0:
|
|||||||
is-fullwidth-code-point "^2.0.0"
|
is-fullwidth-code-point "^2.0.0"
|
||||||
strip-ansi "^5.1.0"
|
strip-ansi "^5.1.0"
|
||||||
|
|
||||||
string-width@^4.1.0, string-width@^4.2.0:
|
string-width@^4.1.0:
|
||||||
version "4.2.0"
|
version "4.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
||||||
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
||||||
@ -9305,11 +9213,6 @@ universalify@^0.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
||||||
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
||||||
|
|
||||||
universalify@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
|
|
||||||
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
|
|
||||||
|
|
||||||
unpipe@1.0.0, unpipe@~1.0.0:
|
unpipe@1.0.0, unpipe@~1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||||
@ -9723,7 +9626,7 @@ wrap-ansi@^5.1.0:
|
|||||||
string-width "^3.0.0"
|
string-width "^3.0.0"
|
||||||
strip-ansi "^5.0.0"
|
strip-ansi "^5.0.0"
|
||||||
|
|
||||||
wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
|
wrap-ansi@^6.0.0:
|
||||||
version "6.2.0"
|
version "6.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
||||||
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
||||||
@ -9784,14 +9687,6 @@ yargs-parser@^13.1.2:
|
|||||||
camelcase "^5.0.0"
|
camelcase "^5.0.0"
|
||||||
decamelize "^1.2.0"
|
decamelize "^1.2.0"
|
||||||
|
|
||||||
yargs-parser@^18.1.1:
|
|
||||||
version "18.1.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
|
|
||||||
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
|
|
||||||
dependencies:
|
|
||||||
camelcase "^5.0.0"
|
|
||||||
decamelize "^1.2.0"
|
|
||||||
|
|
||||||
yargs@^13.3.2:
|
yargs@^13.3.2:
|
||||||
version "13.3.2"
|
version "13.3.2"
|
||||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
||||||
@ -9808,23 +9703,6 @@ yargs@^13.3.2:
|
|||||||
y18n "^4.0.0"
|
y18n "^4.0.0"
|
||||||
yargs-parser "^13.1.2"
|
yargs-parser "^13.1.2"
|
||||||
|
|
||||||
yargs@^15.0.2:
|
|
||||||
version "15.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
|
|
||||||
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
|
|
||||||
dependencies:
|
|
||||||
cliui "^6.0.0"
|
|
||||||
decamelize "^1.2.0"
|
|
||||||
find-up "^4.1.0"
|
|
||||||
get-caller-file "^2.0.1"
|
|
||||||
require-directory "^2.1.1"
|
|
||||||
require-main-filename "^2.0.0"
|
|
||||||
set-blocking "^2.0.0"
|
|
||||||
string-width "^4.2.0"
|
|
||||||
which-module "^2.0.0"
|
|
||||||
y18n "^4.0.0"
|
|
||||||
yargs-parser "^18.1.1"
|
|
||||||
|
|
||||||
zepto@^1.2.0:
|
zepto@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
||||||
|
2
go.mod
2
go.mod
@ -12,7 +12,6 @@ require (
|
|||||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||||
github.com/cespare/xxhash/v2 v2.1.1
|
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
||||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
@ -29,7 +28,6 @@ require (
|
|||||||
github.com/openzipkin/zipkin-go v0.2.2
|
github.com/openzipkin/zipkin-go v0.2.2
|
||||||
github.com/pelletier/go-toml v1.7.0 // indirect
|
github.com/pelletier/go-toml v1.7.0 // indirect
|
||||||
github.com/pkg/errors v0.9.1
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/prometheus/common v0.4.0
|
|
||||||
github.com/rs/cors v1.7.0
|
github.com/rs/cors v1.7.0
|
||||||
github.com/spf13/afero v1.2.2 // indirect
|
github.com/spf13/afero v1.2.2 // indirect
|
||||||
github.com/spf13/cast v1.3.1 // indirect
|
github.com/spf13/cast v1.3.1 // indirect
|
||||||
|
2
go.sum
2
go.sum
@ -55,8 +55,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
|
|||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
|
@ -155,7 +155,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BuildDetails() string {
|
func BuildDetails() string {
|
||||||
if len(version) == 0 {
|
if version == "" {
|
||||||
return `
|
return `
|
||||||
Super Graph (unknown version)
|
Super Graph (unknown version)
|
||||||
For documentation, visit https://supergraph.dev
|
For documentation, visit https://supergraph.dev
|
||||||
|
@ -88,6 +88,10 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
|
|
||||||
|
ifNotExists(path.Join(appConfigPath, "allow.list"), func(p string) error {
|
||||||
|
return ioutil.WriteFile(p, []byte{}, 0644)
|
||||||
|
})
|
||||||
|
|
||||||
// Create app migrations folder and add relevant files
|
// Create app migrations folder and add relevant files
|
||||||
|
|
||||||
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
appMigrationsPath := path.Join(appConfigPath, "migrations")
|
||||||
|
@ -80,10 +80,8 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||||
ct := context.Background()
|
ct := context.Background()
|
||||||
|
|
||||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
if v, ok := opt["user_id"]; ok && v != "" {
|
||||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||||
} else {
|
|
||||||
ct = context.WithValue(ct, core.UserIDKey, "-1")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// var role string
|
// var role string
|
||||||
@ -146,7 +144,7 @@ func (c *csvSource) Values() ([]interface{}, error) {
|
|||||||
|
|
||||||
for _, v := range c.rows[c.i] {
|
for _, v := range c.rows[c.i] {
|
||||||
switch {
|
switch {
|
||||||
case len(v) == 0:
|
case v == "":
|
||||||
vals = append(vals, "")
|
vals = append(vals, "")
|
||||||
case isDigit(v):
|
case isDigit(v):
|
||||||
var n int
|
var n int
|
||||||
@ -245,7 +243,7 @@ func avatarURL(size int) string {
|
|||||||
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
||||||
}
|
}
|
||||||
|
|
||||||
func imageURL(width int, height int) string {
|
func imageURL(width, height int) string {
|
||||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
|||||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||||
vi.SetDefault("web_ui", false)
|
vi.SetDefault("web_ui", false)
|
||||||
vi.SetDefault("enable_tracing", false)
|
vi.SetDefault("enable_tracing", false)
|
||||||
vi.SetDefault("auth_fail_block", "always")
|
vi.SetDefault("auth_fail_block", false)
|
||||||
vi.SetDefault("seed_file", "seed.js")
|
vi.SetDefault("seed_file", "seed.js")
|
||||||
|
|
||||||
vi.SetDefault("default_block", true)
|
vi.SetDefault("default_block", true)
|
||||||
@ -90,7 +90,7 @@ func newViper(configPath, configFile string) *viper.Viper {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func GetConfigName() string {
|
func GetConfigName() string {
|
||||||
if len(os.Getenv("GO_ENV")) == 0 {
|
if os.Getenv("GO_ENV") == "" {
|
||||||
return "dev"
|
return "dev"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -105,7 +105,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
|
if conf.CacheControl != "" && res.Operation() == core.OpQuery {
|
||||||
w.Header().Set("Cache-Control", conf.CacheControl)
|
w.Header().Set("Cache-Control", conf.CacheControl)
|
||||||
}
|
}
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
|
@ -32,6 +32,7 @@ type Auth struct {
|
|||||||
Secret string
|
Secret string
|
||||||
PubKeyFile string `mapstructure:"public_key_file"`
|
PubKeyFile string `mapstructure:"public_key_file"`
|
||||||
PubKeyType string `mapstructure:"public_key_type"`
|
PubKeyType string `mapstructure:"public_key_type"`
|
||||||
|
Audience string `mapstructure:"audience"`
|
||||||
}
|
}
|
||||||
|
|
||||||
Header struct {
|
Header struct {
|
||||||
@ -46,17 +47,17 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|
||||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||||
if len(userIDProvider) != 0 {
|
if userIDProvider != "" {
|
||||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
||||||
}
|
}
|
||||||
|
|
||||||
userID := r.Header.Get("X-User-ID")
|
userID := r.Header.Get("X-User-ID")
|
||||||
if len(userID) != 0 {
|
if userID != "" {
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
||||||
}
|
}
|
||||||
|
|
||||||
userRole := r.Header.Get("X-User-Role")
|
userRole := r.Header.Get("X-User-Role")
|
||||||
if len(userRole) != 0 {
|
if userRole != "" {
|
||||||
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -67,11 +68,11 @@ func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
hdr := ac.Header
|
hdr := ac.Header
|
||||||
|
|
||||||
if len(hdr.Name) == 0 {
|
if hdr.Name == "" {
|
||||||
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !hdr.Exists && len(hdr.Value) == 0 {
|
if !hdr.Exists && hdr.Value == "" {
|
||||||
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -81,7 +82,7 @@ func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case hdr.Exists:
|
case hdr.Exists:
|
||||||
fo1 = (len(value) == 0)
|
fo1 = (value == "")
|
||||||
|
|
||||||
default:
|
default:
|
||||||
fo1 = (value != hdr.Value)
|
fo1 = (value != hdr.Value)
|
||||||
|
@ -2,19 +2,32 @@ package auth
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
jwt "github.com/dgrijalva/jwt-go"
|
jwt "github.com/dgrijalva/jwt-go"
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
authHeader = "Authorization"
|
authHeader = "Authorization"
|
||||||
jwtAuth0 int = iota + 1
|
jwtAuth0 int = iota + 1
|
||||||
|
jwtFirebase int = iota + 2
|
||||||
|
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
|
||||||
|
firebaseIssuerPrefix = "https://securetoken.google.com/"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type firebasePKCache struct {
|
||||||
|
PublicKeys map[string]string
|
||||||
|
Expiration time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
var firebasePublicKeys firebasePKCache
|
||||||
|
|
||||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
var key interface{}
|
var key interface{}
|
||||||
var jwtProvider int
|
var jwtProvider int
|
||||||
@ -23,16 +36,18 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
|
|
||||||
if ac.JWT.Provider == "auth0" {
|
if ac.JWT.Provider == "auth0" {
|
||||||
jwtProvider = jwtAuth0
|
jwtProvider = jwtAuth0
|
||||||
|
} else if ac.JWT.Provider == "firebase" {
|
||||||
|
jwtProvider = jwtFirebase
|
||||||
}
|
}
|
||||||
|
|
||||||
secret := ac.JWT.Secret
|
secret := ac.JWT.Secret
|
||||||
publicKeyFile := ac.JWT.PubKeyFile
|
publicKeyFile := ac.JWT.PubKeyFile
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case len(secret) != 0:
|
case secret != "":
|
||||||
key = []byte(secret)
|
key = []byte(secret)
|
||||||
|
|
||||||
case len(publicKeyFile) != 0:
|
case publicKeyFile != "":
|
||||||
kd, err := ioutil.ReadFile(publicKeyFile)
|
kd, err := ioutil.ReadFile(publicKeyFile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -56,9 +71,10 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
var tok string
|
var tok string
|
||||||
|
|
||||||
if len(cookie) != 0 {
|
if cookie != "" {
|
||||||
ck, err := r.Cookie(cookie)
|
ck, err := r.Cookie(cookie)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
@ -74,9 +90,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
tok = ah[7:]
|
tok = ah[7:]
|
||||||
}
|
}
|
||||||
|
|
||||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
|
var keyFunc jwt.Keyfunc
|
||||||
return key, nil
|
if jwtProvider == jwtFirebase {
|
||||||
})
|
keyFunc = firebaseKeyFunction
|
||||||
|
} else {
|
||||||
|
keyFunc = func(token *jwt.Token) (interface{}, error) {
|
||||||
|
return key, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
@ -86,12 +109,20 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|
||||||
|
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if jwtProvider == jwtAuth0 {
|
if jwtProvider == jwtAuth0 {
|
||||||
sub := strings.Split(claims.Subject, "|")
|
sub := strings.Split(claims.Subject, "|")
|
||||||
if len(sub) != 2 {
|
if len(sub) != 2 {
|
||||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
||||||
}
|
}
|
||||||
|
} else if jwtProvider == jwtFirebase &&
|
||||||
|
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
|
||||||
|
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||||
} else {
|
} else {
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||||
}
|
}
|
||||||
@ -103,3 +134,92 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type firebaseKeyError struct {
|
||||||
|
Err error
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *firebaseKeyError) Error() string {
|
||||||
|
return e.Message + " " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
|
||||||
|
kid, ok := token.Header["kid"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error 'kid' header not found in token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if firebasePublicKeys.Expiration.Before(time.Now()) {
|
||||||
|
resp, err := http.Get(firebasePKEndpoint)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error connecting to firebase certificate server",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
data, err := ioutil.ReadAll(resp.Body)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error reading firebase certificate server response",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cachePolicy := resp.Header.Get("cache-control")
|
||||||
|
ageIndex := strings.Index(cachePolicy, "max-age=")
|
||||||
|
|
||||||
|
if ageIndex < 0 {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error parsing cache-control header: 'max-age=' not found",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ageToEnd := cachePolicy[ageIndex+8:]
|
||||||
|
endIndex := strings.Index(ageToEnd, ",")
|
||||||
|
if endIndex < 0 {
|
||||||
|
endIndex = len(ageToEnd) - 1
|
||||||
|
}
|
||||||
|
ageString := ageToEnd[:endIndex]
|
||||||
|
|
||||||
|
age, err := strconv.ParseInt(ageString, 10, 64)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error parsing max-age cache policy",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
|
||||||
|
|
||||||
|
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
firebasePublicKeys = firebasePKCache{}
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error unmarshalling firebase public key json",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
firebasePublicKeys.Expiration = expiration
|
||||||
|
}
|
||||||
|
|
||||||
|
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
|
||||||
|
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
|
||||||
|
return k, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error no matching public key for kid supplied in jwt",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -165,7 +165,7 @@ func railsAuth(ac *Auth) (*rails.Auth, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
version := ac.Rails.Version
|
version := ac.Rails.Version
|
||||||
if len(version) == 0 {
|
if version == "" {
|
||||||
return nil, errors.New("no auth.rails.version defined")
|
return nil, errors.New("no auth.rails.version defined")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,9 +6,11 @@ import (
|
|||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
@ -105,39 +107,40 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
|
|||||||
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
||||||
path = strings.TrimRight(path, string(filepath.Separator))
|
path = strings.TrimRight(path, string(filepath.Separator))
|
||||||
|
|
||||||
fileInfos, err := fs.ReadDir(path)
|
files, err := ioutil.ReadDir(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
paths := make([]string, 0, len(fileInfos))
|
fm := make(map[int]string, len(files))
|
||||||
for _, fi := range fileInfos {
|
keys := make([]int, 0, len(files))
|
||||||
|
|
||||||
|
for _, fi := range files {
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
||||||
|
|
||||||
if len(matches) != 2 {
|
if len(matches) != 2 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(matches[1], 10, 32)
|
n, err := strconv.Atoi(matches[1])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// The regexp already validated that the prefix is all digits so this *should* never fail
|
// The regexp already validated that the prefix is all digits so this *should* never fail
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
mcount := len(paths)
|
fm[n] = filepath.Join(path, fi.Name())
|
||||||
|
keys = append(keys, n)
|
||||||
|
}
|
||||||
|
|
||||||
if n < int64(mcount) {
|
sort.Ints(keys)
|
||||||
return nil, fmt.Errorf("Duplicate migration %d", n)
|
|
||||||
}
|
|
||||||
|
|
||||||
if int64(mcount) < n {
|
paths := make([]string, 0, len(keys))
|
||||||
return nil, fmt.Errorf("Missing migration %d", mcount)
|
for _, k := range keys {
|
||||||
}
|
paths = append(paths, fm[k])
|
||||||
|
|
||||||
paths = append(paths, filepath.Join(path, fi.Name()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return paths, nil
|
return paths, nil
|
||||||
@ -196,7 +199,7 @@ func (m *Migrator) LoadMigrations(path string) error {
|
|||||||
for _, v := range strings.Split(upSQL, "\n") {
|
for _, v := range strings.Split(upSQL, "\n") {
|
||||||
// Only account for regular single line comment, empty line and space/comment combination
|
// Only account for regular single line comment, empty line and space/comment combination
|
||||||
cleanString := strings.TrimSpace(v)
|
cleanString := strings.TrimSpace(v)
|
||||||
if len(cleanString) != 0 &&
|
if cleanString != "" &&
|
||||||
!strings.HasPrefix(cleanString, "--") {
|
!strings.HasPrefix(cleanString, "--") {
|
||||||
containsSQL = true
|
containsSQL = true
|
||||||
break
|
break
|
||||||
|
File diff suppressed because one or more lines are too long
@ -27,7 +27,7 @@ func initWatcher() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
var d dir
|
var d dir
|
||||||
if len(cpath) == 0 || cpath == "./" {
|
if cpath == "" || cpath == "./" {
|
||||||
d = Dir("./config", ReExec)
|
d = Dir("./config", ReExec)
|
||||||
} else {
|
} else {
|
||||||
d = Dir(cpath, ReExec)
|
d = Dir(cpath, ReExec)
|
||||||
@ -52,11 +52,11 @@ func startHTTP() {
|
|||||||
hp := strings.SplitN(conf.HostPort, ":", 2)
|
hp := strings.SplitN(conf.HostPort, ":", 2)
|
||||||
|
|
||||||
if len(hp) == 2 {
|
if len(hp) == 2 {
|
||||||
if len(conf.Host) != 0 {
|
if conf.Host != "" {
|
||||||
hp[0] = conf.Host
|
hp[0] = conf.Host
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(conf.Port) != 0 {
|
if conf.Port != "" {
|
||||||
hp[1] = conf.Port
|
hp[1] = conf.Port
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -64,7 +64,7 @@ func startHTTP() {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(conf.hostPort) == 0 {
|
if conf.hostPort == "" {
|
||||||
conf.hostPort = defaultHP
|
conf.hostPort = defaultHP
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -123,7 +123,7 @@ func routeHandler() (http.Handler, error) {
|
|||||||
return mux, nil
|
return mux, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(conf.APIPath) != 0 {
|
if conf.APIPath != "" {
|
||||||
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -134,7 +134,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {{- .AppNameSlug -}}_development
|
dbname: {{ .AppNameSlug -}}_development
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
|
|
||||||
|
@ -82,7 +82,7 @@ database:
|
|||||||
type: postgres
|
type: postgres
|
||||||
host: db
|
host: db
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: {{- .AppNameSlug -}}_production
|
dbname: {{ .AppNameSlug -}}_production
|
||||||
user: postgres
|
user: postgres
|
||||||
password: postgres
|
password: postgres
|
||||||
#pool_size: 10
|
#pool_size: 10
|
||||||
|
@ -11,9 +11,9 @@
|
|||||||
// opt-in, read http://bit.ly/CRA-PWA
|
// opt-in, read http://bit.ly/CRA-PWA
|
||||||
|
|
||||||
const isLocalhost = Boolean(
|
const isLocalhost = Boolean(
|
||||||
window.location.hostname === 'localhost' ||
|
window.location.hostname === "localhost" ||
|
||||||
// [::1] is the IPv6 localhost address.
|
// [::1] is the IPv6 localhost address.
|
||||||
window.location.hostname === '[::1]' ||
|
window.location.hostname === "[::1]" ||
|
||||||
// 127.0.0.1/8 is considered localhost for IPv4.
|
// 127.0.0.1/8 is considered localhost for IPv4.
|
||||||
window.location.hostname.match(
|
window.location.hostname.match(
|
||||||
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
||||||
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
|
|||||||
);
|
);
|
||||||
|
|
||||||
export function register(config) {
|
export function register(config) {
|
||||||
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
|
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
|
||||||
// The URL constructor is available in all browsers that support SW.
|
// The URL constructor is available in all browsers that support SW.
|
||||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
||||||
if (publicUrl.origin !== window.location.origin) {
|
if (publicUrl.origin !== window.location.origin) {
|
||||||
@ -31,7 +31,7 @@ export function register(config) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
window.addEventListener('load', () => {
|
window.addEventListener("load", () => {
|
||||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
||||||
|
|
||||||
if (isLocalhost) {
|
if (isLocalhost) {
|
||||||
@ -42,8 +42,8 @@ export function register(config) {
|
|||||||
// service worker/PWA documentation.
|
// service worker/PWA documentation.
|
||||||
navigator.serviceWorker.ready.then(() => {
|
navigator.serviceWorker.ready.then(() => {
|
||||||
console.log(
|
console.log(
|
||||||
'This web app is being served cache-first by a service ' +
|
"This web app is being served cache-first by a service " +
|
||||||
'worker. To learn more, visit http://bit.ly/CRA-PWA'
|
"worker. To learn more, visit http://bit.ly/CRA-PWA"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -57,21 +57,21 @@ export function register(config) {
|
|||||||
function registerValidSW(swUrl, config) {
|
function registerValidSW(swUrl, config) {
|
||||||
navigator.serviceWorker
|
navigator.serviceWorker
|
||||||
.register(swUrl)
|
.register(swUrl)
|
||||||
.then(registration => {
|
.then((registration) => {
|
||||||
registration.onupdatefound = () => {
|
registration.onupdatefound = () => {
|
||||||
const installingWorker = registration.installing;
|
const installingWorker = registration.installing;
|
||||||
if (installingWorker == null) {
|
if (installingWorker == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
installingWorker.onstatechange = () => {
|
installingWorker.onstatechange = () => {
|
||||||
if (installingWorker.state === 'installed') {
|
if (installingWorker.state === "installed") {
|
||||||
if (navigator.serviceWorker.controller) {
|
if (navigator.serviceWorker.controller) {
|
||||||
// At this point, the updated precached content has been fetched,
|
// At this point, the updated precached content has been fetched,
|
||||||
// but the previous service worker will still serve the older
|
// but the previous service worker will still serve the older
|
||||||
// content until all client tabs are closed.
|
// content until all client tabs are closed.
|
||||||
console.log(
|
console.log(
|
||||||
'New content is available and will be used when all ' +
|
"New content is available and will be used when all " +
|
||||||
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
|
"tabs for this page are closed. See http://bit.ly/CRA-PWA."
|
||||||
);
|
);
|
||||||
|
|
||||||
// Execute callback
|
// Execute callback
|
||||||
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
|
|||||||
// At this point, everything has been precached.
|
// At this point, everything has been precached.
|
||||||
// It's the perfect time to display a
|
// It's the perfect time to display a
|
||||||
// "Content is cached for offline use." message.
|
// "Content is cached for offline use." message.
|
||||||
console.log('Content is cached for offline use.');
|
console.log("Content is cached for offline use.");
|
||||||
|
|
||||||
// Execute callback
|
// Execute callback
|
||||||
if (config && config.onSuccess) {
|
if (config && config.onSuccess) {
|
||||||
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
console.error('Error during service worker registration:', error);
|
console.error("Error during service worker registration:", error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkValidServiceWorker(swUrl, config) {
|
function checkValidServiceWorker(swUrl, config) {
|
||||||
// Check if the service worker can be found. If it can't reload the page.
|
// Check if the service worker can be found. If it can't reload the page.
|
||||||
fetch(swUrl)
|
fetch(swUrl)
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
// Ensure service worker exists, and that we really are getting a JS file.
|
// Ensure service worker exists, and that we really are getting a JS file.
|
||||||
const contentType = response.headers.get('content-type');
|
const contentType = response.headers.get("content-type");
|
||||||
if (
|
if (
|
||||||
response.status === 404 ||
|
response.status === 404 ||
|
||||||
(contentType != null && contentType.indexOf('javascript') === -1)
|
(contentType != null && contentType.indexOf("javascript") === -1)
|
||||||
) {
|
) {
|
||||||
// No service worker found. Probably a different app. Reload the page.
|
// No service worker found. Probably a different app. Reload the page.
|
||||||
navigator.serviceWorker.ready.then(registration => {
|
navigator.serviceWorker.ready.then((registration) => {
|
||||||
registration.unregister().then(() => {
|
registration.unregister().then(() => {
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
});
|
});
|
||||||
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
|
|||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
console.log(
|
console.log(
|
||||||
'No internet connection found. App is running in offline mode.'
|
"No internet connection found. App is running in offline mode."
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function unregister() {
|
export function unregister() {
|
||||||
if ('serviceWorker' in navigator) {
|
if ("serviceWorker" in navigator) {
|
||||||
navigator.serviceWorker.ready.then(registration => {
|
navigator.serviceWorker.ready.then((registration) => {
|
||||||
registration.unregister();
|
registration.unregister();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
13
jsn/bench.1
Normal file
13
jsn/bench.1
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/jsn
|
||||||
|
BenchmarkGet
|
||||||
|
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
|
||||||
|
BenchmarkFilter
|
||||||
|
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
|
||||||
|
BenchmarkStrip
|
||||||
|
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
|
||||||
|
BenchmarkReplace
|
||||||
|
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/jsn 6.029s
|
@ -2,17 +2,19 @@ package jsn
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Filter function filters the JSON keeping only the provided keys and removing all others
|
// Filter function filters the JSON keeping only the provided keys and removing all others
|
||||||
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||||
var err error
|
var err error
|
||||||
kmap := make(map[uint64]struct{}, len(keys))
|
kmap := make(map[uint64]struct{}, len(keys))
|
||||||
|
h := maphash.Hash{}
|
||||||
|
|
||||||
for i := range keys {
|
for i := range keys {
|
||||||
kmap[xxhash.Sum64String(keys[i])] = struct{}{}
|
_, _ = h.WriteString(keys[i])
|
||||||
|
kmap[h.Sum64()] = struct{}{}
|
||||||
|
h.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
// is an list
|
// is an list
|
||||||
@ -132,7 +134,11 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
|||||||
cb := b[s:(e + 1)]
|
cb := b[s:(e + 1)]
|
||||||
e = 0
|
e = 0
|
||||||
|
|
||||||
if _, ok := kmap[xxhash.Sum64(k)]; !ok {
|
_, _ = h.Write(k)
|
||||||
|
_, ok := kmap[h.Sum64()]
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,56 +12,53 @@ var ret int
|
|||||||
|
|
||||||
func TestFuzzCrashers(t *testing.T) {
|
func TestFuzzCrashers(t *testing.T) {
|
||||||
var crashers = []string{
|
var crashers = []string{
|
||||||
/*
|
"00\"0000\"0{",
|
||||||
"00\"0000\"0{",
|
"6\",\n\t\t\t\"something\": " +
|
||||||
"6\",\n\t\t\t\"something\": " +
|
"null\n\t\t},\n\t\t{\n\t\t\t\"id" +
|
||||||
"null\n\t\t},\n\t\t{\n\t\t\t\"id" +
|
"\": 12,\n\t\t\t\"full_name" +
|
||||||
"\": 12,\n\t\t\t\"full_name" +
|
"\": \"Brenton Bauch Ph" +
|
||||||
"\": \"Brenton Bauch Ph" +
|
"D\",\n\t\t\t\"email\": \"ren" +
|
||||||
"D\",\n\t\t\t\"email\": \"ren" +
|
"ee@miller.co\",\n\t\t\t\"_" +
|
||||||
"ee@miller.co\",\n\t\t\t\"_" +
|
"_twitter_id\": 1\n\t\t}," +
|
||||||
"_twitter_id\": 1\n\t\t}," +
|
"\n\t\t{\n\t\t\t\"id\": 13,\n\t\t" +
|
||||||
"\n\t\t{\n\t\t\t\"id\": 13,\n\t\t" +
|
"\t\"full_name\": \"Daine" +
|
||||||
"\t\"full_name\": \"Daine" +
|
" Gleichner\",\n\t\t\t\"ema" +
|
||||||
" Gleichner\",\n\t\t\t\"ema" +
|
"il\": \"andrea@gmail.c" +
|
||||||
"il\": \"andrea@gmail.c" +
|
"om\",\n\t\t\t\"__twitter_i" +
|
||||||
"om\",\n\t\t\t\"__twitter_i" +
|
"d\": \"\",\n\t\t\t\"id__twit" +
|
||||||
"d\": \"\",\n\t\t\t\"id__twit" +
|
"ter_id\": \"NOOO\",\n\t\t\t" +
|
||||||
"ter_id\": \"NOOO\",\n\t\t\t" +
|
"\"work_email\": \"andre" +
|
||||||
"\"work_email\": \"andre" +
|
"a@nienow.co\"\n\t\t}\n\t]}" +
|
||||||
"a@nienow.co\"\n\t\t}\n\t]}" +
|
"\n\t}",
|
||||||
"\n\t}",
|
"0000\"0000\"0{",
|
||||||
"0000\"0000\"0{",
|
"0000\"\"{",
|
||||||
"0000\"\"{",
|
"0000\"000\"{",
|
||||||
"0000\"000\"{",
|
"0\"\"{",
|
||||||
"0\"\"{",
|
"\"0\"{",
|
||||||
"\"0\"{",
|
"000\"0\"{",
|
||||||
"000\"0\"{",
|
"0\"0000\"0{",
|
||||||
"0\"0000\"0{",
|
"000\"\"{",
|
||||||
"000\"\"{",
|
"0\"00\"{",
|
||||||
"0\"00\"{",
|
"000\"0000\"0{",
|
||||||
"000\"0000\"0{",
|
"000\"00\"{",
|
||||||
"000\"00\"{",
|
"\"\"{",
|
||||||
"\"\"{",
|
"0\"0000\"{",
|
||||||
"0\"0000\"{",
|
"\"000\"00{",
|
||||||
"\"000\"00{",
|
"0000\"00\"{",
|
||||||
"0000\"00\"{",
|
"00\"0\"{",
|
||||||
"00\"0\"{",
|
"0\"0\"{",
|
||||||
"0\"0\"{",
|
"000\"0000\"{",
|
||||||
"000\"0000\"{",
|
"00\"0000\"{",
|
||||||
"00\"0000\"{",
|
"0000\"0000\"{",
|
||||||
"0000\"0000\"{",
|
"\"000\"{",
|
||||||
"\"000\"{",
|
"00\"00\"{",
|
||||||
"00\"00\"{",
|
"00\"0000\"00{",
|
||||||
"00\"0000\"00{",
|
"0\"0000\"00{",
|
||||||
"0\"0000\"00{",
|
"00\"\"{",
|
||||||
"00\"\"{",
|
"0000\"0\"{",
|
||||||
"0000\"0\"{",
|
"000\"000\"{",
|
||||||
"000\"000\"{",
|
"\"00000000\"{",
|
||||||
"\"00000000\"{",
|
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||||
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
|
||||||
*/
|
|
||||||
|
|
||||||
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||||
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
|
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
|
||||||
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
|
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
|
||||||
|
11
jsn/get.go
11
jsn/get.go
@ -1,7 +1,7 @@
|
|||||||
package jsn
|
package jsn
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/cespare/xxhash/v2"
|
"hash/maphash"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -41,9 +41,12 @@ func Value(b []byte) []byte {
|
|||||||
// Keys function fetches values for the provided keys
|
// Keys function fetches values for the provided keys
|
||||||
func Get(b []byte, keys [][]byte) []Field {
|
func Get(b []byte, keys [][]byte) []Field {
|
||||||
kmap := make(map[uint64]struct{}, len(keys))
|
kmap := make(map[uint64]struct{}, len(keys))
|
||||||
|
h := maphash.Hash{}
|
||||||
|
|
||||||
for i := range keys {
|
for i := range keys {
|
||||||
kmap[xxhash.Sum64(keys[i])] = struct{}{}
|
_, _ = h.Write(keys[i])
|
||||||
|
kmap[h.Sum64()] = struct{}{}
|
||||||
|
h.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
res := make([]Field, 0, 20)
|
res := make([]Field, 0, 20)
|
||||||
@ -141,7 +144,9 @@ func Get(b []byte, keys [][]byte) []Field {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if e != 0 {
|
if e != 0 {
|
||||||
_, ok := kmap[xxhash.Sum64(k)]
|
_, _ = h.Write(k)
|
||||||
|
_, ok := kmap[h.Sum64()]
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
if ok {
|
if ok {
|
||||||
res = append(res, Field{k, b[s:(e + 1)]})
|
res = append(res, Field{k, b[s:(e + 1)]})
|
||||||
|
@ -3,8 +3,7 @@ package jsn
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
|
"hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
||||||
@ -18,7 +17,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
tmap := make(map[uint64]int, len(from))
|
tmap := make(map[uint64]int, len(from))
|
||||||
|
|
||||||
for i, f := range from {
|
for i, f := range from {
|
||||||
|
Loading…
x
Reference in New Issue
Block a user