Compare commits

..

15 Commits

53 changed files with 1752 additions and 1024 deletions

View File

@ -12,8 +12,7 @@ FROM golang:1.14-alpine as go-build
RUN apk update && \
apk add --no-cache make && \
apk add --no-cache git && \
apk add --no-cache jq && \
apk add --no-cache upx=3.95-r2
apk add --no-cache jq
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc

View File

@ -49,6 +49,7 @@ import (
"crypto/sha256"
"database/sql"
"encoding/json"
"hash/maphash"
_log "log"
"os"
@ -83,10 +84,11 @@ type SuperGraph struct {
schema *psql.DBSchema
allowList *allow.List
encKey [32]byte
prepared map[string]*preparedItem
hashSeed maphash.Seed
queries map[uint64]query
roles map[string]*Role
getRole *sql.Stmt
rmap map[uint64]*resolvFn
rmap map[uint64]resolvFn
abacEnabled bool
anonExists bool
qc *qcode.Compiler
@ -107,10 +109,11 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
}
sg := &SuperGraph{
conf: conf,
db: db,
dbinfo: dbinfo,
log: _log.New(os.Stdout, "", 0),
conf: conf,
db: db,
dbinfo: dbinfo,
log: _log.New(os.Stdout, "", 0),
hashSeed: maphash.MakeSeed(),
}
if err := sg.initConfig(); err != nil {

View File

@ -12,7 +12,8 @@ import (
// to a prepared statement.
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
vars := make([]interface{}, len(md.Params))
params := md.Params()
vars := make([]interface{}, len(params))
var fields map[string]json.RawMessage
var err error
@ -25,7 +26,7 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
}
}
for i, p := range md.Params {
for i, p := range params {
switch p.Name {
case "user_id":
if v := c.Value(UserIDKey); v != nil {

View File

@ -88,6 +88,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
stmts := make([]stmt, 0, len(sg.conf.Roles))
w := &bytes.Buffer{}
md := psql.Metadata{}
for i := 0; i < len(sg.conf.Roles); i++ {
role := &sg.conf.Roles[i]
@ -105,16 +106,18 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
stmts = append(stmts, stmt{role: role, qc: qc})
s := &stmts[len(stmts)-1]
s.md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md)
if err != nil {
return nil, err
}
s.sql = w.String()
s.md = md
w.Reset()
}
sql, err := sg.renderUserQuery(stmts)
sql, err := sg.renderUserQuery(md, stmts)
if err != nil {
return nil, err
}
@ -124,7 +127,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
}
//nolint: errcheck
func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) {
w := &bytes.Buffer{}
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
@ -142,7 +145,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
}
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
io.WriteString(w, sg.conf.RolesQuery)
md.RenderVar(w, sg.conf.RolesQuery)
io.WriteString(w, `) THEN `)
io.WriteString(w, `(SELECT (CASE`)
@ -158,7 +161,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
}
io.WriteString(w, ` ELSE 'user' END) FROM (`)
io.WriteString(w, sg.conf.RolesQuery)
md.RenderVar(w, sg.conf.RolesQuery)
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)

View File

@ -197,30 +197,26 @@ func (c *Config) AddRoleTable(role string, table string, conf interface{}) error
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
// environment variable. This is the best way to create a new Super Graph config.
func ReadInConfig(configFile string) (*Config, error) {
cpath := path.Dir(configFile)
cfile := path.Base(configFile)
vi := newViper(cpath, cfile)
cp := path.Dir(configFile)
vi := newViper(cp, path.Base(configFile))
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
inherits := vi.GetString("inherits")
if inherits != "" {
vi = newViper(cpath, inherits)
if pcf := vi.GetString("inherits"); pcf != "" {
cf := vi.ConfigFileUsed()
vi = newViper(cp, pcf)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
if vi.IsSet("inherits") {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
if v := vi.GetString("inherits"); v != "" {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)", pcf, v)
}
vi.SetConfigName(cfile)
vi.SetConfigFile(cf)
if err := vi.MergeInConfig(); err != nil {
return nil, err
@ -234,7 +230,7 @@ func ReadInConfig(configFile string) (*Config, error) {
}
if c.AllowListFile == "" {
c.AllowListFile = path.Join(cpath, "allow.list")
c.AllowListFile = path.Join(cp, "allow.list")
}
return c, nil
@ -248,7 +244,7 @@ func newViper(configPath, configFile string) *viper.Viper {
vi.AutomaticEnv()
if filepath.Ext(configFile) != "" {
vi.SetConfigFile(configFile)
vi.SetConfigFile(path.Join(configPath, configFile))
} else {
vi.SetConfigName(configFile)
vi.AddConfigPath(configPath)

View File

@ -5,6 +5,7 @@ import (
"database/sql"
"encoding/json"
"fmt"
"hash/maphash"
"time"
"github.com/dosco/super-graph/core/internal/psql"
@ -124,7 +125,7 @@ func (c *scontext) execQuery() ([]byte, error) {
return nil, err
}
if len(data) == 0 || st.md.Skipped == 0 {
if len(data) == 0 || st.md.Skipped() == 0 {
return data, nil
}
@ -165,32 +166,43 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
} else {
role = c.role
}
c.res.role = role
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
h := maphash.Hash{}
h.SetSeed(c.sg.hashSeed)
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
if !ok {
return nil, nil, errNotFound
}
c.res.sql = ps.st.sql
if q.sd == nil {
q.Do(func() { c.sg.prepare(&q, role) })
if q.err != nil {
return nil, nil, err
}
}
c.res.sql = q.st.sql
var root []byte
var row *sql.Row
varsList, err := c.argList(ps.st.md)
varsList, err := c.argList(q.st.md)
if err != nil {
return nil, nil, err
}
if useTx {
row = tx.Stmt(ps.sd).QueryRow(varsList...)
row = tx.Stmt(q.sd).QueryRow(varsList...)
} else {
row = ps.sd.QueryRow(varsList...)
row = q.sd.QueryRow(varsList...)
}
if ps.roleArg {
if q.roleArg {
err = row.Scan(&role, &root)
} else {
err = row.Scan(&root)
@ -204,15 +216,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
if useTx {
if err := tx.Commit(); err != nil {
return nil, nil, err
return nil, nil, q.err
}
}
if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil {
return nil, nil, err
}
return root, &ps.st, nil
return root, &q.st, nil
}
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {

View File

@ -74,14 +74,23 @@ func (sg *SuperGraph) initConfig() error {
}
if c.RolesQuery == "" {
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
sg.log.Printf("INF roles_query not defined: attribute based access control disabled")
} else {
n := 0
for k, v := range sg.roles {
if k == "user" || k == "anon" {
n++
} else if v.Match != "" {
n++
}
}
sg.abacEnabled = (n > 2)
if !sg.abacEnabled {
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)")
}
}
_, userExists := sg.roles["user"]
_, sg.anonExists = sg.roles["anon"]
sg.abacEnabled = userExists && c.RolesQuery != ""
return nil
}

View File

@ -6,24 +6,27 @@ import (
"errors"
"fmt"
"io/ioutil"
"log"
"os"
"sort"
"strings"
"text/scanner"
"github.com/chirino/graphql/schema"
"github.com/dosco/super-graph/jsn"
)
const (
AL_QUERY int = iota + 1
AL_VARS
expComment = iota + 1
expVar
expQuery
)
type Item struct {
Name string
key string
Query string
Vars json.RawMessage
Vars string
Comment string
}
@ -35,6 +38,7 @@ type List struct {
type Config struct {
CreateIfNotExists bool
Persist bool
Log *log.Logger
}
func New(filename string, conf Config) (*List, error) {
@ -80,6 +84,12 @@ func New(filename string, conf Config) (*List, error) {
} else {
al.filepath = filename
}
if file, err := os.OpenFile(al.filepath, os.O_RDONLY|os.O_CREATE, 0644); err != nil {
return nil, err
} else {
file.Close()
}
}
var err error
@ -89,8 +99,10 @@ func New(filename string, conf Config) (*List, error) {
go func() {
for v := range al.saveChan {
if err = al.save(v); err != nil {
break
err := al.save(v)
if err != nil && conf.Log != nil {
conf.Log.Println("WRN allow list save:", err)
}
}
}()
@ -116,121 +128,101 @@ func (al *List) Set(vars []byte, query, comment string) error {
return errors.New("empty query")
}
var q string
for i := 0; i < len(query); i++ {
c := query[i]
if c >= 'a' && c <= 'z' || c >= 'A' && c <= 'Z' {
q = query
break
} else if c == '{' {
q = "query " + query
break
}
}
al.saveChan <- Item{
Comment: comment,
Query: q,
Vars: vars,
Query: query,
Vars: string(vars),
}
return nil
}
func (al *List) Load() ([]Item, error) {
var list []Item
varString := "variables"
b, err := ioutil.ReadFile(al.filepath)
if err != nil {
return list, err
return nil, err
}
if len(b) == 0 {
return list, nil
return parse(string(b), al.filepath)
}
func parse(b string, filename string) ([]Item, error) {
var items []Item
var s scanner.Scanner
s.Init(strings.NewReader(b))
s.Filename = filename
s.Mode ^= scanner.SkipComments
var op, sp scanner.Position
var item Item
newComment := false
st := expComment
for tok := s.Scan(); tok != scanner.EOF; tok = s.Scan() {
txt := s.TokenText()
switch {
case strings.HasPrefix(txt, "/*"):
if st == expQuery {
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
}
item = Item{Comment: strings.TrimSpace(txt[2 : len(txt)-2])}
sp = s.Pos()
st = expComment
newComment = true
case !newComment && strings.HasPrefix(txt, "#"):
if st == expQuery {
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
}
item = Item{}
sp = s.Pos()
st = expComment
case strings.HasPrefix(txt, "variables"):
if st == expComment {
v := b[sp.Offset:s.Pos().Offset]
item.Comment = strings.TrimSpace(v[:strings.IndexByte(v, '\n')])
}
sp = s.Pos()
st = expVar
case isGraphQL(txt):
if st == expVar {
v := b[sp.Offset:s.Pos().Offset]
item.Vars = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
}
sp = op
st = expQuery
}
op = s.Pos()
}
var comment bytes.Buffer
var varBytes []byte
itemMap := make(map[string]struct{})
s, e, c := 0, 0, 0
ty := 0
for {
fq := false
if c == 0 && b[e] == '#' {
s = e
for e < len(b) && b[e] != '\n' {
e++
}
if (e - s) > 2 {
comment.Write(b[(s + 1):(e + 1)])
}
}
if e >= len(b) {
break
}
if matchPrefix(b, e, "query") || matchPrefix(b, e, "mutation") {
if c == 0 {
s = e
}
ty = AL_QUERY
} else if matchPrefix(b, e, varString) {
if c == 0 {
s = e + len(varString) + 1
}
ty = AL_VARS
} else if b[e] == '{' {
c++
} else if b[e] == '}' {
c--
if c == 0 {
if ty == AL_QUERY {
fq = true
} else if ty == AL_VARS {
varBytes = b[s:(e + 1)]
}
ty = 0
}
}
if fq {
query := string(b[s:(e + 1)])
name := QueryName(query)
key := strings.ToLower(name)
if _, ok := itemMap[key]; !ok {
v := Item{
Name: name,
key: key,
Query: query,
Vars: varBytes,
Comment: comment.String(),
}
list = append(list, v)
comment.Reset()
}
varBytes = nil
}
e++
if e >= len(b) {
break
}
if st == expQuery {
v := b[sp.Offset:s.Pos().Offset]
item.Query = strings.TrimSpace(v[:strings.LastIndexByte(v, '}')+1])
items = append(items, item)
}
return list, nil
for i := range items {
items[i].Name = QueryName(items[i].Query)
items[i].key = strings.ToLower(items[i].Name)
}
return items, nil
}
func isGraphQL(s string) bool {
return strings.HasPrefix(s, "query") ||
strings.HasPrefix(s, "mutation") ||
strings.HasPrefix(s, "subscription")
}
func (al *List) save(item Item) error {
@ -287,57 +279,43 @@ func (al *List) save(item Item) error {
return strings.Compare(list[i].key, list[j].key) == -1
})
for _, v := range list {
cmtLines := strings.Split(v.Comment, "\n")
i := 0
for _, c := range cmtLines {
if c = strings.TrimSpace(c); c == "" {
continue
}
_, err := f.WriteString(fmt.Sprintf("# %s\n", c))
if err != nil {
return err
}
i++
}
if i != 0 {
if _, err := f.WriteString("\n"); err != nil {
return err
}
} else {
if _, err := f.WriteString(fmt.Sprintf("# Query named %s\n\n", v.Name)); err != nil {
return err
}
}
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
for i, v := range list {
var vars string
if v.Vars != "" {
buf.Reset()
if err := jsn.Clear(&buf, v.Vars); err != nil {
return fmt.Errorf("failed to clean vars: %w", err)
if err := jsn.Clear(&buf, []byte(v.Vars)); err != nil {
continue
}
vj := json.RawMessage(buf.Bytes())
vj, err = json.MarshalIndent(vj, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal vars: %w", err)
if vj, err = json.MarshalIndent(vj, "", " "); err != nil {
continue
}
vars = string(vj)
}
list[i].Vars = vars
list[i].Comment = strings.TrimSpace(v.Comment)
}
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
for _, v := range list {
if v.Comment != "" {
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Comment))
} else {
_, err = f.WriteString(fmt.Sprintf("/* %s */\n\n", v.Name))
}
if err != nil {
return err
}
if v.Vars != "" {
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", v.Vars))
if err != nil {
return err
}
}
if v.Query[0] == '{' {
_, err = f.WriteString(fmt.Sprintf("query %s\n\n", v.Query))
} else {
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
}
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v.Query))
if err != nil {
return err
}
@ -346,18 +324,6 @@ func (al *List) save(item Item) error {
return nil
}
func matchPrefix(b []byte, i int, s string) bool {
if (len(b) - i) < len(s) {
return false
}
for n := 0; n < len(s); n++ {
if b[(i+n)] != s[n] {
return false
}
}
return true
}
func QueryName(b string) string {
state, s := 0, 0

View File

@ -82,3 +82,160 @@ func TestGQLName5(t *testing.T) {
t.Fatal("Name should be empty, not ", name)
}
}
func TestParse1(t *testing.T) {
var al = `
# Hello world
variables {
"data": {
"slug": "",
"body": "",
"post": {
"connect": {
"slug": ""
}
}
}
}
mutation createComment {
comment(insert: $data) {
slug
body
createdAt: created_at
totalVotes: cached_votes_total
totalReplies: cached_replies_total
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}
# Query named createPost
query createPost {
post(insert: $data) {
slug
body
published
createdAt: created_at
totalVotes: cached_votes_total
totalComments: cached_comments_total
vote: post_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}`
_, err := parse(al, "allow.list")
if err != nil {
t.Fatal(err)
}
}
func TestParse2(t *testing.T) {
var al = `
/* Hello world */
variables {
"data": {
"slug": "",
"body": "",
"post": {
"connect": {
"slug": ""
}
}
}
}
mutation createComment {
comment(insert: $data) {
slug
body
createdAt: created_at
totalVotes: cached_votes_total
totalReplies: cached_replies_total
vote: comment_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}
/*
Query named createPost
*/
variables {
"data": {
"thread": {
"connect": {
"slug": ""
}
},
"slug": "",
"published": false,
"body": ""
}
}
query createPost {
post(insert: $data) {
slug
body
published
createdAt: created_at
totalVotes: cached_votes_total
totalComments: cached_comments_total
vote: post_vote(where: {user_id: {eq: $user_id}}) {
created_at
__typename
}
author: user {
slug
firstName: first_name
lastName: last_name
pictureURL: picture_url
bio
__typename
}
__typename
}
}`
_, err := parse(al, "allow.list")
if err != nil {
t.Fatal(err)
}
}

View File

@ -1,4 +1,3 @@
//nolint:errcheck
package psql
import (
@ -112,15 +111,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
c.renderComma(columnsRendered)
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name)
io.WriteString(c.w, `ts_rank(`)
_, _ = io.WriteString(c.w, `ts_rank(`)
colWithTable(c.w, ti.Name, cn)
if c.schema.ver >= 110000 {
io.WriteString(c.w, `, websearch_to_tsquery(`)
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
} else {
io.WriteString(c.w, `, to_tsquery(`)
_, _ = io.WriteString(c.w, `, to_tsquery(`)
}
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
io.WriteString(c.w, `))`)
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
_, _ = io.WriteString(c.w, `))`)
alias(c.w, col.Name)
return nil
@ -137,15 +136,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
c.renderComma(columnsRendered)
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name)
io.WriteString(c.w, `ts_headline(`)
_, _ = io.WriteString(c.w, `ts_headline(`)
colWithTable(c.w, ti.Name, cn)
if c.schema.ver >= 110000 {
io.WriteString(c.w, `, websearch_to_tsquery(`)
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
} else {
io.WriteString(c.w, `, to_tsquery(`)
_, _ = io.WriteString(c.w, `, to_tsquery(`)
}
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
io.WriteString(c.w, `))`)
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
_, _ = io.WriteString(c.w, `))`)
alias(c.w, col.Name)
return nil
@ -157,9 +156,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
}
c.renderComma(columnsRendered)
io.WriteString(c.w, `(`)
_, _ = io.WriteString(c.w, `(`)
squoted(c.w, ti.Name)
io.WriteString(c.w, ` :: text)`)
_, _ = io.WriteString(c.w, ` :: text)`)
alias(c.w, col.Name)
return nil
@ -169,9 +168,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
pl := funcPrefixLen(c.schema.fm, col.Name)
// if pl == 0 {
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
// io.WriteString(c.w, `'`)
// io.WriteString(c.w, col.Name)
// io.WriteString(c.w, ` not defined'`)
// _, _ = io.WriteString(c.w, `'`)
// _, _ = io.WriteString(c.w, col.Name)
// _, _ = io.WriteString(c.w, ` not defined'`)
// alias(c.w, col.Name)
// }
@ -190,10 +189,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
c.renderComma(columnsRendered)
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
io.WriteString(c.w, fn)
io.WriteString(c.w, `(`)
_, _ = io.WriteString(c.w, fn)
_, _ = io.WriteString(c.w, `(`)
colWithTable(c.w, ti.Name, cn)
io.WriteString(c.w, `)`)
_, _ = io.WriteString(c.w, `)`)
alias(c.w, col.Name)
return nil
@ -201,7 +200,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
func (c *compilerContext) renderComma(columnsRendered int) {
if columnsRendered != 0 {
io.WriteString(c.w, `, `)
_, _ = io.WriteString(c.w, `, `)
}
}

View File

@ -11,7 +11,7 @@ import (
var (
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
schema = GetTestSchema()
schema, _ = GetTestSchema()
vars = map[string]string{
"admin_account_id": "5",
@ -25,6 +25,37 @@ var (
// FuzzerEntrypoint for Fuzzbuzz
func Fuzz(data []byte) int {
err1 := query(data)
err2 := insert(data)
err3 := update(data)
err4 := delete(data)
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
return 0
}
return 1
}
func query(data []byte) error {
gql := data
qc, err1 := qcompileTest.Compile(gql, "user")
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err2 := pcompileTest.CompileEx(qc, vars)
if err1 != nil {
return err1
} else {
return err2
}
}
func insert(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
@ -47,9 +78,57 @@ func Fuzz(data []byte) int {
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}
func update(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}`
qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
return 0
panic("qcompile can't fail")
}
return 1
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}
func delete(data []byte) error {
gql := `mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}`
qc, err := qcompileTest.Compile([]byte(gql), "user")
if err != nil {
panic("qcompile can't fail")
}
vars := map[string]json.RawMessage{
"data": json.RawMessage(data),
}
_, _, err = pcompileTest.CompileEx(qc, vars)
return err
}

View File

@ -0,0 +1,20 @@
// +build gofuzz
package psql
import (
"testing"
)
var ret int
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"{\"connect\":{}}",
"q(q{q{q{q{q{q{q{q{",
}
for _, f := range crashers {
ret = Fuzz([]byte(f))
}
}

View File

@ -25,7 +25,7 @@ func (c *compilerContext) renderInsert(
if insert[0] == '[' {
io.WriteString(c.w, `json_array_elements(`)
}
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
io.WriteString(c.w, ` :: json`)
if insert[0] == '[' {
io.WriteString(c.w, `)`)

View File

@ -0,0 +1,61 @@
package psql
import (
"io"
)
func (md *Metadata) RenderVar(w io.Writer, vv string) {
f, s := -1, 0
for i := range vv {
v := vv[i]
switch {
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
if (i - s) > 0 {
_, _ = io.WriteString(w, vv[s:i])
}
f = i
case (v < 'a' && v > 'z') &&
(v < 'A' && v > 'Z') &&
(v < '0' && v > '9') &&
v != '_' &&
f != -1 &&
(i-f) > 1:
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
s = i
f = -1
}
}
if f != -1 && (len(vv)-f) > 1 {
md.renderValueExp(w, Param{Name: vv[f+1:]})
} else {
_, _ = io.WriteString(w, vv[s:])
}
}
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
_, _ = io.WriteString(w, `$`)
if v, ok := md.pindex[p.Name]; ok {
int32String(w, int32(v))
} else {
md.params = append(md.params, p)
n := len(md.params)
if md.pindex == nil {
md.pindex = make(map[string]int)
}
md.pindex[p.Name] = n
int32String(w, int32(n))
}
}
func (md Metadata) Skipped() uint32 {
return md.skipped
}
func (md Metadata) Params() []Param {
return md.params
}

View File

@ -432,11 +432,11 @@ func (c *compilerContext) renderInsertUpdateColumns(
val := root.PresetMap[cn]
switch {
case ok && len(val) > 1 && val[0] == '$':
c.renderValueExp(Param{Name: val[1:], Type: col.Type})
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type})
case ok && strings.HasPrefix(val, "sql:"):
io.WriteString(c.w, `(`)
c.renderVar(val[4:], c.renderValueExp)
c.md.RenderVar(c.w, val[4:])
io.WriteString(c.w, `)`)
case ok:
@ -542,6 +542,10 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
rel := item.relPC
if rel == nil {
return errors.New("invalid connect value")
}
// Render only for parent-to-child relationship of one-to-one
// For this to work the child needs to found first so it's primary key
// can be set in the related column on the parent object.

View File

@ -7,6 +7,7 @@ import (
"errors"
"fmt"
"io"
"strconv"
"strings"
"github.com/dosco/super-graph/core/internal/qcode"
@ -24,8 +25,8 @@ type Param struct {
}
type Metadata struct {
Skipped uint32
Params []Param
skipped uint32
params []Param
pindex map[string]int
}
@ -79,22 +80,30 @@ func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte
}
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
return co.CompileWithMetadata(w, qc, vars, Metadata{})
}
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
md.skipped = 0
if qc == nil {
return md, fmt.Errorf("qcode is nil")
}
switch qc.Type {
case qcode.QTQuery:
return co.compileQuery(w, qc, vars)
return co.compileQueryWithMetadata(w, qc, vars, md)
case qcode.QTInsert,
qcode.QTUpdate,
qcode.QTDelete,
qcode.QTUpsert:
return co.compileMutation(w, qc, vars)
default:
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
}
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
}
func (co *Compiler) compileQuery(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
return co.compileQueryWithMetadata(w, qc, vars, Metadata{})
}
func (co *Compiler) compileQueryWithMetadata(
@ -171,7 +180,7 @@ func (co *Compiler) compileQueryWithMetadata(
}
for _, cid := range sel.Children {
if hasBit(c.md.Skipped, uint32(cid)) {
if hasBit(c.md.skipped, uint32(cid)) {
continue
}
child := &c.s[cid]
@ -211,10 +220,8 @@ func (co *Compiler) compileQueryWithMetadata(
}
if len(sel.Args) != 0 {
i := 0
for _, v := range sel.Args {
qcode.FreeNode(v, 500)
i++
qcode.FreeNode(v)
}
}
}
@ -349,7 +356,7 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
if _, ok := colmap[rel.Left.Col]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
colmap[rel.Left.Col] = struct{}{}
c.md.Skipped |= (1 << uint(id))
c.md.skipped |= (1 << uint(id))
}
default:
@ -617,7 +624,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
i := colsRendered
for _, id := range sel.Children {
if hasBit(c.md.Skipped, uint32(id)) {
if hasBit(c.md.skipped, uint32(id)) {
continue
}
childSel := &c.s[id]
@ -799,7 +806,7 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
quoted(c.w, ob.Col)
}
io.WriteString(c.w, ` FROM string_to_array(`)
c.renderValueExp(Param{Name: "cursor", Type: "json"})
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
io.WriteString(c.w, `, ',') as a) `)
return nil
}
@ -1097,7 +1104,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
} else {
io.WriteString(c.w, `) @@ to_tsquery(`)
}
c.renderValueExp(Param{Name: ex.Val, Type: "string"})
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"})
io.WriteString(c.w, `))`)
return nil
@ -1186,7 +1193,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
switch {
case ok && strings.HasPrefix(val, "sql:"):
io.WriteString(c.w, `(`)
c.renderVar(val[4:], c.renderValueExp)
c.md.RenderVar(c.w, val[4:])
io.WriteString(c.w, `)`)
case ok:
@ -1194,7 +1201,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: true})
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
io.WriteString(c.w, `))`)
io.WriteString(c.w, ` :: `)
@ -1203,7 +1210,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
return
default:
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: false})
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false})
}
case qcode.ValRef:
@ -1217,54 +1224,6 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
io.WriteString(c.w, col.Type)
}
func (c *compilerContext) renderValueExp(p Param) {
io.WriteString(c.w, `$`)
if v, ok := c.md.pindex[p.Name]; ok {
int32String(c.w, int32(v))
} else {
c.md.Params = append(c.md.Params, p)
n := len(c.md.Params)
if c.md.pindex == nil {
c.md.pindex = make(map[string]int)
}
c.md.pindex[p.Name] = n
int32String(c.w, int32(n))
}
}
func (c *compilerContext) renderVar(vv string, fn func(Param)) {
f, s := -1, 0
for i := range vv {
v := vv[i]
switch {
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
if (i - s) > 0 {
io.WriteString(c.w, vv[s:i])
}
f = i
case (v < 'a' && v > 'z') &&
(v < 'A' && v > 'Z') &&
(v < '0' && v > '9') &&
v != '_' &&
f != -1 &&
(i-f) > 1:
fn(Param{Name: vv[f+1 : i]})
s = i
f = -1
}
}
if f != -1 && (len(vv)-f) > 1 {
fn(Param{Name: vv[f+1:]})
} else {
io.WriteString(c.w, vv[s:])
}
}
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
switch {
case strings.HasPrefix(fn, "avg_"):
@ -1352,26 +1311,6 @@ func squoted(w io.Writer, identifier string) {
io.WriteString(w, `'`)
}
const charset = "0123456789"
func int32String(w io.Writer, val int32) {
if val < 10 {
w.Write([]byte{charset[val]})
return
}
temp := int32(0)
val2 := val
for val2 > 0 {
temp *= 10
temp += val2 % 10
val2 = int32(float64(val2 / 10))
}
val3 := temp
for val3 > 0 {
d := val3 % 10
val3 /= 10
w.Write([]byte{charset[d]})
}
io.WriteString(w, strconv.FormatInt(int64(val), 10))
}

View File

@ -307,6 +307,80 @@ func multiRoot(t *testing.T) {
compileGQLToPSQL(t, gql, nil, "user")
}
func withFragment1(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields2 on user {
first_name
last_name
}`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withFragment2(t *testing.T) {
gql := `
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withFragment3(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
query {
users {
...userFields2
created_at
...userFields1
}
}
`
compileGQLToPSQL(t, gql, nil, "anon")
}
func withCursor(t *testing.T) {
gql := `query {
Products(
@ -400,6 +474,9 @@ func TestCompileQuery(t *testing.T) {
t.Run("queryWithVariables", queryWithVariables)
t.Run("withWhereOnRelations", withWhereOnRelations)
t.Run("multiRoot", multiRoot)
t.Run("withFragment1", withFragment1)
t.Run("withFragment2", withFragment2)
t.Run("withFragment3", withFragment3)
t.Run("jsonColumnAsTable", jsonColumnAsTable)
t.Run("withCursor", withCursor)
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)

View File

@ -86,6 +86,12 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/multiRoot
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment1
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment2
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withFragment3
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/jsonColumnAsTable
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
=== RUN TestCompileQuery/withCursor
@ -117,6 +123,9 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
--- PASS: TestCompileQuery/multiRoot (0.00s)
--- PASS: TestCompileQuery/withFragment1 (0.00s)
--- PASS: TestCompileQuery/withFragment2 (0.00s)
--- PASS: TestCompileQuery/withFragment3 (0.00s)
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
--- PASS: TestCompileQuery/withCursor (0.00s)
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
@ -151,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
PASS
ok github.com/dosco/super-graph/core/internal/psql (cached)
ok github.com/dosco/super-graph/core/internal/psql 0.374s

View File

@ -22,7 +22,7 @@ func (c *compilerContext) renderUpdate(
}
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
// io.WriteString(c.w, qc.ActionVar)
io.WriteString(c.w, ` :: json AS j)`)

View File

@ -0,0 +1,11 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 120888 9236 ns/op 3755 B/op 28 allocs/op
BenchmarkQCompileP-16 502248 2620 ns/op 3795 B/op 28 allocs/op
BenchmarkParse-16 128370 9294 ns/op 3902 B/op 18 allocs/op
BenchmarkParseP-16 575752 2340 ns/op 3903 B/op 18 allocs/op
BenchmarkSchemaParse-16 212048 5779 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 630918 1686 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 7.710s

View File

@ -0,0 +1,13 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile-16 118282 9686 ns/op 4031 B/op 30 allocs/op
BenchmarkQCompileP-16 427531 2710 ns/op 4077 B/op 30 allocs/op
BenchmarkQCompileFragment-16 140588 8328 ns/op 8903 B/op 13 allocs/op
BenchmarkParse-16 131396 9212 ns/op 4175 B/op 18 allocs/op
BenchmarkParseP-16 503778 2310 ns/op 4176 B/op 18 allocs/op
BenchmarkParseFragment-16 143725 8158 ns/op 10193 B/op 9 allocs/op
BenchmarkSchemaParse-16 240609 5060 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP-16 785116 1534 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 11.092s

View File

@ -11,15 +11,18 @@ import (
var (
queryToken = []byte("query")
mutationToken = []byte("mutation")
fragmentToken = []byte("fragment")
subscriptionToken = []byte("subscription")
onToken = []byte("on")
trueToken = []byte("true")
falseToken = []byte("false")
quotesToken = []byte(`'"`)
signsToken = []byte(`+-`)
punctuatorToken = []byte(`!():=[]{|}`)
spreadToken = []byte(`...`)
digitToken = []byte(`0123456789`)
dotToken = []byte(`.`)
punctuatorToken = `!():=[]{|}`
)
// Pos represents a byte position in the original input text from which
@ -43,6 +46,8 @@ const (
itemName
itemQuery
itemMutation
itemFragment
itemOn
itemSub
itemPunctuator
itemArgsOpen
@ -263,11 +268,11 @@ func lexRoot(l *lexer) stateFn {
l.backup()
return lexString
case r == '.':
if len(l.input) >= 3 {
if equals(l.input, 0, 3, spreadToken) {
l.emit(itemSpread)
return lexRoot
}
l.acceptRun(dotToken)
s, e := l.current()
if equals(l.input, s, e, spreadToken) {
l.emit(itemSpread)
return lexRoot
}
fallthrough // '.' can start a number.
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
@ -299,10 +304,14 @@ func lexName(l *lexer) stateFn {
switch {
case equals(l.input, s, e, queryToken):
l.emitL(itemQuery)
case equals(l.input, s, e, fragmentToken):
l.emitL(itemFragment)
case equals(l.input, s, e, mutationToken):
l.emitL(itemMutation)
case equals(l.input, s, e, subscriptionToken):
l.emitL(itemSub)
case equals(l.input, s, e, onToken):
l.emitL(itemOn)
case equals(l.input, s, e, trueToken):
l.emitL(itemBoolVal)
case equals(l.input, s, e, falseToken):
@ -396,31 +405,11 @@ func isAlphaNumeric(r rune) bool {
}
func equals(b []byte, s Pos, e Pos, val []byte) bool {
n := 0
for i := s; i < e; i++ {
if n >= len(val) {
return true
}
switch {
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
return false
case b[i] != val[n]:
return false
}
n++
}
return true
return bytes.EqualFold(b[s:e], val)
}
func contains(b []byte, s Pos, e Pos, val []byte) bool {
for i := s; i < e; i++ {
for n := 0; n < len(val); n++ {
if b[i] == val[n] {
return true
}
}
}
return false
func contains(b []byte, s Pos, e Pos, chars string) bool {
return bytes.ContainsAny(b[s:e], chars)
}
func lowercase(b []byte, s Pos, e Pos) {

View File

@ -3,10 +3,9 @@ package qcode
import (
"errors"
"fmt"
"hash/maphash"
"sync"
"unsafe"
"github.com/dosco/super-graph/core/internal/util"
)
var (
@ -35,8 +34,7 @@ const (
NodeVar
)
type Operation struct {
Type parserType
type SelectionSet struct {
Name string
Args []Arg
argsA [10]Arg
@ -44,12 +42,29 @@ type Operation struct {
fieldsA [10]Field
}
type Operation struct {
Type parserType
SelectionSet
}
var zeroOperation = Operation{}
func (o *Operation) Reset() {
*o = zeroOperation
}
type Fragment struct {
Name string
On string
SelectionSet
}
var zeroFragment = Fragment{}
func (f *Fragment) Reset() {
*f = zeroFragment
}
type Field struct {
ID int32
ParentID int32
@ -64,6 +79,7 @@ type Field struct {
type Arg struct {
Name string
Val *Node
df bool
}
type Node struct {
@ -82,6 +98,8 @@ func (n *Node) Reset() {
}
type Parser struct {
frags map[uint64]*Fragment
h maphash.Hash
input []byte // the string being scanned
pos int
items []item
@ -96,12 +114,194 @@ var opPool = sync.Pool{
New: func() interface{} { return new(Operation) },
}
var fragPool = sync.Pool{
New: func() interface{} { return new(Fragment) },
}
var lexPool = sync.Pool{
New: func() interface{} { return new(lexer) },
}
func Parse(gql []byte) (*Operation, error) {
return parseSelectionSet(gql)
var err error
if len(gql) == 0 {
return nil, errors.New("blank query")
}
l := lexPool.Get().(*lexer)
l.Reset()
defer lexPool.Put(l)
if err = lex(l, gql); err != nil {
return nil, err
}
p := &Parser{
input: l.input,
pos: -1,
items: l.items,
}
op := opPool.Get().(*Operation)
op.Reset()
op.Fields = op.fieldsA[:0]
s := -1
qf := false
for {
if p.peek(itemEOF) {
p.ignore()
break
}
if p.peek(itemFragment) {
p.ignore()
if err = p.parseFragment(op); err != nil {
return nil, err
}
} else {
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
s = p.pos
qf = true
}
p.ignore()
}
}
p.reset(s)
if err := p.parseOp(op); err != nil {
return nil, err
}
return op, nil
}
func (p *Parser) parseFragment(op *Operation) error {
frag := fragPool.Get().(*Fragment)
frag.Reset()
frag.Fields = frag.SelectionSet.fieldsA[:0]
frag.Args = frag.SelectionSet.argsA[:0]
if p.peek(itemName) {
frag.Name = p.val(p.next())
}
if p.peek(itemOn) {
p.ignore()
} else {
return errors.New("fragment: missing 'on' keyword")
}
if p.peek(itemName) {
frag.On = p.vall(p.next())
} else {
return errors.New("fragment: missing table name after 'on' keyword")
}
if p.peek(itemObjOpen) {
p.ignore()
} else {
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
}
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
return fmt.Errorf("fragment: %v", err)
}
if p.frags == nil {
p.frags = make(map[uint64]*Fragment)
}
_, _ = p.h.WriteString(frag.Name)
k := p.h.Sum64()
p.h.Reset()
p.frags[k] = frag
return nil
}
func (p *Parser) parseOp(op *Operation) error {
var err error
var typeSet bool
if p.peek(itemQuery, itemMutation, itemSub) {
err = p.parseOpTypeAndArgs(op)
if err != nil {
return fmt.Errorf("%s: %v", op.Type, err)
}
typeSet = true
}
if p.peek(itemObjOpen) {
p.ignore()
if !typeSet {
op.Type = opQuery
}
for {
if p.peek(itemEOF, itemFragment) {
p.ignore()
break
}
err = p.parseSelectionSet(&op.SelectionSet)
if err != nil {
return fmt.Errorf("%s: %v", op.Type, err)
}
}
} else {
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
}
return nil
}
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
item := p.next()
switch item._type {
case itemQuery:
op.Type = opQuery
case itemMutation:
op.Type = opMutate
case itemSub:
op.Type = opSub
}
op.Args = op.SelectionSet.argsA[:0]
var err error
if p.peek(itemName) {
op.Name = p.val(p.next())
}
if p.peek(itemArgsOpen) {
p.ignore()
op.Args, err = p.parseOpParams(op.Args)
if err != nil {
return err
}
}
return nil
}
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
var err error
selset.Fields, err = p.parseFields(selset.Fields)
if err != nil {
return err
}
return nil
}
func ParseArgValue(argVal string) (*Node, error) {
@ -123,215 +323,114 @@ func ParseArgValue(argVal string) (*Node, error) {
return op, err
}
func parseSelectionSet(gql []byte) (*Operation, error) {
var err error
if len(gql) == 0 {
return nil, errors.New("blank query")
}
l := lexPool.Get().(*lexer)
l.Reset()
if err = lex(l, gql); err != nil {
return nil, err
}
p := &Parser{
input: l.input,
pos: -1,
items: l.items,
}
var op *Operation
if p.peek(itemObjOpen) {
p.ignore()
op, err = p.parseQueryOp()
} else {
op, err = p.parseOp()
}
if err != nil {
return nil, err
}
if p.peek(itemObjClose) {
p.ignore()
} else {
return nil, fmt.Errorf("operation missing closing '}'")
}
if !p.peek(itemEOF) {
p.ignore()
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
}
lexPool.Put(l)
return op, err
}
func (p *Parser) next() item {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return item{_type: itemEOF}
}
p.pos = n
return p.items[p.pos]
}
func (p *Parser) ignore() {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return
}
p.pos = n
}
func (p *Parser) current() string {
item := p.items[p.pos]
return b2s(p.input[item.pos:item.end])
}
func (p *Parser) peek(types ...itemType) bool {
n := p.pos + 1
// if p.items[n]._type == itemEOF {
// return false
// }
if n >= len(p.items) {
return false
}
for i := 0; i < len(types); i++ {
if p.items[n]._type == types[i] {
return true
}
}
return false
}
func (p *Parser) parseOp() (*Operation, error) {
if !p.peek(itemQuery, itemMutation, itemSub) {
err := errors.New("expecting a query, mutation or subscription")
return nil, err
}
item := p.next()
op := opPool.Get().(*Operation)
op.Reset()
switch item._type {
case itemQuery:
op.Type = opQuery
case itemMutation:
op.Type = opMutate
case itemSub:
op.Type = opSub
}
op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
var err error
if p.peek(itemName) {
op.Name = p.val(p.next())
}
if p.peek(itemArgsOpen) {
p.ignore()
op.Args, err = p.parseOpParams(op.Args)
if err != nil {
return nil, err
}
}
if p.peek(itemObjOpen) {
p.ignore()
for n := 0; n < 10; n++ {
if !p.peek(itemName) {
break
}
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return nil, err
}
}
}
return op, nil
}
func (p *Parser) parseQueryOp() (*Operation, error) {
op := opPool.Get().(*Operation)
op.Reset()
op.Type = opQuery
op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
var err error
for n := 0; n < 10; n++ {
if !p.peek(itemName) {
break
}
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return nil, err
}
}
return op, nil
}
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
st := util.NewStack()
st := NewStack()
if !p.peek(itemName, itemSpread) {
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
}
for {
if len(fields) >= maxFields {
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
if p.peek(itemEOF) {
p.ignore()
return nil, errors.New("invalid query")
}
if p.peek(itemObjClose) {
p.ignore()
st.Pop()
if st.Len() == 0 {
break
} else {
if st.Len() != 0 {
st.Pop()
continue
} else {
break
}
}
if len(fields) >= maxFields {
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
}
isFrag := false
if p.peek(itemSpread) {
p.ignore()
isFrag = true
}
if !p.peek(itemName) {
return nil, errors.New("expecting an alias or field name")
if isFrag {
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
} else {
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
}
}
fields = append(fields, Field{ID: int32(len(fields))})
var f *Field
f := &fields[(len(fields) - 1)]
f.Args = f.argsA[:0]
f.Children = f.childrenA[:0]
if isFrag {
name := p.val(p.next())
_, _ = p.h.WriteString(name)
id := p.h.Sum64()
p.h.Reset()
// Parse the inside of the the fields () parentheses
// in short parse the args like id, where, etc
if err := p.parseField(f); err != nil {
return nil, err
}
fr, ok := p.frags[id]
if !ok {
return nil, fmt.Errorf("no fragment named '%s' defined", name)
}
n := int32(len(fields))
fields = append(fields, fr.Fields...)
for i := 0; i < len(fr.Fields); i++ {
k := (n + int32(i))
f := &fields[k]
f.ID = int32(k)
// If this is the top-level point the parent to the parent of the
// previous field.
if f.ParentID == -1 {
pid := st.Peek()
f.ParentID = pid
if f.ParentID != -1 {
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
}
// Update all the other parents id's by our new place in this new array
} else {
f.ParentID += n
}
f.Children = make([]int32, len(f.Children))
copy(f.Children, fr.Fields[i].Children)
f.Args = make([]Arg, len(f.Args))
copy(f.Args, fr.Fields[i].Args)
// Update all the children which is needed.
for j := range f.Children {
f.Children[j] += n
}
}
intf := st.Peek()
if pid, ok := intf.(int32); ok {
f.ParentID = pid
fields[pid].Children = append(fields[pid].Children, f.ID)
} else {
f.ParentID = -1
fields = append(fields, Field{ID: int32(len(fields))})
f = &fields[(len(fields) - 1)]
f.Args = f.argsA[:0]
f.Children = f.childrenA[:0]
// Parse the field
if err := p.parseField(f); err != nil {
return nil, err
}
if st.Len() == 0 {
f.ParentID = -1
} else {
pid := st.Peek()
f.ParentID = pid
fields[pid].Children = append(fields[pid].Children, f.ID)
}
}
// The first opening curley brackets after this
@ -339,13 +438,6 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
if p.peek(itemObjOpen) {
p.ignore()
st.Push(f.ID)
} else if p.peek(itemObjClose) {
if st.Len() == 0 {
break
} else {
continue
}
}
}
@ -385,7 +477,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
}
if p.peek(itemArgsClose) {
if p.peek(itemEOF, itemArgsClose) {
p.ignore()
break
}
@ -403,7 +495,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
}
if p.peek(itemArgsClose) {
if p.peek(itemEOF, itemArgsClose) {
p.ignore()
break
}
@ -470,7 +562,7 @@ func (p *Parser) parseObj() (*Node, error) {
parent.Reset()
for {
if p.peek(itemObjClose) {
if p.peek(itemEOF, itemObjClose) {
p.ignore()
break
}
@ -545,6 +637,57 @@ func (p *Parser) vall(v item) string {
return b2s(p.input[v.pos:v.end])
}
func (p *Parser) peek(types ...itemType) bool {
n := p.pos + 1
l := len(types)
// if p.items[n]._type == itemEOF {
// return false
// }
if n >= len(p.items) {
return types[0] == itemEOF
}
if l == 1 {
return p.items[n]._type == types[0]
}
for i := 0; i < l; i++ {
if p.items[n]._type == types[i] {
return true
}
}
return false
}
func (p *Parser) next() item {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return item{_type: itemEOF}
}
p.pos = n
return p.items[p.pos]
}
func (p *Parser) ignore() {
n := p.pos + 1
if n >= len(p.items) {
p.err = errEOT
return
}
p.pos = n
}
func (p *Parser) peekNext() string {
item := p.items[p.pos+1]
return b2s(p.input[item.pos:item.end])
}
func (p *Parser) reset(to int) {
p.pos = to
}
func b2s(b []byte) string {
return *(*string)(unsafe.Pointer(&b))
}
@ -578,34 +721,9 @@ func (t parserType) String() string {
case NodeList:
v = "node-list"
}
return fmt.Sprintf("<%s>", v)
return v
}
// type Frees struct {
// n *Node
// loc int
// }
// var freeList []Frees
// func FreeNode(n *Node, loc int) {
// j := -1
// for i := range freeList {
// if n == freeList[i].n {
// j = i
// break
// }
// }
// if j == -1 {
// nodePool.Put(n)
// freeList = append(freeList, Frees{n, loc})
// } else {
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
// }
// }
func FreeNode(n *Node, loc int) {
func FreeNode(n *Node) {
nodePool.Put(n)
}

View File

@ -2,8 +2,9 @@ package qcode
import (
"errors"
"github.com/chirino/graphql/schema"
"testing"
"github.com/chirino/graphql/schema"
)
func TestCompile1(t *testing.T) {
@ -120,7 +121,7 @@ updateThread {
}
}
}
}`
}}`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "anon")
@ -130,6 +131,93 @@ updateThread {
}
func TestFragmentsCompile1(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields2 on user {
first_name
last_name
}
`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
func TestFragmentsCompile2(t *testing.T) {
gql := `
query {
users {
...userFields2
created_at
...userFields1
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
func TestFragmentsCompile3(t *testing.T) {
gql := `
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
query {
users {
...userFields2
created_at
...userFields1
}
}
`
qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(gql), "user")
if err != nil {
t.Fatal(err)
}
}
var gql = []byte(`
{products(
# returns only 30 items
@ -151,6 +239,29 @@ var gql = []byte(`
price
}}`)
var gqlWithFragments = []byte(`
fragment userFields1 on user {
id
email
__typename
}
query {
users {
...userFields2
created_at
...userFields1
__typename
}
}
fragment userFields2 on user {
first_name
last_name
__typename
}`)
func BenchmarkQCompile(b *testing.B) {
qcompile, _ := NewCompiler(Config{})
@ -183,8 +294,22 @@ func BenchmarkQCompileP(b *testing.B) {
})
}
func BenchmarkParse(b *testing.B) {
func BenchmarkQCompileFragment(b *testing.B) {
qcompile, _ := NewCompiler(Config{})
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
_, err := qcompile.Compile(gqlWithFragments, "user")
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkParse(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
@ -211,6 +336,18 @@ func BenchmarkParseP(b *testing.B) {
})
}
func BenchmarkParseFragment(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
_, err := Parse(gqlWithFragments)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkSchemaParse(b *testing.B) {
b.ResetTimer()

View File

@ -277,6 +277,7 @@ func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
return nil, err
}
freeNodes(op)
opPool.Put(op)
return &qc, nil
@ -419,6 +420,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
com.AddFilters(qc, s, role)
s.Cols = make([]Column, 0, len(field.Children))
cm := make(map[string]struct{})
action = QTQuery
for _, cid := range field.Children {
@ -428,19 +430,27 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
continue
}
var fname string
if f.Alias != "" {
fname = f.Alias
} else {
fname = f.Name
}
if _, ok := cm[fname]; ok {
continue
} else {
cm[fname] = struct{}{}
}
if len(f.Children) != 0 {
val := f.ID | (s.ID << 16)
st.Push(val)
continue
}
col := Column{Name: f.Name}
if len(f.Alias) != 0 {
col.FieldName = f.Alias
} else {
col.FieldName = f.Name
}
col := Column{Name: f.Name, FieldName: fname}
s.Cols = append(s.Cols, col)
}
@ -483,50 +493,42 @@ func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg, role string) error {
var err error
// don't free this arg either previously done or will be free'd
// in the future like in psql
var df bool
for i := range args {
arg := &args[i]
switch arg.Name {
case "id":
err, df = com.compileArgID(sel, arg)
err = com.compileArgID(sel, arg)
case "search":
err, df = com.compileArgSearch(sel, arg)
err = com.compileArgSearch(sel, arg)
case "where":
err, df = com.compileArgWhere(sel, arg, role)
err = com.compileArgWhere(sel, arg, role)
case "orderby", "order_by", "order":
err, df = com.compileArgOrderBy(sel, arg)
err = com.compileArgOrderBy(sel, arg)
case "distinct_on", "distinct":
err, df = com.compileArgDistinctOn(sel, arg)
err = com.compileArgDistinctOn(sel, arg)
case "limit":
err, df = com.compileArgLimit(sel, arg)
err = com.compileArgLimit(sel, arg)
case "offset":
err, df = com.compileArgOffset(sel, arg)
err = com.compileArgOffset(sel, arg)
case "first":
err, df = com.compileArgFirstLast(sel, arg, PtForward)
err = com.compileArgFirstLast(sel, arg, PtForward)
case "last":
err, df = com.compileArgFirstLast(sel, arg, PtBackward)
err = com.compileArgFirstLast(sel, arg, PtBackward)
case "after":
err, df = com.compileArgAfterBefore(sel, arg, PtForward)
err = com.compileArgAfterBefore(sel, arg, PtForward)
case "before":
err, df = com.compileArgAfterBefore(sel, arg, PtBackward)
}
if !df {
FreeNode(arg.Val, 5)
err = com.compileArgAfterBefore(sel, arg, PtBackward)
}
if err != nil {
@ -637,39 +639,20 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
}
}
if usePool {
st.Push(node)
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
FreeNode(node, 1)
}
}
return root, needsUser, nil
}
func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
if sel.ID != 0 {
return nil, false
return nil
}
if sel.Where != nil && sel.Where.Op == OpEqID {
return nil, false
return nil
}
if arg.Val.Type != NodeVar {
return argErr("id", "variable"), false
return argErr("id", "variable")
}
ex := expPool.Get().(*Exp)
@ -680,12 +663,12 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
ex.Val = arg.Val.Val
sel.Where = ex
return nil, false
return nil
}
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
if arg.Val.Type != NodeVar {
return argErr("search", "variable"), false
return argErr("search", "variable")
}
ex := expPool.Get().(*Exp)
@ -700,18 +683,19 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
}
sel.Args[arg.Name] = arg.Val
arg.df = true
AddFilter(sel, ex)
return nil, true
return nil
}
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error, bool) {
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) error {
st := util.NewStack()
var err error
ex, nu, err := com.compileArgObj(st, arg)
if err != nil {
return err, false
return err
}
if nu && role == "anon" {
@ -719,12 +703,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
}
AddFilter(sel, ex)
return nil, true
return nil
}
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
if arg.Val.Type != NodeObj {
return fmt.Errorf("expecting an object"), false
return fmt.Errorf("expecting an object")
}
st := util.NewStack()
@ -742,16 +726,15 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
node, ok := intf.(*Node)
if !ok || node == nil {
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf)
}
if _, ok := com.bl[node.Name]; ok {
FreeNode(node, 2)
continue
}
if node.Type != NodeStr && node.Type != NodeVar {
return fmt.Errorf("expecting a string or variable"), false
return fmt.Errorf("expecting a string or variable")
}
ob := &OrderBy{}
@ -770,25 +753,24 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
case "desc_nulls_last":
ob.Order = OrderDescNullsLast
default:
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first")
}
setOrderByColName(ob, node)
sel.OrderBy = append(sel.OrderBy, ob)
FreeNode(node, 3)
}
return nil, false
return nil
}
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error {
node := arg.Val
if _, ok := com.bl[node.Name]; ok {
return nil, false
return nil
}
if node.Type != NodeList && node.Type != NodeStr {
return fmt.Errorf("expecting a list of strings or just a string"), false
return fmt.Errorf("expecting a list of strings or just a string")
}
if node.Type == NodeStr {
@ -797,58 +779,57 @@ func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
for i := range node.Children {
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
FreeNode(node.Children[i], 5)
}
return nil, false
return nil
}
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error {
node := arg.Val
if node.Type != NodeInt {
return argErr("limit", "number"), false
return argErr("limit", "number")
}
sel.Paging.Limit = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
node := arg.Val
if node.Type != NodeVar {
return argErr("offset", "variable"), false
return argErr("offset", "variable")
}
sel.Paging.Offset = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) (error, bool) {
func (com *Compiler) compileArgFirstLast(sel *Select, arg *Arg, pt PagingType) error {
node := arg.Val
if node.Type != NodeInt {
return argErr(arg.Name, "number"), false
return argErr(arg.Name, "number")
}
sel.Paging.Type = pt
sel.Paging.Limit = node.Val
return nil, false
return nil
}
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) (error, bool) {
func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType) error {
node := arg.Val
if node.Type != NodeVar || node.Val != "cursor" {
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name), false
return fmt.Errorf("value for argument '%s' must be a variable named $cursor", arg.Name)
}
sel.Paging.Type = pt
sel.Paging.Cursor = true
return nil, false
return nil
}
// var zeroTrv = &trval{}
@ -1228,3 +1209,81 @@ func FreeExp(ex *Exp) {
func argErr(name, ty string) error {
return fmt.Errorf("value for argument '%s' must be a %s", name, ty)
}
func freeNodes(op *Operation) {
var st *util.Stack
fm := make(map[*Node]struct{})
for i := range op.Args {
arg := op.Args[i]
if arg.df {
continue
}
for i := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[i]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
for i := range op.Fields {
f := op.Fields[i]
for j := range f.Args {
arg := f.Args[j]
if arg.df {
continue
}
for k := range arg.Val.Children {
if st == nil {
st = util.NewStack()
}
c := arg.Val.Children[k]
if _, ok := fm[c]; !ok {
st.Push(c)
}
}
if _, ok := fm[arg.Val]; !ok {
nodePool.Put(arg.Val)
fm[arg.Val] = struct{}{}
}
}
}
if st == nil {
return
}
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
node, ok := intf.(*Node)
if !ok || node == nil {
continue
}
for i := range node.Children {
st.Push(node.Children[i])
}
if _, ok := fm[node]; !ok {
nodePool.Put(node)
fm[node] = struct{}{}
}
}
}

View File

@ -2,126 +2,95 @@ package core
import (
"bytes"
"context"
"crypto/sha256"
"database/sql"
"encoding/hex"
"fmt"
"hash/maphash"
"io"
"strings"
"sync"
"github.com/dosco/super-graph/core/internal/allow"
"github.com/dosco/super-graph/core/internal/qcode"
)
type preparedItem struct {
type query struct {
sync.Once
sd *sql.Stmt
ai allow.Item
qt qcode.QType
err error
st stmt
roleArg bool
}
func (sg *SuperGraph) initPrepared() error {
ct := context.Background()
func (sg *SuperGraph) prepare(q *query, role string) {
var stmts []stmt
var err error
qb := []byte(q.ai.Query)
vars := []byte(q.ai.Vars)
switch q.qt {
case qcode.QTQuery:
if sg.abacEnabled {
stmts, err = sg.buildMultiStmt(qb, vars)
} else {
stmts, err = sg.buildRoleStmt(qb, vars, role)
}
case qcode.QTMutation:
stmts, err = sg.buildRoleStmt(qb, vars, role)
}
if err != nil {
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
}
q.st = stmts[0]
q.roleArg = len(stmts) > 1
q.sd, err = sg.db.Prepare(q.st.sql)
if err != nil {
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
}
}
func (sg *SuperGraph) initPrepared() error {
if sg.allowList.IsPersist() {
return nil
}
sg.prepared = make(map[string]*preparedItem)
tx, err := sg.db.BeginTx(ct, nil)
if err != nil {
return err
}
defer tx.Rollback() //nolint: errcheck
if err = sg.prepareRoleStmt(tx); err != nil {
return fmt.Errorf("prepareRoleStmt: %w", err)
if err := sg.prepareRoleStmt(); err != nil {
return fmt.Errorf("role query: %w", err)
}
if err := tx.Commit(); err != nil {
return err
}
success := 0
sg.queries = make(map[uint64]query)
list, err := sg.allowList.Load()
if err != nil {
return err
}
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
for _, v := range list {
if len(v.Query) == 0 {
continue
}
qt := qcode.GetQType(v.Query)
err := sg.prepareStmt(v)
if err != nil {
sg.log.Printf("WRN %s: %v", v.Name, err)
} else {
success++
}
}
switch qt {
case qcode.QTQuery:
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
return nil
}
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
query := item.Query
qb := []byte(query)
vars := item.Vars
qt := qcode.GetQType(query)
ct := context.Background()
switch qt {
case qcode.QTQuery:
var stmts1 []stmt
var err error
if sg.abacEnabled {
stmts1, err = sg.buildMultiStmt(qb, vars)
} else {
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
}
if err != nil {
return err
}
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
if err != nil {
return err
}
if sg.anonExists {
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
if err != nil {
return err
if sg.anonExists {
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
}
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
if err != nil {
return err
}
}
case qcode.QTMutation:
for _, role := range sg.conf.Roles {
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
if err != nil {
return err
}
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
if err != nil {
return err
case qcode.QTMutation:
for _, role := range sg.conf.Roles {
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
}
}
}
@ -129,22 +98,8 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
return nil
}
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
sd, err := sg.db.PrepareContext(ct, st[0].sql)
if err != nil {
return fmt.Errorf("prepare failed: %v: %s", err, st[0].sql)
}
sg.prepared[key] = &preparedItem{
sd: sd,
st: st[0],
roleArg: len(st) > 1,
}
return nil
}
// nolint: errcheck
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
func (sg *SuperGraph) prepareRoleStmt() error {
var err error
if !sg.abacEnabled {
@ -171,11 +126,11 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
}
io.WriteString(w, ` ELSE $2 END) FROM (`)
io.WriteString(w, sg.conf.RolesQuery)
io.WriteString(w, rq)
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
sg.getRole, err = tx.Prepare(w.String())
sg.getRole, err = sg.db.Prepare(w.String())
if err != nil {
return err
}
@ -187,15 +142,14 @@ func (sg *SuperGraph) initAllowList() error {
var ac allow.Config
var err error
if len(sg.conf.AllowListFile) == 0 {
sg.conf.UseAllowList = false
sg.log.Printf("WRN allow list disabled no file specified")
if sg.conf.AllowListFile == "" {
sg.conf.AllowListFile = "allow.list"
}
// When list is not eabled it is still created and
// and new queries are saved to it.
if !sg.conf.UseAllowList {
ac = allow.Config{CreateIfNotExists: true, Persist: true}
ac = allow.Config{CreateIfNotExists: true, Persist: true, Log: sg.log}
}
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
@ -207,9 +161,11 @@ func (sg *SuperGraph) initAllowList() error {
}
// nolint: errcheck
func stmtHash(name string, role string) string {
h := sha256.New()
io.WriteString(h, strings.ToLower(name))
io.WriteString(h, role)
return hex.EncodeToString(h.Sum(nil))
func queryID(h *maphash.Hash, name string, role string) uint64 {
h.WriteString(name)
h.WriteString(role)
v := h.Sum64()
h.Reset()
return v
}

View File

@ -4,10 +4,10 @@ import (
"bytes"
"errors"
"fmt"
"hash/maphash"
"net/http"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/qcode"
"github.com/dosco/super-graph/jsn"
)
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
var err error
sel := st.qc.Selects
h := xxhash.New()
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
// fetch the field name used within the db response json
// that are used to mark insertion points and the mapping between
// those field names and their select objects
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped())
// fetch the field values of the marked insertion points
// these values contain the id to be used with fetching remote data
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
switch {
case len(from) == 1:
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
case len(from) > 1:
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
default:
return nil, errors.New("something wrong no remote ids found in db response")
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
func (sg *SuperGraph) resolveRemote(
hdr http.Header,
h *xxhash.Digest,
h *maphash.Hash,
field jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
to := toA[:1]
// use the json key to find the related Select object
k1 := xxhash.Sum64(field.Key)
_, _ = h.Write(field.Key)
k1 := h.Sum64()
s, ok := sfmap[k1]
if !ok {
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
func (sg *SuperGraph) resolveRemotes(
hdr http.Header,
h *xxhash.Digest,
h *maphash.Hash,
from []jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
for i, id := range from {
// use the json key to find the related Select object
k1 := xxhash.Sum64(id.Key)
_, _ = h.Write(id.Key)
k1 := h.Sum64()
s, ok := sfmap[k1]
if !ok {
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
return to, cerr
}
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
[][]byte,
map[uint64]*qcode.Select) {
@ -227,8 +230,8 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
fm[n] = r.IDField
n++
k := xxhash.Sum64(r.IDField)
sm[k] = s
_, _ = h.Write(r.IDField)
sm[h.Sum64()] = s
}
}

View File

@ -2,11 +2,11 @@ package core
import (
"fmt"
"hash/maphash"
"io/ioutil"
"net/http"
"strings"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/jsn"
)
@ -19,7 +19,7 @@ type resolvFn struct {
func (sg *SuperGraph) initResolvers() error {
var err error
sg.rmap = make(map[uint64]*resolvFn)
sg.rmap = make(map[uint64]resolvFn)
for _, t := range sg.conf.Tables {
err = sg.initRemotes(t)
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
}
func (sg *SuperGraph) initRemotes(t Table) error {
h := xxhash.New()
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
for _, r := range t.Remotes {
// defines the table column to be used as an id in the
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
path = append(path, []byte(p))
}
rf := &resolvFn{
rf := resolvFn{
IDField: []byte(idk),
Path: path,
Fn: fn,
}
// index resolver obj by parent and child names
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
// index resolver obj by IDField
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
_, _ = h.Write(rf.IDField)
sg.rmap[h.Sum64()] = rf
}
return nil

View File

@ -1,11 +1,9 @@
package core
import (
"github.com/cespare/xxhash/v2"
)
import "hash/maphash"
// nolint: errcheck
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
h.WriteString(k1)
h.WriteString(k2)
v := h.Sum64()

View File

@ -55,6 +55,30 @@ query {
}
```
### Fragments
Fragments make it easy to build large complex queries with small composible and re-usable fragment blocks.
```graphql
query {
users {
...userFields2
...userFields1
picture_url
}
}
fragment userFields1 on user {
id
email
}
fragment userFields2 on user {
first_name
last_name
}
```
### Sorting
To sort or ordering results just use the `order_by` argument. This can be combined with `where`, `search`, etc to build complex queries to fit you needs.

View File

@ -4,6 +4,8 @@ title: Introduction
sidebar_label: Introduction
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
Super Graph is a service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more spending weeks or months writing backend API code. Just make the query you need and Super Graph will do the rest.
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Support for JWT tokens, DB migrations, seeding and a lot more.
@ -134,3 +136,9 @@ mutation {
}
}
```
### Built-in GraphQL Editor
Quickly craft and test your queries with a full-featured GraphQL editor. Auto-complete and schema documentation is automatically available.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />

View File

@ -95,7 +95,7 @@ auth:
type: jwt
jwt:
# the two providers are 'auth0' and 'none'
# valid providers are auth0, firebase and none
provider: auth0
secret: abc335bfcfdb04e50db5bb0a4d67ab9
public_key_file: /secrets/public_key.pem
@ -108,6 +108,19 @@ We can get the JWT token either from the `authorization` header where we expect
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
### Firebase Auth
```yaml
auth:
type: jwt
jwt:
provider: firebase
audience: <firebase-project-id>
```
Firebase auth also uses JWT the keys are auto-fetched from Google and used according to their documentation mechanism. The `audience` config value needs to be set to your project id and everything else is taken care for you.
### HTTP Headers
```yaml

View File

@ -0,0 +1,13 @@
---
id: webui
title: Web UI / GraphQL Editor
sidebar_label: Web UI
---
import useBaseUrl from '@docusaurus/useBaseUrl'; // Add to the top of the file below the front matter.
<img alt="Zipkin Traces" src={useBaseUrl("img/webui.jpg")} />
Super Graph comes with a build-in GraphQL editor that only runs in development. Use it to craft your queries and copy-paste them into you're app once you're ready. The editor supports auto-completation and schema documentation. This makes it super easy to craft and test your query all in one go without knowing anything about the underlying database structure.
You can even set query variables or http headers as required. To simulate an authenticated user set the http header `"X-USER-ID": 5` to the user id of the user you want to test with.

View File

@ -36,8 +36,8 @@ module.exports = {
position: "left",
},
{
label: "Art Compute",
href: "https://artcompute.com/s/super-graph",
label: "AbtCode",
href: "https://abtcode.com/s/super-graph",
position: "left",
},
],

View File

@ -3,6 +3,7 @@ module.exports = {
Docusaurus: [
"home",
"intro",
"webui",
"start",
"why",
"graphql",

Binary file not shown.

After

Width:  |  Height:  |  Size: 117 KiB

View File

@ -1805,11 +1805,6 @@ asynckit@^0.4.0:
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
at-least-node@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
atob@^2.1.2:
version "2.1.2"
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
@ -2323,7 +2318,7 @@ ccount@^1.0.0, ccount@^1.0.3:
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
version "2.4.2"
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
@ -2522,15 +2517,6 @@ cliui@^5.0.0:
strip-ansi "^5.2.0"
wrap-ansi "^5.1.0"
cliui@^6.0.0:
version "6.0.0"
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
dependencies:
string-width "^4.2.0"
strip-ansi "^6.0.0"
wrap-ansi "^6.2.0"
coa@^2.0.2:
version "2.0.2"
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
@ -3216,11 +3202,6 @@ depd@~1.1.2:
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
dependency-graph@^0.9.0:
version "0.9.0"
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
des.js@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
@ -3830,7 +3811,7 @@ fast-glob@^2.0.2:
merge2 "^1.2.3"
micromatch "^3.1.10"
fast-glob@^3.0.3, fast-glob@^3.1.1:
fast-glob@^3.0.3:
version "3.2.2"
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
@ -3970,7 +3951,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
make-dir "^3.0.2"
pkg-dir "^4.1.0"
find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
find-up@4.1.0, find-up@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
@ -4084,16 +4065,6 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
jsonfile "^4.0.0"
universalify "^0.1.0"
fs-extra@^9.0.0:
version "9.0.0"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
dependencies:
at-least-node "^1.0.0"
graceful-fs "^4.2.0"
jsonfile "^6.0.1"
universalify "^1.0.0"
fs-minipass@^2.0.0:
version "2.1.0"
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
@ -4149,11 +4120,6 @@ get-own-enumerable-property-symbols@^3.0.0:
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
get-stdin@^7.0.0:
version "7.0.0"
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
get-stream@^4.0.0:
version "4.1.0"
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
@ -4275,18 +4241,6 @@ globby@^10.0.1:
merge2 "^1.2.3"
slash "^3.0.0"
globby@^11.0.0:
version "11.0.0"
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
dependencies:
array-union "^2.1.0"
dir-glob "^3.0.1"
fast-glob "^3.1.1"
ignore "^5.1.4"
merge2 "^1.3.0"
slash "^3.0.0"
globby@^6.1.0:
version "6.1.0"
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
@ -4743,7 +4697,7 @@ ignore@^3.3.5:
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
ignore@^5.1.1, ignore@^5.1.4:
ignore@^5.1.1:
version "5.1.4"
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
@ -5382,15 +5336,6 @@ jsonfile@^4.0.0:
optionalDependencies:
graceful-fs "^4.1.6"
jsonfile@^6.0.1:
version "6.0.1"
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
dependencies:
universalify "^1.0.0"
optionalDependencies:
graceful-fs "^4.1.6"
jsprim@^1.2.2:
version "1.4.1"
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
@ -5656,13 +5601,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
log-symbols@^2.2.0:
version "2.2.0"
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
dependencies:
chalk "^2.0.1"
loglevel@^1.6.8:
version "1.6.8"
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
@ -6645,7 +6583,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
pify@^2.0.0, pify@^2.3.0:
pify@^2.0.0:
version "2.3.0"
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
@ -6731,24 +6669,6 @@ postcss-calc@^7.0.1:
postcss-selector-parser "^6.0.2"
postcss-value-parser "^4.0.2"
postcss-cli@^7.1.1:
version "7.1.1"
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
dependencies:
chalk "^4.0.0"
chokidar "^3.3.0"
dependency-graph "^0.9.0"
fs-extra "^9.0.0"
get-stdin "^7.0.0"
globby "^11.0.0"
postcss "^7.0.0"
postcss-load-config "^2.0.0"
postcss-reporter "^6.0.0"
pretty-hrtime "^1.0.3"
read-cache "^1.0.0"
yargs "^15.0.2"
postcss-color-functional-notation@^2.0.1:
version "2.0.1"
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
@ -7288,16 +7208,6 @@ postcss-replace-overflow-wrap@^3.0.0:
dependencies:
postcss "^7.0.2"
postcss-reporter@^6.0.0:
version "6.0.1"
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
dependencies:
chalk "^2.4.1"
lodash "^4.17.11"
log-symbols "^2.2.0"
postcss "^7.0.7"
postcss-selector-matches@^4.0.0:
version "4.0.0"
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
@ -7397,7 +7307,7 @@ postcss@^6.0.9:
source-map "^0.6.1"
supports-color "^5.4.0"
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6:
version "7.0.30"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
@ -7692,6 +7602,11 @@ react-helmet@^6.0.0-beta:
react-fast-compare "^2.0.4"
react-side-effect "^2.1.0"
react-hook-sticky@^0.2.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
version "16.13.1"
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
@ -7785,13 +7700,6 @@ react@^16.8.4:
object-assign "^4.1.1"
prop-types "^15.6.2"
read-cache@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
dependencies:
pify "^2.3.0"
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
version "2.3.7"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
@ -8709,7 +8617,7 @@ string-width@^3.0.0, string-width@^3.1.0:
is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0"
string-width@^4.1.0, string-width@^4.2.0:
string-width@^4.1.0:
version "4.2.0"
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
@ -9305,11 +9213,6 @@ universalify@^0.1.0:
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
universalify@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
unpipe@1.0.0, unpipe@~1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
@ -9723,7 +9626,7 @@ wrap-ansi@^5.1.0:
string-width "^3.0.0"
strip-ansi "^5.0.0"
wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
wrap-ansi@^6.0.0:
version "6.2.0"
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
@ -9784,14 +9687,6 @@ yargs-parser@^13.1.2:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs-parser@^18.1.1:
version "18.1.3"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
dependencies:
camelcase "^5.0.0"
decamelize "^1.2.0"
yargs@^13.3.2:
version "13.3.2"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
@ -9808,23 +9703,6 @@ yargs@^13.3.2:
y18n "^4.0.0"
yargs-parser "^13.1.2"
yargs@^15.0.2:
version "15.3.1"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
dependencies:
cliui "^6.0.0"
decamelize "^1.2.0"
find-up "^4.1.0"
get-caller-file "^2.0.1"
require-directory "^2.1.1"
require-main-filename "^2.0.0"
set-blocking "^2.0.0"
string-width "^4.2.0"
which-module "^2.0.0"
y18n "^4.0.0"
yargs-parser "^18.1.1"
zepto@^1.2.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"

1
go.mod
View File

@ -12,7 +12,6 @@ require (
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
github.com/brianvoe/gofakeit/v5 v5.2.0
github.com/cespare/xxhash/v2 v2.1.1
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
github.com/daaku/go.zipexe v1.0.1 // indirect
github.com/dgrijalva/jwt-go v3.2.0+incompatible

7
go.sum
View File

@ -35,7 +35,9 @@ github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgv
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
@ -53,8 +55,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
@ -220,6 +220,7 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
@ -319,6 +320,7 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
@ -543,6 +545,7 @@ google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ij
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=

View File

@ -156,7 +156,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
func BuildDetails() string {
if len(version) == 0 {
return fmt.Sprintf(`
return `
Super Graph (unknown version)
For documentation, visit https://supergraph.dev
@ -166,7 +166,7 @@ To build with version information please use the Makefile
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar
`)
`
}
return fmt.Sprintf(`

View File

@ -82,8 +82,6 @@ func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[st
if v, ok := opt["user_id"]; ok && len(v) != 0 {
ct = context.WithValue(ct, core.UserIDKey, v)
} else {
ct = context.WithValue(ct, core.UserIDKey, "-1")
}
// var role string

View File

@ -66,7 +66,7 @@ func newViper(configPath, configFile string) *viper.Viper {
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("auth_fail_block", false)
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("default_block", true)

View File

@ -32,6 +32,7 @@ type Auth struct {
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
Audience string `mapstructure:"audience"`
}
Header struct {

View File

@ -2,19 +2,32 @@ package auth
import (
"context"
"encoding/json"
"io/ioutil"
"net/http"
"strconv"
"strings"
"time"
jwt "github.com/dgrijalva/jwt-go"
"github.com/dosco/super-graph/core"
)
const (
authHeader = "Authorization"
jwtAuth0 int = iota + 1
authHeader = "Authorization"
jwtAuth0 int = iota + 1
jwtFirebase int = iota + 2
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
firebaseIssuerPrefix = "https://securetoken.google.com/"
)
type firebasePKCache struct {
PublicKeys map[string]string
Expiration time.Time
}
var firebasePublicKeys firebasePKCache
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
var key interface{}
var jwtProvider int
@ -23,6 +36,8 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
if ac.JWT.Provider == "auth0" {
jwtProvider = jwtAuth0
} else if ac.JWT.Provider == "firebase" {
jwtProvider = jwtFirebase
}
secret := ac.JWT.Secret
@ -56,6 +71,7 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
}
return func(w http.ResponseWriter, r *http.Request) {
var tok string
if len(cookie) != 0 {
@ -74,9 +90,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
tok = ah[7:]
}
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
return key, nil
})
var keyFunc jwt.Keyfunc
if jwtProvider == jwtFirebase {
keyFunc = firebaseKeyFunction
} else {
keyFunc = func(token *jwt.Token) (interface{}, error) {
return key, nil
}
}
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
if err != nil {
next.ServeHTTP(w, r)
@ -86,12 +109,20 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
ctx := r.Context()
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
next.ServeHTTP(w, r)
return
}
if jwtProvider == jwtAuth0 {
sub := strings.Split(claims.Subject, "|")
if len(sub) != 2 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
}
} else if jwtProvider == jwtFirebase &&
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
} else {
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
}
@ -103,3 +134,92 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
next.ServeHTTP(w, r)
}, nil
}
type firebaseKeyError struct {
Err error
Message string
}
func (e *firebaseKeyError) Error() string {
return e.Message + " " + e.Err.Error()
}
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
kid, ok := token.Header["kid"]
if !ok {
return nil, &firebaseKeyError{
Message: "Error 'kid' header not found in token",
}
}
if firebasePublicKeys.Expiration.Before(time.Now()) {
resp, err := http.Get(firebasePKEndpoint)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error connecting to firebase certificate server",
Err: err,
}
}
defer resp.Body.Close()
data, err := ioutil.ReadAll(resp.Body)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error reading firebase certificate server response",
Err: err,
}
}
cachePolicy := resp.Header.Get("cache-control")
ageIndex := strings.Index(cachePolicy, "max-age=")
if ageIndex < 0 {
return nil, &firebaseKeyError{
Message: "Error parsing cache-control header: 'max-age=' not found",
}
}
ageToEnd := cachePolicy[ageIndex+8:]
endIndex := strings.Index(ageToEnd, ",")
if endIndex < 0 {
endIndex = len(ageToEnd) - 1
}
ageString := ageToEnd[:endIndex]
age, err := strconv.ParseInt(ageString, 10, 64)
if err != nil {
return nil, &firebaseKeyError{
Message: "Error parsing max-age cache policy",
Err: err,
}
}
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
if err != nil {
firebasePublicKeys = firebasePKCache{}
return nil, &firebaseKeyError{
Message: "Error unmarshalling firebase public key json",
Err: err,
}
}
firebasePublicKeys.Expiration = expiration
}
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
return k, err
}
return nil, &firebaseKeyError{
Message: "Error no matching public key for kid supplied in jwt",
}
}

View File

@ -6,9 +6,11 @@ import (
"database/sql"
"fmt"
"io/ioutil"
"log"
"os"
"path/filepath"
"regexp"
"sort"
"strconv"
"strings"
"text/template"
@ -105,39 +107,40 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
path = strings.TrimRight(path, string(filepath.Separator))
fileInfos, err := fs.ReadDir(path)
files, err := ioutil.ReadDir(path)
if err != nil {
return nil, err
log.Fatal(err)
}
paths := make([]string, 0, len(fileInfos))
for _, fi := range fileInfos {
fm := make(map[int]string, len(files))
keys := make([]int, 0, len(files))
for _, fi := range files {
if fi.IsDir() {
continue
}
matches := migrationPattern.FindStringSubmatch(fi.Name())
if len(matches) != 2 {
continue
}
n, err := strconv.ParseInt(matches[1], 10, 32)
n, err := strconv.Atoi(matches[1])
if err != nil {
// The regexp already validated that the prefix is all digits so this *should* never fail
return nil, err
}
mcount := len(paths)
fm[n] = filepath.Join(path, fi.Name())
keys = append(keys, n)
}
if n < int64(mcount) {
return nil, fmt.Errorf("Duplicate migration %d", n)
}
sort.Ints(keys)
if int64(mcount) < n {
return nil, fmt.Errorf("Missing migration %d", mcount)
}
paths = append(paths, filepath.Join(path, fi.Name()))
paths := make([]string, 0, len(keys))
for _, k := range keys {
paths = append(paths, fm[k])
}
return paths, nil

File diff suppressed because one or more lines are too long

View File

@ -11,9 +11,9 @@
// opt-in, read http://bit.ly/CRA-PWA
const isLocalhost = Boolean(
window.location.hostname === 'localhost' ||
window.location.hostname === "localhost" ||
// [::1] is the IPv6 localhost address.
window.location.hostname === '[::1]' ||
window.location.hostname === "[::1]" ||
// 127.0.0.1/8 is considered localhost for IPv4.
window.location.hostname.match(
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
);
export function register(config) {
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
// The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
if (publicUrl.origin !== window.location.origin) {
@ -31,7 +31,7 @@ export function register(config) {
return;
}
window.addEventListener('load', () => {
window.addEventListener("load", () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
if (isLocalhost) {
@ -42,8 +42,8 @@ export function register(config) {
// service worker/PWA documentation.
navigator.serviceWorker.ready.then(() => {
console.log(
'This web app is being served cache-first by a service ' +
'worker. To learn more, visit http://bit.ly/CRA-PWA'
"This web app is being served cache-first by a service " +
"worker. To learn more, visit http://bit.ly/CRA-PWA"
);
});
} else {
@ -57,21 +57,21 @@ export function register(config) {
function registerValidSW(swUrl, config) {
navigator.serviceWorker
.register(swUrl)
.then(registration => {
.then((registration) => {
registration.onupdatefound = () => {
const installingWorker = registration.installing;
if (installingWorker == null) {
return;
}
installingWorker.onstatechange = () => {
if (installingWorker.state === 'installed') {
if (installingWorker.state === "installed") {
if (navigator.serviceWorker.controller) {
// At this point, the updated precached content has been fetched,
// but the previous service worker will still serve the older
// content until all client tabs are closed.
console.log(
'New content is available and will be used when all ' +
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
"New content is available and will be used when all " +
"tabs for this page are closed. See http://bit.ly/CRA-PWA."
);
// Execute callback
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
// At this point, everything has been precached.
// It's the perfect time to display a
// "Content is cached for offline use." message.
console.log('Content is cached for offline use.');
console.log("Content is cached for offline use.");
// Execute callback
if (config && config.onSuccess) {
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
};
};
})
.catch(error => {
console.error('Error during service worker registration:', error);
.catch((error) => {
console.error("Error during service worker registration:", error);
});
}
function checkValidServiceWorker(swUrl, config) {
// Check if the service worker can be found. If it can't reload the page.
fetch(swUrl)
.then(response => {
.then((response) => {
// Ensure service worker exists, and that we really are getting a JS file.
const contentType = response.headers.get('content-type');
const contentType = response.headers.get("content-type");
if (
response.status === 404 ||
(contentType != null && contentType.indexOf('javascript') === -1)
(contentType != null && contentType.indexOf("javascript") === -1)
) {
// No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then(registration => {
navigator.serviceWorker.ready.then((registration) => {
registration.unregister().then(() => {
window.location.reload();
});
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
})
.catch(() => {
console.log(
'No internet connection found. App is running in offline mode.'
"No internet connection found. App is running in offline mode."
);
});
}
export function unregister() {
if ('serviceWorker' in navigator) {
navigator.serviceWorker.ready.then(registration => {
if ("serviceWorker" in navigator) {
navigator.serviceWorker.ready.then((registration) => {
registration.unregister();
});
}

13
jsn/bench.1 Normal file
View File

@ -0,0 +1,13 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/jsn
BenchmarkGet
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
BenchmarkFilter
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
BenchmarkStrip
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
BenchmarkReplace
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
PASS
ok github.com/dosco/super-graph/jsn 6.029s

View File

@ -3,6 +3,7 @@ package jsn
import (
"bytes"
"encoding/json"
"errors"
"io"
)
@ -68,7 +69,12 @@ func Clear(w *bytes.Buffer, v []byte) error {
}
io := int(dec.InputOffset())
w.Write(v[io-len(v1)-2 : io])
s := io - len(v1) - 2
if io <= s || s <= 0 {
return errors.New("invalid json")
}
w.Write(v[s:io])
w.WriteString(`:`)
isValue = true

View File

@ -2,17 +2,19 @@ package jsn
import (
"bytes"
"github.com/cespare/xxhash/v2"
"hash/maphash"
)
// Filter function filters the JSON keeping only the provided keys and removing all others
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
var err error
kmap := make(map[uint64]struct{}, len(keys))
h := maphash.Hash{}
for i := range keys {
kmap[xxhash.Sum64String(keys[i])] = struct{}{}
_, _ = h.WriteString(keys[i])
kmap[h.Sum64()] = struct{}{}
h.Reset()
}
// is an list
@ -132,7 +134,11 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
cb := b[s:(e + 1)]
e = 0
if _, ok := kmap[xxhash.Sum64(k)]; !ok {
_, _ = h.Write(k)
_, ok := kmap[h.Sum64()]
h.Reset()
if !ok {
continue
}

View File

@ -8,6 +8,8 @@ import (
"github.com/dosco/super-graph/jsn"
)
var ret int
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"00\"0000\"0{",
@ -56,9 +58,16 @@ func TestFuzzCrashers(t *testing.T) {
"0000\"0\"{",
"000\"000\"{",
"\"00000000\"{",
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
"\xef\x99\xe3\"",
"\"\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xe3\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xef\xe3" +
"\xe3\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\"",
}
for _, f := range crashers {
_ = jsn.Fuzz([]byte(f))
ret = jsn.Fuzz([]byte(f))
}
}

View File

@ -1,7 +1,7 @@
package jsn
import (
"github.com/cespare/xxhash/v2"
"hash/maphash"
)
const (
@ -41,9 +41,12 @@ func Value(b []byte) []byte {
// Keys function fetches values for the provided keys
func Get(b []byte, keys [][]byte) []Field {
kmap := make(map[uint64]struct{}, len(keys))
h := maphash.Hash{}
for i := range keys {
kmap[xxhash.Sum64(keys[i])] = struct{}{}
_, _ = h.Write(keys[i])
kmap[h.Sum64()] = struct{}{}
h.Reset()
}
res := make([]Field, 0, 20)
@ -141,7 +144,9 @@ func Get(b []byte, keys [][]byte) []Field {
}
if e != 0 {
_, ok := kmap[xxhash.Sum64(k)]
_, _ = h.Write(k)
_, ok := kmap[h.Sum64()]
h.Reset()
if ok {
res = append(res, Field{k, b[s:(e + 1)]})

View File

@ -3,8 +3,7 @@ package jsn
import (
"bytes"
"errors"
"github.com/cespare/xxhash/v2"
"hash/maphash"
)
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
@ -18,7 +17,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
return err
}
h := xxhash.New()
h := maphash.Hash{}
tmap := make(map[uint64]int, len(from))
for i, f := range from {
@ -133,9 +132,18 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
if e != 0 {
e++
if e <= s {
return errors.New("invalid json")
}
if _, err := h.Write(b[s:e]); err != nil {
return err
}
if (we + 1) <= ws {
return errors.New("invalid json")
}
n, ok := tmap[h.Sum64()]
h.Reset()