Compare commits
9 Commits
Author | SHA1 | Date | |
---|---|---|---|
33f3fefbf3 | |||
a775f9475b | |||
bd157290f6 | |||
82cc712a93 | |||
0ce129de14 | |||
1a15e433ba | |||
816121fbcf | |||
e82e97a9d7 | |||
6102f1d66e |
@ -12,8 +12,7 @@ FROM golang:1.14-alpine as go-build
|
|||||||
RUN apk update && \
|
RUN apk update && \
|
||||||
apk add --no-cache make && \
|
apk add --no-cache make && \
|
||||||
apk add --no-cache git && \
|
apk add --no-cache git && \
|
||||||
apk add --no-cache jq && \
|
apk add --no-cache jq
|
||||||
apk add --no-cache upx=3.95-r2
|
|
||||||
|
|
||||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||||
|
|
||||||
|
15
core/api.go
15
core/api.go
@ -49,6 +49,7 @@ import (
|
|||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"hash/maphash"
|
||||||
_log "log"
|
_log "log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
@ -83,10 +84,11 @@ type SuperGraph struct {
|
|||||||
schema *psql.DBSchema
|
schema *psql.DBSchema
|
||||||
allowList *allow.List
|
allowList *allow.List
|
||||||
encKey [32]byte
|
encKey [32]byte
|
||||||
prepared map[string]*preparedItem
|
hashSeed maphash.Seed
|
||||||
|
queries map[uint64]query
|
||||||
roles map[string]*Role
|
roles map[string]*Role
|
||||||
getRole *sql.Stmt
|
getRole *sql.Stmt
|
||||||
rmap map[uint64]*resolvFn
|
rmap map[uint64]resolvFn
|
||||||
abacEnabled bool
|
abacEnabled bool
|
||||||
anonExists bool
|
anonExists bool
|
||||||
qc *qcode.Compiler
|
qc *qcode.Compiler
|
||||||
@ -107,10 +109,11 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
|||||||
}
|
}
|
||||||
|
|
||||||
sg := &SuperGraph{
|
sg := &SuperGraph{
|
||||||
conf: conf,
|
conf: conf,
|
||||||
db: db,
|
db: db,
|
||||||
dbinfo: dbinfo,
|
dbinfo: dbinfo,
|
||||||
log: _log.New(os.Stdout, "", 0),
|
log: _log.New(os.Stdout, "", 0),
|
||||||
|
hashSeed: maphash.MakeSeed(),
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sg.initConfig(); err != nil {
|
if err := sg.initConfig(); err != nil {
|
||||||
|
@ -12,7 +12,8 @@ import (
|
|||||||
// to a prepared statement.
|
// to a prepared statement.
|
||||||
|
|
||||||
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||||
vars := make([]interface{}, len(md.Params))
|
params := md.Params()
|
||||||
|
vars := make([]interface{}, len(params))
|
||||||
|
|
||||||
var fields map[string]json.RawMessage
|
var fields map[string]json.RawMessage
|
||||||
var err error
|
var err error
|
||||||
@ -25,7 +26,7 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, p := range md.Params {
|
for i, p := range params {
|
||||||
switch p.Name {
|
switch p.Name {
|
||||||
case "user_id":
|
case "user_id":
|
||||||
if v := c.Value(UserIDKey); v != nil {
|
if v := c.Value(UserIDKey); v != nil {
|
||||||
|
41
core/bench.11
Normal file
41
core/bench.11
Normal file
@ -0,0 +1,41 @@
|
|||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core
|
||||||
|
BenchmarkGraphQL-16 INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
INF roles_query not defined: attribute based access control disabled
|
||||||
|
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||||
|
105048 10398 ns/op 18342 B/op 55 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core 1.328s
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/allow 0.088s
|
||||||
|
? github.com/dosco/super-graph/core/internal/crypto [no test files]
|
||||||
|
? github.com/dosco/super-graph/core/internal/integration_tests [no test files]
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/integration_tests/cockroachdb 0.121s
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/integration_tests/postgresql 0.118s
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core/internal/psql
|
||||||
|
BenchmarkCompile-16 79845 14428 ns/op 4584 B/op 39 allocs/op
|
||||||
|
BenchmarkCompileParallel-16 326205 3918 ns/op 4633 B/op 39 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/psql 2.696s
|
||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||||
|
BenchmarkQCompile-16 146953 8049 ns/op 3756 B/op 28 allocs/op
|
||||||
|
BenchmarkQCompileP-16 475936 2447 ns/op 3790 B/op 28 allocs/op
|
||||||
|
BenchmarkParse-16 140811 8163 ns/op 3902 B/op 18 allocs/op
|
||||||
|
BenchmarkParseP-16 571345 2041 ns/op 3903 B/op 18 allocs/op
|
||||||
|
BenchmarkSchemaParse-16 230715 5012 ns/op 3968 B/op 57 allocs/op
|
||||||
|
BenchmarkSchemaParseP-16 802426 1565 ns/op 3968 B/op 57 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/core/internal/qcode 8.427s
|
||||||
|
? github.com/dosco/super-graph/core/internal/util [no test files]
|
@ -88,6 +88,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
|
|
||||||
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
md := psql.Metadata{}
|
||||||
|
|
||||||
for i := 0; i < len(sg.conf.Roles); i++ {
|
for i := 0; i < len(sg.conf.Roles); i++ {
|
||||||
role := &sg.conf.Roles[i]
|
role := &sg.conf.Roles[i]
|
||||||
@ -105,16 +106,18 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||||
s := &stmts[len(stmts)-1]
|
s := &stmts[len(stmts)-1]
|
||||||
|
|
||||||
s.md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
s.sql = w.String()
|
s.sql = w.String()
|
||||||
|
s.md = md
|
||||||
|
|
||||||
w.Reset()
|
w.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
sql, err := sg.renderUserQuery(stmts)
|
sql, err := sg.renderUserQuery(md, stmts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -124,7 +127,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) {
|
||||||
w := &bytes.Buffer{}
|
w := &bytes.Buffer{}
|
||||||
|
|
||||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||||
@ -142,7 +145,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
md.RenderVar(w, sg.conf.RolesQuery)
|
||||||
io.WriteString(w, `) THEN `)
|
io.WriteString(w, `) THEN `)
|
||||||
|
|
||||||
io.WriteString(w, `(SELECT (CASE`)
|
io.WriteString(w, `(SELECT (CASE`)
|
||||||
@ -158,7 +161,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
md.RenderVar(w, sg.conf.RolesQuery)
|
||||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
||||||
|
|
||||||
|
34
core/core.go
34
core/core.go
@ -5,6 +5,7 @@ import (
|
|||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
@ -124,7 +125,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(data) == 0 || st.md.Skipped == 0 {
|
if len(data) == 0 || st.md.Skipped() == 0 {
|
||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -165,32 +166,43 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
|
|
||||||
} else {
|
} else {
|
||||||
role = c.role
|
role = c.role
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
c.res.role = role
|
c.res.role = role
|
||||||
|
|
||||||
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(c.sg.hashSeed)
|
||||||
|
|
||||||
|
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, errNotFound
|
return nil, nil, errNotFound
|
||||||
}
|
}
|
||||||
c.res.sql = ps.st.sql
|
|
||||||
|
if q.sd == nil {
|
||||||
|
q.Do(func() { c.sg.prepare(&q, role) })
|
||||||
|
|
||||||
|
if q.err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c.res.sql = q.st.sql
|
||||||
|
|
||||||
var root []byte
|
var root []byte
|
||||||
var row *sql.Row
|
var row *sql.Row
|
||||||
|
|
||||||
varsList, err := c.argList(ps.st.md)
|
varsList, err := c.argList(q.st.md)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
row = tx.Stmt(ps.sd).QueryRow(varsList...)
|
row = tx.Stmt(q.sd).QueryRow(varsList...)
|
||||||
} else {
|
} else {
|
||||||
row = ps.sd.QueryRow(varsList...)
|
row = q.sd.QueryRow(varsList...)
|
||||||
}
|
}
|
||||||
|
|
||||||
if ps.roleArg {
|
if q.roleArg {
|
||||||
err = row.Scan(&role, &root)
|
err = row.Scan(&role, &root)
|
||||||
} else {
|
} else {
|
||||||
err = row.Scan(&root)
|
err = row.Scan(&root)
|
||||||
@ -204,15 +216,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
|||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
if err := tx.Commit(); err != nil {
|
if err := tx.Commit(); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, q.err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
|
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil {
|
||||||
return nil, nil, err
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return root, &ps.st, nil
|
return root, &q.st, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||||
|
21
core/init.go
21
core/init.go
@ -74,14 +74,23 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if c.RolesQuery == "" {
|
if c.RolesQuery == "" {
|
||||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
sg.log.Printf("INF roles_query not defined: attribute based access control disabled")
|
||||||
|
} else {
|
||||||
|
n := 0
|
||||||
|
for k, v := range sg.roles {
|
||||||
|
if k == "user" || k == "anon" {
|
||||||
|
n++
|
||||||
|
} else if v.Match != "" {
|
||||||
|
n++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
sg.abacEnabled = (n > 2)
|
||||||
|
|
||||||
|
if !sg.abacEnabled {
|
||||||
|
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)")
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, userExists := sg.roles["user"]
|
|
||||||
_, sg.anonExists = sg.roles["anon"]
|
|
||||||
|
|
||||||
sg.abacEnabled = userExists && c.RolesQuery != ""
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -6,6 +6,7 @@ import (
|
|||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
@ -35,6 +36,7 @@ type List struct {
|
|||||||
type Config struct {
|
type Config struct {
|
||||||
CreateIfNotExists bool
|
CreateIfNotExists bool
|
||||||
Persist bool
|
Persist bool
|
||||||
|
Log *log.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(filename string, conf Config) (*List, error) {
|
func New(filename string, conf Config) (*List, error) {
|
||||||
@ -80,6 +82,12 @@ func New(filename string, conf Config) (*List, error) {
|
|||||||
} else {
|
} else {
|
||||||
al.filepath = filename
|
al.filepath = filename
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if file, err := os.OpenFile(al.filepath, os.O_RDONLY|os.O_CREATE, 0644); err != nil {
|
||||||
|
return nil, err
|
||||||
|
} else {
|
||||||
|
file.Close()
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
var err error
|
var err error
|
||||||
@ -89,8 +97,10 @@ func New(filename string, conf Config) (*List, error) {
|
|||||||
|
|
||||||
go func() {
|
go func() {
|
||||||
for v := range al.saveChan {
|
for v := range al.saveChan {
|
||||||
if err = al.save(v); err != nil {
|
err := al.save(v)
|
||||||
break
|
|
||||||
|
if err != nil && conf.Log != nil {
|
||||||
|
conf.Log.Println("WRN allow list save:", err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}()
|
}()
|
||||||
|
@ -1,4 +1,3 @@
|
|||||||
//nolint:errcheck
|
|
||||||
package psql
|
package psql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
@ -112,15 +111,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||||
//c.sel.Name, cn, arg.Val, col.Name)
|
//c.sel.Name, cn, arg.Val, col.Name)
|
||||||
io.WriteString(c.w, `ts_rank(`)
|
_, _ = io.WriteString(c.w, `ts_rank(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
_, _ = io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -137,15 +136,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||||
//c.sel.Name, cn, arg.Val, col.Name)
|
//c.sel.Name, cn, arg.Val, col.Name)
|
||||||
io.WriteString(c.w, `ts_headline(`)
|
_, _ = io.WriteString(c.w, `ts_headline(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
if c.schema.ver >= 110000 {
|
if c.schema.ver >= 110000 {
|
||||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `, to_tsquery(`)
|
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
_, _ = io.WriteString(c.w, `))`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -157,9 +156,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
|||||||
}
|
}
|
||||||
|
|
||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
io.WriteString(c.w, `(`)
|
_, _ = io.WriteString(c.w, `(`)
|
||||||
squoted(c.w, ti.Name)
|
squoted(c.w, ti.Name)
|
||||||
io.WriteString(c.w, ` :: text)`)
|
_, _ = io.WriteString(c.w, ` :: text)`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -169,9 +168,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
pl := funcPrefixLen(c.schema.fm, col.Name)
|
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||||
// if pl == 0 {
|
// if pl == 0 {
|
||||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||||
// io.WriteString(c.w, `'`)
|
// _, _ = io.WriteString(c.w, `'`)
|
||||||
// io.WriteString(c.w, col.Name)
|
// _, _ = io.WriteString(c.w, col.Name)
|
||||||
// io.WriteString(c.w, ` not defined'`)
|
// _, _ = io.WriteString(c.w, ` not defined'`)
|
||||||
// alias(c.w, col.Name)
|
// alias(c.w, col.Name)
|
||||||
// }
|
// }
|
||||||
|
|
||||||
@ -190,10 +189,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
c.renderComma(columnsRendered)
|
c.renderComma(columnsRendered)
|
||||||
|
|
||||||
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
||||||
io.WriteString(c.w, fn)
|
_, _ = io.WriteString(c.w, fn)
|
||||||
io.WriteString(c.w, `(`)
|
_, _ = io.WriteString(c.w, `(`)
|
||||||
colWithTable(c.w, ti.Name, cn)
|
colWithTable(c.w, ti.Name, cn)
|
||||||
io.WriteString(c.w, `)`)
|
_, _ = io.WriteString(c.w, `)`)
|
||||||
alias(c.w, col.Name)
|
alias(c.w, col.Name)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -201,7 +200,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
|||||||
|
|
||||||
func (c *compilerContext) renderComma(columnsRendered int) {
|
func (c *compilerContext) renderComma(columnsRendered int) {
|
||||||
if columnsRendered != 0 {
|
if columnsRendered != 0 {
|
||||||
io.WriteString(c.w, `, `)
|
_, _ = io.WriteString(c.w, `, `)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -11,7 +11,7 @@ import (
|
|||||||
var (
|
var (
|
||||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||||
|
|
||||||
schema = GetTestSchema()
|
schema, _ = GetTestSchema()
|
||||||
|
|
||||||
vars = map[string]string{
|
vars = map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
@ -25,6 +25,37 @@ var (
|
|||||||
|
|
||||||
// FuzzerEntrypoint for Fuzzbuzz
|
// FuzzerEntrypoint for Fuzzbuzz
|
||||||
func Fuzz(data []byte) int {
|
func Fuzz(data []byte) int {
|
||||||
|
err1 := query(data)
|
||||||
|
err2 := insert(data)
|
||||||
|
err3 := update(data)
|
||||||
|
err4 := delete(data)
|
||||||
|
|
||||||
|
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||||
|
return 0
|
||||||
|
}
|
||||||
|
|
||||||
|
return 1
|
||||||
|
}
|
||||||
|
|
||||||
|
func query(data []byte) error {
|
||||||
|
gql := data
|
||||||
|
|
||||||
|
qc, err1 := qcompileTest.Compile(gql, "user")
|
||||||
|
|
||||||
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err2 := pcompileTest.CompileEx(qc, vars)
|
||||||
|
|
||||||
|
if err1 != nil {
|
||||||
|
return err1
|
||||||
|
} else {
|
||||||
|
return err2
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func insert(data []byte) error {
|
||||||
gql := `mutation {
|
gql := `mutation {
|
||||||
product(insert: $data) {
|
product(insert: $data) {
|
||||||
id
|
id
|
||||||
@ -47,9 +78,57 @@ func Fuzz(data []byte) int {
|
|||||||
}
|
}
|
||||||
|
|
||||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func update(data []byte) error {
|
||||||
|
gql := `mutation {
|
||||||
|
product(insert: $data) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
full_name
|
||||||
|
email
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return 0
|
panic("qcompile can't fail")
|
||||||
}
|
}
|
||||||
|
|
||||||
return 1
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
func delete(data []byte) error {
|
||||||
|
gql := `mutation {
|
||||||
|
product(insert: $data) {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
user {
|
||||||
|
id
|
||||||
|
full_name
|
||||||
|
email
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||||
|
if err != nil {
|
||||||
|
panic("qcompile can't fail")
|
||||||
|
}
|
||||||
|
|
||||||
|
vars := map[string]json.RawMessage{
|
||||||
|
"data": json.RawMessage(data),
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||||
|
return err
|
||||||
}
|
}
|
||||||
|
20
core/internal/psql/fuzz_test.go
Normal file
20
core/internal/psql/fuzz_test.go
Normal file
@ -0,0 +1,20 @@
|
|||||||
|
// +build gofuzz
|
||||||
|
|
||||||
|
package psql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
)
|
||||||
|
|
||||||
|
var ret int
|
||||||
|
|
||||||
|
func TestFuzzCrashers(t *testing.T) {
|
||||||
|
var crashers = []string{
|
||||||
|
"{\"connect\":{}}",
|
||||||
|
"q(q{q{q{q{q{q{q{q{",
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, f := range crashers {
|
||||||
|
ret = Fuzz([]byte(f))
|
||||||
|
}
|
||||||
|
}
|
@ -25,7 +25,7 @@ func (c *compilerContext) renderInsert(
|
|||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `json_array_elements(`)
|
io.WriteString(c.w, `json_array_elements(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||||
io.WriteString(c.w, ` :: json`)
|
io.WriteString(c.w, ` :: json`)
|
||||||
if insert[0] == '[' {
|
if insert[0] == '[' {
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
61
core/internal/psql/metadata.go
Normal file
61
core/internal/psql/metadata.go
Normal file
@ -0,0 +1,61 @@
|
|||||||
|
package psql
|
||||||
|
|
||||||
|
import (
|
||||||
|
"io"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (md *Metadata) RenderVar(w io.Writer, vv string) {
|
||||||
|
f, s := -1, 0
|
||||||
|
|
||||||
|
for i := range vv {
|
||||||
|
v := vv[i]
|
||||||
|
switch {
|
||||||
|
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||||
|
if (i - s) > 0 {
|
||||||
|
_, _ = io.WriteString(w, vv[s:i])
|
||||||
|
}
|
||||||
|
f = i
|
||||||
|
|
||||||
|
case (v < 'a' && v > 'z') &&
|
||||||
|
(v < 'A' && v > 'Z') &&
|
||||||
|
(v < '0' && v > '9') &&
|
||||||
|
v != '_' &&
|
||||||
|
f != -1 &&
|
||||||
|
(i-f) > 1:
|
||||||
|
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
|
||||||
|
s = i
|
||||||
|
f = -1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if f != -1 && (len(vv)-f) > 1 {
|
||||||
|
md.renderValueExp(w, Param{Name: vv[f+1:]})
|
||||||
|
} else {
|
||||||
|
_, _ = io.WriteString(w, vv[s:])
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
|
||||||
|
_, _ = io.WriteString(w, `$`)
|
||||||
|
if v, ok := md.pindex[p.Name]; ok {
|
||||||
|
int32String(w, int32(v))
|
||||||
|
|
||||||
|
} else {
|
||||||
|
md.params = append(md.params, p)
|
||||||
|
n := len(md.params)
|
||||||
|
|
||||||
|
if md.pindex == nil {
|
||||||
|
md.pindex = make(map[string]int)
|
||||||
|
}
|
||||||
|
md.pindex[p.Name] = n
|
||||||
|
int32String(w, int32(n))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md Metadata) Skipped() uint32 {
|
||||||
|
return md.skipped
|
||||||
|
}
|
||||||
|
|
||||||
|
func (md Metadata) Params() []Param {
|
||||||
|
return md.params
|
||||||
|
}
|
@ -432,11 +432,11 @@ func (c *compilerContext) renderInsertUpdateColumns(
|
|||||||
val := root.PresetMap[cn]
|
val := root.PresetMap[cn]
|
||||||
switch {
|
switch {
|
||||||
case ok && len(val) > 1 && val[0] == '$':
|
case ok && len(val) > 1 && val[0] == '$':
|
||||||
c.renderValueExp(Param{Name: val[1:], Type: col.Type})
|
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type})
|
||||||
|
|
||||||
case ok && strings.HasPrefix(val, "sql:"):
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
io.WriteString(c.w, `(`)
|
io.WriteString(c.w, `(`)
|
||||||
c.renderVar(val[4:], c.renderValueExp)
|
c.md.RenderVar(c.w, val[4:])
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
case ok:
|
case ok:
|
||||||
@ -542,6 +542,10 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
|
|||||||
|
|
||||||
rel := item.relPC
|
rel := item.relPC
|
||||||
|
|
||||||
|
if rel == nil {
|
||||||
|
return errors.New("invalid connect value")
|
||||||
|
}
|
||||||
|
|
||||||
// Render only for parent-to-child relationship of one-to-one
|
// Render only for parent-to-child relationship of one-to-one
|
||||||
// For this to work the child needs to found first so it's primary key
|
// For this to work the child needs to found first so it's primary key
|
||||||
// can be set in the related column on the parent object.
|
// can be set in the related column on the parent object.
|
||||||
|
@ -25,8 +25,8 @@ type Param struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Metadata struct {
|
type Metadata struct {
|
||||||
Skipped uint32
|
skipped uint32
|
||||||
Params []Param
|
params []Param
|
||||||
pindex map[string]int
|
pindex map[string]int
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -80,22 +80,30 @@ func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||||
|
return co.CompileWithMetadata(w, qc, vars, Metadata{})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||||
|
md.skipped = 0
|
||||||
|
|
||||||
|
if qc == nil {
|
||||||
|
return md, fmt.Errorf("qcode is nil")
|
||||||
|
}
|
||||||
|
|
||||||
switch qc.Type {
|
switch qc.Type {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
return co.compileQuery(w, qc, vars)
|
return co.compileQueryWithMetadata(w, qc, vars, md)
|
||||||
|
|
||||||
case qcode.QTInsert,
|
case qcode.QTInsert,
|
||||||
qcode.QTUpdate,
|
qcode.QTUpdate,
|
||||||
qcode.QTDelete,
|
qcode.QTDelete,
|
||||||
qcode.QTUpsert:
|
qcode.QTUpsert:
|
||||||
return co.compileMutation(w, qc, vars)
|
return co.compileMutation(w, qc, vars)
|
||||||
|
|
||||||
|
default:
|
||||||
|
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (co *Compiler) compileQuery(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
|
||||||
return co.compileQueryWithMetadata(w, qc, vars, Metadata{})
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (co *Compiler) compileQueryWithMetadata(
|
func (co *Compiler) compileQueryWithMetadata(
|
||||||
@ -172,7 +180,7 @@ func (co *Compiler) compileQueryWithMetadata(
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, cid := range sel.Children {
|
for _, cid := range sel.Children {
|
||||||
if hasBit(c.md.Skipped, uint32(cid)) {
|
if hasBit(c.md.skipped, uint32(cid)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
child := &c.s[cid]
|
child := &c.s[cid]
|
||||||
@ -350,7 +358,7 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
|||||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
||||||
colmap[rel.Left.Col] = struct{}{}
|
colmap[rel.Left.Col] = struct{}{}
|
||||||
c.md.Skipped |= (1 << uint(id))
|
c.md.skipped |= (1 << uint(id))
|
||||||
}
|
}
|
||||||
|
|
||||||
default:
|
default:
|
||||||
@ -618,7 +626,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
|||||||
i := colsRendered
|
i := colsRendered
|
||||||
|
|
||||||
for _, id := range sel.Children {
|
for _, id := range sel.Children {
|
||||||
if hasBit(c.md.Skipped, uint32(id)) {
|
if hasBit(c.md.skipped, uint32(id)) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
childSel := &c.s[id]
|
childSel := &c.s[id]
|
||||||
@ -800,7 +808,7 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
|||||||
quoted(c.w, ob.Col)
|
quoted(c.w, ob.Col)
|
||||||
}
|
}
|
||||||
io.WriteString(c.w, ` FROM string_to_array(`)
|
io.WriteString(c.w, ` FROM string_to_array(`)
|
||||||
c.renderValueExp(Param{Name: "cursor", Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
|
||||||
io.WriteString(c.w, `, ',') as a) `)
|
io.WriteString(c.w, `, ',') as a) `)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -1098,7 +1106,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
|||||||
} else {
|
} else {
|
||||||
io.WriteString(c.w, `) @@ to_tsquery(`)
|
io.WriteString(c.w, `) @@ to_tsquery(`)
|
||||||
}
|
}
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: "string"})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"})
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
@ -1187,7 +1195,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
switch {
|
switch {
|
||||||
case ok && strings.HasPrefix(val, "sql:"):
|
case ok && strings.HasPrefix(val, "sql:"):
|
||||||
io.WriteString(c.w, `(`)
|
io.WriteString(c.w, `(`)
|
||||||
c.renderVar(val[4:], c.renderValueExp)
|
c.md.RenderVar(c.w, val[4:])
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
|
|
||||||
case ok:
|
case ok:
|
||||||
@ -1195,7 +1203,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
|
|
||||||
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
||||||
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||||
io.WriteString(c.w, `))`)
|
io.WriteString(c.w, `))`)
|
||||||
|
|
||||||
io.WriteString(c.w, ` :: `)
|
io.WriteString(c.w, ` :: `)
|
||||||
@ -1204,7 +1212,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
return
|
return
|
||||||
|
|
||||||
default:
|
default:
|
||||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||||
}
|
}
|
||||||
|
|
||||||
case qcode.ValRef:
|
case qcode.ValRef:
|
||||||
@ -1218,54 +1226,6 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
io.WriteString(c.w, col.Type)
|
io.WriteString(c.w, col.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderValueExp(p Param) {
|
|
||||||
io.WriteString(c.w, `$`)
|
|
||||||
if v, ok := c.md.pindex[p.Name]; ok {
|
|
||||||
int32String(c.w, int32(v))
|
|
||||||
|
|
||||||
} else {
|
|
||||||
c.md.Params = append(c.md.Params, p)
|
|
||||||
n := len(c.md.Params)
|
|
||||||
|
|
||||||
if c.md.pindex == nil {
|
|
||||||
c.md.pindex = make(map[string]int)
|
|
||||||
}
|
|
||||||
c.md.pindex[p.Name] = n
|
|
||||||
int32String(c.w, int32(n))
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *compilerContext) renderVar(vv string, fn func(Param)) {
|
|
||||||
f, s := -1, 0
|
|
||||||
|
|
||||||
for i := range vv {
|
|
||||||
v := vv[i]
|
|
||||||
switch {
|
|
||||||
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
|
||||||
if (i - s) > 0 {
|
|
||||||
io.WriteString(c.w, vv[s:i])
|
|
||||||
}
|
|
||||||
f = i
|
|
||||||
|
|
||||||
case (v < 'a' && v > 'z') &&
|
|
||||||
(v < 'A' && v > 'Z') &&
|
|
||||||
(v < '0' && v > '9') &&
|
|
||||||
v != '_' &&
|
|
||||||
f != -1 &&
|
|
||||||
(i-f) > 1:
|
|
||||||
fn(Param{Name: vv[f+1 : i]})
|
|
||||||
s = i
|
|
||||||
f = -1
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if f != -1 && (len(vv)-f) > 1 {
|
|
||||||
fn(Param{Name: vv[f+1:]})
|
|
||||||
} else {
|
|
||||||
io.WriteString(c.w, vv[s:])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||||
switch {
|
switch {
|
||||||
case strings.HasPrefix(fn, "avg_"):
|
case strings.HasPrefix(fn, "avg_"):
|
||||||
@ -1353,8 +1313,6 @@ func squoted(w io.Writer, identifier string) {
|
|||||||
io.WriteString(w, `'`)
|
io.WriteString(w, `'`)
|
||||||
}
|
}
|
||||||
|
|
||||||
const charset = "0123456789"
|
|
||||||
|
|
||||||
func int32String(w io.Writer, val int32) {
|
func int32String(w io.Writer, val int32) {
|
||||||
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
||||||
}
|
}
|
||||||
|
@ -307,6 +307,80 @@ func multiRoot(t *testing.T) {
|
|||||||
compileGQLToPSQL(t, gql, nil, "user")
|
compileGQLToPSQL(t, gql, nil, "user")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func withFragment1(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
|
func withFragment2(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
|
func withFragment3(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
||||||
|
|
||||||
|
compileGQLToPSQL(t, gql, nil, "anon")
|
||||||
|
}
|
||||||
|
|
||||||
func withCursor(t *testing.T) {
|
func withCursor(t *testing.T) {
|
||||||
gql := `query {
|
gql := `query {
|
||||||
Products(
|
Products(
|
||||||
@ -400,6 +474,9 @@ func TestCompileQuery(t *testing.T) {
|
|||||||
t.Run("queryWithVariables", queryWithVariables)
|
t.Run("queryWithVariables", queryWithVariables)
|
||||||
t.Run("withWhereOnRelations", withWhereOnRelations)
|
t.Run("withWhereOnRelations", withWhereOnRelations)
|
||||||
t.Run("multiRoot", multiRoot)
|
t.Run("multiRoot", multiRoot)
|
||||||
|
t.Run("withFragment1", withFragment1)
|
||||||
|
t.Run("withFragment2", withFragment2)
|
||||||
|
t.Run("withFragment3", withFragment3)
|
||||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||||
t.Run("withCursor", withCursor)
|
t.Run("withCursor", withCursor)
|
||||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||||
|
@ -86,6 +86,12 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
|
|||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment1
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment2
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
|
=== RUN TestCompileQuery/withFragment3
|
||||||
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withCursor
|
=== RUN TestCompileQuery/withCursor
|
||||||
@ -117,6 +123,9 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
|||||||
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
||||||
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment1 (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment2 (0.00s)
|
||||||
|
--- PASS: TestCompileQuery/withFragment3 (0.00s)
|
||||||
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
||||||
--- PASS: TestCompileQuery/withCursor (0.00s)
|
--- PASS: TestCompileQuery/withCursor (0.00s)
|
||||||
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
||||||
@ -151,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql (cached)
|
ok github.com/dosco/super-graph/core/internal/psql 0.374s
|
||||||
|
@ -22,7 +22,7 @@ func (c *compilerContext) renderUpdate(
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||||
// io.WriteString(c.w, qc.ActionVar)
|
// io.WriteString(c.w, qc.ActionVar)
|
||||||
io.WriteString(c.w, ` :: json AS j)`)
|
io.WriteString(c.w, ` :: json AS j)`)
|
||||||
|
|
||||||
|
@ -11,15 +11,18 @@ import (
|
|||||||
var (
|
var (
|
||||||
queryToken = []byte("query")
|
queryToken = []byte("query")
|
||||||
mutationToken = []byte("mutation")
|
mutationToken = []byte("mutation")
|
||||||
|
fragmentToken = []byte("fragment")
|
||||||
subscriptionToken = []byte("subscription")
|
subscriptionToken = []byte("subscription")
|
||||||
|
onToken = []byte("on")
|
||||||
trueToken = []byte("true")
|
trueToken = []byte("true")
|
||||||
falseToken = []byte("false")
|
falseToken = []byte("false")
|
||||||
quotesToken = []byte(`'"`)
|
quotesToken = []byte(`'"`)
|
||||||
signsToken = []byte(`+-`)
|
signsToken = []byte(`+-`)
|
||||||
punctuatorToken = []byte(`!():=[]{|}`)
|
|
||||||
spreadToken = []byte(`...`)
|
spreadToken = []byte(`...`)
|
||||||
digitToken = []byte(`0123456789`)
|
digitToken = []byte(`0123456789`)
|
||||||
dotToken = []byte(`.`)
|
dotToken = []byte(`.`)
|
||||||
|
|
||||||
|
punctuatorToken = `!():=[]{|}`
|
||||||
)
|
)
|
||||||
|
|
||||||
// Pos represents a byte position in the original input text from which
|
// Pos represents a byte position in the original input text from which
|
||||||
@ -43,6 +46,8 @@ const (
|
|||||||
itemName
|
itemName
|
||||||
itemQuery
|
itemQuery
|
||||||
itemMutation
|
itemMutation
|
||||||
|
itemFragment
|
||||||
|
itemOn
|
||||||
itemSub
|
itemSub
|
||||||
itemPunctuator
|
itemPunctuator
|
||||||
itemArgsOpen
|
itemArgsOpen
|
||||||
@ -263,11 +268,11 @@ func lexRoot(l *lexer) stateFn {
|
|||||||
l.backup()
|
l.backup()
|
||||||
return lexString
|
return lexString
|
||||||
case r == '.':
|
case r == '.':
|
||||||
if len(l.input) >= 3 {
|
l.acceptRun(dotToken)
|
||||||
if equals(l.input, 0, 3, spreadToken) {
|
s, e := l.current()
|
||||||
l.emit(itemSpread)
|
if equals(l.input, s, e, spreadToken) {
|
||||||
return lexRoot
|
l.emit(itemSpread)
|
||||||
}
|
return lexRoot
|
||||||
}
|
}
|
||||||
fallthrough // '.' can start a number.
|
fallthrough // '.' can start a number.
|
||||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
||||||
@ -299,10 +304,14 @@ func lexName(l *lexer) stateFn {
|
|||||||
switch {
|
switch {
|
||||||
case equals(l.input, s, e, queryToken):
|
case equals(l.input, s, e, queryToken):
|
||||||
l.emitL(itemQuery)
|
l.emitL(itemQuery)
|
||||||
|
case equals(l.input, s, e, fragmentToken):
|
||||||
|
l.emitL(itemFragment)
|
||||||
case equals(l.input, s, e, mutationToken):
|
case equals(l.input, s, e, mutationToken):
|
||||||
l.emitL(itemMutation)
|
l.emitL(itemMutation)
|
||||||
case equals(l.input, s, e, subscriptionToken):
|
case equals(l.input, s, e, subscriptionToken):
|
||||||
l.emitL(itemSub)
|
l.emitL(itemSub)
|
||||||
|
case equals(l.input, s, e, onToken):
|
||||||
|
l.emitL(itemOn)
|
||||||
case equals(l.input, s, e, trueToken):
|
case equals(l.input, s, e, trueToken):
|
||||||
l.emitL(itemBoolVal)
|
l.emitL(itemBoolVal)
|
||||||
case equals(l.input, s, e, falseToken):
|
case equals(l.input, s, e, falseToken):
|
||||||
@ -396,31 +405,11 @@ func isAlphaNumeric(r rune) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
||||||
n := 0
|
return bytes.EqualFold(b[s:e], val)
|
||||||
for i := s; i < e; i++ {
|
|
||||||
if n >= len(val) {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
switch {
|
|
||||||
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
|
|
||||||
return false
|
|
||||||
case b[i] != val[n]:
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
n++
|
|
||||||
}
|
|
||||||
return true
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func contains(b []byte, s Pos, e Pos, val []byte) bool {
|
func contains(b []byte, s Pos, e Pos, chars string) bool {
|
||||||
for i := s; i < e; i++ {
|
return bytes.ContainsAny(b[s:e], chars)
|
||||||
for n := 0; n < len(val); n++ {
|
|
||||||
if b[i] == val[n] {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func lowercase(b []byte, s Pos, e Pos) {
|
func lowercase(b []byte, s Pos, e Pos) {
|
||||||
|
@ -3,10 +3,9 @@ package qcode
|
|||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"sync"
|
"sync"
|
||||||
"unsafe"
|
"unsafe"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/util"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -35,8 +34,7 @@ const (
|
|||||||
NodeVar
|
NodeVar
|
||||||
)
|
)
|
||||||
|
|
||||||
type Operation struct {
|
type SelectionSet struct {
|
||||||
Type parserType
|
|
||||||
Name string
|
Name string
|
||||||
Args []Arg
|
Args []Arg
|
||||||
argsA [10]Arg
|
argsA [10]Arg
|
||||||
@ -44,12 +42,29 @@ type Operation struct {
|
|||||||
fieldsA [10]Field
|
fieldsA [10]Field
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Operation struct {
|
||||||
|
Type parserType
|
||||||
|
SelectionSet
|
||||||
|
}
|
||||||
|
|
||||||
var zeroOperation = Operation{}
|
var zeroOperation = Operation{}
|
||||||
|
|
||||||
func (o *Operation) Reset() {
|
func (o *Operation) Reset() {
|
||||||
*o = zeroOperation
|
*o = zeroOperation
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type Fragment struct {
|
||||||
|
Name string
|
||||||
|
On string
|
||||||
|
SelectionSet
|
||||||
|
}
|
||||||
|
|
||||||
|
var zeroFragment = Fragment{}
|
||||||
|
|
||||||
|
func (f *Fragment) Reset() {
|
||||||
|
*f = zeroFragment
|
||||||
|
}
|
||||||
|
|
||||||
type Field struct {
|
type Field struct {
|
||||||
ID int32
|
ID int32
|
||||||
ParentID int32
|
ParentID int32
|
||||||
@ -82,6 +97,8 @@ func (n *Node) Reset() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type Parser struct {
|
type Parser struct {
|
||||||
|
frags map[uint64]*Fragment
|
||||||
|
h maphash.Hash
|
||||||
input []byte // the string being scanned
|
input []byte // the string being scanned
|
||||||
pos int
|
pos int
|
||||||
items []item
|
items []item
|
||||||
@ -96,12 +113,194 @@ var opPool = sync.Pool{
|
|||||||
New: func() interface{} { return new(Operation) },
|
New: func() interface{} { return new(Operation) },
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var fragPool = sync.Pool{
|
||||||
|
New: func() interface{} { return new(Fragment) },
|
||||||
|
}
|
||||||
|
|
||||||
var lexPool = sync.Pool{
|
var lexPool = sync.Pool{
|
||||||
New: func() interface{} { return new(lexer) },
|
New: func() interface{} { return new(lexer) },
|
||||||
}
|
}
|
||||||
|
|
||||||
func Parse(gql []byte) (*Operation, error) {
|
func Parse(gql []byte) (*Operation, error) {
|
||||||
return parseSelectionSet(gql)
|
var err error
|
||||||
|
|
||||||
|
if len(gql) == 0 {
|
||||||
|
return nil, errors.New("blank query")
|
||||||
|
}
|
||||||
|
|
||||||
|
l := lexPool.Get().(*lexer)
|
||||||
|
l.Reset()
|
||||||
|
defer lexPool.Put(l)
|
||||||
|
|
||||||
|
if err = lex(l, gql); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
p := &Parser{
|
||||||
|
input: l.input,
|
||||||
|
pos: -1,
|
||||||
|
items: l.items,
|
||||||
|
}
|
||||||
|
|
||||||
|
op := opPool.Get().(*Operation)
|
||||||
|
op.Reset()
|
||||||
|
op.Fields = op.fieldsA[:0]
|
||||||
|
|
||||||
|
s := -1
|
||||||
|
qf := false
|
||||||
|
|
||||||
|
for {
|
||||||
|
if p.peek(itemEOF) {
|
||||||
|
p.ignore()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemFragment) {
|
||||||
|
p.ignore()
|
||||||
|
if err = p.parseFragment(op); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||||
|
s = p.pos
|
||||||
|
qf = true
|
||||||
|
}
|
||||||
|
p.ignore()
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
p.reset(s)
|
||||||
|
if err := p.parseOp(op); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return op, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseFragment(op *Operation) error {
|
||||||
|
frag := fragPool.Get().(*Fragment)
|
||||||
|
frag.Reset()
|
||||||
|
|
||||||
|
frag.Fields = frag.fieldsA[:0]
|
||||||
|
frag.Args = frag.argsA[:0]
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
frag.Name = p.val(p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemOn) {
|
||||||
|
p.ignore()
|
||||||
|
} else {
|
||||||
|
return errors.New("fragment: missing 'on' keyword")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
frag.On = p.vall(p.next())
|
||||||
|
} else {
|
||||||
|
return errors.New("fragment: missing table name after 'on' keyword")
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemObjOpen) {
|
||||||
|
p.ignore()
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
|
||||||
|
return fmt.Errorf("fragment: %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.frags == nil {
|
||||||
|
p.frags = make(map[uint64]*Fragment)
|
||||||
|
}
|
||||||
|
|
||||||
|
_, _ = p.h.WriteString(frag.Name)
|
||||||
|
k := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
|
||||||
|
p.frags[k] = frag
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseOp(op *Operation) error {
|
||||||
|
var err error
|
||||||
|
var typeSet bool
|
||||||
|
|
||||||
|
if p.peek(itemQuery, itemMutation, itemSub) {
|
||||||
|
err = p.parseOpTypeAndArgs(op)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: %v", op.Type, err)
|
||||||
|
}
|
||||||
|
typeSet = true
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemObjOpen) {
|
||||||
|
p.ignore()
|
||||||
|
if !typeSet {
|
||||||
|
op.Type = opQuery
|
||||||
|
}
|
||||||
|
|
||||||
|
for {
|
||||||
|
if p.peek(itemEOF, itemFragment) {
|
||||||
|
p.ignore()
|
||||||
|
break
|
||||||
|
}
|
||||||
|
|
||||||
|
err = p.parseSelectionSet(&op.SelectionSet)
|
||||||
|
if err != nil {
|
||||||
|
return fmt.Errorf("%s: %v", op.Type, err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
||||||
|
item := p.next()
|
||||||
|
|
||||||
|
switch item._type {
|
||||||
|
case itemQuery:
|
||||||
|
op.Type = opQuery
|
||||||
|
case itemMutation:
|
||||||
|
op.Type = opMutate
|
||||||
|
case itemSub:
|
||||||
|
op.Type = opSub
|
||||||
|
}
|
||||||
|
|
||||||
|
op.Args = op.argsA[:0]
|
||||||
|
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if p.peek(itemName) {
|
||||||
|
op.Name = p.val(p.next())
|
||||||
|
}
|
||||||
|
|
||||||
|
if p.peek(itemArgsOpen) {
|
||||||
|
p.ignore()
|
||||||
|
|
||||||
|
op.Args, err = p.parseOpParams(op.Args)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
|
||||||
|
var err error
|
||||||
|
|
||||||
|
selset.Fields, err = p.parseFields(selset.Fields)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func ParseArgValue(argVal string) (*Node, error) {
|
func ParseArgValue(argVal string) (*Node, error) {
|
||||||
@ -123,215 +322,107 @@ func ParseArgValue(argVal string) (*Node, error) {
|
|||||||
return op, err
|
return op, err
|
||||||
}
|
}
|
||||||
|
|
||||||
func parseSelectionSet(gql []byte) (*Operation, error) {
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if len(gql) == 0 {
|
|
||||||
return nil, errors.New("blank query")
|
|
||||||
}
|
|
||||||
|
|
||||||
l := lexPool.Get().(*lexer)
|
|
||||||
l.Reset()
|
|
||||||
|
|
||||||
if err = lex(l, gql); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p := &Parser{
|
|
||||||
input: l.input,
|
|
||||||
pos: -1,
|
|
||||||
items: l.items,
|
|
||||||
}
|
|
||||||
|
|
||||||
var op *Operation
|
|
||||||
|
|
||||||
if p.peek(itemObjOpen) {
|
|
||||||
p.ignore()
|
|
||||||
op, err = p.parseQueryOp()
|
|
||||||
} else {
|
|
||||||
op, err = p.parseOp()
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemObjClose) {
|
|
||||||
p.ignore()
|
|
||||||
} else {
|
|
||||||
return nil, fmt.Errorf("operation missing closing '}'")
|
|
||||||
}
|
|
||||||
|
|
||||||
if !p.peek(itemEOF) {
|
|
||||||
p.ignore()
|
|
||||||
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
|
|
||||||
}
|
|
||||||
|
|
||||||
lexPool.Put(l)
|
|
||||||
|
|
||||||
return op, err
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) next() item {
|
|
||||||
n := p.pos + 1
|
|
||||||
if n >= len(p.items) {
|
|
||||||
p.err = errEOT
|
|
||||||
return item{_type: itemEOF}
|
|
||||||
}
|
|
||||||
p.pos = n
|
|
||||||
return p.items[p.pos]
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) ignore() {
|
|
||||||
n := p.pos + 1
|
|
||||||
if n >= len(p.items) {
|
|
||||||
p.err = errEOT
|
|
||||||
return
|
|
||||||
}
|
|
||||||
p.pos = n
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) current() string {
|
|
||||||
item := p.items[p.pos]
|
|
||||||
return b2s(p.input[item.pos:item.end])
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) peek(types ...itemType) bool {
|
|
||||||
n := p.pos + 1
|
|
||||||
// if p.items[n]._type == itemEOF {
|
|
||||||
// return false
|
|
||||||
// }
|
|
||||||
if n >= len(p.items) {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
for i := 0; i < len(types); i++ {
|
|
||||||
if p.items[n]._type == types[i] {
|
|
||||||
return true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseOp() (*Operation, error) {
|
|
||||||
if !p.peek(itemQuery, itemMutation, itemSub) {
|
|
||||||
err := errors.New("expecting a query, mutation or subscription")
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
item := p.next()
|
|
||||||
|
|
||||||
op := opPool.Get().(*Operation)
|
|
||||||
op.Reset()
|
|
||||||
|
|
||||||
switch item._type {
|
|
||||||
case itemQuery:
|
|
||||||
op.Type = opQuery
|
|
||||||
case itemMutation:
|
|
||||||
op.Type = opMutate
|
|
||||||
case itemSub:
|
|
||||||
op.Type = opSub
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields = op.fieldsA[:0]
|
|
||||||
op.Args = op.argsA[:0]
|
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if p.peek(itemName) {
|
|
||||||
op.Name = p.val(p.next())
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemArgsOpen) {
|
|
||||||
p.ignore()
|
|
||||||
|
|
||||||
op.Args, err = p.parseOpParams(op.Args)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if p.peek(itemObjOpen) {
|
|
||||||
p.ignore()
|
|
||||||
|
|
||||||
for n := 0; n < 10; n++ {
|
|
||||||
if !p.peek(itemName) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields, err = p.parseFields(op.Fields)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return op, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseQueryOp() (*Operation, error) {
|
|
||||||
op := opPool.Get().(*Operation)
|
|
||||||
op.Reset()
|
|
||||||
|
|
||||||
op.Type = opQuery
|
|
||||||
op.Fields = op.fieldsA[:0]
|
|
||||||
op.Args = op.argsA[:0]
|
|
||||||
|
|
||||||
var err error
|
|
||||||
|
|
||||||
for n := 0; n < 10; n++ {
|
|
||||||
if !p.peek(itemName) {
|
|
||||||
break
|
|
||||||
}
|
|
||||||
|
|
||||||
op.Fields, err = p.parseFields(op.Fields)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return op, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||||
st := util.NewStack()
|
st := NewStack()
|
||||||
|
|
||||||
|
if !p.peek(itemName, itemSpread) {
|
||||||
|
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
|
||||||
|
}
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if len(fields) >= maxFields {
|
if p.peek(itemEOF) {
|
||||||
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
p.ignore()
|
||||||
|
return nil, errors.New("invalid query")
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemObjClose) {
|
if p.peek(itemObjClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
st.Pop()
|
|
||||||
|
|
||||||
if st.Len() == 0 {
|
if st.Len() != 0 {
|
||||||
break
|
st.Pop()
|
||||||
} else {
|
|
||||||
continue
|
continue
|
||||||
|
} else {
|
||||||
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(fields) >= maxFields {
|
||||||
|
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
||||||
|
}
|
||||||
|
|
||||||
|
isFrag := false
|
||||||
|
|
||||||
|
if p.peek(itemSpread) {
|
||||||
|
p.ignore()
|
||||||
|
isFrag = true
|
||||||
|
}
|
||||||
|
|
||||||
if !p.peek(itemName) {
|
if !p.peek(itemName) {
|
||||||
return nil, errors.New("expecting an alias or field name")
|
if isFrag {
|
||||||
|
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||||
|
} else {
|
||||||
|
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
fields = append(fields, Field{ID: int32(len(fields))})
|
var f *Field
|
||||||
|
|
||||||
f := &fields[(len(fields) - 1)]
|
if isFrag {
|
||||||
f.Args = f.argsA[:0]
|
name := p.val(p.next())
|
||||||
f.Children = f.childrenA[:0]
|
p.h.WriteString(name)
|
||||||
|
k := p.h.Sum64()
|
||||||
|
p.h.Reset()
|
||||||
|
|
||||||
// Parse the inside of the the fields () parentheses
|
fr, ok := p.frags[k]
|
||||||
// in short parse the args like id, where, etc
|
if !ok {
|
||||||
if err := p.parseField(f); err != nil {
|
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||||
return nil, err
|
}
|
||||||
}
|
|
||||||
|
n := int32(len(fields))
|
||||||
|
fields = append(fields, fr.Fields...)
|
||||||
|
|
||||||
|
for i := int(n); i < len(fields); i++ {
|
||||||
|
f := &fields[i]
|
||||||
|
f.ID = int32(i)
|
||||||
|
|
||||||
|
// If this is the top-level point the parent to the parent of the
|
||||||
|
// previous field.
|
||||||
|
if f.ParentID == -1 {
|
||||||
|
pid := st.Peek()
|
||||||
|
f.ParentID = pid
|
||||||
|
if f.ParentID != -1 {
|
||||||
|
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
|
||||||
|
}
|
||||||
|
// Update all the other parents id's by our new place in this new array
|
||||||
|
} else {
|
||||||
|
f.ParentID += n
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update all the children which is needed.
|
||||||
|
for j := range f.Children {
|
||||||
|
f.Children[j] += n
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
intf := st.Peek()
|
|
||||||
if pid, ok := intf.(int32); ok {
|
|
||||||
f.ParentID = pid
|
|
||||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
|
||||||
} else {
|
} else {
|
||||||
f.ParentID = -1
|
fields = append(fields, Field{ID: int32(len(fields))})
|
||||||
|
|
||||||
|
f = &fields[(len(fields) - 1)]
|
||||||
|
f.Args = f.argsA[:0]
|
||||||
|
f.Children = f.childrenA[:0]
|
||||||
|
|
||||||
|
// Parse the field
|
||||||
|
if err := p.parseField(f); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if st.Len() == 0 {
|
||||||
|
f.ParentID = -1
|
||||||
|
} else {
|
||||||
|
pid := st.Peek()
|
||||||
|
f.ParentID = pid
|
||||||
|
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// The first opening curley brackets after this
|
// The first opening curley brackets after this
|
||||||
@ -339,13 +430,6 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
|||||||
if p.peek(itemObjOpen) {
|
if p.peek(itemObjOpen) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
st.Push(f.ID)
|
st.Push(f.ID)
|
||||||
|
|
||||||
} else if p.peek(itemObjClose) {
|
|
||||||
if st.Len() == 0 {
|
|
||||||
break
|
|
||||||
} else {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -385,7 +469,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
|
|||||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemArgsClose) {
|
if p.peek(itemEOF, itemArgsClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -403,7 +487,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
|
|||||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||||
}
|
}
|
||||||
|
|
||||||
if p.peek(itemArgsClose) {
|
if p.peek(itemEOF, itemArgsClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -470,7 +554,7 @@ func (p *Parser) parseObj() (*Node, error) {
|
|||||||
parent.Reset()
|
parent.Reset()
|
||||||
|
|
||||||
for {
|
for {
|
||||||
if p.peek(itemObjClose) {
|
if p.peek(itemEOF, itemObjClose) {
|
||||||
p.ignore()
|
p.ignore()
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
@ -545,6 +629,62 @@ func (p *Parser) vall(v item) string {
|
|||||||
return b2s(p.input[v.pos:v.end])
|
return b2s(p.input[v.pos:v.end])
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peek(types ...itemType) bool {
|
||||||
|
n := p.pos + 1
|
||||||
|
l := len(types)
|
||||||
|
// if p.items[n]._type == itemEOF {
|
||||||
|
// return false
|
||||||
|
// }
|
||||||
|
|
||||||
|
if n >= len(p.items) {
|
||||||
|
return types[0] == itemEOF
|
||||||
|
}
|
||||||
|
|
||||||
|
if l == 1 {
|
||||||
|
return p.items[n]._type == types[0]
|
||||||
|
}
|
||||||
|
|
||||||
|
for i := 0; i < l; i++ {
|
||||||
|
if p.items[n]._type == types[i] {
|
||||||
|
return true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) next() item {
|
||||||
|
n := p.pos + 1
|
||||||
|
if n >= len(p.items) {
|
||||||
|
p.err = errEOT
|
||||||
|
return item{_type: itemEOF}
|
||||||
|
}
|
||||||
|
p.pos = n
|
||||||
|
return p.items[p.pos]
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) ignore() {
|
||||||
|
n := p.pos + 1
|
||||||
|
if n >= len(p.items) {
|
||||||
|
p.err = errEOT
|
||||||
|
return
|
||||||
|
}
|
||||||
|
p.pos = n
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peekCurrent() string {
|
||||||
|
item := p.items[p.pos]
|
||||||
|
return b2s(p.input[item.pos:item.end])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) peekNext() string {
|
||||||
|
item := p.items[p.pos+1]
|
||||||
|
return b2s(p.input[item.pos:item.end])
|
||||||
|
}
|
||||||
|
|
||||||
|
func (p *Parser) reset(to int) {
|
||||||
|
p.pos = to
|
||||||
|
}
|
||||||
|
|
||||||
func b2s(b []byte) string {
|
func b2s(b []byte) string {
|
||||||
return *(*string)(unsafe.Pointer(&b))
|
return *(*string)(unsafe.Pointer(&b))
|
||||||
}
|
}
|
||||||
@ -578,7 +718,7 @@ func (t parserType) String() string {
|
|||||||
case NodeList:
|
case NodeList:
|
||||||
v = "node-list"
|
v = "node-list"
|
||||||
}
|
}
|
||||||
return fmt.Sprintf("<%s>", v)
|
return v
|
||||||
}
|
}
|
||||||
|
|
||||||
// type Frees struct {
|
// type Frees struct {
|
||||||
|
@ -2,8 +2,9 @@ package qcode
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"errors"
|
"errors"
|
||||||
"github.com/chirino/graphql/schema"
|
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql/schema"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestCompile1(t *testing.T) {
|
func TestCompile1(t *testing.T) {
|
||||||
@ -120,7 +121,7 @@ updateThread {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}`
|
}}`
|
||||||
qcompile, _ := NewCompiler(Config{})
|
qcompile, _ := NewCompiler(Config{})
|
||||||
_, err := qcompile.Compile([]byte(gql), "anon")
|
_, err := qcompile.Compile([]byte(gql), "anon")
|
||||||
|
|
||||||
@ -130,6 +131,93 @@ updateThread {
|
|||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile1(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile2(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestFragmentsCompile3(t *testing.T) {
|
||||||
|
gql := `
|
||||||
|
fragment userFields1 on user {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment userFields2 on user {
|
||||||
|
first_name
|
||||||
|
last_name
|
||||||
|
}
|
||||||
|
|
||||||
|
query {
|
||||||
|
users {
|
||||||
|
...userFields2
|
||||||
|
|
||||||
|
created_at
|
||||||
|
...userFields1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
`
|
||||||
|
qcompile, _ := NewCompiler(Config{})
|
||||||
|
_, err := qcompile.Compile([]byte(gql), "user")
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
t.Fatal(err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
var gql = []byte(`
|
var gql = []byte(`
|
||||||
{products(
|
{products(
|
||||||
# returns only 30 items
|
# returns only 30 items
|
||||||
@ -184,7 +272,6 @@ func BenchmarkQCompileP(b *testing.B) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BenchmarkParse(b *testing.B) {
|
func BenchmarkParse(b *testing.B) {
|
||||||
|
|
||||||
b.ResetTimer()
|
b.ResetTimer()
|
||||||
b.ReportAllocs()
|
b.ReportAllocs()
|
||||||
for n := 0; n < b.N; n++ {
|
for n := 0; n < b.N; n++ {
|
||||||
|
177
core/prepare.go
177
core/prepare.go
@ -2,126 +2,94 @@ package core
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
|
||||||
"crypto/sha256"
|
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/hex"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"io"
|
"io"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type preparedItem struct {
|
type query struct {
|
||||||
|
sync.Once
|
||||||
sd *sql.Stmt
|
sd *sql.Stmt
|
||||||
|
ai allow.Item
|
||||||
|
qt qcode.QType
|
||||||
|
err error
|
||||||
st stmt
|
st stmt
|
||||||
roleArg bool
|
roleArg bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) initPrepared() error {
|
func (sg *SuperGraph) prepare(q *query, role string) {
|
||||||
ct := context.Background()
|
var stmts []stmt
|
||||||
|
var err error
|
||||||
|
|
||||||
|
qb := []byte(q.ai.Query)
|
||||||
|
|
||||||
|
switch q.qt {
|
||||||
|
case qcode.QTQuery:
|
||||||
|
if sg.abacEnabled {
|
||||||
|
stmts, err = sg.buildMultiStmt(qb, q.ai.Vars)
|
||||||
|
} else {
|
||||||
|
stmts, err = sg.buildRoleStmt(qb, q.ai.Vars, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
case qcode.QTMutation:
|
||||||
|
stmts, err = sg.buildRoleStmt(qb, q.ai.Vars, role)
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
q.st = stmts[0]
|
||||||
|
q.roleArg = len(stmts) > 1
|
||||||
|
|
||||||
|
q.sd, err = sg.db.Prepare(q.st.sql)
|
||||||
|
if err != nil {
|
||||||
|
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sg *SuperGraph) initPrepared() error {
|
||||||
if sg.allowList.IsPersist() {
|
if sg.allowList.IsPersist() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
sg.prepared = make(map[string]*preparedItem)
|
|
||||||
|
|
||||||
tx, err := sg.db.BeginTx(ct, nil)
|
if err := sg.prepareRoleStmt(); err != nil {
|
||||||
if err != nil {
|
return fmt.Errorf("role query: %w", err)
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer tx.Rollback() //nolint: errcheck
|
|
||||||
|
|
||||||
if err = sg.prepareRoleStmt(tx); err != nil {
|
|
||||||
return fmt.Errorf("prepareRoleStmt: %w", err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
sg.queries = make(map[uint64]query)
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
success := 0
|
|
||||||
|
|
||||||
list, err := sg.allowList.Load()
|
list, err := sg.allowList.Load()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
for _, v := range list {
|
for _, v := range list {
|
||||||
if len(v.Query) == 0 {
|
if len(v.Query) == 0 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
qt := qcode.GetQType(v.Query)
|
||||||
|
|
||||||
err := sg.prepareStmt(v)
|
switch qt {
|
||||||
if err != nil {
|
case qcode.QTQuery:
|
||||||
sg.log.Printf("WRN %s: %v", v.Name, err)
|
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
|
||||||
} else {
|
|
||||||
success++
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
if sg.anonExists {
|
||||||
|
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|
||||||
query := item.Query
|
|
||||||
qb := []byte(query)
|
|
||||||
vars := item.Vars
|
|
||||||
|
|
||||||
qt := qcode.GetQType(query)
|
|
||||||
ct := context.Background()
|
|
||||||
switch qt {
|
|
||||||
case qcode.QTQuery:
|
|
||||||
var stmts1 []stmt
|
|
||||||
var err error
|
|
||||||
|
|
||||||
if sg.abacEnabled {
|
|
||||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
|
||||||
} else {
|
|
||||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if sg.anonExists {
|
|
||||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
|
||||||
|
|
||||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
case qcode.QTMutation:
|
||||||
if err != nil {
|
for _, role := range sg.conf.Roles {
|
||||||
return err
|
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
case qcode.QTMutation:
|
|
||||||
for _, role := range sg.conf.Roles {
|
|
||||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
|
||||||
|
|
||||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -129,22 +97,8 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
|
||||||
sd, err := sg.db.PrepareContext(ct, st[0].sql)
|
|
||||||
if err != nil {
|
|
||||||
return fmt.Errorf("prepare failed: %v: %s", err, st[0].sql)
|
|
||||||
}
|
|
||||||
|
|
||||||
sg.prepared[key] = &preparedItem{
|
|
||||||
sd: sd,
|
|
||||||
st: st[0],
|
|
||||||
roleArg: len(st) > 1,
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
func (sg *SuperGraph) prepareRoleStmt() error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !sg.abacEnabled {
|
if !sg.abacEnabled {
|
||||||
@ -171,11 +125,11 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
||||||
io.WriteString(w, sg.conf.RolesQuery)
|
io.WriteString(w, rq)
|
||||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
||||||
|
|
||||||
sg.getRole, err = tx.Prepare(w.String())
|
sg.getRole, err = sg.db.Prepare(w.String())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -187,15 +141,14 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
var ac allow.Config
|
var ac allow.Config
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if len(sg.conf.AllowListFile) == 0 {
|
if sg.conf.AllowListFile == "" {
|
||||||
sg.conf.UseAllowList = false
|
sg.conf.AllowListFile = "allow.list"
|
||||||
sg.log.Printf("WRN allow list disabled no file specified")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// When list is not eabled it is still created and
|
// When list is not eabled it is still created and
|
||||||
// and new queries are saved to it.
|
// and new queries are saved to it.
|
||||||
if !sg.conf.UseAllowList {
|
if !sg.conf.UseAllowList {
|
||||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
ac = allow.Config{CreateIfNotExists: true, Persist: true, Log: sg.log}
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
||||||
@ -207,9 +160,11 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func stmtHash(name string, role string) string {
|
func queryID(h *maphash.Hash, name string, role string) uint64 {
|
||||||
h := sha256.New()
|
h.WriteString(name)
|
||||||
io.WriteString(h, strings.ToLower(name))
|
h.WriteString(role)
|
||||||
io.WriteString(h, role)
|
v := h.Sum64()
|
||||||
return hex.EncodeToString(h.Sum(nil))
|
h.Reset()
|
||||||
|
|
||||||
|
return v
|
||||||
}
|
}
|
||||||
|
@ -4,10 +4,10 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"net/http"
|
"net/http"
|
||||||
"sync"
|
"sync"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
var err error
|
var err error
|
||||||
|
|
||||||
sel := st.qc.Selects
|
sel := st.qc.Selects
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
// fetch the field name used within the db response json
|
// fetch the field name used within the db response json
|
||||||
// that are used to mark insertion points and the mapping between
|
// that are used to mark insertion points and the mapping between
|
||||||
// those field names and their select objects
|
// those field names and their select objects
|
||||||
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
|
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped())
|
||||||
|
|
||||||
// fetch the field values of the marked insertion points
|
// fetch the field values of the marked insertion points
|
||||||
// these values contain the id to be used with fetching remote data
|
// these values contain the id to be used with fetching remote data
|
||||||
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
|
|
||||||
switch {
|
switch {
|
||||||
case len(from) == 1:
|
case len(from) == 1:
|
||||||
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
|
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
|
||||||
|
|
||||||
case len(from) > 1:
|
case len(from) > 1:
|
||||||
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
|
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
return nil, errors.New("something wrong no remote ids found in db response")
|
return nil, errors.New("something wrong no remote ids found in db response")
|
||||||
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
|||||||
|
|
||||||
func (sg *SuperGraph) resolveRemote(
|
func (sg *SuperGraph) resolveRemote(
|
||||||
hdr http.Header,
|
hdr http.Header,
|
||||||
h *xxhash.Digest,
|
h *maphash.Hash,
|
||||||
field jsn.Field,
|
field jsn.Field,
|
||||||
sel []qcode.Select,
|
sel []qcode.Select,
|
||||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
|
|||||||
to := toA[:1]
|
to := toA[:1]
|
||||||
|
|
||||||
// use the json key to find the related Select object
|
// use the json key to find the related Select object
|
||||||
k1 := xxhash.Sum64(field.Key)
|
_, _ = h.Write(field.Key)
|
||||||
|
k1 := h.Sum64()
|
||||||
|
|
||||||
s, ok := sfmap[k1]
|
s, ok := sfmap[k1]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
|
|||||||
|
|
||||||
func (sg *SuperGraph) resolveRemotes(
|
func (sg *SuperGraph) resolveRemotes(
|
||||||
hdr http.Header,
|
hdr http.Header,
|
||||||
h *xxhash.Digest,
|
h *maphash.Hash,
|
||||||
from []jsn.Field,
|
from []jsn.Field,
|
||||||
sel []qcode.Select,
|
sel []qcode.Select,
|
||||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
|
|||||||
for i, id := range from {
|
for i, id := range from {
|
||||||
|
|
||||||
// use the json key to find the related Select object
|
// use the json key to find the related Select object
|
||||||
k1 := xxhash.Sum64(id.Key)
|
_, _ = h.Write(id.Key)
|
||||||
|
k1 := h.Sum64()
|
||||||
|
|
||||||
s, ok := sfmap[k1]
|
s, ok := sfmap[k1]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
|
|||||||
return to, cerr
|
return to, cerr
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
|
||||||
[][]byte,
|
[][]byte,
|
||||||
map[uint64]*qcode.Select) {
|
map[uint64]*qcode.Select) {
|
||||||
|
|
||||||
@ -227,8 +230,8 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
|
|||||||
fm[n] = r.IDField
|
fm[n] = r.IDField
|
||||||
n++
|
n++
|
||||||
|
|
||||||
k := xxhash.Sum64(r.IDField)
|
_, _ = h.Write(r.IDField)
|
||||||
sm[k] = s
|
sm[h.Sum64()] = s
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -2,11 +2,11 @@ package core
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"hash/maphash"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
@ -19,7 +19,7 @@ type resolvFn struct {
|
|||||||
|
|
||||||
func (sg *SuperGraph) initResolvers() error {
|
func (sg *SuperGraph) initResolvers() error {
|
||||||
var err error
|
var err error
|
||||||
sg.rmap = make(map[uint64]*resolvFn)
|
sg.rmap = make(map[uint64]resolvFn)
|
||||||
|
|
||||||
for _, t := range sg.conf.Tables {
|
for _, t := range sg.conf.Tables {
|
||||||
err = sg.initRemotes(t)
|
err = sg.initRemotes(t)
|
||||||
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) initRemotes(t Table) error {
|
func (sg *SuperGraph) initRemotes(t Table) error {
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
|
h.SetSeed(sg.hashSeed)
|
||||||
|
|
||||||
for _, r := range t.Remotes {
|
for _, r := range t.Remotes {
|
||||||
// defines the table column to be used as an id in the
|
// defines the table column to be used as an id in the
|
||||||
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
|||||||
path = append(path, []byte(p))
|
path = append(path, []byte(p))
|
||||||
}
|
}
|
||||||
|
|
||||||
rf := &resolvFn{
|
rf := resolvFn{
|
||||||
IDField: []byte(idk),
|
IDField: []byte(idk),
|
||||||
Path: path,
|
Path: path,
|
||||||
Fn: fn,
|
Fn: fn,
|
||||||
}
|
}
|
||||||
|
|
||||||
// index resolver obj by parent and child names
|
// index resolver obj by parent and child names
|
||||||
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
|
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
|
||||||
|
|
||||||
// index resolver obj by IDField
|
// index resolver obj by IDField
|
||||||
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
|
_, _ = h.Write(rf.IDField)
|
||||||
|
sg.rmap[h.Sum64()] = rf
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
|
@ -1,11 +1,9 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
import (
|
import "hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
|
||||||
|
|
||||||
// nolint: errcheck
|
// nolint: errcheck
|
||||||
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
|
||||||
h.WriteString(k1)
|
h.WriteString(k1)
|
||||||
h.WriteString(k2)
|
h.WriteString(k2)
|
||||||
v := h.Sum64()
|
v := h.Sum64()
|
||||||
|
@ -36,8 +36,8 @@ module.exports = {
|
|||||||
position: "left",
|
position: "left",
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
label: "Art Compute",
|
label: "AbtCode",
|
||||||
href: "https://artcompute.com/s/super-graph",
|
href: "https://abtcode.com/s/super-graph",
|
||||||
position: "left",
|
position: "left",
|
||||||
},
|
},
|
||||||
],
|
],
|
||||||
|
@ -1805,11 +1805,6 @@ asynckit@^0.4.0:
|
|||||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||||
|
|
||||||
at-least-node@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
|
|
||||||
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
|
|
||||||
|
|
||||||
atob@^2.1.2:
|
atob@^2.1.2:
|
||||||
version "2.1.2"
|
version "2.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
||||||
@ -2323,7 +2318,7 @@ ccount@^1.0.0, ccount@^1.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
||||||
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
||||||
|
|
||||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
|
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
|
||||||
version "2.4.2"
|
version "2.4.2"
|
||||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||||
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
||||||
@ -2522,15 +2517,6 @@ cliui@^5.0.0:
|
|||||||
strip-ansi "^5.2.0"
|
strip-ansi "^5.2.0"
|
||||||
wrap-ansi "^5.1.0"
|
wrap-ansi "^5.1.0"
|
||||||
|
|
||||||
cliui@^6.0.0:
|
|
||||||
version "6.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
|
|
||||||
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
|
|
||||||
dependencies:
|
|
||||||
string-width "^4.2.0"
|
|
||||||
strip-ansi "^6.0.0"
|
|
||||||
wrap-ansi "^6.2.0"
|
|
||||||
|
|
||||||
coa@^2.0.2:
|
coa@^2.0.2:
|
||||||
version "2.0.2"
|
version "2.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
||||||
@ -3216,11 +3202,6 @@ depd@~1.1.2:
|
|||||||
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
||||||
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
||||||
|
|
||||||
dependency-graph@^0.9.0:
|
|
||||||
version "0.9.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
|
|
||||||
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
|
|
||||||
|
|
||||||
des.js@^1.0.0:
|
des.js@^1.0.0:
|
||||||
version "1.0.1"
|
version "1.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
||||||
@ -3830,7 +3811,7 @@ fast-glob@^2.0.2:
|
|||||||
merge2 "^1.2.3"
|
merge2 "^1.2.3"
|
||||||
micromatch "^3.1.10"
|
micromatch "^3.1.10"
|
||||||
|
|
||||||
fast-glob@^3.0.3, fast-glob@^3.1.1:
|
fast-glob@^3.0.3:
|
||||||
version "3.2.2"
|
version "3.2.2"
|
||||||
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
||||||
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
||||||
@ -3970,7 +3951,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
|
|||||||
make-dir "^3.0.2"
|
make-dir "^3.0.2"
|
||||||
pkg-dir "^4.1.0"
|
pkg-dir "^4.1.0"
|
||||||
|
|
||||||
find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
|
find-up@4.1.0, find-up@^4.0.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
||||||
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
||||||
@ -4084,16 +4065,6 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
|
|||||||
jsonfile "^4.0.0"
|
jsonfile "^4.0.0"
|
||||||
universalify "^0.1.0"
|
universalify "^0.1.0"
|
||||||
|
|
||||||
fs-extra@^9.0.0:
|
|
||||||
version "9.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
|
|
||||||
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
|
|
||||||
dependencies:
|
|
||||||
at-least-node "^1.0.0"
|
|
||||||
graceful-fs "^4.2.0"
|
|
||||||
jsonfile "^6.0.1"
|
|
||||||
universalify "^1.0.0"
|
|
||||||
|
|
||||||
fs-minipass@^2.0.0:
|
fs-minipass@^2.0.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
||||||
@ -4149,11 +4120,6 @@ get-own-enumerable-property-symbols@^3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
||||||
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
||||||
|
|
||||||
get-stdin@^7.0.0:
|
|
||||||
version "7.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
|
|
||||||
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
|
|
||||||
|
|
||||||
get-stream@^4.0.0:
|
get-stream@^4.0.0:
|
||||||
version "4.1.0"
|
version "4.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||||
@ -4275,18 +4241,6 @@ globby@^10.0.1:
|
|||||||
merge2 "^1.2.3"
|
merge2 "^1.2.3"
|
||||||
slash "^3.0.0"
|
slash "^3.0.0"
|
||||||
|
|
||||||
globby@^11.0.0:
|
|
||||||
version "11.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
|
|
||||||
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
|
|
||||||
dependencies:
|
|
||||||
array-union "^2.1.0"
|
|
||||||
dir-glob "^3.0.1"
|
|
||||||
fast-glob "^3.1.1"
|
|
||||||
ignore "^5.1.4"
|
|
||||||
merge2 "^1.3.0"
|
|
||||||
slash "^3.0.0"
|
|
||||||
|
|
||||||
globby@^6.1.0:
|
globby@^6.1.0:
|
||||||
version "6.1.0"
|
version "6.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
||||||
@ -4743,7 +4697,7 @@ ignore@^3.3.5:
|
|||||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||||
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
||||||
|
|
||||||
ignore@^5.1.1, ignore@^5.1.4:
|
ignore@^5.1.1:
|
||||||
version "5.1.4"
|
version "5.1.4"
|
||||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
||||||
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
||||||
@ -5382,15 +5336,6 @@ jsonfile@^4.0.0:
|
|||||||
optionalDependencies:
|
optionalDependencies:
|
||||||
graceful-fs "^4.1.6"
|
graceful-fs "^4.1.6"
|
||||||
|
|
||||||
jsonfile@^6.0.1:
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
|
|
||||||
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
|
|
||||||
dependencies:
|
|
||||||
universalify "^1.0.0"
|
|
||||||
optionalDependencies:
|
|
||||||
graceful-fs "^4.1.6"
|
|
||||||
|
|
||||||
jsprim@^1.2.2:
|
jsprim@^1.2.2:
|
||||||
version "1.4.1"
|
version "1.4.1"
|
||||||
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
||||||
@ -5656,13 +5601,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
|
|||||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||||
|
|
||||||
log-symbols@^2.2.0:
|
|
||||||
version "2.2.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
|
|
||||||
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
|
|
||||||
dependencies:
|
|
||||||
chalk "^2.0.1"
|
|
||||||
|
|
||||||
loglevel@^1.6.8:
|
loglevel@^1.6.8:
|
||||||
version "1.6.8"
|
version "1.6.8"
|
||||||
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
||||||
@ -6645,7 +6583,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
|
|||||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||||
|
|
||||||
pify@^2.0.0, pify@^2.3.0:
|
pify@^2.0.0:
|
||||||
version "2.3.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
||||||
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
||||||
@ -6731,24 +6669,6 @@ postcss-calc@^7.0.1:
|
|||||||
postcss-selector-parser "^6.0.2"
|
postcss-selector-parser "^6.0.2"
|
||||||
postcss-value-parser "^4.0.2"
|
postcss-value-parser "^4.0.2"
|
||||||
|
|
||||||
postcss-cli@^7.1.1:
|
|
||||||
version "7.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
|
|
||||||
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
|
|
||||||
dependencies:
|
|
||||||
chalk "^4.0.0"
|
|
||||||
chokidar "^3.3.0"
|
|
||||||
dependency-graph "^0.9.0"
|
|
||||||
fs-extra "^9.0.0"
|
|
||||||
get-stdin "^7.0.0"
|
|
||||||
globby "^11.0.0"
|
|
||||||
postcss "^7.0.0"
|
|
||||||
postcss-load-config "^2.0.0"
|
|
||||||
postcss-reporter "^6.0.0"
|
|
||||||
pretty-hrtime "^1.0.3"
|
|
||||||
read-cache "^1.0.0"
|
|
||||||
yargs "^15.0.2"
|
|
||||||
|
|
||||||
postcss-color-functional-notation@^2.0.1:
|
postcss-color-functional-notation@^2.0.1:
|
||||||
version "2.0.1"
|
version "2.0.1"
|
||||||
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
||||||
@ -7288,16 +7208,6 @@ postcss-replace-overflow-wrap@^3.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
postcss "^7.0.2"
|
postcss "^7.0.2"
|
||||||
|
|
||||||
postcss-reporter@^6.0.0:
|
|
||||||
version "6.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
|
|
||||||
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
|
|
||||||
dependencies:
|
|
||||||
chalk "^2.4.1"
|
|
||||||
lodash "^4.17.11"
|
|
||||||
log-symbols "^2.2.0"
|
|
||||||
postcss "^7.0.7"
|
|
||||||
|
|
||||||
postcss-selector-matches@^4.0.0:
|
postcss-selector-matches@^4.0.0:
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
||||||
@ -7397,7 +7307,7 @@ postcss@^6.0.9:
|
|||||||
source-map "^0.6.1"
|
source-map "^0.6.1"
|
||||||
supports-color "^5.4.0"
|
supports-color "^5.4.0"
|
||||||
|
|
||||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
|
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6:
|
||||||
version "7.0.30"
|
version "7.0.30"
|
||||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
||||||
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
||||||
@ -7692,6 +7602,11 @@ react-helmet@^6.0.0-beta:
|
|||||||
react-fast-compare "^2.0.4"
|
react-fast-compare "^2.0.4"
|
||||||
react-side-effect "^2.1.0"
|
react-side-effect "^2.1.0"
|
||||||
|
|
||||||
|
react-hook-sticky@^0.2.0:
|
||||||
|
version "0.2.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
|
||||||
|
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
|
||||||
|
|
||||||
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
||||||
version "16.13.1"
|
version "16.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||||
@ -7785,13 +7700,6 @@ react@^16.8.4:
|
|||||||
object-assign "^4.1.1"
|
object-assign "^4.1.1"
|
||||||
prop-types "^15.6.2"
|
prop-types "^15.6.2"
|
||||||
|
|
||||||
read-cache@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
|
|
||||||
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
|
|
||||||
dependencies:
|
|
||||||
pify "^2.3.0"
|
|
||||||
|
|
||||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||||
version "2.3.7"
|
version "2.3.7"
|
||||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||||
@ -8709,7 +8617,7 @@ string-width@^3.0.0, string-width@^3.1.0:
|
|||||||
is-fullwidth-code-point "^2.0.0"
|
is-fullwidth-code-point "^2.0.0"
|
||||||
strip-ansi "^5.1.0"
|
strip-ansi "^5.1.0"
|
||||||
|
|
||||||
string-width@^4.1.0, string-width@^4.2.0:
|
string-width@^4.1.0:
|
||||||
version "4.2.0"
|
version "4.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
||||||
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
||||||
@ -9305,11 +9213,6 @@ universalify@^0.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
||||||
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
||||||
|
|
||||||
universalify@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
|
|
||||||
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
|
|
||||||
|
|
||||||
unpipe@1.0.0, unpipe@~1.0.0:
|
unpipe@1.0.0, unpipe@~1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||||
@ -9723,7 +9626,7 @@ wrap-ansi@^5.1.0:
|
|||||||
string-width "^3.0.0"
|
string-width "^3.0.0"
|
||||||
strip-ansi "^5.0.0"
|
strip-ansi "^5.0.0"
|
||||||
|
|
||||||
wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
|
wrap-ansi@^6.0.0:
|
||||||
version "6.2.0"
|
version "6.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
||||||
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
||||||
@ -9784,14 +9687,6 @@ yargs-parser@^13.1.2:
|
|||||||
camelcase "^5.0.0"
|
camelcase "^5.0.0"
|
||||||
decamelize "^1.2.0"
|
decamelize "^1.2.0"
|
||||||
|
|
||||||
yargs-parser@^18.1.1:
|
|
||||||
version "18.1.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
|
|
||||||
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
|
|
||||||
dependencies:
|
|
||||||
camelcase "^5.0.0"
|
|
||||||
decamelize "^1.2.0"
|
|
||||||
|
|
||||||
yargs@^13.3.2:
|
yargs@^13.3.2:
|
||||||
version "13.3.2"
|
version "13.3.2"
|
||||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
||||||
@ -9808,23 +9703,6 @@ yargs@^13.3.2:
|
|||||||
y18n "^4.0.0"
|
y18n "^4.0.0"
|
||||||
yargs-parser "^13.1.2"
|
yargs-parser "^13.1.2"
|
||||||
|
|
||||||
yargs@^15.0.2:
|
|
||||||
version "15.3.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
|
|
||||||
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
|
|
||||||
dependencies:
|
|
||||||
cliui "^6.0.0"
|
|
||||||
decamelize "^1.2.0"
|
|
||||||
find-up "^4.1.0"
|
|
||||||
get-caller-file "^2.0.1"
|
|
||||||
require-directory "^2.1.1"
|
|
||||||
require-main-filename "^2.0.0"
|
|
||||||
set-blocking "^2.0.0"
|
|
||||||
string-width "^4.2.0"
|
|
||||||
which-module "^2.0.0"
|
|
||||||
y18n "^4.0.0"
|
|
||||||
yargs-parser "^18.1.1"
|
|
||||||
|
|
||||||
zepto@^1.2.0:
|
zepto@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
||||||
|
1
go.mod
1
go.mod
@ -12,7 +12,6 @@ require (
|
|||||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||||
github.com/cespare/xxhash/v2 v2.1.1
|
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
||||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
|
7
go.sum
7
go.sum
@ -35,7 +35,9 @@ github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgv
|
|||||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
|
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
|
||||||
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
||||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||||
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
|
||||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||||
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
|
||||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||||
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
|
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
|
||||||
@ -53,8 +55,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
|
|||||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
||||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
@ -220,6 +220,7 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
|
|||||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
|
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||||
@ -319,6 +320,7 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV
|
|||||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
|
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
@ -543,6 +545,7 @@ google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ij
|
|||||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||||
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
|
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
|
||||||
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||||
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
|
||||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
@ -156,7 +156,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
func BuildDetails() string {
|
func BuildDetails() string {
|
||||||
if len(version) == 0 {
|
if len(version) == 0 {
|
||||||
return fmt.Sprintf(`
|
return `
|
||||||
Super Graph (unknown version)
|
Super Graph (unknown version)
|
||||||
For documentation, visit https://supergraph.dev
|
For documentation, visit https://supergraph.dev
|
||||||
|
|
||||||
@ -166,7 +166,7 @@ To build with version information please use the Makefile
|
|||||||
|
|
||||||
Licensed under the Apache Public License 2.0
|
Licensed under the Apache Public License 2.0
|
||||||
Copyright 2020, Vikram Rangnekar
|
Copyright 2020, Vikram Rangnekar
|
||||||
`)
|
`
|
||||||
}
|
}
|
||||||
|
|
||||||
return fmt.Sprintf(`
|
return fmt.Sprintf(`
|
||||||
|
@ -82,8 +82,6 @@ func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[st
|
|||||||
|
|
||||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||||
} else {
|
|
||||||
ct = context.WithValue(ct, core.UserIDKey, "-1")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// var role string
|
// var role string
|
||||||
|
@ -32,6 +32,7 @@ type Auth struct {
|
|||||||
Secret string
|
Secret string
|
||||||
PubKeyFile string `mapstructure:"public_key_file"`
|
PubKeyFile string `mapstructure:"public_key_file"`
|
||||||
PubKeyType string `mapstructure:"public_key_type"`
|
PubKeyType string `mapstructure:"public_key_type"`
|
||||||
|
Audience string `mapstructure:"audience"`
|
||||||
}
|
}
|
||||||
|
|
||||||
Header struct {
|
Header struct {
|
||||||
|
@ -2,19 +2,32 @@ package auth
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"encoding/json"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"time"
|
||||||
|
|
||||||
jwt "github.com/dgrijalva/jwt-go"
|
jwt "github.com/dgrijalva/jwt-go"
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
authHeader = "Authorization"
|
authHeader = "Authorization"
|
||||||
jwtAuth0 int = iota + 1
|
jwtAuth0 int = iota + 1
|
||||||
|
jwtFirebase int = iota + 2
|
||||||
|
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
|
||||||
|
firebaseIssuerPrefix = "https://securetoken.google.com/"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
type firebasePKCache struct {
|
||||||
|
PublicKeys map[string]string
|
||||||
|
Expiration time.Time
|
||||||
|
}
|
||||||
|
|
||||||
|
var firebasePublicKeys firebasePKCache
|
||||||
|
|
||||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
var key interface{}
|
var key interface{}
|
||||||
var jwtProvider int
|
var jwtProvider int
|
||||||
@ -23,6 +36,8 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
|
|
||||||
if ac.JWT.Provider == "auth0" {
|
if ac.JWT.Provider == "auth0" {
|
||||||
jwtProvider = jwtAuth0
|
jwtProvider = jwtAuth0
|
||||||
|
} else if ac.JWT.Provider == "firebase" {
|
||||||
|
jwtProvider = jwtFirebase
|
||||||
}
|
}
|
||||||
|
|
||||||
secret := ac.JWT.Secret
|
secret := ac.JWT.Secret
|
||||||
@ -56,6 +71,7 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
var tok string
|
var tok string
|
||||||
|
|
||||||
if len(cookie) != 0 {
|
if len(cookie) != 0 {
|
||||||
@ -74,9 +90,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
tok = ah[7:]
|
tok = ah[7:]
|
||||||
}
|
}
|
||||||
|
|
||||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
|
var keyFunc jwt.Keyfunc
|
||||||
return key, nil
|
if jwtProvider == jwtFirebase {
|
||||||
})
|
keyFunc = firebaseKeyFunction
|
||||||
|
} else {
|
||||||
|
keyFunc = func(token *jwt.Token) (interface{}, error) {
|
||||||
|
return key, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
@ -86,12 +109,20 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|
||||||
|
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
|
||||||
|
next.ServeHTTP(w, r)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
if jwtProvider == jwtAuth0 {
|
if jwtProvider == jwtAuth0 {
|
||||||
sub := strings.Split(claims.Subject, "|")
|
sub := strings.Split(claims.Subject, "|")
|
||||||
if len(sub) != 2 {
|
if len(sub) != 2 {
|
||||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
||||||
}
|
}
|
||||||
|
} else if jwtProvider == jwtFirebase &&
|
||||||
|
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
|
||||||
|
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||||
} else {
|
} else {
|
||||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||||
}
|
}
|
||||||
@ -103,3 +134,92 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
|||||||
next.ServeHTTP(w, r)
|
next.ServeHTTP(w, r)
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type firebaseKeyError struct {
|
||||||
|
Err error
|
||||||
|
Message string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (e *firebaseKeyError) Error() string {
|
||||||
|
return e.Message + " " + e.Err.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
|
||||||
|
kid, ok := token.Header["kid"]
|
||||||
|
|
||||||
|
if !ok {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error 'kid' header not found in token",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if firebasePublicKeys.Expiration.Before(time.Now()) {
|
||||||
|
resp, err := http.Get(firebasePKEndpoint)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error connecting to firebase certificate server",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
defer resp.Body.Close()
|
||||||
|
|
||||||
|
data, err := ioutil.ReadAll(resp.Body)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error reading firebase certificate server response",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
cachePolicy := resp.Header.Get("cache-control")
|
||||||
|
ageIndex := strings.Index(cachePolicy, "max-age=")
|
||||||
|
|
||||||
|
if ageIndex < 0 {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error parsing cache-control header: 'max-age=' not found",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
ageToEnd := cachePolicy[ageIndex+8:]
|
||||||
|
endIndex := strings.Index(ageToEnd, ",")
|
||||||
|
if endIndex < 0 {
|
||||||
|
endIndex = len(ageToEnd) - 1
|
||||||
|
}
|
||||||
|
ageString := ageToEnd[:endIndex]
|
||||||
|
|
||||||
|
age, err := strconv.ParseInt(ageString, 10, 64)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error parsing max-age cache policy",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
|
||||||
|
|
||||||
|
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
firebasePublicKeys = firebasePKCache{}
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error unmarshalling firebase public key json",
|
||||||
|
Err: err,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
firebasePublicKeys.Expiration = expiration
|
||||||
|
}
|
||||||
|
|
||||||
|
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
|
||||||
|
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
|
||||||
|
return k, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil, &firebaseKeyError{
|
||||||
|
Message: "Error no matching public key for kid supplied in jwt",
|
||||||
|
}
|
||||||
|
}
|
||||||
|
@ -6,9 +6,11 @@ import (
|
|||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
"os"
|
"os"
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
|
"sort"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
"text/template"
|
"text/template"
|
||||||
@ -105,39 +107,40 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
|
|||||||
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
||||||
path = strings.TrimRight(path, string(filepath.Separator))
|
path = strings.TrimRight(path, string(filepath.Separator))
|
||||||
|
|
||||||
fileInfos, err := fs.ReadDir(path)
|
files, err := ioutil.ReadDir(path)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
paths := make([]string, 0, len(fileInfos))
|
fm := make(map[int]string, len(files))
|
||||||
for _, fi := range fileInfos {
|
keys := make([]int, 0, len(files))
|
||||||
|
|
||||||
|
for _, fi := range files {
|
||||||
if fi.IsDir() {
|
if fi.IsDir() {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
||||||
|
|
||||||
if len(matches) != 2 {
|
if len(matches) != 2 {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
n, err := strconv.ParseInt(matches[1], 10, 32)
|
n, err := strconv.Atoi(matches[1])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// The regexp already validated that the prefix is all digits so this *should* never fail
|
// The regexp already validated that the prefix is all digits so this *should* never fail
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
mcount := len(paths)
|
fm[n] = filepath.Join(path, fi.Name())
|
||||||
|
keys = append(keys, n)
|
||||||
|
}
|
||||||
|
|
||||||
if n < int64(mcount) {
|
sort.Ints(keys)
|
||||||
return nil, fmt.Errorf("Duplicate migration %d", n)
|
|
||||||
}
|
|
||||||
|
|
||||||
if int64(mcount) < n {
|
paths := make([]string, 0, len(keys))
|
||||||
return nil, fmt.Errorf("Missing migration %d", mcount)
|
for _, k := range keys {
|
||||||
}
|
paths = append(paths, fm[k])
|
||||||
|
|
||||||
paths = append(paths, filepath.Join(path, fi.Name()))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return paths, nil
|
return paths, nil
|
||||||
|
File diff suppressed because one or more lines are too long
@ -11,9 +11,9 @@
|
|||||||
// opt-in, read http://bit.ly/CRA-PWA
|
// opt-in, read http://bit.ly/CRA-PWA
|
||||||
|
|
||||||
const isLocalhost = Boolean(
|
const isLocalhost = Boolean(
|
||||||
window.location.hostname === 'localhost' ||
|
window.location.hostname === "localhost" ||
|
||||||
// [::1] is the IPv6 localhost address.
|
// [::1] is the IPv6 localhost address.
|
||||||
window.location.hostname === '[::1]' ||
|
window.location.hostname === "[::1]" ||
|
||||||
// 127.0.0.1/8 is considered localhost for IPv4.
|
// 127.0.0.1/8 is considered localhost for IPv4.
|
||||||
window.location.hostname.match(
|
window.location.hostname.match(
|
||||||
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
||||||
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
|
|||||||
);
|
);
|
||||||
|
|
||||||
export function register(config) {
|
export function register(config) {
|
||||||
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
|
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
|
||||||
// The URL constructor is available in all browsers that support SW.
|
// The URL constructor is available in all browsers that support SW.
|
||||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
||||||
if (publicUrl.origin !== window.location.origin) {
|
if (publicUrl.origin !== window.location.origin) {
|
||||||
@ -31,7 +31,7 @@ export function register(config) {
|
|||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
window.addEventListener('load', () => {
|
window.addEventListener("load", () => {
|
||||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
||||||
|
|
||||||
if (isLocalhost) {
|
if (isLocalhost) {
|
||||||
@ -42,8 +42,8 @@ export function register(config) {
|
|||||||
// service worker/PWA documentation.
|
// service worker/PWA documentation.
|
||||||
navigator.serviceWorker.ready.then(() => {
|
navigator.serviceWorker.ready.then(() => {
|
||||||
console.log(
|
console.log(
|
||||||
'This web app is being served cache-first by a service ' +
|
"This web app is being served cache-first by a service " +
|
||||||
'worker. To learn more, visit http://bit.ly/CRA-PWA'
|
"worker. To learn more, visit http://bit.ly/CRA-PWA"
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
} else {
|
} else {
|
||||||
@ -57,21 +57,21 @@ export function register(config) {
|
|||||||
function registerValidSW(swUrl, config) {
|
function registerValidSW(swUrl, config) {
|
||||||
navigator.serviceWorker
|
navigator.serviceWorker
|
||||||
.register(swUrl)
|
.register(swUrl)
|
||||||
.then(registration => {
|
.then((registration) => {
|
||||||
registration.onupdatefound = () => {
|
registration.onupdatefound = () => {
|
||||||
const installingWorker = registration.installing;
|
const installingWorker = registration.installing;
|
||||||
if (installingWorker == null) {
|
if (installingWorker == null) {
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
installingWorker.onstatechange = () => {
|
installingWorker.onstatechange = () => {
|
||||||
if (installingWorker.state === 'installed') {
|
if (installingWorker.state === "installed") {
|
||||||
if (navigator.serviceWorker.controller) {
|
if (navigator.serviceWorker.controller) {
|
||||||
// At this point, the updated precached content has been fetched,
|
// At this point, the updated precached content has been fetched,
|
||||||
// but the previous service worker will still serve the older
|
// but the previous service worker will still serve the older
|
||||||
// content until all client tabs are closed.
|
// content until all client tabs are closed.
|
||||||
console.log(
|
console.log(
|
||||||
'New content is available and will be used when all ' +
|
"New content is available and will be used when all " +
|
||||||
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
|
"tabs for this page are closed. See http://bit.ly/CRA-PWA."
|
||||||
);
|
);
|
||||||
|
|
||||||
// Execute callback
|
// Execute callback
|
||||||
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
|
|||||||
// At this point, everything has been precached.
|
// At this point, everything has been precached.
|
||||||
// It's the perfect time to display a
|
// It's the perfect time to display a
|
||||||
// "Content is cached for offline use." message.
|
// "Content is cached for offline use." message.
|
||||||
console.log('Content is cached for offline use.');
|
console.log("Content is cached for offline use.");
|
||||||
|
|
||||||
// Execute callback
|
// Execute callback
|
||||||
if (config && config.onSuccess) {
|
if (config && config.onSuccess) {
|
||||||
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
|
|||||||
};
|
};
|
||||||
};
|
};
|
||||||
})
|
})
|
||||||
.catch(error => {
|
.catch((error) => {
|
||||||
console.error('Error during service worker registration:', error);
|
console.error("Error during service worker registration:", error);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
function checkValidServiceWorker(swUrl, config) {
|
function checkValidServiceWorker(swUrl, config) {
|
||||||
// Check if the service worker can be found. If it can't reload the page.
|
// Check if the service worker can be found. If it can't reload the page.
|
||||||
fetch(swUrl)
|
fetch(swUrl)
|
||||||
.then(response => {
|
.then((response) => {
|
||||||
// Ensure service worker exists, and that we really are getting a JS file.
|
// Ensure service worker exists, and that we really are getting a JS file.
|
||||||
const contentType = response.headers.get('content-type');
|
const contentType = response.headers.get("content-type");
|
||||||
if (
|
if (
|
||||||
response.status === 404 ||
|
response.status === 404 ||
|
||||||
(contentType != null && contentType.indexOf('javascript') === -1)
|
(contentType != null && contentType.indexOf("javascript") === -1)
|
||||||
) {
|
) {
|
||||||
// No service worker found. Probably a different app. Reload the page.
|
// No service worker found. Probably a different app. Reload the page.
|
||||||
navigator.serviceWorker.ready.then(registration => {
|
navigator.serviceWorker.ready.then((registration) => {
|
||||||
registration.unregister().then(() => {
|
registration.unregister().then(() => {
|
||||||
window.location.reload();
|
window.location.reload();
|
||||||
});
|
});
|
||||||
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
|
|||||||
})
|
})
|
||||||
.catch(() => {
|
.catch(() => {
|
||||||
console.log(
|
console.log(
|
||||||
'No internet connection found. App is running in offline mode.'
|
"No internet connection found. App is running in offline mode."
|
||||||
);
|
);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
export function unregister() {
|
export function unregister() {
|
||||||
if ('serviceWorker' in navigator) {
|
if ("serviceWorker" in navigator) {
|
||||||
navigator.serviceWorker.ready.then(registration => {
|
navigator.serviceWorker.ready.then((registration) => {
|
||||||
registration.unregister();
|
registration.unregister();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
13
jsn/bench.1
Normal file
13
jsn/bench.1
Normal file
@ -0,0 +1,13 @@
|
|||||||
|
goos: darwin
|
||||||
|
goarch: amd64
|
||||||
|
pkg: github.com/dosco/super-graph/jsn
|
||||||
|
BenchmarkGet
|
||||||
|
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
|
||||||
|
BenchmarkFilter
|
||||||
|
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
|
||||||
|
BenchmarkStrip
|
||||||
|
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
|
||||||
|
BenchmarkReplace
|
||||||
|
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
|
||||||
|
PASS
|
||||||
|
ok github.com/dosco/super-graph/jsn 6.029s
|
@ -3,6 +3,7 @@ package jsn
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
"errors"
|
||||||
"io"
|
"io"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -68,7 +69,12 @@ func Clear(w *bytes.Buffer, v []byte) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
io := int(dec.InputOffset())
|
io := int(dec.InputOffset())
|
||||||
w.Write(v[io-len(v1)-2 : io])
|
s := io - len(v1) - 2
|
||||||
|
if io <= s || s <= 0 {
|
||||||
|
return errors.New("invalid json")
|
||||||
|
}
|
||||||
|
|
||||||
|
w.Write(v[s:io])
|
||||||
w.WriteString(`:`)
|
w.WriteString(`:`)
|
||||||
isValue = true
|
isValue = true
|
||||||
|
|
||||||
|
@ -2,17 +2,19 @@ package jsn
|
|||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
"hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Filter function filters the JSON keeping only the provided keys and removing all others
|
// Filter function filters the JSON keeping only the provided keys and removing all others
|
||||||
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||||
var err error
|
var err error
|
||||||
kmap := make(map[uint64]struct{}, len(keys))
|
kmap := make(map[uint64]struct{}, len(keys))
|
||||||
|
h := maphash.Hash{}
|
||||||
|
|
||||||
for i := range keys {
|
for i := range keys {
|
||||||
kmap[xxhash.Sum64String(keys[i])] = struct{}{}
|
_, _ = h.WriteString(keys[i])
|
||||||
|
kmap[h.Sum64()] = struct{}{}
|
||||||
|
h.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
// is an list
|
// is an list
|
||||||
@ -132,7 +134,11 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
|||||||
cb := b[s:(e + 1)]
|
cb := b[s:(e + 1)]
|
||||||
e = 0
|
e = 0
|
||||||
|
|
||||||
if _, ok := kmap[xxhash.Sum64(k)]; !ok {
|
_, _ = h.Write(k)
|
||||||
|
_, ok := kmap[h.Sum64()]
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
|
if !ok {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8,6 +8,8 @@ import (
|
|||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
var ret int
|
||||||
|
|
||||||
func TestFuzzCrashers(t *testing.T) {
|
func TestFuzzCrashers(t *testing.T) {
|
||||||
var crashers = []string{
|
var crashers = []string{
|
||||||
"00\"0000\"0{",
|
"00\"0000\"0{",
|
||||||
@ -56,9 +58,16 @@ func TestFuzzCrashers(t *testing.T) {
|
|||||||
"0000\"0\"{",
|
"0000\"0\"{",
|
||||||
"000\"000\"{",
|
"000\"000\"{",
|
||||||
"\"00000000\"{",
|
"\"00000000\"{",
|
||||||
|
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||||
|
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||||
|
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
|
||||||
|
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
|
||||||
|
"\xef\x99\xe3\"",
|
||||||
|
"\"\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xe3\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xef\xe3" +
|
||||||
|
"\xe3\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\"",
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, f := range crashers {
|
for _, f := range crashers {
|
||||||
_ = jsn.Fuzz([]byte(f))
|
ret = jsn.Fuzz([]byte(f))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
11
jsn/get.go
11
jsn/get.go
@ -1,7 +1,7 @@
|
|||||||
package jsn
|
package jsn
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"github.com/cespare/xxhash/v2"
|
"hash/maphash"
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
@ -41,9 +41,12 @@ func Value(b []byte) []byte {
|
|||||||
// Keys function fetches values for the provided keys
|
// Keys function fetches values for the provided keys
|
||||||
func Get(b []byte, keys [][]byte) []Field {
|
func Get(b []byte, keys [][]byte) []Field {
|
||||||
kmap := make(map[uint64]struct{}, len(keys))
|
kmap := make(map[uint64]struct{}, len(keys))
|
||||||
|
h := maphash.Hash{}
|
||||||
|
|
||||||
for i := range keys {
|
for i := range keys {
|
||||||
kmap[xxhash.Sum64(keys[i])] = struct{}{}
|
_, _ = h.Write(keys[i])
|
||||||
|
kmap[h.Sum64()] = struct{}{}
|
||||||
|
h.Reset()
|
||||||
}
|
}
|
||||||
|
|
||||||
res := make([]Field, 0, 20)
|
res := make([]Field, 0, 20)
|
||||||
@ -141,7 +144,9 @@ func Get(b []byte, keys [][]byte) []Field {
|
|||||||
}
|
}
|
||||||
|
|
||||||
if e != 0 {
|
if e != 0 {
|
||||||
_, ok := kmap[xxhash.Sum64(k)]
|
_, _ = h.Write(k)
|
||||||
|
_, ok := kmap[h.Sum64()]
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
if ok {
|
if ok {
|
||||||
res = append(res, Field{k, b[s:(e + 1)]})
|
res = append(res, Field{k, b[s:(e + 1)]})
|
||||||
|
@ -3,8 +3,7 @@ package jsn
|
|||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
"errors"
|
"errors"
|
||||||
|
"hash/maphash"
|
||||||
"github.com/cespare/xxhash/v2"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
||||||
@ -18,7 +17,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
h := xxhash.New()
|
h := maphash.Hash{}
|
||||||
tmap := make(map[uint64]int, len(from))
|
tmap := make(map[uint64]int, len(from))
|
||||||
|
|
||||||
for i, f := range from {
|
for i, f := range from {
|
||||||
@ -133,9 +132,18 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
|||||||
if e != 0 {
|
if e != 0 {
|
||||||
e++
|
e++
|
||||||
|
|
||||||
|
if e <= s {
|
||||||
|
return errors.New("invalid json")
|
||||||
|
}
|
||||||
|
|
||||||
if _, err := h.Write(b[s:e]); err != nil {
|
if _, err := h.Write(b[s:e]); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (we + 1) <= ws {
|
||||||
|
return errors.New("invalid json")
|
||||||
|
}
|
||||||
|
|
||||||
n, ok := tmap[h.Sum64()]
|
n, ok := tmap[h.Sum64()]
|
||||||
h.Reset()
|
h.Reset()
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user