Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
33f3fefbf3 | |||
a775f9475b | |||
bd157290f6 | |||
82cc712a93 | |||
0ce129de14 | |||
1a15e433ba | |||
816121fbcf | |||
e82e97a9d7 | |||
6102f1d66e | |||
701b2f3bfd | |||
bac89d8301 | |||
b3dfb2bc7b | |||
1fb7f0e6c8 |
@ -12,8 +12,7 @@ FROM golang:1.14-alpine as go-build
|
||||
RUN apk update && \
|
||||
apk add --no-cache make && \
|
||||
apk add --no-cache git && \
|
||||
apk add --no-cache jq && \
|
||||
apk add --no-cache upx=3.95-r2
|
||||
apk add --no-cache jq
|
||||
|
||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||
|
||||
@ -29,9 +28,9 @@ COPY --from=react-build /web/build/ ./internal/serv/web/build
|
||||
|
||||
RUN go mod vendor
|
||||
RUN make build
|
||||
RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
upx --ultra-brute -qq super-graph && \
|
||||
upx -t super-graph
|
||||
# RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
# upx --ultra-brute -qq super-graph && \
|
||||
# upx -t super-graph
|
||||
|
||||
|
||||
|
||||
|
@ -49,6 +49,7 @@ import (
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"hash/maphash"
|
||||
_log "log"
|
||||
"os"
|
||||
|
||||
@ -83,10 +84,11 @@ type SuperGraph struct {
|
||||
schema *psql.DBSchema
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
prepared map[string]*preparedItem
|
||||
hashSeed maphash.Seed
|
||||
queries map[uint64]query
|
||||
roles map[string]*Role
|
||||
getRole *sql.Stmt
|
||||
rmap map[uint64]*resolvFn
|
||||
rmap map[uint64]resolvFn
|
||||
abacEnabled bool
|
||||
anonExists bool
|
||||
qc *qcode.Compiler
|
||||
@ -111,6 +113,7 @@ func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph,
|
||||
db: db,
|
||||
dbinfo: dbinfo,
|
||||
log: _log.New(os.Stdout, "", 0),
|
||||
hashSeed: maphash.MakeSeed(),
|
||||
}
|
||||
|
||||
if err := sg.initConfig(); err != nil {
|
||||
|
@ -12,7 +12,8 @@ import (
|
||||
// to a prepared statement.
|
||||
|
||||
func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||
vars := make([]interface{}, len(md.Params))
|
||||
params := md.Params()
|
||||
vars := make([]interface{}, len(params))
|
||||
|
||||
var fields map[string]json.RawMessage
|
||||
var err error
|
||||
@ -25,7 +26,7 @@ func (c *scontext) argList(md psql.Metadata) ([]interface{}, error) {
|
||||
}
|
||||
}
|
||||
|
||||
for i, p := range md.Params {
|
||||
for i, p := range params {
|
||||
switch p.Name {
|
||||
case "user_id":
|
||||
if v := c.Value(UserIDKey); v != nil {
|
||||
|
41
core/bench.11
Normal file
41
core/bench.11
Normal file
@ -0,0 +1,41 @@
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core
|
||||
BenchmarkGraphQL-16 INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
INF roles_query not defined: attribute based access control disabled
|
||||
all expectations were already fulfilled, call to Query 'SELECT jsonb_build_object('users', "__sj_0"."json", 'products', "__sj_1"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "__sj_2"."json" AS "customers", "__sj_3"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_1" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "users_3"."full_name" AS "full_name", "users_3"."phone" AS "phone", "users_3"."email" AS "email" FROM (SELECT "users"."full_name", "users"."phone", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_1"."user_id"))) LIMIT ('1') :: integer) AS "users_3") AS "__sr_3") AS "__sj_3" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_1"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1") AS "__sj_1", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."name" AS "name" FROM (SELECT "users"."id" FROM "users" GROUP BY "users"."id" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"' with args [] was not expected
|
||||
105048 10398 ns/op 18342 B/op 55 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core 1.328s
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/allow 0.088s
|
||||
? github.com/dosco/super-graph/core/internal/crypto [no test files]
|
||||
? github.com/dosco/super-graph/core/internal/integration_tests [no test files]
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/integration_tests/cockroachdb 0.121s
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/integration_tests/postgresql 0.118s
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/psql
|
||||
BenchmarkCompile-16 79845 14428 ns/op 4584 B/op 39 allocs/op
|
||||
BenchmarkCompileParallel-16 326205 3918 ns/op 4633 B/op 39 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql 2.696s
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 146953 8049 ns/op 3756 B/op 28 allocs/op
|
||||
BenchmarkQCompileP-16 475936 2447 ns/op 3790 B/op 28 allocs/op
|
||||
BenchmarkParse-16 140811 8163 ns/op 3902 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 571345 2041 ns/op 3903 B/op 18 allocs/op
|
||||
BenchmarkSchemaParse-16 230715 5012 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 802426 1565 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 8.427s
|
||||
? github.com/dosco/super-graph/core/internal/util [no test files]
|
@ -88,6 +88,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
|
||||
stmts := make([]stmt, 0, len(sg.conf.Roles))
|
||||
w := &bytes.Buffer{}
|
||||
md := psql.Metadata{}
|
||||
|
||||
for i := 0; i < len(sg.conf.Roles); i++ {
|
||||
role := &sg.conf.Roles[i]
|
||||
@ -105,16 +106,18 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||
s := &stmts[len(stmts)-1]
|
||||
|
||||
s.md, err = sg.pc.Compile(w, qc, psql.Variables(vm))
|
||||
md, err = sg.pc.CompileWithMetadata(w, qc, psql.Variables(vm), md)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s.sql = w.String()
|
||||
s.md = md
|
||||
|
||||
w.Reset()
|
||||
}
|
||||
|
||||
sql, err := sg.renderUserQuery(stmts)
|
||||
sql, err := sg.renderUserQuery(md, stmts)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -124,7 +127,7 @@ func (sg *SuperGraph) buildMultiStmt(query, vars []byte) ([]stmt, error) {
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
func (sg *SuperGraph) renderUserQuery(md psql.Metadata, stmts []stmt) (string, error) {
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||
@ -142,7 +145,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
}
|
||||
|
||||
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
md.RenderVar(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, `) THEN `)
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
@ -158,7 +161,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
}
|
||||
|
||||
io.WriteString(w, ` ELSE 'user' END) FROM (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
md.RenderVar(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
||||
|
||||
|
@ -30,12 +30,10 @@ type Config struct {
|
||||
// or other database functions
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
|
||||
// DefaultAllow reverses the blocked by default behaviour for queries in
|
||||
// anonymous mode. (anon role)
|
||||
// For example if the table `users` is not listed under the anon role then
|
||||
// access to it would by default for unauthenticated queries this reverses
|
||||
// this behavior (!!! Use with caution !!!!)
|
||||
DefaultAllow bool `mapstructure:"default_allow"`
|
||||
// DefaultBlock ensures that in anonymous mode (role 'anon') all tables
|
||||
// are blocked from queries and mutations. To open access to tables in
|
||||
// anonymous mode they have to be added to the 'anon' role config.
|
||||
DefaultBlock bool `mapstructure:"default_block"`
|
||||
|
||||
// Vars is a map of hardcoded variables that can be leveraged in your
|
||||
// queries (eg variable admin_id will be $admin_id in the query)
|
||||
@ -57,6 +55,9 @@ type Config struct {
|
||||
// Roles contains all the configuration for all the roles you want to support
|
||||
// `user` and `anon` are two default roles. User role is for when a user ID is
|
||||
// available and Anon when it's not.
|
||||
//
|
||||
// If you're using the RolesQuery config to enable atribute based acess control then
|
||||
// you can add more custom roles.
|
||||
Roles []Role
|
||||
|
||||
// Inflections is to add additionally singular to plural mappings
|
||||
@ -108,12 +109,12 @@ type Role struct {
|
||||
// RoleTable struct contains role specific access control values for a database table
|
||||
type RoleTable struct {
|
||||
Name string
|
||||
ReadOnly *bool `mapstructure:"read_only"`
|
||||
ReadOnly bool `mapstructure:"read_only"`
|
||||
|
||||
Query Query
|
||||
Insert Insert
|
||||
Update Update
|
||||
Delete Delete
|
||||
Query *Query
|
||||
Insert *Insert
|
||||
Update *Update
|
||||
Delete *Delete
|
||||
}
|
||||
|
||||
// Query struct contains access control values for query operations
|
||||
@ -122,7 +123,7 @@ type Query struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for insert operations
|
||||
@ -130,7 +131,7 @@ type Insert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for update operations
|
||||
@ -138,14 +139,59 @@ type Update struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Delete struct contains access control values for delete operations
|
||||
type Delete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block *bool
|
||||
Block bool
|
||||
}
|
||||
|
||||
// AddRoleTable function is a helper function to make it easy to add per-table
|
||||
// row-level config
|
||||
func (c *Config) AddRoleTable(role string, table string, conf interface{}) error {
|
||||
var r *Role
|
||||
|
||||
for i := range c.Roles {
|
||||
if strings.EqualFold(c.Roles[i].Name, role) {
|
||||
r = &c.Roles[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
if r == nil {
|
||||
nr := Role{Name: role}
|
||||
c.Roles = append(c.Roles, nr)
|
||||
r = &nr
|
||||
}
|
||||
|
||||
var t *RoleTable
|
||||
for i := range r.Tables {
|
||||
if strings.EqualFold(r.Tables[i].Name, table) {
|
||||
t = &r.Tables[i]
|
||||
break
|
||||
}
|
||||
}
|
||||
if t == nil {
|
||||
nt := RoleTable{Name: table}
|
||||
r.Tables = append(r.Tables, nt)
|
||||
t = &nt
|
||||
}
|
||||
|
||||
switch v := conf.(type) {
|
||||
case Query:
|
||||
t.Query = &v
|
||||
case Insert:
|
||||
t.Insert = &v
|
||||
case Update:
|
||||
t.Update = &v
|
||||
case Delete:
|
||||
t.Delete = &v
|
||||
default:
|
||||
return fmt.Errorf("unsupported object type: %t", v)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
|
35
core/core.go
35
core/core.go
@ -5,6 +5,7 @@ import (
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
@ -90,6 +91,7 @@ func (sg *SuperGraph) initCompilers() error {
|
||||
}
|
||||
|
||||
sg.qc, err = qcode.NewCompiler(qcode.Config{
|
||||
DefaultBlock: sg.conf.DefaultBlock,
|
||||
Blocklist: sg.conf.Blocklist,
|
||||
})
|
||||
if err != nil {
|
||||
@ -123,7 +125,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(data) == 0 || st.md.Skipped == 0 {
|
||||
if len(data) == 0 || st.md.Skipped() == 0 {
|
||||
return data, nil
|
||||
}
|
||||
|
||||
@ -164,32 +166,43 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
} else {
|
||||
role = c.role
|
||||
|
||||
}
|
||||
|
||||
c.res.role = role
|
||||
|
||||
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(c.sg.hashSeed)
|
||||
|
||||
q, ok := c.sg.queries[queryID(&h, c.res.name, role)]
|
||||
if !ok {
|
||||
return nil, nil, errNotFound
|
||||
}
|
||||
c.res.sql = ps.st.sql
|
||||
|
||||
if q.sd == nil {
|
||||
q.Do(func() { c.sg.prepare(&q, role) })
|
||||
|
||||
if q.err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
c.res.sql = q.st.sql
|
||||
|
||||
var root []byte
|
||||
var row *sql.Row
|
||||
|
||||
varsList, err := c.argList(ps.st.md)
|
||||
varsList, err := c.argList(q.st.md)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if useTx {
|
||||
row = tx.Stmt(ps.sd).QueryRow(varsList...)
|
||||
row = tx.Stmt(q.sd).QueryRow(varsList...)
|
||||
} else {
|
||||
row = ps.sd.QueryRow(varsList...)
|
||||
row = q.sd.QueryRow(varsList...)
|
||||
}
|
||||
|
||||
if ps.roleArg {
|
||||
if q.roleArg {
|
||||
err = row.Scan(&role, &root)
|
||||
} else {
|
||||
err = row.Scan(&root)
|
||||
@ -203,15 +216,15 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
if useTx {
|
||||
if err := tx.Commit(); err != nil {
|
||||
return nil, nil, err
|
||||
return nil, nil, q.err
|
||||
}
|
||||
}
|
||||
|
||||
if root, err = c.sg.encryptCursor(ps.st.qc, root); err != nil {
|
||||
if root, err = c.sg.encryptCursor(q.st.qc, root); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return root, &ps.st, nil
|
||||
return root, &q.st, nil
|
||||
}
|
||||
|
||||
func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
|
84
core/init.go
84
core/init.go
@ -74,13 +74,22 @@ func (sg *SuperGraph) initConfig() error {
|
||||
}
|
||||
|
||||
if c.RolesQuery == "" {
|
||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||
sg.log.Printf("INF roles_query not defined: attribute based access control disabled")
|
||||
} else {
|
||||
n := 0
|
||||
for k, v := range sg.roles {
|
||||
if k == "user" || k == "anon" {
|
||||
n++
|
||||
} else if v.Match != "" {
|
||||
n++
|
||||
}
|
||||
}
|
||||
sg.abacEnabled = (n > 2)
|
||||
|
||||
_, userExists := sg.roles["user"]
|
||||
_, sg.anonExists = sg.roles["anon"]
|
||||
|
||||
sg.abacEnabled = userExists && c.RolesQuery != ""
|
||||
if !sg.abacEnabled {
|
||||
sg.log.Printf("WRN attribute based access control disabled: no custom roles found (with 'match' defined)")
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -196,7 +205,7 @@ func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||
func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||
for _, r := range c.Roles {
|
||||
for _, t := range r.Tables {
|
||||
if err := addRole(qc, r, t, c.DefaultAllow); err != nil {
|
||||
if err := addRole(qc, r, t, c.DefaultBlock); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
@ -205,67 +214,56 @@ func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultAllow bool) error {
|
||||
ro := true // read-only
|
||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultBlock bool) error {
|
||||
ro := false // read-only
|
||||
|
||||
if defaultAllow {
|
||||
ro = false
|
||||
if defaultBlock && r.Name == "anon" {
|
||||
ro = true
|
||||
}
|
||||
|
||||
if r.Name != "anon" {
|
||||
ro = false
|
||||
if t.ReadOnly {
|
||||
ro = true
|
||||
}
|
||||
|
||||
if t.ReadOnly != nil {
|
||||
ro = *t.ReadOnly
|
||||
}
|
||||
query := qcode.QueryConfig{Block: false}
|
||||
insert := qcode.InsertConfig{Block: ro}
|
||||
update := qcode.UpdateConfig{Block: ro}
|
||||
del := qcode.DeleteConfig{Block: ro}
|
||||
|
||||
blocked := struct {
|
||||
query bool
|
||||
insert bool
|
||||
update bool
|
||||
delete bool
|
||||
}{false, ro, ro, ro}
|
||||
|
||||
if t.Query.Block != nil {
|
||||
blocked.query = *t.Query.Block
|
||||
}
|
||||
if t.Insert.Block != nil {
|
||||
blocked.insert = *t.Insert.Block
|
||||
}
|
||||
if t.Update.Block != nil {
|
||||
blocked.update = *t.Update.Block
|
||||
}
|
||||
if t.Delete.Block != nil {
|
||||
blocked.delete = *t.Delete.Block
|
||||
}
|
||||
|
||||
query := qcode.QueryConfig{
|
||||
if t.Query != nil {
|
||||
query = qcode.QueryConfig{
|
||||
Limit: t.Query.Limit,
|
||||
Filters: t.Query.Filters,
|
||||
Columns: t.Query.Columns,
|
||||
DisableFunctions: t.Query.DisableFunctions,
|
||||
Block: blocked.query,
|
||||
Block: t.Query.Block,
|
||||
}
|
||||
}
|
||||
|
||||
insert := qcode.InsertConfig{
|
||||
if t.Insert != nil {
|
||||
insert = qcode.InsertConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
Block: blocked.insert,
|
||||
Block: t.Insert.Block,
|
||||
}
|
||||
}
|
||||
|
||||
update := qcode.UpdateConfig{
|
||||
if t.Update != nil {
|
||||
update = qcode.UpdateConfig{
|
||||
Filters: t.Update.Filters,
|
||||
Columns: t.Update.Columns,
|
||||
Presets: t.Update.Presets,
|
||||
Block: blocked.update,
|
||||
Block: t.Update.Block,
|
||||
}
|
||||
}
|
||||
|
||||
del := qcode.DeleteConfig{
|
||||
if t.Delete != nil {
|
||||
del = qcode.DeleteConfig{
|
||||
Filters: t.Delete.Filters,
|
||||
Columns: t.Delete.Columns,
|
||||
Block: blocked.delete,
|
||||
Block: t.Delete.Block,
|
||||
}
|
||||
}
|
||||
|
||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||
|
@ -6,6 +6,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
@ -35,6 +36,7 @@ type List struct {
|
||||
type Config struct {
|
||||
CreateIfNotExists bool
|
||||
Persist bool
|
||||
Log *log.Logger
|
||||
}
|
||||
|
||||
func New(filename string, conf Config) (*List, error) {
|
||||
@ -80,6 +82,12 @@ func New(filename string, conf Config) (*List, error) {
|
||||
} else {
|
||||
al.filepath = filename
|
||||
}
|
||||
|
||||
if file, err := os.OpenFile(al.filepath, os.O_RDONLY|os.O_CREATE, 0644); err != nil {
|
||||
return nil, err
|
||||
} else {
|
||||
file.Close()
|
||||
}
|
||||
}
|
||||
|
||||
var err error
|
||||
@ -89,8 +97,10 @@ func New(filename string, conf Config) (*List, error) {
|
||||
|
||||
go func() {
|
||||
for v := range al.saveChan {
|
||||
if err = al.save(v); err != nil {
|
||||
break
|
||||
err := al.save(v)
|
||||
|
||||
if err != nil && conf.Log != nil {
|
||||
conf.Log.Println("WRN allow list save:", err)
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
@ -55,19 +55,6 @@ func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
||||
config.AllowListFile = "./allow.list"
|
||||
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||
|
||||
blockFalse := false
|
||||
|
||||
config.Roles = []core.Role{
|
||||
core.Role{
|
||||
Name: "anon",
|
||||
Tables: []core.RoleTable{
|
||||
core.RoleTable{Name: "users", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "product", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "line_item", ReadOnly: &blockFalse, Query: core.Query{Limit: 100}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
sg, err := core.NewSuperGraph(&config, db)
|
||||
require.NoError(t, err)
|
||||
ctx := context.Background()
|
||||
|
@ -1,4 +1,3 @@
|
||||
//nolint:errcheck
|
||||
package psql
|
||||
|
||||
import (
|
||||
@ -112,15 +111,15 @@ func (c *compilerContext) renderColumnSearchRank(sel *qcode.Select, ti *DBTableI
|
||||
c.renderComma(columnsRendered)
|
||||
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Name, cn, arg.Val, col.Name)
|
||||
io.WriteString(c.w, `ts_rank(`)
|
||||
_, _ = io.WriteString(c.w, `ts_rank(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
if c.schema.ver >= 110000 {
|
||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
} else {
|
||||
io.WriteString(c.w, `, to_tsquery(`)
|
||||
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||
}
|
||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
||||
io.WriteString(c.w, `))`)
|
||||
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||
_, _ = io.WriteString(c.w, `))`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -137,15 +136,15 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
|
||||
c.renderComma(columnsRendered)
|
||||
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Name, cn, arg.Val, col.Name)
|
||||
io.WriteString(c.w, `ts_headline(`)
|
||||
_, _ = io.WriteString(c.w, `ts_headline(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
if c.schema.ver >= 110000 {
|
||||
io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
_, _ = io.WriteString(c.w, `, websearch_to_tsquery(`)
|
||||
} else {
|
||||
io.WriteString(c.w, `, to_tsquery(`)
|
||||
_, _ = io.WriteString(c.w, `, to_tsquery(`)
|
||||
}
|
||||
c.renderValueExp(Param{Name: arg.Val, Type: "string"})
|
||||
io.WriteString(c.w, `))`)
|
||||
c.md.renderValueExp(c.w, Param{Name: arg.Val, Type: "string"})
|
||||
_, _ = io.WriteString(c.w, `))`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -157,9 +156,9 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
||||
}
|
||||
|
||||
c.renderComma(columnsRendered)
|
||||
io.WriteString(c.w, `(`)
|
||||
_, _ = io.WriteString(c.w, `(`)
|
||||
squoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` :: text)`)
|
||||
_, _ = io.WriteString(c.w, ` :: text)`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -169,9 +168,9 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||
// if pl == 0 {
|
||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||
// io.WriteString(c.w, `'`)
|
||||
// io.WriteString(c.w, col.Name)
|
||||
// io.WriteString(c.w, ` not defined'`)
|
||||
// _, _ = io.WriteString(c.w, `'`)
|
||||
// _, _ = io.WriteString(c.w, col.Name)
|
||||
// _, _ = io.WriteString(c.w, ` not defined'`)
|
||||
// alias(c.w, col.Name)
|
||||
// }
|
||||
|
||||
@ -190,10 +189,10 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
c.renderComma(columnsRendered)
|
||||
|
||||
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
|
||||
io.WriteString(c.w, fn)
|
||||
io.WriteString(c.w, `(`)
|
||||
_, _ = io.WriteString(c.w, fn)
|
||||
_, _ = io.WriteString(c.w, `(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
io.WriteString(c.w, `)`)
|
||||
_, _ = io.WriteString(c.w, `)`)
|
||||
alias(c.w, col.Name)
|
||||
|
||||
return nil
|
||||
@ -201,7 +200,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
|
||||
|
||||
func (c *compilerContext) renderComma(columnsRendered int) {
|
||||
if columnsRendered != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
_, _ = io.WriteString(c.w, `, `)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -11,7 +11,7 @@ import (
|
||||
var (
|
||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||
|
||||
schema = GetTestSchema()
|
||||
schema, _ = GetTestSchema()
|
||||
|
||||
vars = map[string]string{
|
||||
"admin_account_id": "5",
|
||||
@ -25,6 +25,37 @@ var (
|
||||
|
||||
// FuzzerEntrypoint for Fuzzbuzz
|
||||
func Fuzz(data []byte) int {
|
||||
err1 := query(data)
|
||||
err2 := insert(data)
|
||||
err3 := update(data)
|
||||
err4 := delete(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
return 1
|
||||
}
|
||||
|
||||
func query(data []byte) error {
|
||||
gql := data
|
||||
|
||||
qc, err1 := qcompileTest.Compile(gql, "user")
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err2 := pcompileTest.CompileEx(qc, vars)
|
||||
|
||||
if err1 != nil {
|
||||
return err1
|
||||
} else {
|
||||
return err2
|
||||
}
|
||||
}
|
||||
|
||||
func insert(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
@ -47,9 +78,57 @@ func Fuzz(data []byte) int {
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
if err != nil {
|
||||
return 0
|
||||
return err
|
||||
}
|
||||
|
||||
return 1
|
||||
func update(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||
if err != nil {
|
||||
panic("qcompile can't fail")
|
||||
}
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
return err
|
||||
}
|
||||
|
||||
func delete(data []byte) error {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
qc, err := qcompileTest.Compile([]byte(gql), "user")
|
||||
if err != nil {
|
||||
panic("qcompile can't fail")
|
||||
}
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(data),
|
||||
}
|
||||
|
||||
_, _, err = pcompileTest.CompileEx(qc, vars)
|
||||
return err
|
||||
}
|
||||
|
20
core/internal/psql/fuzz_test.go
Normal file
20
core/internal/psql/fuzz_test.go
Normal file
@ -0,0 +1,20 @@
|
||||
// +build gofuzz
|
||||
|
||||
package psql
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
var ret int
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"{\"connect\":{}}",
|
||||
"q(q{q{q{q{q{q{q{q{",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
ret = Fuzz([]byte(f))
|
||||
}
|
||||
}
|
@ -25,7 +25,7 @@ func (c *compilerContext) renderInsert(
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `json_array_elements(`)
|
||||
}
|
||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
||||
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||
io.WriteString(c.w, ` :: json`)
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `)`)
|
||||
|
61
core/internal/psql/metadata.go
Normal file
61
core/internal/psql/metadata.go
Normal file
@ -0,0 +1,61 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"io"
|
||||
)
|
||||
|
||||
func (md *Metadata) RenderVar(w io.Writer, vv string) {
|
||||
f, s := -1, 0
|
||||
|
||||
for i := range vv {
|
||||
v := vv[i]
|
||||
switch {
|
||||
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||
if (i - s) > 0 {
|
||||
_, _ = io.WriteString(w, vv[s:i])
|
||||
}
|
||||
f = i
|
||||
|
||||
case (v < 'a' && v > 'z') &&
|
||||
(v < 'A' && v > 'Z') &&
|
||||
(v < '0' && v > '9') &&
|
||||
v != '_' &&
|
||||
f != -1 &&
|
||||
(i-f) > 1:
|
||||
md.renderValueExp(w, Param{Name: vv[f+1 : i]})
|
||||
s = i
|
||||
f = -1
|
||||
}
|
||||
}
|
||||
|
||||
if f != -1 && (len(vv)-f) > 1 {
|
||||
md.renderValueExp(w, Param{Name: vv[f+1:]})
|
||||
} else {
|
||||
_, _ = io.WriteString(w, vv[s:])
|
||||
}
|
||||
}
|
||||
|
||||
func (md *Metadata) renderValueExp(w io.Writer, p Param) {
|
||||
_, _ = io.WriteString(w, `$`)
|
||||
if v, ok := md.pindex[p.Name]; ok {
|
||||
int32String(w, int32(v))
|
||||
|
||||
} else {
|
||||
md.params = append(md.params, p)
|
||||
n := len(md.params)
|
||||
|
||||
if md.pindex == nil {
|
||||
md.pindex = make(map[string]int)
|
||||
}
|
||||
md.pindex[p.Name] = n
|
||||
int32String(w, int32(n))
|
||||
}
|
||||
}
|
||||
|
||||
func (md Metadata) Skipped() uint32 {
|
||||
return md.skipped
|
||||
}
|
||||
|
||||
func (md Metadata) Params() []Param {
|
||||
return md.params
|
||||
}
|
@ -432,11 +432,11 @@ func (c *compilerContext) renderInsertUpdateColumns(
|
||||
val := root.PresetMap[cn]
|
||||
switch {
|
||||
case ok && len(val) > 1 && val[0] == '$':
|
||||
c.renderValueExp(Param{Name: val[1:], Type: col.Type})
|
||||
c.md.renderValueExp(c.w, Param{Name: val[1:], Type: col.Type})
|
||||
|
||||
case ok && strings.HasPrefix(val, "sql:"):
|
||||
io.WriteString(c.w, `(`)
|
||||
c.renderVar(val[4:], c.renderValueExp)
|
||||
c.md.RenderVar(c.w, val[4:])
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
case ok:
|
||||
@ -542,6 +542,10 @@ func (c *compilerContext) renderConnectStmt(qc *qcode.QCode, w io.Writer,
|
||||
|
||||
rel := item.relPC
|
||||
|
||||
if rel == nil {
|
||||
return errors.New("invalid connect value")
|
||||
}
|
||||
|
||||
// Render only for parent-to-child relationship of one-to-one
|
||||
// For this to work the child needs to found first so it's primary key
|
||||
// can be set in the related column on the parent object.
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
@ -24,8 +25,8 @@ type Param struct {
|
||||
}
|
||||
|
||||
type Metadata struct {
|
||||
Skipped uint32
|
||||
Params []Param
|
||||
skipped uint32
|
||||
params []Param
|
||||
pindex map[string]int
|
||||
}
|
||||
|
||||
@ -79,22 +80,30 @@ func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (Metadata, []byte
|
||||
}
|
||||
|
||||
func (co *Compiler) Compile(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||
return co.CompileWithMetadata(w, qc, vars, Metadata{})
|
||||
}
|
||||
|
||||
func (co *Compiler) CompileWithMetadata(w io.Writer, qc *qcode.QCode, vars Variables, md Metadata) (Metadata, error) {
|
||||
md.skipped = 0
|
||||
|
||||
if qc == nil {
|
||||
return md, fmt.Errorf("qcode is nil")
|
||||
}
|
||||
|
||||
switch qc.Type {
|
||||
case qcode.QTQuery:
|
||||
return co.compileQuery(w, qc, vars)
|
||||
return co.compileQueryWithMetadata(w, qc, vars, md)
|
||||
|
||||
case qcode.QTInsert,
|
||||
qcode.QTUpdate,
|
||||
qcode.QTDelete,
|
||||
qcode.QTUpsert:
|
||||
return co.compileMutation(w, qc, vars)
|
||||
}
|
||||
|
||||
default:
|
||||
return Metadata{}, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||
}
|
||||
|
||||
func (co *Compiler) compileQuery(w io.Writer, qc *qcode.QCode, vars Variables) (Metadata, error) {
|
||||
return co.compileQueryWithMetadata(w, qc, vars, Metadata{})
|
||||
}
|
||||
|
||||
func (co *Compiler) compileQueryWithMetadata(
|
||||
@ -171,7 +180,7 @@ func (co *Compiler) compileQueryWithMetadata(
|
||||
}
|
||||
|
||||
for _, cid := range sel.Children {
|
||||
if hasBit(c.md.Skipped, uint32(cid)) {
|
||||
if hasBit(c.md.skipped, uint32(cid)) {
|
||||
continue
|
||||
}
|
||||
child := &c.s[cid]
|
||||
@ -349,7 +358,7 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
|
||||
if _, ok := colmap[rel.Left.Col]; !ok {
|
||||
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Left.Col, FieldName: rel.Right.Col})
|
||||
colmap[rel.Left.Col] = struct{}{}
|
||||
c.md.Skipped |= (1 << uint(id))
|
||||
c.md.skipped |= (1 << uint(id))
|
||||
}
|
||||
|
||||
default:
|
||||
@ -617,7 +626,7 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
|
||||
i := colsRendered
|
||||
|
||||
for _, id := range sel.Children {
|
||||
if hasBit(c.md.Skipped, uint32(id)) {
|
||||
if hasBit(c.md.skipped, uint32(id)) {
|
||||
continue
|
||||
}
|
||||
childSel := &c.s[id]
|
||||
@ -799,7 +808,7 @@ func (c *compilerContext) renderCursorCTE(sel *qcode.Select) error {
|
||||
quoted(c.w, ob.Col)
|
||||
}
|
||||
io.WriteString(c.w, ` FROM string_to_array(`)
|
||||
c.renderValueExp(Param{Name: "cursor", Type: "json"})
|
||||
c.md.renderValueExp(c.w, Param{Name: "cursor", Type: "json"})
|
||||
io.WriteString(c.w, `, ',') as a) `)
|
||||
return nil
|
||||
}
|
||||
@ -1097,7 +1106,7 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
|
||||
} else {
|
||||
io.WriteString(c.w, `) @@ to_tsquery(`)
|
||||
}
|
||||
c.renderValueExp(Param{Name: ex.Val, Type: "string"})
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: "string"})
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
return nil
|
||||
@ -1186,7 +1195,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
switch {
|
||||
case ok && strings.HasPrefix(val, "sql:"):
|
||||
io.WriteString(c.w, `(`)
|
||||
c.renderVar(val[4:], c.renderValueExp)
|
||||
c.md.RenderVar(c.w, val[4:])
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
case ok:
|
||||
@ -1194,7 +1203,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
|
||||
case ex.Op == qcode.OpIn || ex.Op == qcode.OpNotIn:
|
||||
io.WriteString(c.w, `(ARRAY(SELECT json_array_elements_text(`)
|
||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: true})
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
io.WriteString(c.w, ` :: `)
|
||||
@ -1203,7 +1212,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
return
|
||||
|
||||
default:
|
||||
c.renderValueExp(Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||
c.md.renderValueExp(c.w, Param{Name: ex.Val, Type: col.Type, IsArray: false})
|
||||
}
|
||||
|
||||
case qcode.ValRef:
|
||||
@ -1217,54 +1226,6 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
io.WriteString(c.w, col.Type)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderValueExp(p Param) {
|
||||
io.WriteString(c.w, `$`)
|
||||
if v, ok := c.md.pindex[p.Name]; ok {
|
||||
int32String(c.w, int32(v))
|
||||
|
||||
} else {
|
||||
c.md.Params = append(c.md.Params, p)
|
||||
n := len(c.md.Params)
|
||||
|
||||
if c.md.pindex == nil {
|
||||
c.md.pindex = make(map[string]int)
|
||||
}
|
||||
c.md.pindex[p.Name] = n
|
||||
int32String(c.w, int32(n))
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderVar(vv string, fn func(Param)) {
|
||||
f, s := -1, 0
|
||||
|
||||
for i := range vv {
|
||||
v := vv[i]
|
||||
switch {
|
||||
case (i > 0 && vv[i-1] != '\\' && v == '$') || v == '$':
|
||||
if (i - s) > 0 {
|
||||
io.WriteString(c.w, vv[s:i])
|
||||
}
|
||||
f = i
|
||||
|
||||
case (v < 'a' && v > 'z') &&
|
||||
(v < 'A' && v > 'Z') &&
|
||||
(v < '0' && v > '9') &&
|
||||
v != '_' &&
|
||||
f != -1 &&
|
||||
(i-f) > 1:
|
||||
fn(Param{Name: vv[f+1 : i]})
|
||||
s = i
|
||||
f = -1
|
||||
}
|
||||
}
|
||||
|
||||
if f != -1 && (len(vv)-f) > 1 {
|
||||
fn(Param{Name: vv[f+1:]})
|
||||
} else {
|
||||
io.WriteString(c.w, vv[s:])
|
||||
}
|
||||
}
|
||||
|
||||
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||
switch {
|
||||
case strings.HasPrefix(fn, "avg_"):
|
||||
@ -1352,26 +1313,6 @@ func squoted(w io.Writer, identifier string) {
|
||||
io.WriteString(w, `'`)
|
||||
}
|
||||
|
||||
const charset = "0123456789"
|
||||
|
||||
func int32String(w io.Writer, val int32) {
|
||||
if val < 10 {
|
||||
w.Write([]byte{charset[val]})
|
||||
return
|
||||
}
|
||||
|
||||
temp := int32(0)
|
||||
val2 := val
|
||||
for val2 > 0 {
|
||||
temp *= 10
|
||||
temp += val2 % 10
|
||||
val2 = int32(float64(val2 / 10))
|
||||
}
|
||||
|
||||
val3 := temp
|
||||
for val3 > 0 {
|
||||
d := val3 % 10
|
||||
val3 /= 10
|
||||
w.Write([]byte{charset[d]})
|
||||
}
|
||||
io.WriteString(w, strconv.FormatInt(int64(val), 10))
|
||||
}
|
||||
|
@ -307,6 +307,80 @@ func multiRoot(t *testing.T) {
|
||||
compileGQLToPSQL(t, gql, nil, "user")
|
||||
}
|
||||
|
||||
func withFragment1(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
func withFragment2(t *testing.T) {
|
||||
gql := `
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
func withFragment3(t *testing.T) {
|
||||
gql := `
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
`
|
||||
|
||||
compileGQLToPSQL(t, gql, nil, "anon")
|
||||
}
|
||||
|
||||
func withCursor(t *testing.T) {
|
||||
gql := `query {
|
||||
Products(
|
||||
@ -400,6 +474,9 @@ func TestCompileQuery(t *testing.T) {
|
||||
t.Run("queryWithVariables", queryWithVariables)
|
||||
t.Run("withWhereOnRelations", withWhereOnRelations)
|
||||
t.Run("multiRoot", multiRoot)
|
||||
t.Run("withFragment1", withFragment1)
|
||||
t.Run("withFragment2", withFragment2)
|
||||
t.Run("withFragment3", withFragment3)
|
||||
t.Run("jsonColumnAsTable", jsonColumnAsTable)
|
||||
t.Run("withCursor", withCursor)
|
||||
t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
|
||||
|
@ -86,6 +86,12 @@ SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT t
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/multiRoot
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment1
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment2
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withFragment3
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."first_name" AS "first_name", "users_0"."last_name" AS "last_name", "users_0"."created_at" AS "created_at", "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT , "users"."created_at", "users"."id", "users"."email" FROM "users" GROUP BY "users"."created_at", "users"."id", "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withCursor
|
||||
@ -117,6 +123,9 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
||||
--- PASS: TestCompileQuery/queryWithVariables (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereOnRelations (0.00s)
|
||||
--- PASS: TestCompileQuery/multiRoot (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment1 (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment2 (0.00s)
|
||||
--- PASS: TestCompileQuery/withFragment3 (0.00s)
|
||||
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
|
||||
--- PASS: TestCompileQuery/withCursor (0.00s)
|
||||
--- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
|
||||
@ -151,4 +160,4 @@ WITH "_sg_input" AS (SELECT $1 :: json AS j), "_x_users" AS (SELECT * FROM (VALU
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql (cached)
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.374s
|
||||
|
@ -22,7 +22,7 @@ func (c *compilerContext) renderUpdate(
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||
c.renderValueExp(Param{Name: qc.ActionVar, Type: "json"})
|
||||
c.md.renderValueExp(c.w, Param{Name: qc.ActionVar, Type: "json"})
|
||||
// io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, ` :: json AS j)`)
|
||||
|
||||
|
11
core/internal/qcode/bench.10
Normal file
11
core/internal/qcode/bench.10
Normal file
@ -0,0 +1,11 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile-16 120888 9236 ns/op 3755 B/op 28 allocs/op
|
||||
BenchmarkQCompileP-16 502248 2620 ns/op 3795 B/op 28 allocs/op
|
||||
BenchmarkParse-16 128370 9294 ns/op 3902 B/op 18 allocs/op
|
||||
BenchmarkParseP-16 575752 2340 ns/op 3903 B/op 18 allocs/op
|
||||
BenchmarkSchemaParse-16 212048 5779 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP-16 630918 1686 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 7.710s
|
@ -6,6 +6,7 @@ import (
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
DefaultBlock bool
|
||||
Blocklist []string
|
||||
}
|
||||
|
||||
|
@ -11,15 +11,18 @@ import (
|
||||
var (
|
||||
queryToken = []byte("query")
|
||||
mutationToken = []byte("mutation")
|
||||
fragmentToken = []byte("fragment")
|
||||
subscriptionToken = []byte("subscription")
|
||||
onToken = []byte("on")
|
||||
trueToken = []byte("true")
|
||||
falseToken = []byte("false")
|
||||
quotesToken = []byte(`'"`)
|
||||
signsToken = []byte(`+-`)
|
||||
punctuatorToken = []byte(`!():=[]{|}`)
|
||||
spreadToken = []byte(`...`)
|
||||
digitToken = []byte(`0123456789`)
|
||||
dotToken = []byte(`.`)
|
||||
|
||||
punctuatorToken = `!():=[]{|}`
|
||||
)
|
||||
|
||||
// Pos represents a byte position in the original input text from which
|
||||
@ -43,6 +46,8 @@ const (
|
||||
itemName
|
||||
itemQuery
|
||||
itemMutation
|
||||
itemFragment
|
||||
itemOn
|
||||
itemSub
|
||||
itemPunctuator
|
||||
itemArgsOpen
|
||||
@ -263,12 +268,12 @@ func lexRoot(l *lexer) stateFn {
|
||||
l.backup()
|
||||
return lexString
|
||||
case r == '.':
|
||||
if len(l.input) >= 3 {
|
||||
if equals(l.input, 0, 3, spreadToken) {
|
||||
l.acceptRun(dotToken)
|
||||
s, e := l.current()
|
||||
if equals(l.input, s, e, spreadToken) {
|
||||
l.emit(itemSpread)
|
||||
return lexRoot
|
||||
}
|
||||
}
|
||||
fallthrough // '.' can start a number.
|
||||
case r == '+' || r == '-' || ('0' <= r && r <= '9'):
|
||||
l.backup()
|
||||
@ -299,10 +304,14 @@ func lexName(l *lexer) stateFn {
|
||||
switch {
|
||||
case equals(l.input, s, e, queryToken):
|
||||
l.emitL(itemQuery)
|
||||
case equals(l.input, s, e, fragmentToken):
|
||||
l.emitL(itemFragment)
|
||||
case equals(l.input, s, e, mutationToken):
|
||||
l.emitL(itemMutation)
|
||||
case equals(l.input, s, e, subscriptionToken):
|
||||
l.emitL(itemSub)
|
||||
case equals(l.input, s, e, onToken):
|
||||
l.emitL(itemOn)
|
||||
case equals(l.input, s, e, trueToken):
|
||||
l.emitL(itemBoolVal)
|
||||
case equals(l.input, s, e, falseToken):
|
||||
@ -396,31 +405,11 @@ func isAlphaNumeric(r rune) bool {
|
||||
}
|
||||
|
||||
func equals(b []byte, s Pos, e Pos, val []byte) bool {
|
||||
n := 0
|
||||
for i := s; i < e; i++ {
|
||||
if n >= len(val) {
|
||||
return true
|
||||
}
|
||||
switch {
|
||||
case b[i] >= 'A' && b[i] <= 'Z' && ('a'+(b[i]-'A')) != val[n]:
|
||||
return false
|
||||
case b[i] != val[n]:
|
||||
return false
|
||||
}
|
||||
n++
|
||||
}
|
||||
return true
|
||||
return bytes.EqualFold(b[s:e], val)
|
||||
}
|
||||
|
||||
func contains(b []byte, s Pos, e Pos, val []byte) bool {
|
||||
for i := s; i < e; i++ {
|
||||
for n := 0; n < len(val); n++ {
|
||||
if b[i] == val[n] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
}
|
||||
return false
|
||||
func contains(b []byte, s Pos, e Pos, chars string) bool {
|
||||
return bytes.ContainsAny(b[s:e], chars)
|
||||
}
|
||||
|
||||
func lowercase(b []byte, s Pos, e Pos) {
|
||||
|
@ -3,10 +3,9 @@ package qcode
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"sync"
|
||||
"unsafe"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -35,8 +34,7 @@ const (
|
||||
NodeVar
|
||||
)
|
||||
|
||||
type Operation struct {
|
||||
Type parserType
|
||||
type SelectionSet struct {
|
||||
Name string
|
||||
Args []Arg
|
||||
argsA [10]Arg
|
||||
@ -44,12 +42,29 @@ type Operation struct {
|
||||
fieldsA [10]Field
|
||||
}
|
||||
|
||||
type Operation struct {
|
||||
Type parserType
|
||||
SelectionSet
|
||||
}
|
||||
|
||||
var zeroOperation = Operation{}
|
||||
|
||||
func (o *Operation) Reset() {
|
||||
*o = zeroOperation
|
||||
}
|
||||
|
||||
type Fragment struct {
|
||||
Name string
|
||||
On string
|
||||
SelectionSet
|
||||
}
|
||||
|
||||
var zeroFragment = Fragment{}
|
||||
|
||||
func (f *Fragment) Reset() {
|
||||
*f = zeroFragment
|
||||
}
|
||||
|
||||
type Field struct {
|
||||
ID int32
|
||||
ParentID int32
|
||||
@ -82,6 +97,8 @@ func (n *Node) Reset() {
|
||||
}
|
||||
|
||||
type Parser struct {
|
||||
frags map[uint64]*Fragment
|
||||
h maphash.Hash
|
||||
input []byte // the string being scanned
|
||||
pos int
|
||||
items []item
|
||||
@ -96,12 +113,194 @@ var opPool = sync.Pool{
|
||||
New: func() interface{} { return new(Operation) },
|
||||
}
|
||||
|
||||
var fragPool = sync.Pool{
|
||||
New: func() interface{} { return new(Fragment) },
|
||||
}
|
||||
|
||||
var lexPool = sync.Pool{
|
||||
New: func() interface{} { return new(lexer) },
|
||||
}
|
||||
|
||||
func Parse(gql []byte) (*Operation, error) {
|
||||
return parseSelectionSet(gql)
|
||||
var err error
|
||||
|
||||
if len(gql) == 0 {
|
||||
return nil, errors.New("blank query")
|
||||
}
|
||||
|
||||
l := lexPool.Get().(*lexer)
|
||||
l.Reset()
|
||||
defer lexPool.Put(l)
|
||||
|
||||
if err = lex(l, gql); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &Parser{
|
||||
input: l.input,
|
||||
pos: -1,
|
||||
items: l.items,
|
||||
}
|
||||
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
op.Fields = op.fieldsA[:0]
|
||||
|
||||
s := -1
|
||||
qf := false
|
||||
|
||||
for {
|
||||
if p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
|
||||
if p.peek(itemFragment) {
|
||||
p.ignore()
|
||||
if err = p.parseFragment(op); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
} else {
|
||||
if !qf && p.peek(itemQuery, itemMutation, itemSub, itemObjOpen) {
|
||||
s = p.pos
|
||||
qf = true
|
||||
}
|
||||
p.ignore()
|
||||
}
|
||||
}
|
||||
|
||||
p.reset(s)
|
||||
if err := p.parseOp(op); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFragment(op *Operation) error {
|
||||
frag := fragPool.Get().(*Fragment)
|
||||
frag.Reset()
|
||||
|
||||
frag.Fields = frag.fieldsA[:0]
|
||||
frag.Args = frag.argsA[:0]
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.Name = p.val(p.next())
|
||||
}
|
||||
|
||||
if p.peek(itemOn) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return errors.New("fragment: missing 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemName) {
|
||||
frag.On = p.vall(p.next())
|
||||
} else {
|
||||
return errors.New("fragment: missing table name after 'on' keyword")
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
} else {
|
||||
return fmt.Errorf("fragment: expecting a '{', got: %s", p.next())
|
||||
}
|
||||
|
||||
if err := p.parseSelectionSet(&frag.SelectionSet); err != nil {
|
||||
return fmt.Errorf("fragment: %v", err)
|
||||
}
|
||||
|
||||
if p.frags == nil {
|
||||
p.frags = make(map[uint64]*Fragment)
|
||||
}
|
||||
|
||||
_, _ = p.h.WriteString(frag.Name)
|
||||
k := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
p.frags[k] = frag
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseOp(op *Operation) error {
|
||||
var err error
|
||||
var typeSet bool
|
||||
|
||||
if p.peek(itemQuery, itemMutation, itemSub) {
|
||||
err = p.parseOpTypeAndArgs(op)
|
||||
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %v", op.Type, err)
|
||||
}
|
||||
typeSet = true
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
if !typeSet {
|
||||
op.Type = opQuery
|
||||
}
|
||||
|
||||
for {
|
||||
if p.peek(itemEOF, itemFragment) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
|
||||
err = p.parseSelectionSet(&op.SelectionSet)
|
||||
if err != nil {
|
||||
return fmt.Errorf("%s: %v", op.Type, err)
|
||||
}
|
||||
}
|
||||
} else {
|
||||
return fmt.Errorf("expecting a query, mutation or subscription, got: %s", p.next())
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseOpTypeAndArgs(op *Operation) error {
|
||||
item := p.next()
|
||||
|
||||
switch item._type {
|
||||
case itemQuery:
|
||||
op.Type = opQuery
|
||||
case itemMutation:
|
||||
op.Type = opMutate
|
||||
case itemSub:
|
||||
op.Type = opSub
|
||||
}
|
||||
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
if p.peek(itemName) {
|
||||
op.Name = p.val(p.next())
|
||||
}
|
||||
|
||||
if p.peek(itemArgsOpen) {
|
||||
p.ignore()
|
||||
|
||||
op.Args, err = p.parseOpParams(op.Args)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseSelectionSet(selset *SelectionSet) error {
|
||||
var err error
|
||||
|
||||
selset.Fields, err = p.parseFields(selset.Fields)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func ParseArgValue(argVal string) (*Node, error) {
|
||||
@ -123,215 +322,107 @@ func ParseArgValue(argVal string) (*Node, error) {
|
||||
return op, err
|
||||
}
|
||||
|
||||
func parseSelectionSet(gql []byte) (*Operation, error) {
|
||||
var err error
|
||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
st := NewStack()
|
||||
|
||||
if len(gql) == 0 {
|
||||
return nil, errors.New("blank query")
|
||||
if !p.peek(itemName, itemSpread) {
|
||||
return nil, fmt.Errorf("unexpected token: %s", p.peekNext())
|
||||
}
|
||||
|
||||
l := lexPool.Get().(*lexer)
|
||||
l.Reset()
|
||||
|
||||
if err = lex(l, gql); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
p := &Parser{
|
||||
input: l.input,
|
||||
pos: -1,
|
||||
items: l.items,
|
||||
}
|
||||
|
||||
var op *Operation
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
for {
|
||||
if p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
op, err = p.parseQueryOp()
|
||||
} else {
|
||||
op, err = p.parseOp()
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return nil, errors.New("invalid query")
|
||||
}
|
||||
|
||||
if p.peek(itemObjClose) {
|
||||
p.ignore()
|
||||
|
||||
if st.Len() != 0 {
|
||||
st.Pop()
|
||||
continue
|
||||
} else {
|
||||
return nil, fmt.Errorf("operation missing closing '}'")
|
||||
}
|
||||
|
||||
if !p.peek(itemEOF) {
|
||||
p.ignore()
|
||||
return nil, fmt.Errorf("invalid '%s' found after closing '}'", p.current())
|
||||
}
|
||||
|
||||
lexPool.Put(l)
|
||||
|
||||
return op, err
|
||||
}
|
||||
|
||||
func (p *Parser) next() item {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return item{_type: itemEOF}
|
||||
}
|
||||
p.pos = n
|
||||
return p.items[p.pos]
|
||||
}
|
||||
|
||||
func (p *Parser) ignore() {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return
|
||||
}
|
||||
p.pos = n
|
||||
}
|
||||
|
||||
func (p *Parser) current() string {
|
||||
item := p.items[p.pos]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peek(types ...itemType) bool {
|
||||
n := p.pos + 1
|
||||
// if p.items[n]._type == itemEOF {
|
||||
// return false
|
||||
// }
|
||||
if n >= len(p.items) {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < len(types); i++ {
|
||||
if p.items[n]._type == types[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) parseOp() (*Operation, error) {
|
||||
if !p.peek(itemQuery, itemMutation, itemSub) {
|
||||
err := errors.New("expecting a query, mutation or subscription")
|
||||
return nil, err
|
||||
}
|
||||
item := p.next()
|
||||
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
|
||||
switch item._type {
|
||||
case itemQuery:
|
||||
op.Type = opQuery
|
||||
case itemMutation:
|
||||
op.Type = opMutate
|
||||
case itemSub:
|
||||
op.Type = opSub
|
||||
}
|
||||
|
||||
op.Fields = op.fieldsA[:0]
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
if p.peek(itemName) {
|
||||
op.Name = p.val(p.next())
|
||||
}
|
||||
|
||||
if p.peek(itemArgsOpen) {
|
||||
p.ignore()
|
||||
|
||||
op.Args, err = p.parseOpParams(op.Args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
|
||||
for n := 0; n < 10; n++ {
|
||||
if !p.peek(itemName) {
|
||||
break
|
||||
}
|
||||
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseQueryOp() (*Operation, error) {
|
||||
op := opPool.Get().(*Operation)
|
||||
op.Reset()
|
||||
|
||||
op.Type = opQuery
|
||||
op.Fields = op.fieldsA[:0]
|
||||
op.Args = op.argsA[:0]
|
||||
|
||||
var err error
|
||||
|
||||
for n := 0; n < 10; n++ {
|
||||
if !p.peek(itemName) {
|
||||
break
|
||||
}
|
||||
|
||||
op.Fields, err = p.parseFields(op.Fields)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return op, nil
|
||||
}
|
||||
|
||||
func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
st := util.NewStack()
|
||||
|
||||
for {
|
||||
if len(fields) >= maxFields {
|
||||
return nil, fmt.Errorf("too many fields (max %d)", maxFields)
|
||||
}
|
||||
|
||||
if p.peek(itemObjClose) {
|
||||
p.ignore()
|
||||
st.Pop()
|
||||
isFrag := false
|
||||
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
if p.peek(itemSpread) {
|
||||
p.ignore()
|
||||
isFrag = true
|
||||
}
|
||||
|
||||
if !p.peek(itemName) {
|
||||
return nil, errors.New("expecting an alias or field name")
|
||||
if isFrag {
|
||||
return nil, fmt.Errorf("expecting a fragment name, got: %s", p.next())
|
||||
} else {
|
||||
return nil, fmt.Errorf("expecting an alias or field name, got: %s", p.next())
|
||||
}
|
||||
}
|
||||
|
||||
var f *Field
|
||||
|
||||
if isFrag {
|
||||
name := p.val(p.next())
|
||||
p.h.WriteString(name)
|
||||
k := p.h.Sum64()
|
||||
p.h.Reset()
|
||||
|
||||
fr, ok := p.frags[k]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf("no fragment named '%s' defined", name)
|
||||
}
|
||||
|
||||
n := int32(len(fields))
|
||||
fields = append(fields, fr.Fields...)
|
||||
|
||||
for i := int(n); i < len(fields); i++ {
|
||||
f := &fields[i]
|
||||
f.ID = int32(i)
|
||||
|
||||
// If this is the top-level point the parent to the parent of the
|
||||
// previous field.
|
||||
if f.ParentID == -1 {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
if f.ParentID != -1 {
|
||||
fields[pid].Children = append(fields[f.ParentID].Children, f.ID)
|
||||
}
|
||||
// Update all the other parents id's by our new place in this new array
|
||||
} else {
|
||||
f.ParentID += n
|
||||
}
|
||||
|
||||
// Update all the children which is needed.
|
||||
for j := range f.Children {
|
||||
f.Children[j] += n
|
||||
}
|
||||
}
|
||||
|
||||
} else {
|
||||
fields = append(fields, Field{ID: int32(len(fields))})
|
||||
|
||||
f := &fields[(len(fields) - 1)]
|
||||
f = &fields[(len(fields) - 1)]
|
||||
f.Args = f.argsA[:0]
|
||||
f.Children = f.childrenA[:0]
|
||||
|
||||
// Parse the inside of the the fields () parentheses
|
||||
// in short parse the args like id, where, etc
|
||||
// Parse the field
|
||||
if err := p.parseField(f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
intf := st.Peek()
|
||||
if pid, ok := intf.(int32); ok {
|
||||
if st.Len() == 0 {
|
||||
f.ParentID = -1
|
||||
} else {
|
||||
pid := st.Peek()
|
||||
f.ParentID = pid
|
||||
fields[pid].Children = append(fields[pid].Children, f.ID)
|
||||
} else {
|
||||
f.ParentID = -1
|
||||
}
|
||||
}
|
||||
|
||||
// The first opening curley brackets after this
|
||||
@ -339,13 +430,6 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
|
||||
if p.peek(itemObjOpen) {
|
||||
p.ignore()
|
||||
st.Push(f.ID)
|
||||
|
||||
} else if p.peek(itemObjClose) {
|
||||
if st.Len() == 0 {
|
||||
break
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -385,7 +469,7 @@ func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
|
||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||
}
|
||||
|
||||
if p.peek(itemArgsClose) {
|
||||
if p.peek(itemEOF, itemArgsClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -403,7 +487,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
|
||||
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
|
||||
}
|
||||
|
||||
if p.peek(itemArgsClose) {
|
||||
if p.peek(itemEOF, itemArgsClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -470,7 +554,7 @@ func (p *Parser) parseObj() (*Node, error) {
|
||||
parent.Reset()
|
||||
|
||||
for {
|
||||
if p.peek(itemObjClose) {
|
||||
if p.peek(itemEOF, itemObjClose) {
|
||||
p.ignore()
|
||||
break
|
||||
}
|
||||
@ -545,6 +629,62 @@ func (p *Parser) vall(v item) string {
|
||||
return b2s(p.input[v.pos:v.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peek(types ...itemType) bool {
|
||||
n := p.pos + 1
|
||||
l := len(types)
|
||||
// if p.items[n]._type == itemEOF {
|
||||
// return false
|
||||
// }
|
||||
|
||||
if n >= len(p.items) {
|
||||
return types[0] == itemEOF
|
||||
}
|
||||
|
||||
if l == 1 {
|
||||
return p.items[n]._type == types[0]
|
||||
}
|
||||
|
||||
for i := 0; i < l; i++ {
|
||||
if p.items[n]._type == types[i] {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func (p *Parser) next() item {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return item{_type: itemEOF}
|
||||
}
|
||||
p.pos = n
|
||||
return p.items[p.pos]
|
||||
}
|
||||
|
||||
func (p *Parser) ignore() {
|
||||
n := p.pos + 1
|
||||
if n >= len(p.items) {
|
||||
p.err = errEOT
|
||||
return
|
||||
}
|
||||
p.pos = n
|
||||
}
|
||||
|
||||
func (p *Parser) peekCurrent() string {
|
||||
item := p.items[p.pos]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) peekNext() string {
|
||||
item := p.items[p.pos+1]
|
||||
return b2s(p.input[item.pos:item.end])
|
||||
}
|
||||
|
||||
func (p *Parser) reset(to int) {
|
||||
p.pos = to
|
||||
}
|
||||
|
||||
func b2s(b []byte) string {
|
||||
return *(*string)(unsafe.Pointer(&b))
|
||||
}
|
||||
@ -578,7 +718,7 @@ func (t parserType) String() string {
|
||||
case NodeList:
|
||||
v = "node-list"
|
||||
}
|
||||
return fmt.Sprintf("<%s>", v)
|
||||
return v
|
||||
}
|
||||
|
||||
// type Frees struct {
|
||||
|
@ -2,8 +2,9 @@ package qcode
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/chirino/graphql/schema"
|
||||
"testing"
|
||||
|
||||
"github.com/chirino/graphql/schema"
|
||||
)
|
||||
|
||||
func TestCompile1(t *testing.T) {
|
||||
@ -120,7 +121,7 @@ updateThread {
|
||||
}
|
||||
}
|
||||
}
|
||||
}`
|
||||
}}`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "anon")
|
||||
|
||||
@ -130,6 +131,93 @@ updateThread {
|
||||
|
||||
}
|
||||
|
||||
func TestFragmentsCompile1(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFragmentsCompile2(t *testing.T) {
|
||||
gql := `
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
func TestFragmentsCompile3(t *testing.T) {
|
||||
gql := `
|
||||
fragment userFields1 on user {
|
||||
id
|
||||
email
|
||||
}
|
||||
|
||||
fragment userFields2 on user {
|
||||
first_name
|
||||
last_name
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
...userFields2
|
||||
|
||||
created_at
|
||||
...userFields1
|
||||
}
|
||||
}
|
||||
|
||||
`
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(gql), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
var gql = []byte(`
|
||||
{products(
|
||||
# returns only 30 items
|
||||
@ -184,7 +272,6 @@ func BenchmarkQCompileP(b *testing.B) {
|
||||
}
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
|
@ -172,6 +172,8 @@ const (
|
||||
type Compiler struct {
|
||||
tr map[string]map[string]*trval
|
||||
bl map[string]struct{}
|
||||
|
||||
defBlock bool
|
||||
}
|
||||
|
||||
var expPool = sync.Pool{
|
||||
@ -179,7 +181,7 @@ var expPool = sync.Pool{
|
||||
}
|
||||
|
||||
func NewCompiler(c Config) (*Compiler, error) {
|
||||
co := &Compiler{}
|
||||
co := &Compiler{defBlock: c.DefaultBlock}
|
||||
co.tr = make(map[string]map[string]*trval)
|
||||
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
||||
|
||||
@ -358,7 +360,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
}
|
||||
|
||||
} else if role == "anon" {
|
||||
skipRender = true
|
||||
skipRender = com.defBlock
|
||||
}
|
||||
|
||||
selects = append(selects, Select{
|
||||
|
167
core/prepare.go
167
core/prepare.go
@ -2,126 +2,94 @@ package core
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"io"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/allow"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
)
|
||||
|
||||
type preparedItem struct {
|
||||
type query struct {
|
||||
sync.Once
|
||||
sd *sql.Stmt
|
||||
ai allow.Item
|
||||
qt qcode.QType
|
||||
err error
|
||||
st stmt
|
||||
roleArg bool
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initPrepared() error {
|
||||
ct := context.Background()
|
||||
func (sg *SuperGraph) prepare(q *query, role string) {
|
||||
var stmts []stmt
|
||||
var err error
|
||||
|
||||
qb := []byte(q.ai.Query)
|
||||
|
||||
switch q.qt {
|
||||
case qcode.QTQuery:
|
||||
if sg.abacEnabled {
|
||||
stmts, err = sg.buildMultiStmt(qb, q.ai.Vars)
|
||||
} else {
|
||||
stmts, err = sg.buildRoleStmt(qb, q.ai.Vars, role)
|
||||
}
|
||||
|
||||
case qcode.QTMutation:
|
||||
stmts, err = sg.buildRoleStmt(qb, q.ai.Vars, role)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
sg.log.Printf("WRN %s %s: %v", q.qt, q.ai.Name, err)
|
||||
}
|
||||
|
||||
q.st = stmts[0]
|
||||
q.roleArg = len(stmts) > 1
|
||||
|
||||
q.sd, err = sg.db.Prepare(q.st.sql)
|
||||
if err != nil {
|
||||
q.err = fmt.Errorf("prepare failed: %v: %s", err, q.st.sql)
|
||||
}
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initPrepared() error {
|
||||
if sg.allowList.IsPersist() {
|
||||
return nil
|
||||
}
|
||||
sg.prepared = make(map[string]*preparedItem)
|
||||
|
||||
tx, err := sg.db.BeginTx(ct, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback() //nolint: errcheck
|
||||
|
||||
if err = sg.prepareRoleStmt(tx); err != nil {
|
||||
return fmt.Errorf("prepareRoleStmt: %w", err)
|
||||
if err := sg.prepareRoleStmt(); err != nil {
|
||||
return fmt.Errorf("role query: %w", err)
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
success := 0
|
||||
sg.queries = make(map[uint64]query)
|
||||
|
||||
list, err := sg.allowList.Load()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
for _, v := range list {
|
||||
if len(v.Query) == 0 {
|
||||
continue
|
||||
}
|
||||
qt := qcode.GetQType(v.Query)
|
||||
|
||||
err := sg.prepareStmt(v)
|
||||
if err != nil {
|
||||
sg.log.Printf("WRN %s: %v", v.Name, err)
|
||||
} else {
|
||||
success++
|
||||
}
|
||||
}
|
||||
|
||||
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
query := item.Query
|
||||
qb := []byte(query)
|
||||
vars := item.Vars
|
||||
|
||||
qt := qcode.GetQType(query)
|
||||
ct := context.Background()
|
||||
switch qt {
|
||||
case qcode.QTQuery:
|
||||
var stmts1 []stmt
|
||||
var err error
|
||||
|
||||
if sg.abacEnabled {
|
||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
||||
} else {
|
||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
||||
|
||||
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
|
||||
|
||||
if sg.anonExists {
|
||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
||||
|
||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
|
||||
}
|
||||
|
||||
case qcode.QTMutation:
|
||||
for _, role := range sg.conf.Roles {
|
||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||
|
||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -129,22 +97,8 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||
sd, err := sg.db.PrepareContext(ct, st[0].sql)
|
||||
if err != nil {
|
||||
return fmt.Errorf("prepare failed: %v: %s", err, st[0].sql)
|
||||
}
|
||||
|
||||
sg.prepared[key] = &preparedItem{
|
||||
sd: sd,
|
||||
st: st[0],
|
||||
roleArg: len(st) > 1,
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
func (sg *SuperGraph) prepareRoleStmt() error {
|
||||
var err error
|
||||
|
||||
if !sg.abacEnabled {
|
||||
@ -171,11 +125,11 @@ func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
}
|
||||
|
||||
io.WriteString(w, ` ELSE $2 END) FROM (`)
|
||||
io.WriteString(w, sg.conf.RolesQuery)
|
||||
io.WriteString(w, rq)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
|
||||
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler" LIMIT 1; `)
|
||||
|
||||
sg.getRole, err = tx.Prepare(w.String())
|
||||
sg.getRole, err = sg.db.Prepare(w.String())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -187,15 +141,14 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
var ac allow.Config
|
||||
var err error
|
||||
|
||||
if len(sg.conf.AllowListFile) == 0 {
|
||||
sg.conf.UseAllowList = false
|
||||
sg.log.Printf("WRN allow list disabled no file specified")
|
||||
if sg.conf.AllowListFile == "" {
|
||||
sg.conf.AllowListFile = "allow.list"
|
||||
}
|
||||
|
||||
// When list is not eabled it is still created and
|
||||
// and new queries are saved to it.
|
||||
if !sg.conf.UseAllowList {
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true, Log: sg.log}
|
||||
}
|
||||
|
||||
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
||||
@ -207,9 +160,11 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func stmtHash(name string, role string) string {
|
||||
h := sha256.New()
|
||||
io.WriteString(h, strings.ToLower(name))
|
||||
io.WriteString(h, role)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
func queryID(h *maphash.Hash, name string, role string) uint64 {
|
||||
h.WriteString(name)
|
||||
h.WriteString(role)
|
||||
v := h.Sum64()
|
||||
h.Reset()
|
||||
|
||||
return v
|
||||
}
|
||||
|
@ -4,10 +4,10 @@ import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"net/http"
|
||||
"sync"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
var err error
|
||||
|
||||
sel := st.qc.Selects
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
// fetch the field name used within the db response json
|
||||
// that are used to mark insertion points and the mapping between
|
||||
// those field names and their select objects
|
||||
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
|
||||
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped())
|
||||
|
||||
// fetch the field values of the marked insertion points
|
||||
// these values contain the id to be used with fetching remote data
|
||||
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
|
||||
switch {
|
||||
case len(from) == 1:
|
||||
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
|
||||
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
|
||||
|
||||
case len(from) > 1:
|
||||
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
|
||||
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
|
||||
|
||||
default:
|
||||
return nil, errors.New("something wrong no remote ids found in db response")
|
||||
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
|
||||
|
||||
func (sg *SuperGraph) resolveRemote(
|
||||
hdr http.Header,
|
||||
h *xxhash.Digest,
|
||||
h *maphash.Hash,
|
||||
field jsn.Field,
|
||||
sel []qcode.Select,
|
||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
|
||||
to := toA[:1]
|
||||
|
||||
// use the json key to find the related Select object
|
||||
k1 := xxhash.Sum64(field.Key)
|
||||
_, _ = h.Write(field.Key)
|
||||
k1 := h.Sum64()
|
||||
|
||||
s, ok := sfmap[k1]
|
||||
if !ok {
|
||||
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
|
||||
|
||||
func (sg *SuperGraph) resolveRemotes(
|
||||
hdr http.Header,
|
||||
h *xxhash.Digest,
|
||||
h *maphash.Hash,
|
||||
from []jsn.Field,
|
||||
sel []qcode.Select,
|
||||
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
|
||||
for i, id := range from {
|
||||
|
||||
// use the json key to find the related Select object
|
||||
k1 := xxhash.Sum64(id.Key)
|
||||
_, _ = h.Write(id.Key)
|
||||
k1 := h.Sum64()
|
||||
|
||||
s, ok := sfmap[k1]
|
||||
if !ok {
|
||||
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
|
||||
return to, cerr
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
||||
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
|
||||
[][]byte,
|
||||
map[uint64]*qcode.Select) {
|
||||
|
||||
@ -227,8 +230,8 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
|
||||
fm[n] = r.IDField
|
||||
n++
|
||||
|
||||
k := xxhash.Sum64(r.IDField)
|
||||
sm[k] = s
|
||||
_, _ = h.Write(r.IDField)
|
||||
sm[h.Sum64()] = s
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,11 +2,11 @@ package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"hash/maphash"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
@ -19,7 +19,7 @@ type resolvFn struct {
|
||||
|
||||
func (sg *SuperGraph) initResolvers() error {
|
||||
var err error
|
||||
sg.rmap = make(map[uint64]*resolvFn)
|
||||
sg.rmap = make(map[uint64]resolvFn)
|
||||
|
||||
for _, t := range sg.conf.Tables {
|
||||
err = sg.initRemotes(t)
|
||||
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
h.SetSeed(sg.hashSeed)
|
||||
|
||||
for _, r := range t.Remotes {
|
||||
// defines the table column to be used as an id in the
|
||||
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
|
||||
path = append(path, []byte(p))
|
||||
}
|
||||
|
||||
rf := &resolvFn{
|
||||
rf := resolvFn{
|
||||
IDField: []byte(idk),
|
||||
Path: path,
|
||||
Fn: fn,
|
||||
}
|
||||
|
||||
// index resolver obj by parent and child names
|
||||
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
|
||||
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
|
||||
|
||||
// index resolver obj by IDField
|
||||
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
|
||||
_, _ = h.Write(rf.IDField)
|
||||
sg.rmap[h.Sum64()] = rf
|
||||
}
|
||||
|
||||
return nil
|
||||
|
@ -1,11 +1,9 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"github.com/cespare/xxhash/v2"
|
||||
)
|
||||
import "hash/maphash"
|
||||
|
||||
// nolint: errcheck
|
||||
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
|
||||
h.WriteString(k1)
|
||||
h.WriteString(k2)
|
||||
v := h.Sum64()
|
||||
|
249
docs/website/docs/seed.md
Normal file
249
docs/website/docs/seed.md
Normal file
@ -0,0 +1,249 @@
|
||||
---
|
||||
id: seed
|
||||
title: Database Seeding
|
||||
sidebar_label: Seed Scripts
|
||||
---
|
||||
|
||||
While developing it's often useful to be able to have fake data available in the database. Fake data can help with building the UI and save you time when trying to get the GraphQL query correct. Super Graph has the ability do this for you. All you have to do is write a seed script `config/seed.js` (In Javascript) and use the `db:seed` command line option. Below is an example of kind of things you can do in a seed script.
|
||||
|
||||
## Creating fake users
|
||||
|
||||
Since all mutations and queries are in standard GraphQL you can use all the features available in Super Graph GraphQL.
|
||||
|
||||
```javascript
|
||||
var users = [];
|
||||
|
||||
for (i = 0; i < 20; i++) {
|
||||
var data = {
|
||||
slug: util.make_slug(fake.first_name() + "-" + fake.last_name()),
|
||||
first_name: fake.first_name(),
|
||||
last_name: fake.last_name(),
|
||||
picture_url: fake.avatar_url(),
|
||||
email: fake.email(),
|
||||
bio: fake.sentence(10),
|
||||
};
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
user(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data });
|
||||
|
||||
users.push(res.user);
|
||||
}
|
||||
```
|
||||
|
||||
## Inserting the users fake blog posts
|
||||
|
||||
Another example highlighting how the `connect` syntax of Super Graph GraphQL can be used to connect inserted posts
|
||||
to random users that were previously created. For futher details checkout the [seed script](/seed) documentation.
|
||||
|
||||
```javascript
|
||||
var posts = [];
|
||||
|
||||
for (i = 0; i < 1500; i++) {
|
||||
var user.id = users[Math.floor(Math.random() * 10)];
|
||||
|
||||
var data = {
|
||||
slug: util.make_slug(fake.sentence(3) + i),
|
||||
body: fake.sentence(100),
|
||||
published: true,
|
||||
thread: {
|
||||
connect: { user: user.id }
|
||||
}
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
post(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}",
|
||||
{ data: data },
|
||||
{ user_id: u.id })
|
||||
|
||||
posts.push(res.post.slug)
|
||||
}
|
||||
```
|
||||
|
||||
## Insert a large number of rows efficiently
|
||||
|
||||
This feature uses the `COPY` functionality available in Postgres this is the best way to
|
||||
insert a large number of rows into a table. The `import_csv` function reads in a CSV file using the first
|
||||
line of the file as column names.
|
||||
|
||||
```javascript
|
||||
import_csv("post_tags", "./tags.csv");
|
||||
```
|
||||
|
||||
## A list of fake data functions available to you.
|
||||
|
||||
```
|
||||
person
|
||||
name
|
||||
name_prefix
|
||||
name_suffix
|
||||
first_name
|
||||
last_name
|
||||
gender
|
||||
ssn
|
||||
contact
|
||||
email
|
||||
phone
|
||||
phone_formatted
|
||||
username
|
||||
password
|
||||
|
||||
// Address
|
||||
address
|
||||
city
|
||||
country
|
||||
country_abr
|
||||
state
|
||||
state_abr
|
||||
street
|
||||
street_name
|
||||
street_number
|
||||
street_prefix
|
||||
street_suffix
|
||||
zip
|
||||
latitude
|
||||
latitude_in_range
|
||||
longitude
|
||||
longitude_in_range
|
||||
|
||||
// Beer
|
||||
beer_alcohol
|
||||
beer_hop
|
||||
beer_ibu
|
||||
beer_blg
|
||||
beer_malt
|
||||
beer_name
|
||||
beer_style
|
||||
beer_yeast
|
||||
|
||||
// Cars
|
||||
car
|
||||
car_type
|
||||
car_maker
|
||||
car_model
|
||||
|
||||
// Text
|
||||
word
|
||||
sentence
|
||||
paragraph
|
||||
question
|
||||
quote
|
||||
|
||||
// Misc
|
||||
generate
|
||||
boolean
|
||||
uuid
|
||||
|
||||
// Colors
|
||||
color
|
||||
hex_color
|
||||
rgb_color
|
||||
safe_color
|
||||
|
||||
// Internet
|
||||
url
|
||||
image_url
|
||||
avatar_url
|
||||
domain_name
|
||||
domain_suffix
|
||||
ipv4_address
|
||||
ipv6_address
|
||||
http_method
|
||||
user_agent
|
||||
user_agent_firefox
|
||||
user_agent_chrome
|
||||
user_agent_opera
|
||||
user_agent_safari
|
||||
|
||||
// Date / Time
|
||||
date
|
||||
date_range
|
||||
nano_second
|
||||
second
|
||||
minute
|
||||
hour
|
||||
month
|
||||
day
|
||||
weekday
|
||||
year
|
||||
timezone
|
||||
timezone_abv
|
||||
timezone_full
|
||||
timezone_offset
|
||||
|
||||
// Payment
|
||||
price
|
||||
credit_card
|
||||
credit_card_cvv
|
||||
credit_card_number
|
||||
credit_card_type
|
||||
currency
|
||||
currency_long
|
||||
currency_short
|
||||
|
||||
// Company
|
||||
bs
|
||||
buzzword
|
||||
company
|
||||
company_suffix
|
||||
job
|
||||
job_description
|
||||
job_level
|
||||
job_title
|
||||
|
||||
// Hacker
|
||||
hacker_abbreviation
|
||||
hacker_adjective
|
||||
hacker_noun
|
||||
hacker_phrase
|
||||
hacker_verb
|
||||
|
||||
//Hipster
|
||||
hipster_word
|
||||
hipster_paragraph
|
||||
hipster_sentence
|
||||
|
||||
// File
|
||||
file_extension
|
||||
file_mine_type
|
||||
|
||||
// Numbers
|
||||
number
|
||||
numerify
|
||||
int8
|
||||
int16
|
||||
int32
|
||||
int64
|
||||
uint8
|
||||
uint16
|
||||
uint32
|
||||
uint64
|
||||
float32
|
||||
float32_range
|
||||
float64
|
||||
float64_range
|
||||
shuffle_ints
|
||||
mac_address
|
||||
|
||||
// String
|
||||
digit
|
||||
letter
|
||||
lexify
|
||||
rand_string
|
||||
numerify
|
||||
```
|
||||
|
||||
## Some more utility functions
|
||||
|
||||
```
|
||||
shuffle_strings(string_array)
|
||||
make_slug(text)
|
||||
make_slug_lang(text, lang)
|
||||
```
|
@ -96,179 +96,6 @@ var post_count = import_csv("posts", "posts.csv");
|
||||
|
||||
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
|
||||
|
||||
```
|
||||
// Person
|
||||
person
|
||||
name
|
||||
name_prefix
|
||||
name_suffix
|
||||
first_name
|
||||
last_name
|
||||
gender
|
||||
ssn
|
||||
contact
|
||||
email
|
||||
phone
|
||||
phone_formatted
|
||||
username
|
||||
password
|
||||
|
||||
// Address
|
||||
address
|
||||
city
|
||||
country
|
||||
country_abr
|
||||
state
|
||||
state_abr
|
||||
status_code
|
||||
street
|
||||
street_name
|
||||
street_number
|
||||
street_prefix
|
||||
street_suffix
|
||||
zip
|
||||
latitude
|
||||
latitude_in_range
|
||||
longitude
|
||||
longitude_in_range
|
||||
|
||||
// Beer
|
||||
beer_alcohol
|
||||
beer_hop
|
||||
beer_ibu
|
||||
beer_blg
|
||||
beer_malt
|
||||
beer_name
|
||||
beer_style
|
||||
beer_yeast
|
||||
|
||||
// Cars
|
||||
car
|
||||
car_type
|
||||
car_maker
|
||||
car_model
|
||||
|
||||
// Text
|
||||
word
|
||||
sentence
|
||||
paragraph
|
||||
question
|
||||
quote
|
||||
|
||||
// Misc
|
||||
generate
|
||||
boolean
|
||||
uuid
|
||||
|
||||
// Colors
|
||||
color
|
||||
hex_color
|
||||
rgb_color
|
||||
safe_color
|
||||
|
||||
// Internet
|
||||
url
|
||||
image_url
|
||||
domain_name
|
||||
domain_suffix
|
||||
ipv4_address
|
||||
ipv6_address
|
||||
simple_status_code
|
||||
http_method
|
||||
user_agent
|
||||
user_agent_firefox
|
||||
user_agent_chrome
|
||||
user_agent_opera
|
||||
user_agent_safari
|
||||
|
||||
// Date / Time
|
||||
date
|
||||
date_range
|
||||
nano_second
|
||||
second
|
||||
minute
|
||||
hour
|
||||
month
|
||||
day
|
||||
weekday
|
||||
year
|
||||
timezone
|
||||
timezone_abv
|
||||
timezone_full
|
||||
timezone_offset
|
||||
|
||||
// Payment
|
||||
price
|
||||
credit_card
|
||||
credit_card_cvv
|
||||
credit_card_number
|
||||
credit_card_number_luhn
|
||||
credit_card_type
|
||||
currency
|
||||
currency_long
|
||||
currency_short
|
||||
|
||||
// Company
|
||||
bs
|
||||
buzzword
|
||||
company
|
||||
company_suffix
|
||||
job
|
||||
job_description
|
||||
job_level
|
||||
job_title
|
||||
|
||||
// Hacker
|
||||
hacker_abbreviation
|
||||
hacker_adjective
|
||||
hacker_ingverb
|
||||
hacker_noun
|
||||
hacker_phrase
|
||||
hacker_verb
|
||||
|
||||
//Hipster
|
||||
hipster_word
|
||||
hipster_paragraph
|
||||
hipster_sentence
|
||||
|
||||
// File
|
||||
file_extension
|
||||
file_mine_type
|
||||
|
||||
// Numbers
|
||||
number
|
||||
numerify
|
||||
int8
|
||||
int16
|
||||
int32
|
||||
int64
|
||||
uint8
|
||||
uint16
|
||||
uint32
|
||||
uint64
|
||||
float32
|
||||
float32_range
|
||||
float64
|
||||
float64_range
|
||||
shuffle_ints
|
||||
mac_address
|
||||
|
||||
//String
|
||||
digit
|
||||
letter
|
||||
lexify
|
||||
shuffle_strings
|
||||
numerify
|
||||
```
|
||||
|
||||
Other utility functions
|
||||
|
||||
```
|
||||
shuffle_strings(string_array)
|
||||
make_slug(text)
|
||||
make_slug_lang(text, lang)
|
||||
```
|
||||
|
||||
### Migrations
|
||||
|
||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||
|
@ -36,8 +36,8 @@ module.exports = {
|
||||
position: "left",
|
||||
},
|
||||
{
|
||||
label: "Art Compute",
|
||||
href: "https://artcompute.com/s/super-graph",
|
||||
label: "AbtCode",
|
||||
href: "https://abtcode.com/s/super-graph",
|
||||
position: "left",
|
||||
},
|
||||
],
|
||||
|
@ -11,6 +11,7 @@ module.exports = {
|
||||
"security",
|
||||
"telemetry",
|
||||
"config",
|
||||
"seed",
|
||||
"deploy",
|
||||
"internals",
|
||||
],
|
||||
|
@ -1805,11 +1805,6 @@ asynckit@^0.4.0:
|
||||
resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
|
||||
integrity sha1-x57Zf380y48robyXkLzDZkdLS3k=
|
||||
|
||||
at-least-node@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/at-least-node/-/at-least-node-1.0.0.tgz#602cd4b46e844ad4effc92a8011a3c46e0238dc2"
|
||||
integrity sha512-+q/t7Ekv1EDY2l6Gda6LLiX14rU9TV20Wa3ofeQmwPFZbOMo9DXrLbOjFaaclkXKWidIaopwAObQDqwWtGUjqg==
|
||||
|
||||
atob@^2.1.2:
|
||||
version "2.1.2"
|
||||
resolved "https://registry.yarnpkg.com/atob/-/atob-2.1.2.tgz#6d9517eb9e030d2436666651e86bd9f6f13533c9"
|
||||
@ -2323,7 +2318,7 @@ ccount@^1.0.0, ccount@^1.0.3:
|
||||
resolved "https://registry.yarnpkg.com/ccount/-/ccount-1.0.5.tgz#ac82a944905a65ce204eb03023157edf29425c17"
|
||||
integrity sha512-MOli1W+nfbPLlKEhInaxhRdp7KVLFxLN5ykwzHgLsLI3H3gs5jjFAK4Eoj3OzzcxCtumDaI8onoVDeQyWaNTkw==
|
||||
|
||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.0.1, chalk@^2.4.1, chalk@^2.4.2:
|
||||
chalk@2.4.2, chalk@^2.0.0, chalk@^2.4.1, chalk@^2.4.2:
|
||||
version "2.4.2"
|
||||
resolved "https://registry.yarnpkg.com/chalk/-/chalk-2.4.2.tgz#cd42541677a54333cf541a49108c1432b44c9424"
|
||||
integrity sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==
|
||||
@ -2522,15 +2517,6 @@ cliui@^5.0.0:
|
||||
strip-ansi "^5.2.0"
|
||||
wrap-ansi "^5.1.0"
|
||||
|
||||
cliui@^6.0.0:
|
||||
version "6.0.0"
|
||||
resolved "https://registry.yarnpkg.com/cliui/-/cliui-6.0.0.tgz#511d702c0c4e41ca156d7d0e96021f23e13225b1"
|
||||
integrity sha512-t6wbgtoCXvAzst7QgXxJYqPt0usEfbgQdftEPbLL/cvv6HPE5VgvqCuAIDR0NgU52ds6rFwqrgakNLrHEjCbrQ==
|
||||
dependencies:
|
||||
string-width "^4.2.0"
|
||||
strip-ansi "^6.0.0"
|
||||
wrap-ansi "^6.2.0"
|
||||
|
||||
coa@^2.0.2:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/coa/-/coa-2.0.2.tgz#43f6c21151b4ef2bf57187db0d73de229e3e7ec3"
|
||||
@ -3216,11 +3202,6 @@ depd@~1.1.2:
|
||||
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
||||
integrity sha1-m81S4UwJd2PnSbJ0xDRu0uVgtak=
|
||||
|
||||
dependency-graph@^0.9.0:
|
||||
version "0.9.0"
|
||||
resolved "https://registry.yarnpkg.com/dependency-graph/-/dependency-graph-0.9.0.tgz#11aed7e203bc8b00f48356d92db27b265c445318"
|
||||
integrity sha512-9YLIBURXj4DJMFALxXw9K3Y3rwb5Fk0X5/8ipCzaN84+gKxoHK43tVKRNakCQbiEx07E8Uwhuq21BpUagFhZ8w==
|
||||
|
||||
des.js@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/des.js/-/des.js-1.0.1.tgz#5382142e1bdc53f85d86d53e5f4aa7deb91e0843"
|
||||
@ -3830,7 +3811,7 @@ fast-glob@^2.0.2:
|
||||
merge2 "^1.2.3"
|
||||
micromatch "^3.1.10"
|
||||
|
||||
fast-glob@^3.0.3, fast-glob@^3.1.1:
|
||||
fast-glob@^3.0.3:
|
||||
version "3.2.2"
|
||||
resolved "https://registry.yarnpkg.com/fast-glob/-/fast-glob-3.2.2.tgz#ade1a9d91148965d4bf7c51f72e1ca662d32e63d"
|
||||
integrity sha512-UDV82o4uQyljznxwMxyVRJgZZt3O5wENYojjzbaGEGZgeOxkLFf+V4cnUD+krzb2F72E18RhamkMZ7AdeggF7A==
|
||||
@ -3970,7 +3951,7 @@ find-cache-dir@^3.0.0, find-cache-dir@^3.3.1:
|
||||
make-dir "^3.0.2"
|
||||
pkg-dir "^4.1.0"
|
||||
|
||||
find-up@4.1.0, find-up@^4.0.0, find-up@^4.1.0:
|
||||
find-up@4.1.0, find-up@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/find-up/-/find-up-4.1.0.tgz#97afe7d6cdc0bc5928584b7c8d7b16e8a9aa5d19"
|
||||
integrity sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==
|
||||
@ -4084,16 +4065,6 @@ fs-extra@^8.0.0, fs-extra@^8.1.0:
|
||||
jsonfile "^4.0.0"
|
||||
universalify "^0.1.0"
|
||||
|
||||
fs-extra@^9.0.0:
|
||||
version "9.0.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-9.0.0.tgz#b6afc31036e247b2466dc99c29ae797d5d4580a3"
|
||||
integrity sha512-pmEYSk3vYsG/bF651KPUXZ+hvjpgWYw/Gc7W9NFUe3ZVLczKKWIij3IKpOrQcdw4TILtibFslZ0UmR8Vvzig4g==
|
||||
dependencies:
|
||||
at-least-node "^1.0.0"
|
||||
graceful-fs "^4.2.0"
|
||||
jsonfile "^6.0.1"
|
||||
universalify "^1.0.0"
|
||||
|
||||
fs-minipass@^2.0.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
||||
@ -4149,11 +4120,6 @@ get-own-enumerable-property-symbols@^3.0.0:
|
||||
resolved "https://registry.yarnpkg.com/get-own-enumerable-property-symbols/-/get-own-enumerable-property-symbols-3.0.2.tgz#b5fde77f22cbe35f390b4e089922c50bce6ef664"
|
||||
integrity sha512-I0UBV/XOz1XkIJHEUDMZAbzCThU/H8DxmSfmdGcKPnVhu2VfFqr34jr9777IyaTYvxjedWhqVIilEDsCdP5G6g==
|
||||
|
||||
get-stdin@^7.0.0:
|
||||
version "7.0.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stdin/-/get-stdin-7.0.0.tgz#8d5de98f15171a125c5e516643c7a6d0ea8a96f6"
|
||||
integrity sha512-zRKcywvrXlXsA0v0i9Io4KDRaAw7+a1ZpjRwl9Wox8PFlVCCHra7E9c4kqXCoCM9nR5tBkaTTZRBoCm60bFqTQ==
|
||||
|
||||
get-stream@^4.0.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/get-stream/-/get-stream-4.1.0.tgz#c1b255575f3dc21d59bfc79cd3d2b46b1c3a54b5"
|
||||
@ -4275,18 +4241,6 @@ globby@^10.0.1:
|
||||
merge2 "^1.2.3"
|
||||
slash "^3.0.0"
|
||||
|
||||
globby@^11.0.0:
|
||||
version "11.0.0"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-11.0.0.tgz#56fd0e9f0d4f8fb0c456f1ab0dee96e1380bc154"
|
||||
integrity sha512-iuehFnR3xu5wBBtm4xi0dMe92Ob87ufyu/dHwpDYfbcpYpIbrO5OnS8M1vWvrBhSGEJ3/Ecj7gnX76P8YxpPEg==
|
||||
dependencies:
|
||||
array-union "^2.1.0"
|
||||
dir-glob "^3.0.1"
|
||||
fast-glob "^3.1.1"
|
||||
ignore "^5.1.4"
|
||||
merge2 "^1.3.0"
|
||||
slash "^3.0.0"
|
||||
|
||||
globby@^6.1.0:
|
||||
version "6.1.0"
|
||||
resolved "https://registry.yarnpkg.com/globby/-/globby-6.1.0.tgz#f5a6d70e8395e21c858fb0489d64df02424d506c"
|
||||
@ -4743,7 +4697,7 @@ ignore@^3.3.5:
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||
integrity sha512-Pgs951kaMm5GXP7MOvxERINe3gsaVjUWFm+UZPSq9xYriQAksyhg0csnS0KXSNRD5NmNdapXEpjxG49+AKh/ug==
|
||||
|
||||
ignore@^5.1.1, ignore@^5.1.4:
|
||||
ignore@^5.1.1:
|
||||
version "5.1.4"
|
||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-5.1.4.tgz#84b7b3dbe64552b6ef0eca99f6743dbec6d97adf"
|
||||
integrity sha512-MzbUSahkTW1u7JpKKjY7LCARd1fU5W2rLdxlM4kdkayuCwZImjkpluF9CM1aLewYJguPDqewLam18Y6AU69A8A==
|
||||
@ -5382,15 +5336,6 @@ jsonfile@^4.0.0:
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsonfile@^6.0.1:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/jsonfile/-/jsonfile-6.0.1.tgz#98966cba214378c8c84b82e085907b40bf614179"
|
||||
integrity sha512-jR2b5v7d2vIOust+w3wtFKZIfpC2pnRmFAhAC/BuweZFQR8qZzxH1OyrQ10HmdVYiXWkYUqPVsz91cG7EL2FBg==
|
||||
dependencies:
|
||||
universalify "^1.0.0"
|
||||
optionalDependencies:
|
||||
graceful-fs "^4.1.6"
|
||||
|
||||
jsprim@^1.2.2:
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/jsprim/-/jsprim-1.4.1.tgz#313e66bc1e5cc06e438bc1b7499c2e5c56acb6a2"
|
||||
@ -5656,13 +5601,6 @@ lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||
|
||||
log-symbols@^2.2.0:
|
||||
version "2.2.0"
|
||||
resolved "https://registry.yarnpkg.com/log-symbols/-/log-symbols-2.2.0.tgz#5740e1c5d6f0dfda4ad9323b5332107ef6b4c40a"
|
||||
integrity sha512-VeIAFslyIerEJLXHziedo2basKbMKtTw3vfn5IzG0XTjhAVEJyNHnL2p7vc+wBDSdQuUpNw3M2u6xb9QsAY5Eg==
|
||||
dependencies:
|
||||
chalk "^2.0.1"
|
||||
|
||||
loglevel@^1.6.8:
|
||||
version "1.6.8"
|
||||
resolved "https://registry.yarnpkg.com/loglevel/-/loglevel-1.6.8.tgz#8a25fb75d092230ecd4457270d80b54e28011171"
|
||||
@ -6645,7 +6583,7 @@ picomatch@^2.0.4, picomatch@^2.0.5, picomatch@^2.2.1:
|
||||
resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.2.2.tgz#21f333e9b6b8eaff02468f5146ea406d345f4dad"
|
||||
integrity sha512-q0M/9eZHzmr0AulXyPwNfZjtwZ/RBZlbN3K3CErVrk50T2ASYI7Bye0EvekFY3IP1Nt2DHu0re+V2ZHIpMkuWg==
|
||||
|
||||
pify@^2.0.0, pify@^2.3.0:
|
||||
pify@^2.0.0:
|
||||
version "2.3.0"
|
||||
resolved "https://registry.yarnpkg.com/pify/-/pify-2.3.0.tgz#ed141a6ac043a849ea588498e7dca8b15330e90c"
|
||||
integrity sha1-7RQaasBDqEnqWISY59yosVMw6Qw=
|
||||
@ -6731,24 +6669,6 @@ postcss-calc@^7.0.1:
|
||||
postcss-selector-parser "^6.0.2"
|
||||
postcss-value-parser "^4.0.2"
|
||||
|
||||
postcss-cli@^7.1.1:
|
||||
version "7.1.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-cli/-/postcss-cli-7.1.1.tgz#260f9546be260b2149bf32e28d785a0d79c9aab8"
|
||||
integrity sha512-bYQy5ydAQJKCMSpvaMg0ThPBeGYqhQXumjbFOmWnL4u65CYXQ16RfS6afGQpit0dGv/fNzxbdDtx8dkqOhhIbg==
|
||||
dependencies:
|
||||
chalk "^4.0.0"
|
||||
chokidar "^3.3.0"
|
||||
dependency-graph "^0.9.0"
|
||||
fs-extra "^9.0.0"
|
||||
get-stdin "^7.0.0"
|
||||
globby "^11.0.0"
|
||||
postcss "^7.0.0"
|
||||
postcss-load-config "^2.0.0"
|
||||
postcss-reporter "^6.0.0"
|
||||
pretty-hrtime "^1.0.3"
|
||||
read-cache "^1.0.0"
|
||||
yargs "^15.0.2"
|
||||
|
||||
postcss-color-functional-notation@^2.0.1:
|
||||
version "2.0.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-color-functional-notation/-/postcss-color-functional-notation-2.0.1.tgz#5efd37a88fbabeb00a2966d1e53d98ced93f74e0"
|
||||
@ -7288,16 +7208,6 @@ postcss-replace-overflow-wrap@^3.0.0:
|
||||
dependencies:
|
||||
postcss "^7.0.2"
|
||||
|
||||
postcss-reporter@^6.0.0:
|
||||
version "6.0.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-reporter/-/postcss-reporter-6.0.1.tgz#7c055120060a97c8837b4e48215661aafb74245f"
|
||||
integrity sha512-LpmQjfRWyabc+fRygxZjpRxfhRf9u/fdlKf4VHG4TSPbV2XNsuISzYW1KL+1aQzx53CAppa1bKG4APIB/DOXXw==
|
||||
dependencies:
|
||||
chalk "^2.4.1"
|
||||
lodash "^4.17.11"
|
||||
log-symbols "^2.2.0"
|
||||
postcss "^7.0.7"
|
||||
|
||||
postcss-selector-matches@^4.0.0:
|
||||
version "4.0.0"
|
||||
resolved "https://registry.yarnpkg.com/postcss-selector-matches/-/postcss-selector-matches-4.0.0.tgz#71c8248f917ba2cc93037c9637ee09c64436fcff"
|
||||
@ -7397,7 +7307,7 @@ postcss@^6.0.9:
|
||||
source-map "^0.6.1"
|
||||
supports-color "^5.4.0"
|
||||
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6, postcss@^7.0.7:
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.16, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.2, postcss@^7.0.21, postcss@^7.0.27, postcss@^7.0.30, postcss@^7.0.5, postcss@^7.0.6:
|
||||
version "7.0.30"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.30.tgz#cc9378beffe46a02cbc4506a0477d05fcea9a8e2"
|
||||
integrity sha512-nu/0m+NtIzoubO+xdAlwZl/u5S5vi/y6BCsoL8D+8IxsD3XvBS8X4YEADNIVXKVuQvduiucnRv+vPIqj56EGMQ==
|
||||
@ -7692,6 +7602,11 @@ react-helmet@^6.0.0-beta:
|
||||
react-fast-compare "^2.0.4"
|
||||
react-side-effect "^2.1.0"
|
||||
|
||||
react-hook-sticky@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/react-hook-sticky/-/react-hook-sticky-0.2.0.tgz#0dcc40a2afb1856e53764af9b231f1146e3de576"
|
||||
integrity sha512-J92F5H6PJQlMBgZ2tv58GeVlTZtEhpZ9bYLdoV2+5fVSJScszuY+TDZY3enQEAPIgJsLteFglGGuf8/TB9L72Q==
|
||||
|
||||
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1:
|
||||
version "16.13.1"
|
||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||
@ -7785,13 +7700,6 @@ react@^16.8.4:
|
||||
object-assign "^4.1.1"
|
||||
prop-types "^15.6.2"
|
||||
|
||||
read-cache@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/read-cache/-/read-cache-1.0.0.tgz#e664ef31161166c9751cdbe8dbcf86b5fb58f774"
|
||||
integrity sha1-5mTvMRYRZsl1HNvo28+GtftY93Q=
|
||||
dependencies:
|
||||
pify "^2.3.0"
|
||||
|
||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||
version "2.3.7"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||
@ -8709,7 +8617,7 @@ string-width@^3.0.0, string-width@^3.1.0:
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^5.1.0"
|
||||
|
||||
string-width@^4.1.0, string-width@^4.2.0:
|
||||
string-width@^4.1.0:
|
||||
version "4.2.0"
|
||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-4.2.0.tgz#952182c46cc7b2c313d1596e623992bd163b72b5"
|
||||
integrity sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==
|
||||
@ -9305,11 +9213,6 @@ universalify@^0.1.0:
|
||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-0.1.2.tgz#b646f69be3942dabcecc9d6639c80dc105efaa66"
|
||||
integrity sha512-rBJeI5CXAlmy1pV+617WB9J63U6XcazHHF2f2dbJix4XzpUF0RS3Zbj0FGIOCAva5P/d/GBOYaACQ1w+0azUkg==
|
||||
|
||||
universalify@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/universalify/-/universalify-1.0.0.tgz#b61a1da173e8435b2fe3c67d29b9adf8594bd16d"
|
||||
integrity sha512-rb6X1W158d7pRQBg5gkR8uPaSfiids68LTJQYOtEUhoJUWBdaQHsuT/EUduxXYxcrt4r5PJ4fuHW1MHT6p0qug==
|
||||
|
||||
unpipe@1.0.0, unpipe@~1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/unpipe/-/unpipe-1.0.0.tgz#b2bf4ee8514aae6165b4817829d21b2ef49904ec"
|
||||
@ -9723,7 +9626,7 @@ wrap-ansi@^5.1.0:
|
||||
string-width "^3.0.0"
|
||||
strip-ansi "^5.0.0"
|
||||
|
||||
wrap-ansi@^6.0.0, wrap-ansi@^6.2.0:
|
||||
wrap-ansi@^6.0.0:
|
||||
version "6.2.0"
|
||||
resolved "https://registry.yarnpkg.com/wrap-ansi/-/wrap-ansi-6.2.0.tgz#e9393ba07102e6c91a3b221478f0257cd2856e53"
|
||||
integrity sha512-r6lPcBGxZXlIcymEu7InxDMhdW0KDxpLgoFLcguasxCaJ/SOIZwINatK9KY/tf+ZrlywOKU0UDj3ATXUBfxJXA==
|
||||
@ -9784,14 +9687,6 @@ yargs-parser@^13.1.2:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs-parser@^18.1.1:
|
||||
version "18.1.3"
|
||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-18.1.3.tgz#be68c4975c6b2abf469236b0c870362fab09a7b0"
|
||||
integrity sha512-o50j0JeToy/4K6OZcaQmW6lyXXKhq7csREXcDwk2omFPJEwUNOVtJKvmDr9EI1fAJZUyZcRF7kxGBWmRXudrCQ==
|
||||
dependencies:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs@^13.3.2:
|
||||
version "13.3.2"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-13.3.2.tgz#ad7ffefec1aa59565ac915f82dccb38a9c31a2dd"
|
||||
@ -9808,23 +9703,6 @@ yargs@^13.3.2:
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^13.1.2"
|
||||
|
||||
yargs@^15.0.2:
|
||||
version "15.3.1"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-15.3.1.tgz#9505b472763963e54afe60148ad27a330818e98b"
|
||||
integrity sha512-92O1HWEjw27sBfgmXiixJWT5hRBp2eobqXicLtPBIDBhYB+1HpwZlXmbW2luivBJHBzki+7VyCLRtAkScbTBQA==
|
||||
dependencies:
|
||||
cliui "^6.0.0"
|
||||
decamelize "^1.2.0"
|
||||
find-up "^4.1.0"
|
||||
get-caller-file "^2.0.1"
|
||||
require-directory "^2.1.1"
|
||||
require-main-filename "^2.0.0"
|
||||
set-blocking "^2.0.0"
|
||||
string-width "^4.2.0"
|
||||
which-module "^2.0.0"
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^18.1.1"
|
||||
|
||||
zepto@^1.2.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/zepto/-/zepto-1.2.0.tgz#e127bd9e66fd846be5eab48c1394882f7c0e4f98"
|
||||
|
1
go.mod
1
go.mod
@ -12,7 +12,6 @@ require (
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||
github.com/cespare/xxhash/v2 v2.1.1
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
|
7
go.sum
7
go.sum
@ -35,7 +35,9 @@ github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgv
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
|
||||
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
|
||||
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc h1:cAKDfWh5VpdgMhJosfJnn5/FoN2SRZ4p7fJNX58YPaU=
|
||||
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf h1:qet1QNfXsQxTZqLG4oE62mJzwPIB8+Tee4RNCL9ulrY=
|
||||
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
|
||||
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
|
||||
github.com/aws/aws-sdk-go v1.15.27 h1:i75BxN4Es/8rTVQbEKAP1WCiIhhz635xTNeDdZJRAXQ=
|
||||
@ -53,8 +55,6 @@ github.com/census-instrumentation/opencensus-proto v0.2.1 h1:glEXhBS5PSLLv4IXzLA
|
||||
github.com/census-instrumentation/opencensus-proto v0.2.1/go.mod h1:f6KPmirojxKA12rnyqOA5BBL4O983OfeGPqjHWSTneU=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
@ -220,6 +220,7 @@ github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7V
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
@ -319,6 +320,7 @@ github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeV
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||
github.com/sirupsen/logrus v1.4.2 h1:SPIRibHv4MatM3XXNO2BJeFLZwZ2LvZgfQ5+UNI2im4=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
@ -543,6 +545,7 @@ google.golang.org/grpc v1.21.0/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ij
|
||||
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
|
||||
google.golang.org/grpc v1.23.1 h1:q4XQuHFC6I28BKZpo6IYyb3mNO+l7lSOxRuYTCiDfXk=
|
||||
google.golang.org/grpc v1.23.1/go.mod h1:Y5yQAOtifL1yxbo5wqy6BxZv8vAUGQwXBOALyacEbxg=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6 h1:jMFz6MfLP0/4fUyZle81rXUoxOBFi19VUFKVDOQfozc=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+SDo693bJlVdllGtEeKM=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
|
@ -156,7 +156,7 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
||||
|
||||
func BuildDetails() string {
|
||||
if len(version) == 0 {
|
||||
return fmt.Sprintf(`
|
||||
return `
|
||||
Super Graph (unknown version)
|
||||
For documentation, visit https://supergraph.dev
|
||||
|
||||
@ -166,7 +166,7 @@ To build with version information please use the Makefile
|
||||
|
||||
Licensed under the Apache Public License 2.0
|
||||
Copyright 2020, Vikram Rangnekar
|
||||
`)
|
||||
`
|
||||
}
|
||||
|
||||
return fmt.Sprintf(`
|
||||
|
@ -17,6 +17,8 @@ import (
|
||||
"github.com/dop251/goja"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/gosimple/slug"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -27,6 +29,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
log.Fatalf("ERR failed to read config: %s", err)
|
||||
}
|
||||
conf.Production = false
|
||||
conf.DefaultBlock = false
|
||||
|
||||
db, err = initDB(conf, true, false)
|
||||
if err != nil {
|
||||
@ -51,7 +54,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
|
||||
vm := goja.New()
|
||||
vm.Set("graphql", graphQLFn)
|
||||
//vm.Set("import_csv", importCSV)
|
||||
vm.Set("import_csv", importCSV)
|
||||
|
||||
console := vm.NewObject()
|
||||
console.Set("log", logFunc) //nolint: errcheck
|
||||
@ -79,8 +82,6 @@ func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[st
|
||||
|
||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||
} else {
|
||||
ct = context.WithValue(ct, core.UserIDKey, "-1")
|
||||
}
|
||||
|
||||
// var role string
|
||||
@ -181,34 +182,42 @@ func (c *csvSource) Err() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// func importCSV(table, filename string) int64 {
|
||||
// if filename[0] != '/' {
|
||||
// filename = path.Join(conf.ConfigPathUsed(), filename)
|
||||
// }
|
||||
func importCSV(table, filename string) int64 {
|
||||
if filename[0] != '/' {
|
||||
filename = path.Join(confPath, filename)
|
||||
}
|
||||
|
||||
// s, err := NewCSVSource(filename)
|
||||
// if err != nil {
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
s, err := NewCSVSource(filename)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", err)
|
||||
}
|
||||
|
||||
// var cols []string
|
||||
// colval, _ := s.Values()
|
||||
var cols []string
|
||||
colval, _ := s.Values()
|
||||
|
||||
// for _, c := range colval {
|
||||
// cols = append(cols, c.(string))
|
||||
// }
|
||||
for _, c := range colval {
|
||||
cols = append(cols, c.(string))
|
||||
}
|
||||
|
||||
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
|
||||
// cols,
|
||||
// s)
|
||||
conn, err := stdlib.AcquireConn(db)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", err)
|
||||
}
|
||||
//nolint: errcheck
|
||||
defer stdlib.ReleaseConn(db, conn)
|
||||
|
||||
// if err != nil {
|
||||
// err = fmt.Errorf("%w (line no %d)", err, s.i)
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
n, err := conn.CopyFrom(
|
||||
context.Background(),
|
||||
pgx.Identifier{table},
|
||||
cols,
|
||||
s)
|
||||
|
||||
// return n
|
||||
// }
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %v", fmt.Errorf("%w (line no %d)", err, s.i))
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func logFunc(args ...interface{}) {
|
||||
@ -377,11 +386,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
|
||||
f.Set("hipster_sentence", gofakeit.HipsterSentence)
|
||||
|
||||
//Languages
|
||||
//f.Set("language", gofakeit.Language)
|
||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||
|
||||
// File
|
||||
f.Set("file_extension", gofakeit.FileExtension)
|
||||
f.Set("file_mine_type", gofakeit.FileMimeType)
|
||||
@ -410,8 +414,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("lexify", gofakeit.Lexify)
|
||||
f.Set("rand_string", getRandValue)
|
||||
f.Set("numerify", gofakeit.Numerify)
|
||||
|
||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
|
@ -69,6 +69,8 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi.SetDefault("auth_fail_block", "always")
|
||||
vi.SetDefault("seed_file", "seed.js")
|
||||
|
||||
vi.SetDefault("default_block", true)
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
|
@ -15,7 +15,6 @@ import (
|
||||
"contrib.go.opencensus.io/integrations/ocsql"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -32,6 +32,7 @@ type Auth struct {
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
Audience string `mapstructure:"audience"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
|
@ -2,9 +2,12 @@ package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
jwt "github.com/dgrijalva/jwt-go"
|
||||
"github.com/dosco/super-graph/core"
|
||||
@ -13,8 +16,18 @@ import (
|
||||
const (
|
||||
authHeader = "Authorization"
|
||||
jwtAuth0 int = iota + 1
|
||||
jwtFirebase int = iota + 2
|
||||
firebasePKEndpoint = "https://www.googleapis.com/robot/v1/metadata/x509/securetoken@system.gserviceaccount.com"
|
||||
firebaseIssuerPrefix = "https://securetoken.google.com/"
|
||||
)
|
||||
|
||||
type firebasePKCache struct {
|
||||
PublicKeys map[string]string
|
||||
Expiration time.Time
|
||||
}
|
||||
|
||||
var firebasePublicKeys firebasePKCache
|
||||
|
||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
var key interface{}
|
||||
var jwtProvider int
|
||||
@ -23,6 +36,8 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
|
||||
if ac.JWT.Provider == "auth0" {
|
||||
jwtProvider = jwtAuth0
|
||||
} else if ac.JWT.Provider == "firebase" {
|
||||
jwtProvider = jwtFirebase
|
||||
}
|
||||
|
||||
secret := ac.JWT.Secret
|
||||
@ -56,6 +71,7 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
var tok string
|
||||
|
||||
if len(cookie) != 0 {
|
||||
@ -74,9 +90,16 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
tok = ah[7:]
|
||||
}
|
||||
|
||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
|
||||
var keyFunc jwt.Keyfunc
|
||||
if jwtProvider == jwtFirebase {
|
||||
keyFunc = firebaseKeyFunction
|
||||
} else {
|
||||
keyFunc = func(token *jwt.Token) (interface{}, error) {
|
||||
return key, nil
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, keyFunc)
|
||||
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
@ -86,12 +109,20 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
|
||||
ctx := r.Context()
|
||||
|
||||
if ac.JWT.Audience != "" && claims.Audience != ac.JWT.Audience {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
if jwtProvider == jwtAuth0 {
|
||||
sub := strings.Split(claims.Subject, "|")
|
||||
if len(sub) != 2 {
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
||||
}
|
||||
} else if jwtProvider == jwtFirebase &&
|
||||
claims.Issuer == firebaseIssuerPrefix+ac.JWT.Audience {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||
} else {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||
}
|
||||
@ -103,3 +134,92 @@ func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
next.ServeHTTP(w, r)
|
||||
}, nil
|
||||
}
|
||||
|
||||
type firebaseKeyError struct {
|
||||
Err error
|
||||
Message string
|
||||
}
|
||||
|
||||
func (e *firebaseKeyError) Error() string {
|
||||
return e.Message + " " + e.Err.Error()
|
||||
}
|
||||
|
||||
func firebaseKeyFunction(token *jwt.Token) (interface{}, error) {
|
||||
kid, ok := token.Header["kid"]
|
||||
|
||||
if !ok {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error 'kid' header not found in token",
|
||||
}
|
||||
}
|
||||
|
||||
if firebasePublicKeys.Expiration.Before(time.Now()) {
|
||||
resp, err := http.Get(firebasePKEndpoint)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error connecting to firebase certificate server",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
defer resp.Body.Close()
|
||||
|
||||
data, err := ioutil.ReadAll(resp.Body)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error reading firebase certificate server response",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
cachePolicy := resp.Header.Get("cache-control")
|
||||
ageIndex := strings.Index(cachePolicy, "max-age=")
|
||||
|
||||
if ageIndex < 0 {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error parsing cache-control header: 'max-age=' not found",
|
||||
}
|
||||
}
|
||||
|
||||
ageToEnd := cachePolicy[ageIndex+8:]
|
||||
endIndex := strings.Index(ageToEnd, ",")
|
||||
if endIndex < 0 {
|
||||
endIndex = len(ageToEnd) - 1
|
||||
}
|
||||
ageString := ageToEnd[:endIndex]
|
||||
|
||||
age, err := strconv.ParseInt(ageString, 10, 64)
|
||||
|
||||
if err != nil {
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error parsing max-age cache policy",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
expiration := time.Now().Add(time.Duration(time.Duration(age) * time.Second))
|
||||
|
||||
err = json.Unmarshal(data, &firebasePublicKeys.PublicKeys)
|
||||
|
||||
if err != nil {
|
||||
firebasePublicKeys = firebasePKCache{}
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error unmarshalling firebase public key json",
|
||||
Err: err,
|
||||
}
|
||||
}
|
||||
|
||||
firebasePublicKeys.Expiration = expiration
|
||||
}
|
||||
|
||||
if key, found := firebasePublicKeys.PublicKeys[kid.(string)]; found {
|
||||
k, err := jwt.ParseRSAPublicKeyFromPEM([]byte(key))
|
||||
return k, err
|
||||
}
|
||||
|
||||
return nil, &firebaseKeyError{
|
||||
Message: "Error no matching public key for kid supplied in jwt",
|
||||
}
|
||||
}
|
||||
|
@ -6,9 +6,11 @@ import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strconv"
|
||||
"strings"
|
||||
"text/template"
|
||||
@ -105,39 +107,40 @@ func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
|
||||
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
|
||||
path = strings.TrimRight(path, string(filepath.Separator))
|
||||
|
||||
fileInfos, err := fs.ReadDir(path)
|
||||
files, err := ioutil.ReadDir(path)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
paths := make([]string, 0, len(fileInfos))
|
||||
for _, fi := range fileInfos {
|
||||
fm := make(map[int]string, len(files))
|
||||
keys := make([]int, 0, len(files))
|
||||
|
||||
for _, fi := range files {
|
||||
if fi.IsDir() {
|
||||
continue
|
||||
}
|
||||
|
||||
matches := migrationPattern.FindStringSubmatch(fi.Name())
|
||||
|
||||
if len(matches) != 2 {
|
||||
continue
|
||||
}
|
||||
|
||||
n, err := strconv.ParseInt(matches[1], 10, 32)
|
||||
n, err := strconv.Atoi(matches[1])
|
||||
if err != nil {
|
||||
// The regexp already validated that the prefix is all digits so this *should* never fail
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mcount := len(paths)
|
||||
|
||||
if n < int64(mcount) {
|
||||
return nil, fmt.Errorf("Duplicate migration %d", n)
|
||||
fm[n] = filepath.Join(path, fi.Name())
|
||||
keys = append(keys, n)
|
||||
}
|
||||
|
||||
if int64(mcount) < n {
|
||||
return nil, fmt.Errorf("Missing migration %d", mcount)
|
||||
}
|
||||
sort.Ints(keys)
|
||||
|
||||
paths = append(paths, filepath.Join(path, fi.Name()))
|
||||
paths := make([]string, 0, len(keys))
|
||||
for _, k := range keys {
|
||||
paths = append(paths, fm[k])
|
||||
}
|
||||
|
||||
return paths, nil
|
||||
|
File diff suppressed because one or more lines are too long
@ -11,9 +11,9 @@
|
||||
// opt-in, read http://bit.ly/CRA-PWA
|
||||
|
||||
const isLocalhost = Boolean(
|
||||
window.location.hostname === 'localhost' ||
|
||||
window.location.hostname === "localhost" ||
|
||||
// [::1] is the IPv6 localhost address.
|
||||
window.location.hostname === '[::1]' ||
|
||||
window.location.hostname === "[::1]" ||
|
||||
// 127.0.0.1/8 is considered localhost for IPv4.
|
||||
window.location.hostname.match(
|
||||
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
|
||||
@ -21,7 +21,7 @@ const isLocalhost = Boolean(
|
||||
);
|
||||
|
||||
export function register(config) {
|
||||
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
|
||||
if (process.env.NODE_ENV === "production" && "serviceWorker" in navigator) {
|
||||
// The URL constructor is available in all browsers that support SW.
|
||||
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
|
||||
if (publicUrl.origin !== window.location.origin) {
|
||||
@ -31,7 +31,7 @@ export function register(config) {
|
||||
return;
|
||||
}
|
||||
|
||||
window.addEventListener('load', () => {
|
||||
window.addEventListener("load", () => {
|
||||
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
|
||||
|
||||
if (isLocalhost) {
|
||||
@ -42,8 +42,8 @@ export function register(config) {
|
||||
// service worker/PWA documentation.
|
||||
navigator.serviceWorker.ready.then(() => {
|
||||
console.log(
|
||||
'This web app is being served cache-first by a service ' +
|
||||
'worker. To learn more, visit http://bit.ly/CRA-PWA'
|
||||
"This web app is being served cache-first by a service " +
|
||||
"worker. To learn more, visit http://bit.ly/CRA-PWA"
|
||||
);
|
||||
});
|
||||
} else {
|
||||
@ -57,21 +57,21 @@ export function register(config) {
|
||||
function registerValidSW(swUrl, config) {
|
||||
navigator.serviceWorker
|
||||
.register(swUrl)
|
||||
.then(registration => {
|
||||
.then((registration) => {
|
||||
registration.onupdatefound = () => {
|
||||
const installingWorker = registration.installing;
|
||||
if (installingWorker == null) {
|
||||
return;
|
||||
}
|
||||
installingWorker.onstatechange = () => {
|
||||
if (installingWorker.state === 'installed') {
|
||||
if (installingWorker.state === "installed") {
|
||||
if (navigator.serviceWorker.controller) {
|
||||
// At this point, the updated precached content has been fetched,
|
||||
// but the previous service worker will still serve the older
|
||||
// content until all client tabs are closed.
|
||||
console.log(
|
||||
'New content is available and will be used when all ' +
|
||||
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
|
||||
"New content is available and will be used when all " +
|
||||
"tabs for this page are closed. See http://bit.ly/CRA-PWA."
|
||||
);
|
||||
|
||||
// Execute callback
|
||||
@ -82,7 +82,7 @@ function registerValidSW(swUrl, config) {
|
||||
// At this point, everything has been precached.
|
||||
// It's the perfect time to display a
|
||||
// "Content is cached for offline use." message.
|
||||
console.log('Content is cached for offline use.');
|
||||
console.log("Content is cached for offline use.");
|
||||
|
||||
// Execute callback
|
||||
if (config && config.onSuccess) {
|
||||
@ -93,23 +93,23 @@ function registerValidSW(swUrl, config) {
|
||||
};
|
||||
};
|
||||
})
|
||||
.catch(error => {
|
||||
console.error('Error during service worker registration:', error);
|
||||
.catch((error) => {
|
||||
console.error("Error during service worker registration:", error);
|
||||
});
|
||||
}
|
||||
|
||||
function checkValidServiceWorker(swUrl, config) {
|
||||
// Check if the service worker can be found. If it can't reload the page.
|
||||
fetch(swUrl)
|
||||
.then(response => {
|
||||
.then((response) => {
|
||||
// Ensure service worker exists, and that we really are getting a JS file.
|
||||
const contentType = response.headers.get('content-type');
|
||||
const contentType = response.headers.get("content-type");
|
||||
if (
|
||||
response.status === 404 ||
|
||||
(contentType != null && contentType.indexOf('javascript') === -1)
|
||||
(contentType != null && contentType.indexOf("javascript") === -1)
|
||||
) {
|
||||
// No service worker found. Probably a different app. Reload the page.
|
||||
navigator.serviceWorker.ready.then(registration => {
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.unregister().then(() => {
|
||||
window.location.reload();
|
||||
});
|
||||
@ -121,14 +121,14 @@ function checkValidServiceWorker(swUrl, config) {
|
||||
})
|
||||
.catch(() => {
|
||||
console.log(
|
||||
'No internet connection found. App is running in offline mode.'
|
||||
"No internet connection found. App is running in offline mode."
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
export function unregister() {
|
||||
if ('serviceWorker' in navigator) {
|
||||
navigator.serviceWorker.ready.then(registration => {
|
||||
if ("serviceWorker" in navigator) {
|
||||
navigator.serviceWorker.ready.then((registration) => {
|
||||
registration.unregister();
|
||||
});
|
||||
}
|
||||
|
13
jsn/bench.1
Normal file
13
jsn/bench.1
Normal file
@ -0,0 +1,13 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/jsn
|
||||
BenchmarkGet
|
||||
BenchmarkGet-16 13898 85293 ns/op 3328 B/op 2 allocs/op
|
||||
BenchmarkFilter
|
||||
BenchmarkFilter-16 189328 6341 ns/op 448 B/op 1 allocs/op
|
||||
BenchmarkStrip
|
||||
BenchmarkStrip-16 219765 5543 ns/op 224 B/op 1 allocs/op
|
||||
BenchmarkReplace
|
||||
BenchmarkReplace-16 100899 12022 ns/op 416 B/op 1 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/jsn 6.029s
|
@ -3,6 +3,7 @@ package jsn
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
)
|
||||
|
||||
@ -68,7 +69,12 @@ func Clear(w *bytes.Buffer, v []byte) error {
|
||||
}
|
||||
|
||||
io := int(dec.InputOffset())
|
||||
w.Write(v[io-len(v1)-2 : io])
|
||||
s := io - len(v1) - 2
|
||||
if io <= s || s <= 0 {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
w.Write(v[s:io])
|
||||
w.WriteString(`:`)
|
||||
isValue = true
|
||||
|
||||
|
@ -2,17 +2,19 @@ package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
// Filter function filters the JSON keeping only the provided keys and removing all others
|
||||
func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
var err error
|
||||
kmap := make(map[uint64]struct{}, len(keys))
|
||||
h := maphash.Hash{}
|
||||
|
||||
for i := range keys {
|
||||
kmap[xxhash.Sum64String(keys[i])] = struct{}{}
|
||||
_, _ = h.WriteString(keys[i])
|
||||
kmap[h.Sum64()] = struct{}{}
|
||||
h.Reset()
|
||||
}
|
||||
|
||||
// is an list
|
||||
@ -132,7 +134,11 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
cb := b[s:(e + 1)]
|
||||
e = 0
|
||||
|
||||
if _, ok := kmap[xxhash.Sum64(k)]; !ok {
|
||||
_, _ = h.Write(k)
|
||||
_, ok := kmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
if !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
|
51
jsn/fuzz.go
51
jsn/fuzz.go
@ -2,10 +2,55 @@
|
||||
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
if err := unifiedTest(data); err != nil {
|
||||
return 0
|
||||
c := 0
|
||||
|
||||
if err := Validate(string(data)); err == nil {
|
||||
c = 1
|
||||
}
|
||||
|
||||
return 1
|
||||
if err := fuzzTest(data); err == nil {
|
||||
c = 1
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func fuzzTest(data []byte) error {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
var b3 bytes.Buffer
|
||||
err4 := Clear(&b3, data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil || err4 != nil {
|
||||
return errors.New("there was an error")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
@ -1,9 +1,15 @@
|
||||
package jsn
|
||||
// +build gofuzz
|
||||
|
||||
package jsn_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
var ret int
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"00\"0000\"0{",
|
||||
@ -52,9 +58,16 @@ func TestFuzzCrashers(t *testing.T) {
|
||||
"0000\"0\"{",
|
||||
"000\"000\"{",
|
||||
"\"00000000\"{",
|
||||
`0000"00"00000000"000000000"00"000000000000000"00000"00000": "00"0"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||
`0000"000000000000000000000000000000000000"00000000"000000000"00"000000000000000"00000"00000": "00000000000000"00000"__twitter_id": [{ "name": "hello" }, { "name": "world"}]`,
|
||||
`00"__twitter_id":[{ "name": "hello" }, { "name": "world"}]`,
|
||||
"\"\xb0\xef\xbd\xe3\xbd\xef\x99\xe3\xbd\xef\xbd\xef\xbd\xef\xbd\xe5\x99\xe3\xbd" +
|
||||
"\xef\x99\xe3\"",
|
||||
"\"\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xe3\xef\xe3\xef\xe3\xe3\xe3\xef\xe3\xef\xe3" +
|
||||
"\xe3\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\xef\xe5\xe3\xef\xe3\xc6\xef\xef\"",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
_ = unifiedTest([]byte(f))
|
||||
ret = jsn.Fuzz([]byte(f))
|
||||
}
|
||||
}
|
||||
|
11
jsn/get.go
11
jsn/get.go
@ -1,7 +1,7 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -41,9 +41,12 @@ func Value(b []byte) []byte {
|
||||
// Keys function fetches values for the provided keys
|
||||
func Get(b []byte, keys [][]byte) []Field {
|
||||
kmap := make(map[uint64]struct{}, len(keys))
|
||||
h := maphash.Hash{}
|
||||
|
||||
for i := range keys {
|
||||
kmap[xxhash.Sum64(keys[i])] = struct{}{}
|
||||
_, _ = h.Write(keys[i])
|
||||
kmap[h.Sum64()] = struct{}{}
|
||||
h.Reset()
|
||||
}
|
||||
|
||||
res := make([]Field, 0, 20)
|
||||
@ -141,7 +144,9 @@ func Get(b []byte, keys [][]byte) []Field {
|
||||
}
|
||||
|
||||
if e != 0 {
|
||||
_, ok := kmap[xxhash.Sum64(k)]
|
||||
_, _ = h.Write(k)
|
||||
_, ok := kmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
if ok {
|
||||
res = append(res, Field{k, b[s:(e + 1)]})
|
||||
|
@ -1,4 +1,4 @@
|
||||
package jsn
|
||||
package jsn_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
@ -6,6 +6,8 @@ import (
|
||||
"io/ioutil"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
var (
|
||||
@ -171,13 +173,13 @@ var (
|
||||
)
|
||||
|
||||
func TestGet(t *testing.T) {
|
||||
values := Get([]byte(input1), [][]byte{
|
||||
values := jsn.Get([]byte(input1), [][]byte{
|
||||
[]byte("test_1a"),
|
||||
[]byte("__twitter_id"),
|
||||
[]byte("work_email"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("test_1a"), []byte(`{ "__twitter_id": "ABCD" }`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABCD"`)},
|
||||
{[]byte("__twitter_id"), []byte(`"2048666903444506956"`)},
|
||||
@ -214,11 +216,11 @@ func TestGet(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet1(t *testing.T) {
|
||||
values := Get([]byte(input5), [][]byte{
|
||||
values := jsn.Get([]byte(input5), [][]byte{
|
||||
[]byte("thread_slug"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("thread_slug"), []byte(`"in-september-2018-slovak-police-stated-that-kuciak-7929"`)},
|
||||
}
|
||||
|
||||
@ -238,11 +240,11 @@ func TestGet1(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet2(t *testing.T) {
|
||||
values := Get([]byte(input6), [][]byte{
|
||||
values := jsn.Get([]byte(input6), [][]byte{
|
||||
[]byte("users_cursor"), []byte("threads_cursor"),
|
||||
})
|
||||
|
||||
expected := []Field{
|
||||
expected := []jsn.Field{
|
||||
{[]byte("threads_cursor"), []byte(`null`)},
|
||||
{[]byte("threads_cursor"), []byte(`25`)},
|
||||
{[]byte("users_cursor"), []byte(`3`)},
|
||||
@ -264,7 +266,7 @@ func TestGet2(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestGet3(t *testing.T) {
|
||||
values := Get(input7, [][]byte{[]byte("data")})
|
||||
values := jsn.Get(input7, [][]byte{[]byte("data")})
|
||||
v := values[0].Value
|
||||
|
||||
if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) {
|
||||
@ -277,7 +279,7 @@ func TestGet4(t *testing.T) {
|
||||
|
||||
exp = strings.ReplaceAll(exp, "@", "`")
|
||||
|
||||
values := Get(input8, [][]byte{[]byte("body")})
|
||||
values := jsn.Get(input8, [][]byte{[]byte("body")})
|
||||
|
||||
if string(values[0].Key) != "body" {
|
||||
t.Fatal("unexpected key")
|
||||
@ -291,29 +293,29 @@ func TestGet4(t *testing.T) {
|
||||
|
||||
func TestValue(t *testing.T) {
|
||||
v1 := []byte("12345")
|
||||
if !bytes.Equal(Value(v1), v1) {
|
||||
if !bytes.Equal(jsn.Value(v1), v1) {
|
||||
t.Fatal("Number value invalid")
|
||||
}
|
||||
|
||||
v2 := []byte(`"12345"`)
|
||||
if !bytes.Equal(Value(v2), []byte(`12345`)) {
|
||||
if !bytes.Equal(jsn.Value(v2), []byte(`12345`)) {
|
||||
t.Fatal("String value invalid")
|
||||
}
|
||||
|
||||
v3 := []byte(`{ "hello": "world" }`)
|
||||
if Value(v3) != nil {
|
||||
t.Fatal("Object value is not nil", Value(v3))
|
||||
if jsn.Value(v3) != nil {
|
||||
t.Fatal("Object value is not nil", jsn.Value(v3))
|
||||
}
|
||||
|
||||
v4 := []byte(`[ "hello", "world" ]`)
|
||||
if Value(v4) != nil {
|
||||
if jsn.Value(v4) != nil {
|
||||
t.Fatal("List value is not nil")
|
||||
}
|
||||
}
|
||||
|
||||
func TestFilter1(t *testing.T) {
|
||||
var b bytes.Buffer
|
||||
err := Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
|
||||
err := jsn.Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -329,7 +331,7 @@ func TestFilter2(t *testing.T) {
|
||||
value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]`
|
||||
|
||||
var b bytes.Buffer
|
||||
err := Filter(&b, []byte(value), []string{"id"})
|
||||
err := jsn.Filter(&b, []byte(value), []string{"id"})
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -343,7 +345,7 @@ func TestFilter2(t *testing.T) {
|
||||
|
||||
func TestStrip(t *testing.T) {
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
value1 := Strip([]byte(input3), path1)
|
||||
value1 := jsn.Strip([]byte(input3), path1)
|
||||
|
||||
expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
|
||||
|
||||
@ -353,7 +355,7 @@ func TestStrip(t *testing.T) {
|
||||
}
|
||||
|
||||
path2 := [][]byte{[]byte("boo"), []byte("hoo")}
|
||||
value2 := Strip([]byte(input3), path2)
|
||||
value2 := jsn.Strip([]byte(input3), path2)
|
||||
|
||||
if !bytes.Equal(value2, []byte(input3)) {
|
||||
t.Log(value2)
|
||||
@ -364,7 +366,7 @@ func TestStrip(t *testing.T) {
|
||||
func TestValidateTrue(t *testing.T) {
|
||||
json := []byte(` [{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
|
||||
|
||||
err := Validate(string(json))
|
||||
err := jsn.Validate(string(json))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
}
|
||||
@ -373,7 +375,7 @@ func TestValidateTrue(t *testing.T) {
|
||||
func TestValidateFalse(t *testing.T) {
|
||||
json := []byte(` [{ "hello": 123"<html>}]`)
|
||||
|
||||
err := Validate(string(json))
|
||||
err := jsn.Validate(string(json))
|
||||
if err == nil {
|
||||
t.Error("JSON validation failed to detect invalid json")
|
||||
}
|
||||
@ -382,12 +384,12 @@ func TestValidateFalse(t *testing.T) {
|
||||
func TestReplace(t *testing.T) {
|
||||
var buf bytes.Buffer
|
||||
|
||||
from := []Field{
|
||||
from := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
to := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
@ -412,7 +414,7 @@ func TestReplace(t *testing.T) {
|
||||
"__twitter_id":"1234567890"
|
||||
}] }`
|
||||
|
||||
err := Replace(&buf, []byte(input4), from, to)
|
||||
err := jsn.Replace(&buf, []byte(input4), from, to)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -428,7 +430,7 @@ func TestReplaceEmpty(t *testing.T) {
|
||||
|
||||
json := `{ "users" : [{"id":1,"full_name":"Sidney St[1]roman","email":"user0@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":2,"full_name":"Jerry Dickinson","email":"user1@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":3,"full_name":"Kenna Cassin","email":"user2@demo.com","__users_twitter_id":"2048666903444506956"}, {"id":4,"full_name":"Mr. Pat Parisian","email":"rodney@kautzer.biz","__users_twitter_id":"2048666903444506956"}, {"id":5,"full_name":"Bette Ebert","email":"janeenrath@goyette.com","__users_twitter_id":"2048666903444506956"}, {"id":6,"full_name":"Everett Kiehn","email":"michael@bartoletti.com","__users_twitter_id":"2048666903444506956"}, {"id":7,"full_name":"Katrina Cronin","email":"loretaklocko@framivolkman.org","__users_twitter_id":"2048666903444506956"}, {"id":8,"full_name":"Caroll Orn Sr.","email":"joannarau@hegmann.io","__users_twitter_id":"2048666903444506956"}, {"id":9,"full_name":"Gwendolyn Ziemann","email":"renaytoy@rutherford.co","__users_twitter_id":"2048666903444506956"}, {"id":10,"full_name":"Mrs. Rosann Fritsch","email":"holliemosciski@thiel.org","__users_twitter_id":"2048666903444506956"}, {"id":11,"full_name":"Arden Koss","email":"cristobalankunding@howewelch.org","__users_twitter_id":"2048666903444506956"}, {"id":12,"full_name":"Brenton Bauch PhD","email":"renee@miller.co","__users_twitter_id":"2048666903444506956"}, {"id":13,"full_name":"Daine Gleichner","email":"andrea@nienow.co","__users_twitter_id":"2048666903444506956"}] }`
|
||||
|
||||
err := Replace(&buf, []byte(json), []Field{}, []Field{})
|
||||
err := jsn.Replace(&buf, []byte(json), []jsn.Field{}, []jsn.Field{})
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -442,7 +444,7 @@ func TestReplaceEmpty(t *testing.T) {
|
||||
func TestKeys1(t *testing.T) {
|
||||
json := `[{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]},{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}]`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
|
||||
@ -462,7 +464,7 @@ func TestKeys1(t *testing.T) {
|
||||
func TestKeys2(t *testing.T) {
|
||||
json := `{"id":1,"posts": [{"title":"PT1-1","description":"PD1-1"}, {"title":"PT1-2","description":"PD1-2"}], "full_name":"FN1","email":"E1","books": [{"name":"BN1-1","description":"BD1-1"},{"name":"BN1-2","description":"BD1-2"},{"name":"BN1-2","description":"BD1-2"}]}`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
|
||||
@ -491,7 +493,7 @@ func TestKeys3(t *testing.T) {
|
||||
"user": 123
|
||||
}`
|
||||
|
||||
fields := Keys([]byte(json))
|
||||
fields := jsn.Keys([]byte(json))
|
||||
|
||||
exp := []string{
|
||||
"insert", "created_at", "test_1a", "type1", "type2", "name", "updated_at", "description",
|
||||
@ -526,7 +528,7 @@ func TestClear(t *testing.T) {
|
||||
|
||||
expected := `{"insert":{"created_at":"","test_1a":{"type1":"","type2":[{"a":0.0}]},"name":"","updated_at":"","description":""},"user":0.0,"tags":[]}`
|
||||
|
||||
err := Clear(&buf, []byte(json))
|
||||
err := jsn.Clear(&buf, []byte(json))
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -541,7 +543,7 @@ func BenchmarkGet(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
Get([]byte(input1), [][]byte{[]byte("__twitter_id")})
|
||||
jsn.Get([]byte(input1), [][]byte{[]byte("__twitter_id")})
|
||||
}
|
||||
}
|
||||
|
||||
@ -553,7 +555,7 @@ func BenchmarkFilter(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
err := Filter(&buf, []byte(input2), keys)
|
||||
err := jsn.Filter(&buf, []byte(input2), keys)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
@ -566,19 +568,19 @@ func BenchmarkStrip(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
Strip([]byte(input3), path)
|
||||
jsn.Strip([]byte(input3), path)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkReplace(b *testing.B) {
|
||||
var buf bytes.Buffer
|
||||
|
||||
from := []Field{
|
||||
from := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
to := []jsn.Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
@ -587,7 +589,7 @@ func BenchmarkReplace(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
err := Replace(&buf, []byte(input4), from, to)
|
||||
err := jsn.Replace(&buf, []byte(input4), from, to)
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
@ -3,8 +3,7 @@ package jsn
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"hash/maphash"
|
||||
)
|
||||
|
||||
// Replace function replaces key-value pairs provided in the `from` argument with those in the `to` argument
|
||||
@ -18,7 +17,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
return err
|
||||
}
|
||||
|
||||
h := xxhash.New()
|
||||
h := maphash.Hash{}
|
||||
tmap := make(map[uint64]int, len(from))
|
||||
|
||||
for i, f := range from {
|
||||
@ -133,9 +132,18 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
if e != 0 {
|
||||
e++
|
||||
|
||||
if e <= s {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
if _, err := h.Write(b[s:e]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if (we + 1) <= ws {
|
||||
return errors.New("invalid json")
|
||||
}
|
||||
|
||||
n, ok := tmap[h.Sum64()]
|
||||
h.Reset()
|
||||
|
||||
|
37
jsn/test.go
37
jsn/test.go
@ -1,37 +0,0 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func unifiedTest(data []byte) error {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil {
|
||||
return errors.New("there was an error")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
Reference in New Issue
Block a user