Add ability to set filters per operation / action

This commit is contained in:
Vikram Rangnekar 2019-10-06 16:28:10 -04:00
parent 450d77ccbd
commit 9299855d8a
17 changed files with 446 additions and 225 deletions

View File

@ -16,6 +16,111 @@ mutation {
}
}
variables {
"data": {
"product_id": 5
}
}
mutation {
products(id: $product_id, delete: true) {
id
name
}
}
variables {
"data": [
{
"name": "Gumbo1",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Gumbo2",
"created_at": "now",
"updated_at": "now"
}
]
}
mutation {
products(id: 199, delete: true) {
id
name
}
}
variables {
"data": [
{
"name": "Gumbo1",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Gumbo2",
"created_at": "now",
"updated_at": "now"
}
]
}
query {
products {
id
name
}
}
variables {
"update": {
"name": "Helloo",
"description": "World \u003c\u003e"
},
"user": 123
}
mutation {
products(id: 5, update: $update) {
id
name
description
}
}
variables {
"data": [
{
"name": "Gumbo1",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Gumbo2",
"created_at": "now",
"updated_at": "now"
}
]
}
query {
product {
id
name
}
}
query {
me {
id
email
full_name
}
}
variables {
"data": {
"email": "gfk@myspace.com",
@ -31,19 +136,6 @@ mutation {
}
}
variables {
"data": {
"product_id": 5
}
}
mutation {
products(id: $product_id, delete: true) {
id
name
}
}
query {
users {
id
@ -79,26 +171,3 @@ mutation {
}
}
query {
me {
id
email
full_name
}
}
variables {
"update": {
"name": "Helloo",
"description": "World \u003c\u003e"
},
"user": 123
}
mutation {
products(id: 5, update: $update) {
id
name
description
}
}

View File

@ -111,10 +111,13 @@ database:
- encrypted
- token
tables:
tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together

View File

@ -105,10 +105,13 @@ database:
- encrypted
- token
tables:
tables:
- name: users
# This filter will overwrite defaults.filter
filter: ["{ id: { eq: $user_id } }"]
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
- name: products
# Multiple filters are AND'd together

View File

@ -51,7 +51,7 @@ func (co *Compiler) compileMutation(qc *qcode.QCode, w *bytes.Buffer, vars Varia
}
default:
return 0, errors.New("valid mutations are 'insert' and 'update'")
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
}
io.WriteString(c.w, ` RETURNING *) `)

View File

@ -36,10 +36,10 @@ func singleInsert(t *testing.T) {
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
@ -132,7 +132,7 @@ func singleUpdate(t *testing.T) {
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),

View File

@ -25,7 +25,8 @@ func TestMain(m *testing.M) {
DefaultFilter: []string{
`{ user_id: { _eq: $user_id } }`,
},
FilterMap: map[string][]string{
FilterMap: qcode.Filters{
All: map[string][]string{
"users": []string{
"{ id: { eq: $user_id } }",
},
@ -38,6 +39,15 @@ func TestMain(m *testing.M) {
"{ id: { eq: $user_id } }",
},
},
Query: map[string][]string{
"users": []string{},
},
Update: map[string][]string{
"products": []string{
"{ user_id: { eq: $user_id } }",
},
},
},
Blocklist: []string{
"secret",
"password",
@ -306,7 +316,7 @@ func oneToMany(t *testing.T) {
}
}`
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" WHERE ((("users"."id") = {{user_id}})) LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {

View File

@ -145,16 +145,30 @@ const (
OrderDescNullsLast
)
type Filters struct {
All map[string][]string
Query map[string][]string
Insert map[string][]string
Update map[string][]string
Delete map[string][]string
}
type Config struct {
DefaultFilter []string
FilterMap map[string][]string
FilterMap Filters
Blocklist []string
KeepArgs bool
}
type Compiler struct {
fl *Exp
fm map[string]*Exp
df *Exp
fm struct {
all map[string]*Exp
query map[string]*Exp
insert map[string]*Exp
update map[string]*Exp
delete map[string]*Exp
}
bl map[string]struct{}
ka bool
}
@ -169,20 +183,59 @@ var expPool = sync.Pool{
}
func NewCompiler(c Config) (*Compiler, error) {
bl := make(map[string]struct{}, len(c.Blocklist))
var err error
co := &Compiler{ka: c.KeepArgs}
co.bl = make(map[string]struct{}, len(c.Blocklist))
for i := range c.Blocklist {
bl[c.Blocklist[i]] = struct{}{}
co.bl[c.Blocklist[i]] = struct{}{}
}
fl, err := compileFilter(c.DefaultFilter)
co.df, err = compileFilter(c.DefaultFilter)
if err != nil {
return nil, err
}
fm := make(map[string]*Exp, len(c.FilterMap))
co.fm.all, err = buildFilters(c.FilterMap.All)
if err != nil {
return nil, err
}
for k, v := range c.FilterMap {
co.fm.query, err = buildFilters(c.FilterMap.Query)
if err != nil {
return nil, err
}
co.fm.insert, err = buildFilters(c.FilterMap.Insert)
if err != nil {
return nil, err
}
co.fm.update, err = buildFilters(c.FilterMap.Update)
if err != nil {
return nil, err
}
co.fm.delete, err = buildFilters(c.FilterMap.Delete)
if err != nil {
return nil, err
}
seedExp := [100]Exp{}
for i := range seedExp {
seedExp[i].doFree = true
expPool.Put(&seedExp[i])
}
return co, nil
}
func buildFilters(filMap map[string][]string) (map[string]*Exp, error) {
fm := make(map[string]*Exp, len(filMap))
for k, v := range filMap {
fil, err := compileFilter(v)
if err != nil {
return nil, err
@ -194,13 +247,7 @@ func NewCompiler(c Config) (*Compiler, error) {
fm[plural] = fil
}
seedExp := [100]Exp{}
for i := range seedExp {
seedExp[i].doFree = true
expPool.Put(&seedExp[i])
}
return &Compiler{fl, fm, bl, c.KeepArgs}, nil
return fm, nil
}
func (com *Compiler) Compile(query []byte) (*QCode, error) {
@ -308,15 +355,37 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
id++
}
var ok bool
if id == 0 {
return nil, errors.New("invalid query")
}
var fil *Exp
if id > 0 {
root := &selects[0]
fil, ok = com.fm[root.Table]
if !ok || fil == nil {
fil = com.fl
switch op.Type {
case opQuery:
fil, _ = com.fm.query[root.Table]
case opMutate:
switch root.Action {
case ActionInsert:
fil, _ = com.fm.insert[root.Table]
case ActionUpdate:
fil, _ = com.fm.update[root.Table]
case ActionDelete:
fil, _ = com.fm.delete[root.Table]
case ActionUpsert:
fil, _ = com.fm.insert[root.Table]
}
}
if fil == nil {
fil, _ = com.fm.all[root.Table]
}
if fil == nil {
fil = com.df
}
if fil != nil && fil.Op != OpNop {
@ -334,10 +403,6 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
}
}
} else {
return nil, errors.New("invalid query")
}
return selects[:id], nil
}

View File

@ -124,6 +124,13 @@ e.g. db:migrate -+1
Run: cmdNew,
})
rootCmd.AddCommand(&cobra.Command{
Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
Short: "Dump config to file",
Long: "Dump current config to a file in the selected format",
Run: cmdConfDump,
})
rootCmd.Flags().StringVar(&confPath,
"path", "./config", "path to config files")
@ -144,35 +151,7 @@ func initLog() *zerolog.Logger {
}
func initConf() (*config, error) {
vi := viper.New()
vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.AddConfigPath(confPath)
vi.AddConfigPath("./config")
vi.SetConfigName(getConfigName())
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432)
vi.SetDefault("database.user", "postgres")
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV")
vi.BindEnv("HOST", "HOST")
vi.BindEnv("PORT", "PORT")
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)
vi := newConfig()
if err := vi.ReadInConfig(); err != nil {
return nil, err
@ -184,12 +163,16 @@ func initConf() (*config, error) {
return nil, fmt.Errorf("unable to decode config, %v", err)
}
if len(c.Tables) == 0 {
c.Tables = c.DB.Tables
}
for k, v := range c.Inflections {
flect.AddPlural(k, v)
}
for i := range c.DB.Tables {
t := c.DB.Tables[i]
for i := range c.Tables {
t := c.Tables[i]
t.Name = flect.Pluralize(strings.ToLower(t.Name))
}

29
serv/cmd_conf.go Normal file
View File

@ -0,0 +1,29 @@
package serv
import (
"fmt"
"os"
"github.com/spf13/cobra"
)
func cmdConfDump(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help()
os.Exit(1)
}
fname := fmt.Sprintf("%s.%s", getConfigName(), args[0])
vi := newConfig()
if err := vi.ReadInConfig(); err != nil {
logger.Fatal().Err(err).Send()
}
if err := vi.WriteConfigAs(fname); err != nil {
logger.Fatal().Err(err).Send()
}
logger.Info().Msgf("config dumped to ./%s", fname)
}

View File

@ -245,6 +245,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
// }
// os.Exit(1)
}
logger.Info().Msg("migration done")
}

View File

@ -141,5 +141,6 @@ func ifNotExists(filePath string, doFn func(string) error) {
if err != nil {
logger.Fatal().Err(err).Msgf("unable to create '%s'", filePath)
}
logger.Info().Msgf("created '%s'", filePath)
}

View File

@ -52,6 +52,8 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
if err != nil {
logger.Fatal().Err(err).Msg("failed to execute script")
}
logger.Info().Msg("seed script done")
}
//func runFunc(call goja.FunctionCall) {

View File

@ -3,7 +3,7 @@ package serv
import (
"strings"
"github.com/gobuffalo/flect"
"github.com/spf13/viper"
)
type config struct {
@ -69,11 +69,17 @@ type config struct {
Tables []configTable
} `mapstructure:"database"`
Tables []configTable
}
type configTable struct {
Name string
Filter []string
FilterQuery []string `mapstructure:"filter_query"`
FilterInsert []string `mapstructure:"filter_insert"`
FilterUpdate []string `mapstructure:"filter_update"`
FilterDelete []string `mapstructure:"filter_delete"`
Table string
Blocklist []string
Remotes []configRemote
@ -92,6 +98,40 @@ type configRemote struct {
} `mapstructure:"set_headers"`
}
func newConfig() *viper.Viper {
vi := viper.New()
vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.AddConfigPath(confPath)
vi.AddConfigPath("./config")
vi.SetConfigName(getConfigName())
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432)
vi.SetDefault("database.user", "postgres")
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV")
vi.BindEnv("HOST", "HOST")
vi.BindEnv("PORT", "PORT")
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)
return vi
}
func (c *config) getVariables() map[string]string {
vars := make(map[string]string, len(c.DB.vars))
@ -113,10 +153,10 @@ func (c *config) getVariables() map[string]string {
}
func (c *config) getAliasMap() map[string][]string {
m := make(map[string][]string, len(c.DB.Tables))
m := make(map[string][]string, len(c.Tables))
for i := range c.DB.Tables {
t := c.DB.Tables[i]
for i := range c.Tables {
t := c.Tables[i]
if len(t.Table) == 0 {
continue
@ -127,28 +167,3 @@ func (c *config) getAliasMap() map[string][]string {
}
return m
}
func (c *config) getFilterMap() map[string][]string {
m := make(map[string][]string, len(c.DB.Tables))
for i := range c.DB.Tables {
t := c.DB.Tables[i]
if len(t.Filter) == 0 {
continue
}
singular := flect.Singularize(t.Name)
plural := flect.Pluralize(t.Name)
if t.Filter[0] == "none" {
m[singular] = []string{}
m[plural] = []string{}
} else {
m[singular] = t.Filter
m[plural] = t.Filter
}
}
return m
}

View File

@ -25,7 +25,7 @@ type resolvFn struct {
func initResolvers() error {
rmap = make(map[uint64]*resolvFn)
for _, t := range conf.DB.Tables {
for _, t := range conf.Tables {
err := initRemotes(t)
if err != nil {
return err

View File

@ -12,6 +12,7 @@ import (
rice "github.com/GeertJohan/go.rice"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode"
"github.com/gobuffalo/flect"
)
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
@ -20,13 +21,46 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
return nil, nil, err
}
qc, err := qcode.NewCompiler(qcode.Config{
conf := qcode.Config{
DefaultFilter: c.DB.Defaults.Filter,
FilterMap: c.getFilterMap(),
FilterMap: qcode.Filters{
All: make(map[string][]string, len(c.Tables)),
Query: make(map[string][]string, len(c.Tables)),
Insert: make(map[string][]string, len(c.Tables)),
Update: make(map[string][]string, len(c.Tables)),
Delete: make(map[string][]string, len(c.Tables)),
},
Blocklist: c.DB.Defaults.Blocklist,
KeepArgs: false,
})
}
for i := range c.Tables {
t := c.Tables[i]
singular := flect.Singularize(t.Name)
plural := flect.Pluralize(t.Name)
setFilter := func(fm map[string][]string, fil []string) {
switch {
case len(fil) == 0:
return
case fil[0] == "none" || len(fil[0]) == 0:
fm[singular] = []string{}
fm[plural] = []string{}
default:
fm[singular] = t.Filter
fm[plural] = t.Filter
}
}
setFilter(conf.FilterMap.All, t.Filter)
setFilter(conf.FilterMap.Query, t.FilterQuery)
setFilter(conf.FilterMap.Insert, t.FilterInsert)
setFilter(conf.FilterMap.Update, t.FilterUpdate)
setFilter(conf.FilterMap.Delete, t.FilterDelete)
}
qc, err := qcode.NewCompiler(conf)
if err != nil {
return nil, nil, err
}

View File

@ -115,6 +115,9 @@ database:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together

View File

@ -108,7 +108,10 @@ database:
tables:
- name: users
# This filter will overwrite defaults.filter
filter: ["{ id: { eq: $user_id } }"]
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
- name: products
# Multiple filters are AND'd together