diff --git a/config/allow.list b/config/allow.list index c0cd9b3..a17a562 100644 --- a/config/allow.list +++ b/config/allow.list @@ -5,7 +5,7 @@ variables { "name": "Wu-Tang", "description": "No description needed" }, - "product_id": 1 + "product_id": 1 } mutation { @@ -16,6 +16,111 @@ mutation { } } +variables { + "data": { + "product_id": 5 + } +} + +mutation { + products(id: $product_id, delete: true) { + id + name + } +} + +variables { + "data": [ + { + "name": "Gumbo1", + "created_at": "now", + "updated_at": "now" + }, + { + "name": "Gumbo2", + "created_at": "now", + "updated_at": "now" + } + ] +} + +mutation { + products(id: 199, delete: true) { + id + name + } +} + +variables { + "data": [ + { + "name": "Gumbo1", + "created_at": "now", + "updated_at": "now" + }, + { + "name": "Gumbo2", + "created_at": "now", + "updated_at": "now" + } + ] +} + +query { + products { + id + name + } +} + + +variables { + "update": { + "name": "Helloo", + "description": "World \u003c\u003e" + }, + "user": 123 +} + +mutation { + products(id: 5, update: $update) { + id + name + description + } +} + +variables { + "data": [ + { + "name": "Gumbo1", + "created_at": "now", + "updated_at": "now" + }, + { + "name": "Gumbo2", + "created_at": "now", + "updated_at": "now" + } + ] +} + +query { + product { + id + name + } +} + + +query { + me { + id + email + full_name + } +} + variables { "data": { "email": "gfk@myspace.com", @@ -31,19 +136,6 @@ mutation { } } -variables { - "data": { - "product_id": 5 - } -} - -mutation { - products(id: $product_id, delete: true) { - id - name - } -} - query { users { id @@ -79,26 +171,3 @@ mutation { } } -query { - me { - id - email - full_name - } -} - -variables { - "update": { - "name": "Helloo", - "description": "World \u003c\u003e" - }, - "user": 123 -} - -mutation { - products(id: 5, update: $update) { - id - name - description - } -} \ No newline at end of file diff --git a/config/dev.yml b/config/dev.yml index 80550b4..7c3ba46 100644 --- a/config/dev.yml +++ b/config/dev.yml @@ -111,42 +111,45 @@ database: - encrypted - token - tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] +tables: + - name: users + # This filter will overwrite defaults.filter + # filter: ["{ id: { eq: $user_id } }"] + # filter_query: ["{ id: { eq: $user_id } }"] + filter_update: ["{ id: { eq: $user_id } }"] + filter_delete: ["{ id: { eq: $user_id } }"] - # - name: products - # # Multiple filters are AND'd together - # filter: [ - # "{ price: { gt: 0 } }", - # "{ price: { lt: 8 } }" - # ] + # - name: products + # # Multiple filters are AND'd together + # filter: [ + # "{ price: { gt: 0 } }", + # "{ price: { lt: 8 } }" + # ] - - name: customers - # No filter is used for this field not - # even defaults.filter - filter: none + - name: customers + # No filter is used for this field not + # even defaults.filter + filter: none - remotes: - - name: payments - id: stripe_id - url: http://rails_app:3000/stripe/$id - path: data - # debug: true - pass_headers: - - cookie - set_headers: - - name: Host - value: 0.0.0.0 - # - name: Authorization - # value: Bearer + remotes: + - name: payments + id: stripe_id + url: http://rails_app:3000/stripe/$id + path: data + # debug: true + pass_headers: + - cookie + set_headers: + - name: Host + value: 0.0.0.0 + # - name: Authorization + # value: Bearer - - # You can create new fields that have a - # real db table backing them - name: me - table: users - filter: ["{ id: { eq: $user_id } }"] + - # You can create new fields that have a + # real db table backing them + name: me + table: users + filter: ["{ id: { eq: $user_id } }"] - # - name: posts - # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file + # - name: posts + # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file diff --git a/config/prod.yml b/config/prod.yml index 908fc8f..fa5f932 100644 --- a/config/prod.yml +++ b/config/prod.yml @@ -105,40 +105,43 @@ database: - encrypted - token - tables: - - name: users - # This filter will overwrite defaults.filter - filter: ["{ id: { eq: $user_id } }"] +tables: + - name: users + # This filter will overwrite defaults.filter + # filter: ["{ id: { eq: $user_id } }"] + # filter_query: ["{ id: { eq: $user_id } }"] + filter_update: ["{ id: { eq: $user_id } }"] + filter_delete: ["{ id: { eq: $user_id } }"] - - name: products - # Multiple filters are AND'd together - filter: [ - "{ price: { gt: 0 } }", - "{ price: { lt: 8 } }" - ] + - name: products + # Multiple filters are AND'd together + filter: [ + "{ price: { gt: 0 } }", + "{ price: { lt: 8 } }" + ] - - name: customers - # No filter is used for this field not - # even defaults.filter - filter: none + - name: customers + # No filter is used for this field not + # even defaults.filter + filter: none - # remotes: - # - name: payments - # id: stripe_id - # url: http://rails_app:3000/stripe/$id - # path: data - # # pass_headers: - # # - cookie - # # - host - # set_headers: - # - name: Authorization - # value: Bearer + # remotes: + # - name: payments + # id: stripe_id + # url: http://rails_app:3000/stripe/$id + # path: data + # # pass_headers: + # # - cookie + # # - host + # set_headers: + # - name: Authorization + # value: Bearer - - # You can create new fields that have a - # real db table backing them - name: me - table: users - filter: ["{ id: { eq: $user_id } }"] + - # You can create new fields that have a + # real db table backing them + name: me + table: users + filter: ["{ id: { eq: $user_id } }"] - # - name: posts - # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file + # - name: posts + # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file diff --git a/psql/insert.go b/psql/insert.go index bb66d7e..027588e 100644 --- a/psql/insert.go +++ b/psql/insert.go @@ -51,7 +51,7 @@ func (co *Compiler) compileMutation(qc *qcode.QCode, w *bytes.Buffer, vars Varia } default: - return 0, errors.New("valid mutations are 'insert' and 'update'") + return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'") } io.WriteString(c.w, ` RETURNING *) `) diff --git a/psql/insert_test.go b/psql/insert_test.go index a9f866c..bd03d7f 100644 --- a/psql/insert_test.go +++ b/psql/insert_test.go @@ -36,10 +36,10 @@ func singleInsert(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` vars := map[string]json.RawMessage{ - "insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), + "insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`), } resSQL, err := compileGQLToPSQL(gql, vars) @@ -132,7 +132,7 @@ func singleUpdate(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` vars := map[string]json.RawMessage{ "update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), diff --git a/psql/select_test.go b/psql/select_test.go index 1bdaa48..553f576 100644 --- a/psql/select_test.go +++ b/psql/select_test.go @@ -25,17 +25,27 @@ func TestMain(m *testing.M) { DefaultFilter: []string{ `{ user_id: { _eq: $user_id } }`, }, - FilterMap: map[string][]string{ - "users": []string{ - "{ id: { eq: $user_id } }", + FilterMap: qcode.Filters{ + All: map[string][]string{ + "users": []string{ + "{ id: { eq: $user_id } }", + }, + "products": []string{ + "{ price: { gt: 0 } }", + "{ price: { lt: 8 } }", + }, + "customers": []string{}, + "mes": []string{ + "{ id: { eq: $user_id } }", + }, }, - "products": []string{ - "{ price: { gt: 0 } }", - "{ price: { lt: 8 } }", + Query: map[string][]string{ + "users": []string{}, }, - "customers": []string{}, - "mes": []string{ - "{ id: { eq: $user_id } }", + Update: map[string][]string{ + "products": []string{ + "{ user_id: { eq: $user_id } }", + }, }, }, Blocklist: []string{ @@ -306,7 +316,7 @@ func oneToMany(t *testing.T) { } }` - sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" WHERE ((("users"."id") = {{user_id}})) LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";` + sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";` resSQL, err := compileGQLToPSQL(gql, nil) if err != nil { diff --git a/qcode/qcode.go b/qcode/qcode.go index 5b5b081..bafe3e4 100644 --- a/qcode/qcode.go +++ b/qcode/qcode.go @@ -145,16 +145,30 @@ const ( OrderDescNullsLast ) +type Filters struct { + All map[string][]string + Query map[string][]string + Insert map[string][]string + Update map[string][]string + Delete map[string][]string +} + type Config struct { DefaultFilter []string - FilterMap map[string][]string + FilterMap Filters Blocklist []string KeepArgs bool } type Compiler struct { - fl *Exp - fm map[string]*Exp + df *Exp + fm struct { + all map[string]*Exp + query map[string]*Exp + insert map[string]*Exp + update map[string]*Exp + delete map[string]*Exp + } bl map[string]struct{} ka bool } @@ -169,20 +183,59 @@ var expPool = sync.Pool{ } func NewCompiler(c Config) (*Compiler, error) { - bl := make(map[string]struct{}, len(c.Blocklist)) + var err error + co := &Compiler{ka: c.KeepArgs} + + co.bl = make(map[string]struct{}, len(c.Blocklist)) for i := range c.Blocklist { - bl[c.Blocklist[i]] = struct{}{} + co.bl[c.Blocklist[i]] = struct{}{} } - fl, err := compileFilter(c.DefaultFilter) + co.df, err = compileFilter(c.DefaultFilter) if err != nil { return nil, err } - fm := make(map[string]*Exp, len(c.FilterMap)) + co.fm.all, err = buildFilters(c.FilterMap.All) + if err != nil { + return nil, err + } - for k, v := range c.FilterMap { + co.fm.query, err = buildFilters(c.FilterMap.Query) + if err != nil { + return nil, err + } + + co.fm.insert, err = buildFilters(c.FilterMap.Insert) + if err != nil { + return nil, err + } + + co.fm.update, err = buildFilters(c.FilterMap.Update) + if err != nil { + return nil, err + } + + co.fm.delete, err = buildFilters(c.FilterMap.Delete) + if err != nil { + return nil, err + } + + seedExp := [100]Exp{} + + for i := range seedExp { + seedExp[i].doFree = true + expPool.Put(&seedExp[i]) + } + + return co, nil +} + +func buildFilters(filMap map[string][]string) (map[string]*Exp, error) { + fm := make(map[string]*Exp, len(filMap)) + + for k, v := range filMap { fil, err := compileFilter(v) if err != nil { return nil, err @@ -194,13 +247,7 @@ func NewCompiler(c Config) (*Compiler, error) { fm[plural] = fil } - seedExp := [100]Exp{} - for i := range seedExp { - seedExp[i].doFree = true - expPool.Put(&seedExp[i]) - } - - return &Compiler{fl, fm, bl, c.KeepArgs}, nil + return fm, nil } func (com *Compiler) Compile(query []byte) (*QCode, error) { @@ -308,34 +355,52 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) { id++ } - var ok bool + if id == 0 { + return nil, errors.New("invalid query") + } + var fil *Exp - if id > 0 { - root := &selects[0] - fil, ok = com.fm[root.Table] + root := &selects[0] - if !ok || fil == nil { - fil = com.fl + switch op.Type { + case opQuery: + fil, _ = com.fm.query[root.Table] + + case opMutate: + switch root.Action { + case ActionInsert: + fil, _ = com.fm.insert[root.Table] + case ActionUpdate: + fil, _ = com.fm.update[root.Table] + case ActionDelete: + fil, _ = com.fm.delete[root.Table] + case ActionUpsert: + fil, _ = com.fm.insert[root.Table] } + } - if fil != nil && fil.Op != OpNop { - if root.Where != nil { - ow := root.Where + if fil == nil { + fil, _ = com.fm.all[root.Table] + } - root.Where = expPool.Get().(*Exp) - root.Where.Reset() - root.Where.Op = OpAnd - root.Where.Children = root.Where.childrenA[:2] - root.Where.Children[0] = fil - root.Where.Children[1] = ow - } else { - root.Where = fil - } + if fil == nil { + fil = com.df + } + + if fil != nil && fil.Op != OpNop { + if root.Where != nil { + ow := root.Where + + root.Where = expPool.Get().(*Exp) + root.Where.Reset() + root.Where.Op = OpAnd + root.Where.Children = root.Where.childrenA[:2] + root.Where.Children[0] = fil + root.Where.Children[1] = ow + } else { + root.Where = fil } - - } else { - return nil, errors.New("invalid query") } return selects[:id], nil diff --git a/serv/cmd.go b/serv/cmd.go index 0b894eb..87c1660 100644 --- a/serv/cmd.go +++ b/serv/cmd.go @@ -124,6 +124,13 @@ e.g. db:migrate -+1 Run: cmdNew, }) + rootCmd.AddCommand(&cobra.Command{ + Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")), + Short: "Dump config to file", + Long: "Dump current config to a file in the selected format", + Run: cmdConfDump, + }) + rootCmd.Flags().StringVar(&confPath, "path", "./config", "path to config files") @@ -144,35 +151,7 @@ func initLog() *zerolog.Logger { } func initConf() (*config, error) { - vi := viper.New() - - vi.SetEnvPrefix("SG") - vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) - vi.AutomaticEnv() - - vi.AddConfigPath(confPath) - vi.AddConfigPath("./config") - vi.SetConfigName(getConfigName()) - - vi.SetDefault("host_port", "0.0.0.0:8080") - vi.SetDefault("web_ui", false) - vi.SetDefault("enable_tracing", false) - vi.SetDefault("auth_fail_block", "always") - vi.SetDefault("seed_file", "seed.js") - - vi.SetDefault("database.type", "postgres") - vi.SetDefault("database.host", "localhost") - vi.SetDefault("database.port", 5432) - vi.SetDefault("database.user", "postgres") - vi.SetDefault("database.schema", "public") - - vi.SetDefault("env", "development") - vi.BindEnv("env", "GO_ENV") - vi.BindEnv("HOST", "HOST") - vi.BindEnv("PORT", "PORT") - - vi.SetDefault("auth.rails.max_idle", 80) - vi.SetDefault("auth.rails.max_active", 12000) + vi := newConfig() if err := vi.ReadInConfig(); err != nil { return nil, err @@ -184,12 +163,16 @@ func initConf() (*config, error) { return nil, fmt.Errorf("unable to decode config, %v", err) } + if len(c.Tables) == 0 { + c.Tables = c.DB.Tables + } + for k, v := range c.Inflections { flect.AddPlural(k, v) } - for i := range c.DB.Tables { - t := c.DB.Tables[i] + for i := range c.Tables { + t := c.Tables[i] t.Name = flect.Pluralize(strings.ToLower(t.Name)) } diff --git a/serv/cmd_conf.go b/serv/cmd_conf.go new file mode 100644 index 0000000..6c182ac --- /dev/null +++ b/serv/cmd_conf.go @@ -0,0 +1,29 @@ +package serv + +import ( + "fmt" + "os" + + "github.com/spf13/cobra" +) + +func cmdConfDump(cmd *cobra.Command, args []string) { + if len(args) != 1 { + cmd.Help() + os.Exit(1) + } + + fname := fmt.Sprintf("%s.%s", getConfigName(), args[0]) + + vi := newConfig() + + if err := vi.ReadInConfig(); err != nil { + logger.Fatal().Err(err).Send() + } + + if err := vi.WriteConfigAs(fname); err != nil { + logger.Fatal().Err(err).Send() + } + + logger.Info().Msgf("config dumped to ./%s", fname) +} diff --git a/serv/cmd_migrate.go b/serv/cmd_migrate.go index ada93dd..44283d9 100644 --- a/serv/cmd_migrate.go +++ b/serv/cmd_migrate.go @@ -245,6 +245,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) { // } // os.Exit(1) } + logger.Info().Msg("migration done") } diff --git a/serv/cmd_new.go b/serv/cmd_new.go index 76caa36..44a851c 100644 --- a/serv/cmd_new.go +++ b/serv/cmd_new.go @@ -141,5 +141,6 @@ func ifNotExists(filePath string, doFn func(string) error) { if err != nil { logger.Fatal().Err(err).Msgf("unable to create '%s'", filePath) } + logger.Info().Msgf("created '%s'", filePath) } diff --git a/serv/cmd_seed.go b/serv/cmd_seed.go index 27e1fd8..f0cc2d4 100644 --- a/serv/cmd_seed.go +++ b/serv/cmd_seed.go @@ -52,6 +52,8 @@ func cmdDBSeed(cmd *cobra.Command, args []string) { if err != nil { logger.Fatal().Err(err).Msg("failed to execute script") } + + logger.Info().Msg("seed script done") } //func runFunc(call goja.FunctionCall) { diff --git a/serv/config.go b/serv/config.go index 71c8d94..1fb7f63 100644 --- a/serv/config.go +++ b/serv/config.go @@ -3,7 +3,7 @@ package serv import ( "strings" - "github.com/gobuffalo/flect" + "github.com/spf13/viper" ) type config struct { @@ -69,14 +69,20 @@ type config struct { Tables []configTable } `mapstructure:"database"` + + Tables []configTable } type configTable struct { - Name string - Filter []string - Table string - Blocklist []string - Remotes []configRemote + Name string + Filter []string + FilterQuery []string `mapstructure:"filter_query"` + FilterInsert []string `mapstructure:"filter_insert"` + FilterUpdate []string `mapstructure:"filter_update"` + FilterDelete []string `mapstructure:"filter_delete"` + Table string + Blocklist []string + Remotes []configRemote } type configRemote struct { @@ -92,6 +98,40 @@ type configRemote struct { } `mapstructure:"set_headers"` } +func newConfig() *viper.Viper { + vi := viper.New() + + vi.SetEnvPrefix("SG") + vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) + vi.AutomaticEnv() + + vi.AddConfigPath(confPath) + vi.AddConfigPath("./config") + vi.SetConfigName(getConfigName()) + + vi.SetDefault("host_port", "0.0.0.0:8080") + vi.SetDefault("web_ui", false) + vi.SetDefault("enable_tracing", false) + vi.SetDefault("auth_fail_block", "always") + vi.SetDefault("seed_file", "seed.js") + + vi.SetDefault("database.type", "postgres") + vi.SetDefault("database.host", "localhost") + vi.SetDefault("database.port", 5432) + vi.SetDefault("database.user", "postgres") + vi.SetDefault("database.schema", "public") + + vi.SetDefault("env", "development") + vi.BindEnv("env", "GO_ENV") + vi.BindEnv("HOST", "HOST") + vi.BindEnv("PORT", "PORT") + + vi.SetDefault("auth.rails.max_idle", 80) + vi.SetDefault("auth.rails.max_active", 12000) + + return vi +} + func (c *config) getVariables() map[string]string { vars := make(map[string]string, len(c.DB.vars)) @@ -113,10 +153,10 @@ func (c *config) getVariables() map[string]string { } func (c *config) getAliasMap() map[string][]string { - m := make(map[string][]string, len(c.DB.Tables)) + m := make(map[string][]string, len(c.Tables)) - for i := range c.DB.Tables { - t := c.DB.Tables[i] + for i := range c.Tables { + t := c.Tables[i] if len(t.Table) == 0 { continue @@ -127,28 +167,3 @@ func (c *config) getAliasMap() map[string][]string { } return m } - -func (c *config) getFilterMap() map[string][]string { - m := make(map[string][]string, len(c.DB.Tables)) - - for i := range c.DB.Tables { - t := c.DB.Tables[i] - - if len(t.Filter) == 0 { - continue - } - singular := flect.Singularize(t.Name) - plural := flect.Pluralize(t.Name) - - if t.Filter[0] == "none" { - m[singular] = []string{} - m[plural] = []string{} - - } else { - m[singular] = t.Filter - m[plural] = t.Filter - } - } - - return m -} diff --git a/serv/reso.go b/serv/reso.go index 402d381..6e132df 100644 --- a/serv/reso.go +++ b/serv/reso.go @@ -25,7 +25,7 @@ type resolvFn struct { func initResolvers() error { rmap = make(map[uint64]*resolvFn) - for _, t := range conf.DB.Tables { + for _, t := range conf.Tables { err := initRemotes(t) if err != nil { return err diff --git a/serv/serv.go b/serv/serv.go index 9713bb6..1006520 100644 --- a/serv/serv.go +++ b/serv/serv.go @@ -12,6 +12,7 @@ import ( rice "github.com/GeertJohan/go.rice" "github.com/dosco/super-graph/psql" "github.com/dosco/super-graph/qcode" + "github.com/gobuffalo/flect" ) func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) { @@ -20,13 +21,46 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) { return nil, nil, err } - qc, err := qcode.NewCompiler(qcode.Config{ + conf := qcode.Config{ DefaultFilter: c.DB.Defaults.Filter, - FilterMap: c.getFilterMap(), - Blocklist: c.DB.Defaults.Blocklist, - KeepArgs: false, - }) + FilterMap: qcode.Filters{ + All: make(map[string][]string, len(c.Tables)), + Query: make(map[string][]string, len(c.Tables)), + Insert: make(map[string][]string, len(c.Tables)), + Update: make(map[string][]string, len(c.Tables)), + Delete: make(map[string][]string, len(c.Tables)), + }, + Blocklist: c.DB.Defaults.Blocklist, + KeepArgs: false, + } + for i := range c.Tables { + t := c.Tables[i] + + singular := flect.Singularize(t.Name) + plural := flect.Pluralize(t.Name) + + setFilter := func(fm map[string][]string, fil []string) { + switch { + case len(fil) == 0: + return + case fil[0] == "none" || len(fil[0]) == 0: + fm[singular] = []string{} + fm[plural] = []string{} + default: + fm[singular] = t.Filter + fm[plural] = t.Filter + } + } + + setFilter(conf.FilterMap.All, t.Filter) + setFilter(conf.FilterMap.Query, t.FilterQuery) + setFilter(conf.FilterMap.Insert, t.FilterInsert) + setFilter(conf.FilterMap.Update, t.FilterUpdate) + setFilter(conf.FilterMap.Delete, t.FilterDelete) + } + + qc, err := qcode.NewCompiler(conf) if err != nil { return nil, nil, err } diff --git a/tmpl/dev.yml b/tmpl/dev.yml index 8a5bee6..b53a4d5 100644 --- a/tmpl/dev.yml +++ b/tmpl/dev.yml @@ -115,6 +115,9 @@ database: - name: users # This filter will overwrite defaults.filter # filter: ["{ id: { eq: $user_id } }"] + # filter_query: ["{ id: { eq: $user_id } }"] + filter_update: ["{ id: { eq: $user_id } }"] + filter_delete: ["{ id: { eq: $user_id } }"] # - name: products # # Multiple filters are AND'd together diff --git a/tmpl/prod.yml b/tmpl/prod.yml index 3f60b5c..9597d7a 100644 --- a/tmpl/prod.yml +++ b/tmpl/prod.yml @@ -108,7 +108,10 @@ database: tables: - name: users # This filter will overwrite defaults.filter - filter: ["{ id: { eq: $user_id } }"] + # filter: ["{ id: { eq: $user_id } }"] + # filter_query: ["{ id: { eq: $user_id } }"] + filter_update: ["{ id: { eq: $user_id } }"] + filter_delete: ["{ id: { eq: $user_id } }"] - name: products # Multiple filters are AND'd together