diff --git a/.gitignore b/.gitignore index 5c88189..fdcd5db 100644 --- a/.gitignore +++ b/.gitignore @@ -28,3 +28,4 @@ main .DS_Store .swp main +super-graph diff --git a/.wtc.yaml b/.wtc.yaml new file mode 100644 index 0000000..215573f --- /dev/null +++ b/.wtc.yaml @@ -0,0 +1,13 @@ +no_trace: false +debounce: 300 # if rule has no debounce, this will be used instead +ignore: \.git/ +trig: [start, run] # will run on start +rules: + - name: start + - name: run + match: \.go$ + ignore: web|examples|docs|_test\.go$ + command: go run main.go serv + - name: test + match: _test\.go$ + command: go test -cover {PKG} \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index c2487cc..805cc0d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,9 +11,8 @@ RUN apk update && \ apk add --no-cache git && \ apk add --no-cache upx=3.95-r2 -RUN go get -u github.com/shanzi/wu && \ - go install github.com/shanzi/wu && \ - go get github.com/GeertJohan/go.rice/rice +RUN go get -u github.com/rafaelsq/wtc && \ + go get -u github.com/GeertJohan/go.rice/rice WORKDIR /app COPY . /app diff --git a/README.md b/README.md index ec614f4..e6fbee3 100644 --- a/README.md +++ b/README.md @@ -46,6 +46,9 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun ## Contact me +I'm happy to help you deploy Super Graph so feel free to reach out over +Twitter or Discord. + [twitter/dosco](https://twitter.com/dosco) [chat/super-graph](https://discord.gg/6pSWCTZ) diff --git a/config/dev.yml b/config/dev.yml index ade488b..ffe9f64 100644 --- a/config/dev.yml +++ b/config/dev.yml @@ -100,7 +100,7 @@ database: # Define defaults to for the field key and values below defaults: - # filter: ["{ user_id: { eq: $user_id } }"] + # filters: ["{ user_id: { eq: $user_id } }"] # Field and table names that you wish to block blocklist: @@ -112,17 +112,6 @@ database: - token tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] - - # - name: products - # # Multiple filters are AND'd together - # filter: [ - # "{ price: { gt: 0 } }", - # "{ price: { lt: 8 } }" - # ] - - name: customers remotes: - name: payments @@ -168,24 +157,23 @@ roles: tables: - name: users query: - filter: ["{ id: { _eq: $user_id } }"] + filters: ["{ id: { _eq: $user_id } }"] - name: products - query: limit: 50 - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: ["id", "name", "description" ] disable_aggregation: false insert: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: ["id", "name", "description" ] set: - - created_at: "now" + - created_at: "now" update: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: - id - name @@ -199,6 +187,5 @@ roles: match: id = 1 tables: - name: users - - # select: - # filter: ["{ account_id: { _eq: $account_id } }"] + # query: + # filters: ["{ account_id: { _eq: $account_id } }"] diff --git a/config/prod.yml b/config/prod.yml index a52af3d..95abfb7 100644 --- a/config/prod.yml +++ b/config/prod.yml @@ -90,7 +90,7 @@ database: # Define defaults to for the field key and values below defaults: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] # Field and table names that you wish to block blocklist: @@ -102,25 +102,7 @@ database: - token tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] - # filter_query: ["{ id: { eq: $user_id } }"] - filter_update: ["{ id: { eq: $user_id } }"] - filter_delete: ["{ id: { eq: $user_id } }"] - - - name: products - # Multiple filters are AND'd together - filter: [ - "{ price: { gt: 0 } }", - "{ price: { lt: 8 } }" - ] - - name: customers - # No filter is used for this field not - # even defaults.filter - filter: none - # remotes: # - name: payments # id: stripe_id @@ -137,7 +119,61 @@ tables: # real db table backing them name: me table: users - filter: ["{ id: { eq: $user_id } }"] - # - name: posts - # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file +roles_query: "SELECT * FROM users as usr WHERE id = $user_id" + +roles: + - name: anon + tables: + - name: products + limit: 10 + + query: + columns: ["id", "name", "description" ] + aggregation: false + + insert: + allow: false + + update: + allow: false + + delete: + allow: false + + - name: user + tables: + - name: users + query: + filters: ["{ id: { _eq: $user_id } }"] + + - name: products + query: + limit: 50 + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + disable_aggregation: false + + insert: + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + set: + - created_at: "now" + + update: + filters: ["{ user_id: { eq: $user_id } }"] + columns: + - id + - name + set: + - updated_at: "now" + + delete: + deny: true + + - name: admin + match: id = 1 + tables: + - name: users + # query: + # filters: ["{ account_id: { _eq: $account_id } }"] diff --git a/docker-compose.yml b/docker-compose.yml index d41e9f5..b3beb6e 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -34,7 +34,7 @@ services: volumes: - .:/app working_dir: /app - command: wu -pattern="*.go" go run main.go serv + command: wtc depends_on: - db - rails_app diff --git a/docs/guide.md b/docs/guide.md index 13d6338..55f0b43 100644 --- a/docs/guide.md +++ b/docs/guide.md @@ -1043,26 +1043,35 @@ We're tried to ensure that the config file is self documenting and easy to work app_name: "Super Graph Development" host_port: 0.0.0.0:8080 web_ui: true -debug_level: 1 -# debug, info, warn, error, fatal, panic, disable -log_level: "info" +# debug, info, warn, error, fatal, panic +log_level: "debug" # Disable this in development to get a list of # queries used. When enabled super graph # will only allow queries from this list # List saved to ./config/allow.list -use_allow_list: true +use_allow_list: false # Throw a 401 on auth failure for queries that need auth # valid values: always, per_query, never -auth_fail_block: always +auth_fail_block: never # Latency tracing for database queries and remote joins # the resulting latency information is returned with the # response enable_tracing: true +# Watch the config folder and reload Super Graph +# with the new configs when a change is detected +reload_on_config_change: true + +# File that points to the database seeding script +# seed_file: seed.js + +# Path pointing to where the migrations can be found +migrations_path: ./config/migrations + # Postgres related environment Variables # SG_DATABASE_HOST # SG_DATABASE_PORT @@ -1086,7 +1095,7 @@ auth: # Comment this out if you want to disable setting # the user_id via a header. Good for testing - header: X-User-ID + creds_in_header: true rails: # Rails version this is used for reading the @@ -1097,10 +1106,10 @@ auth: secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566 # Remote cookie store. (memcache or redis) - # url: redis://127.0.0.1:6379 - # password: test - # max_idle: 80, - # max_active: 12000, + # url: redis://redis:6379 + # password: "" + # max_idle: 80 + # max_active: 12000 # In most cases you don't need these # salt: "encrypted cookie" @@ -1120,20 +1129,23 @@ database: dbname: app_development user: postgres password: '' - # pool_size: 10 - # max_retries: 0 - # log_level: "debug" + + #schema: "public" + #pool_size: 10 + #max_retries: 0 + #log_level: "debug" # Define variables here that you want to use in filters + # sub-queries must be wrapped in () variables: - account_id: "select account_id from users where id = $user_id" + account_id: "(select account_id from users where id = $user_id)" # Define defaults to for the field key and values below defaults: - filter: ["{ user_id: { eq: $user_id } }"] + # filters: ["{ user_id: { eq: $user_id } }"] # Field and table names that you wish to block - blacklist: + blocklist: - ar_internal_metadata - schema_migrations - secret @@ -1141,46 +1153,85 @@ database: - encrypted - token - tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] - # filter_query: ["{ id: { eq: $user_id } }"] - filter_update: ["{ id: { eq: $user_id } }"] - filter_delete: ["{ id: { eq: $user_id } }"] +tables: + - name: customers + remotes: + - name: payments + id: stripe_id + url: http://rails_app:3000/stripe/$id + path: data + # debug: true + pass_headers: + - cookie + set_headers: + - name: Host + value: 0.0.0.0 + # - name: Authorization + # value: Bearer - - name: products - # Multiple filters are AND'd together - filter: [ - "{ price: { gt: 0 } }", - "{ price: { lt: 8 } }" - ] + - # You can create new fields that have a + # real db table backing them + name: me + table: users - - name: customers - # No filter is used for this field not - # even defaults.filter - filter: none +roles_query: "SELECT * FROM users as usr WHERE id = $user_id" - remotes: - - name: payments - id: stripe_id - url: http://rails_app:3000/stripe/$id - path: data - # pass_headers: - # - cookie - # - host - set_headers: - - name: Authorization - value: Bearer +roles: + - name: anon + tables: + - name: products + limit: 10 - - # You can create new fields that have a - # real db table backing them - name: me - table: users - filter: ["{ id: { eq: $user_id } }"] + query: + columns: ["id", "name", "description" ] + aggregation: false + + insert: + allow: false + + update: + allow: false + + delete: + allow: false + + - name: user + tables: + - name: users + query: + filters: ["{ id: { _eq: $user_id } }"] + + - name: products + query: + limit: 50 + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + disable_aggregation: false + + insert: + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + set: + - created_at: "now" + + update: + filters: ["{ user_id: { eq: $user_id } }"] + columns: + - id + - name + set: + - updated_at: "now" + + delete: + deny: true + + - name: admin + match: id = 1 + tables: + - name: users + # query: + # filters: ["{ account_id: { _eq: $account_id } }"] - # - name: posts - # filter: ["{ account_id: { _eq: $account_id } }"] ``` If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets diff --git a/psql/mutate_test.go b/psql/mutate_test.go index 8555f74..390c301 100644 --- a/psql/mutate_test.go +++ b/psql/mutate_test.go @@ -12,7 +12,7 @@ func simpleInsert(t *testing.T) { } }` - sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337";` + sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337"` vars := map[string]json.RawMessage{ "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`), @@ -36,7 +36,7 @@ func singleInsert(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`), @@ -60,7 +60,7 @@ func bulkInsert(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`), @@ -84,7 +84,7 @@ func singleUpsert(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), @@ -108,7 +108,7 @@ func bulkUpsert(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`), @@ -132,7 +132,7 @@ func singleUpdate(t *testing.T) { } }` - sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), @@ -156,7 +156,7 @@ func delete(t *testing.T) { } }` - sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` + sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"` vars := map[string]json.RawMessage{ "update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), diff --git a/psql/query_test.go b/psql/query_test.go index c24b439..78330b6 100644 --- a/psql/query_test.go +++ b/psql/query_test.go @@ -32,16 +32,16 @@ func TestMain(m *testing.M) { qcompile.AddRole("user", "product", qcode.TRConfig{ Query: qcode.QueryConfig{ Columns: []string{"id", "name", "price", "users", "customers"}, - Filter: []string{ + Filters: []string{ "{ price: { gt: 0 } }", "{ price: { lt: 8 } }", }, }, Update: qcode.UpdateConfig{ - Filter: []string{"{ user_id: { eq: $user_id } }"}, + Filters: []string{"{ user_id: { eq: $user_id } }"}, }, Delete: qcode.DeleteConfig{ - Filter: []string{ + Filters: []string{ "{ price: { gt: 0 } }", "{ price: { lt: 8 } }", }, @@ -70,7 +70,7 @@ func TestMain(m *testing.M) { qcompile.AddRole("user", "mes", qcode.TRConfig{ Query: qcode.QueryConfig{ Columns: []string{"id", "full_name", "avatar"}, - Filter: []string{ + Filters: []string{ "{ id: { eq: $user_id } }", }, }, diff --git a/qcode/config.go b/qcode/config.go index b2ab9c4..c68a3d1 100644 --- a/qcode/config.go +++ b/qcode/config.go @@ -7,25 +7,25 @@ type Config struct { type QueryConfig struct { Limit int - Filter []string + Filters []string Columns []string DisableFunctions bool } type InsertConfig struct { - Filter []string + Filters []string Columns []string Set map[string]string } type UpdateConfig struct { - Filter []string + Filters []string Columns []string Set map[string]string } type DeleteConfig struct { - Filter []string + Filters []string Columns []string } diff --git a/qcode/qcode.go b/qcode/qcode.go index a1d55e3..8c90d93 100644 --- a/qcode/qcode.go +++ b/qcode/qcode.go @@ -190,7 +190,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error { } // query config - trv.query.fil, err = compileFilter(trc.Query.Filter) + trv.query.fil, err = compileFilter(trc.Query.Filters) if err != nil { return err } @@ -201,20 +201,20 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error { trv.query.disable.funcs = trc.Query.DisableFunctions // insert config - if trv.insert.fil, err = compileFilter(trc.Insert.Filter); err != nil { + if trv.insert.fil, err = compileFilter(trc.Insert.Filters); err != nil { return err } trv.insert.cols = toMap(trc.Insert.Columns) // update config - if trv.update.fil, err = compileFilter(trc.Update.Filter); err != nil { + if trv.update.fil, err = compileFilter(trc.Update.Filters); err != nil { return err } trv.insert.cols = toMap(trc.Insert.Columns) trv.insert.set = trc.Insert.Set // delete config - if trv.delete.fil, err = compileFilter(trc.Delete.Filter); err != nil { + if trv.delete.fil, err = compileFilter(trc.Delete.Filters); err != nil { return err } trv.delete.cols = toMap(trc.Delete.Columns) diff --git a/serv/allow.go b/serv/allow.go index f960fc0..f8f02ad 100644 --- a/serv/allow.go +++ b/serv/allow.go @@ -26,7 +26,8 @@ type allowItem struct { var _allowList allowList type allowList struct { - list map[string]*allowItem + list []*allowItem + index map[string]int filepath string saveChan chan *allowItem active bool @@ -34,7 +35,7 @@ type allowList struct { func initAllowList(cpath string) { _allowList = allowList{ - list: make(map[string]*allowItem), + index: make(map[string]int), saveChan: make(chan *allowItem), active: true, } @@ -172,19 +173,21 @@ func (al *allowList) load() { if c == 0 { if ty == AL_QUERY { q := string(b[s:(e + 1)]) + key := gqlHash(q, varBytes, "") - item := &allowItem{ - uri: uri, - gql: q, - } - - if len(varBytes) != 0 { + if idx, ok := al.index[key]; !ok { + al.list = append(al.list, &allowItem{ + uri: uri, + gql: q, + vars: varBytes, + }) + al.index[key] = len(al.list) - 1 + } else { + item := al.list[idx] + item.gql = q item.vars = varBytes } - //fmt.Println("%%", item.gql, string(item.vars)) - - al.list[gqlHash(q, varBytes, "")] = item varBytes = nil } else if ty == AL_VARS { @@ -205,11 +208,15 @@ func (al *allowList) save(item *allowItem) { if al.active == false { return } - h := gqlHash(item.gql, item.vars, "") - if _, ok := al.list[h]; ok { - return + + key := gqlHash(item.gql, item.vars, "") + + if idx, ok := al.index[key]; ok { + al.list[idx] = item + } else { + al.list = append(al.list, item) + al.index[key] = len(al.list) - 1 } - al.list[gqlHash(item.gql, item.vars, "")] = item f, err := os.Create(al.filepath) if err != nil { diff --git a/serv/config.go b/serv/config.go index 39e3d53..160cd4b 100644 --- a/serv/config.go +++ b/serv/config.go @@ -65,7 +65,7 @@ type config struct { Vars map[string]string `mapstructure:"variables"` Defaults struct { - Filter []string + Filters []string Blocklist []string } @@ -106,28 +106,28 @@ type configRole struct { Query struct { Limit int - Filter []string + Filters []string Columns []string DisableAggregation bool `mapstructure:"disable_aggregation"` Deny bool } Insert struct { - Filter []string + Filters []string Columns []string Set map[string]string Deny bool } Update struct { - Filter []string + Filters []string Columns []string Set map[string]string Deny bool } Delete struct { - Filter []string + Filters []string Columns []string Deny bool } diff --git a/serv/serv.go b/serv/serv.go index e03b9c2..a98c16e 100644 --- a/serv/serv.go +++ b/serv/serv.go @@ -34,25 +34,25 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) { for _, t := range r.Tables { query := qcode.QueryConfig{ Limit: t.Query.Limit, - Filter: t.Query.Filter, + Filters: t.Query.Filters, Columns: t.Query.Columns, DisableFunctions: t.Query.DisableAggregation, } insert := qcode.InsertConfig{ - Filter: t.Insert.Filter, + Filters: t.Insert.Filters, Columns: t.Insert.Columns, Set: t.Insert.Set, } update := qcode.UpdateConfig{ - Filter: t.Insert.Filter, + Filters: t.Insert.Filters, Columns: t.Insert.Columns, Set: t.Insert.Set, } delete := qcode.DeleteConfig{ - Filter: t.Insert.Filter, + Filters: t.Insert.Filters, Columns: t.Insert.Columns, } diff --git a/slides/overview.slide b/slides/overview.slide index 7888781..e52ff40 100644 --- a/slides/overview.slide +++ b/slides/overview.slide @@ -80,7 +80,7 @@ SQL Output account_id: "select account_id from users where id = $user_id" defaults: - filter: ["{ user_id: { eq: $user_id } }"] + Filters: ["{ user_id: { eq: $user_id } }"] blacklist: - password @@ -88,14 +88,14 @@ SQL Output fields: - name: users - filter: ["{ id: { eq: $user_id } }"] + Filters: ["{ id: { eq: $user_id } }"] - name: products - filter: [ + Filters: [ "{ price: { gt: 0 } }", "{ price: { lt: 8 } }" ] - name: me table: users - filter: ["{ id: { eq: $user_id } }"] + Filters: ["{ id: { eq: $user_id } }"] diff --git a/tmpl/dev.yml b/tmpl/dev.yml index b53a4d5..ffe9f64 100644 --- a/tmpl/dev.yml +++ b/tmpl/dev.yml @@ -1,4 +1,4 @@ -app_name: "{% app_name %} Development" +app_name: "Super Graph Development" host_port: 0.0.0.0:8080 web_ui: true @@ -53,7 +53,7 @@ auth: # Comment this out if you want to disable setting # the user_id via a header. Good for testing - header: X-User-ID + creds_in_header: true rails: # Rails version this is used for reading the @@ -84,7 +84,7 @@ database: type: postgres host: db port: 5432 - dbname: {% app_name_slug %}_development + dbname: app_development user: postgres password: '' @@ -100,7 +100,7 @@ database: # Define defaults to for the field key and values below defaults: - # filter: ["{ user_id: { eq: $user_id } }"] + # filters: ["{ user_id: { eq: $user_id } }"] # Field and table names that you wish to block blocklist: @@ -111,45 +111,81 @@ database: - encrypted - token - tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] - # filter_query: ["{ id: { eq: $user_id } }"] - filter_update: ["{ id: { eq: $user_id } }"] - filter_delete: ["{ id: { eq: $user_id } }"] +tables: + - name: customers + remotes: + - name: payments + id: stripe_id + url: http://rails_app:3000/stripe/$id + path: data + # debug: true + pass_headers: + - cookie + set_headers: + - name: Host + value: 0.0.0.0 + # - name: Authorization + # value: Bearer - # - name: products - # # Multiple filters are AND'd together - # filter: [ - # "{ price: { gt: 0 } }", - # "{ price: { lt: 8 } }" - # ] + - # You can create new fields that have a + # real db table backing them + name: me + table: users - - name: customers - # No filter is used for this field not - # even defaults.filter - filter: none +roles_query: "SELECT * FROM users as usr WHERE id = $user_id" - remotes: - - name: payments - id: stripe_id - url: http://rails_app:3000/stripe/$id - path: data - # debug: true - pass_headers: - - cookie - set_headers: - - name: Host - value: 0.0.0.0 - # - name: Authorization - # value: Bearer +roles: + - name: anon + tables: + - name: products + limit: 10 - - # You can create new fields that have a - # real db table backing them - name: me - table: users - filter: ["{ id: { eq: $user_id } }"] + query: + columns: ["id", "name", "description" ] + aggregation: false - # - name: posts - # filter: ["{ account_id: { _eq: $account_id } }"] \ No newline at end of file + insert: + allow: false + + update: + allow: false + + delete: + allow: false + + - name: user + tables: + - name: users + query: + filters: ["{ id: { _eq: $user_id } }"] + + - name: products + query: + limit: 50 + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + disable_aggregation: false + + insert: + filters: ["{ user_id: { eq: $user_id } }"] + columns: ["id", "name", "description" ] + set: + - created_at: "now" + + update: + filters: ["{ user_id: { eq: $user_id } }"] + columns: + - id + - name + set: + - updated_at: "now" + + delete: + deny: true + + - name: admin + match: id = 1 + tables: + - name: users + # query: + # filters: ["{ account_id: { _eq: $account_id } }"] diff --git a/tmpl/prod.yml b/tmpl/prod.yml index 29c6b45..95abfb7 100644 --- a/tmpl/prod.yml +++ b/tmpl/prod.yml @@ -1,4 +1,4 @@ -app_name: "{% app_name %} Production" +app_name: "Super Graph Production" host_port: 0.0.0.0:8080 web_ui: false @@ -76,7 +76,7 @@ database: type: postgres host: db port: 5432 - dbname: {% app_name_slug %}_production + dbname: {{app_name_slug}}_development user: postgres password: '' #pool_size: 10 @@ -90,7 +90,7 @@ database: # Define defaults to for the field key and values below defaults: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] # Field and table names that you wish to block blocklist: @@ -101,32 +101,19 @@ database: - encrypted - token - tables: - - name: users - # This filter will overwrite defaults.filter - # filter: ["{ id: { eq: $user_id } }"] - - # - name: products - # # Multiple filters are AND'd together - # filter: [ - # "{ price: { gt: 0 } }", - # "{ price: { lt: 8 } }" - # ] - +tables: - name: customers - remotes: - - name: payments - id: stripe_id - url: http://rails_app:3000/stripe/$id - path: data - # debug: true - pass_headers: - - cookie - set_headers: - - name: Host - value: 0.0.0.0 - # - name: Authorization - # value: Bearer + # remotes: + # - name: payments + # id: stripe_id + # url: http://rails_app:3000/stripe/$id + # path: data + # # pass_headers: + # # - cookie + # # - host + # set_headers: + # - name: Authorization + # value: Bearer - # You can create new fields that have a # real db table backing them @@ -158,24 +145,23 @@ roles: tables: - name: users query: - filter: ["{ id: { _eq: $user_id } }"] + filters: ["{ id: { _eq: $user_id } }"] - name: products - query: limit: 50 - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: ["id", "name", "description" ] disable_aggregation: false insert: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: ["id", "name", "description" ] set: - - created_at: "now" + - created_at: "now" update: - filter: ["{ user_id: { eq: $user_id } }"] + filters: ["{ user_id: { eq: $user_id } }"] columns: - id - name @@ -189,6 +175,5 @@ roles: match: id = 1 tables: - name: users - - # select: - # filter: ["{ account_id: { _eq: $account_id } }"] + # query: + # filters: ["{ account_id: { _eq: $account_id } }"]