Preserve allow.list ordering on save

This commit is contained in:
Vikram Rangnekar 2019-10-25 01:39:59 -04:00
parent 4edc15eb98
commit cabd2d81ae
18 changed files with 341 additions and 223 deletions

1
.gitignore vendored
View File

@ -28,3 +28,4 @@ main
.DS_Store .DS_Store
.swp .swp
main main
super-graph

13
.wtc.yaml Normal file
View File

@ -0,0 +1,13 @@
no_trace: false
debounce: 300 # if rule has no debounce, this will be used instead
ignore: \.git/
trig: [start, run] # will run on start
rules:
- name: start
- name: run
match: \.go$
ignore: web|examples|docs|_test\.go$
command: go run main.go serv
- name: test
match: _test\.go$
command: go test -cover {PKG}

View File

@ -11,9 +11,8 @@ RUN apk update && \
apk add --no-cache git && \ apk add --no-cache git && \
apk add --no-cache upx=3.95-r2 apk add --no-cache upx=3.95-r2
RUN go get -u github.com/shanzi/wu && \ RUN go get -u github.com/rafaelsq/wtc && \
go install github.com/shanzi/wu && \ go get -u github.com/GeertJohan/go.rice/rice
go get github.com/GeertJohan/go.rice/rice
WORKDIR /app WORKDIR /app
COPY . /app COPY . /app

View File

@ -46,6 +46,9 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
## Contact me ## Contact me
I'm happy to help you deploy Super Graph so feel free to reach out over
Twitter or Discord.
[twitter/dosco](https://twitter.com/dosco) [twitter/dosco](https://twitter.com/dosco)
[chat/super-graph](https://discord.gg/6pSWCTZ) [chat/super-graph](https://discord.gg/6pSWCTZ)

View File

@ -100,7 +100,7 @@ database:
# Define defaults to for the field key and values below # Define defaults to for the field key and values below
defaults: defaults:
# filter: ["{ user_id: { eq: $user_id } }"] # filters: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block # Field and table names that you wish to block
blocklist: blocklist:
@ -112,17 +112,6 @@ database:
- token - token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together
# filter: [
# "{ price: { gt: 0 } }",
# "{ price: { lt: 8 } }"
# ]
- name: customers - name: customers
remotes: remotes:
- name: payments - name: payments
@ -168,24 +157,23 @@ roles:
tables: tables:
- name: users - name: users
query: query:
filter: ["{ id: { _eq: $user_id } }"] filters: ["{ id: { _eq: $user_id } }"]
- name: products - name: products
query: query:
limit: 50 limit: 50
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ] columns: ["id", "name", "description" ]
disable_aggregation: false disable_aggregation: false
insert: insert:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ] columns: ["id", "name", "description" ]
set: set:
- created_at: "now" - created_at: "now"
update: update:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: columns:
- id - id
- name - name
@ -199,6 +187,5 @@ roles:
match: id = 1 match: id = 1
tables: tables:
- name: users - name: users
# query:
# select: # filters: ["{ account_id: { _eq: $account_id } }"]
# filter: ["{ account_id: { _eq: $account_id } }"]

View File

@ -90,7 +90,7 @@ database:
# Define defaults to for the field key and values below # Define defaults to for the field key and values below
defaults: defaults:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block # Field and table names that you wish to block
blocklist: blocklist:
@ -102,25 +102,7 @@ database:
- token - token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
- name: products
# Multiple filters are AND'd together
filter: [
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }"
]
- name: customers - name: customers
# No filter is used for this field not
# even defaults.filter
filter: none
# remotes: # remotes:
# - name: payments # - name: payments
# id: stripe_id # id: stripe_id
@ -137,7 +119,61 @@ tables:
# real db table backing them # real db table backing them
name: me name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"]
# - name: posts roles_query: "SELECT * FROM users as usr WHERE id = $user_id"
# filter: ["{ account_id: { _eq: $account_id } }"]
roles:
- name: anon
tables:
- name: products
limit: 10
query:
columns: ["id", "name", "description" ]
aggregation: false
insert:
allow: false
update:
allow: false
delete:
allow: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_aggregation: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
set:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
set:
- updated_at: "now"
delete:
deny: true
- name: admin
match: id = 1
tables:
- name: users
# query:
# filters: ["{ account_id: { _eq: $account_id } }"]

View File

@ -34,7 +34,7 @@ services:
volumes: volumes:
- .:/app - .:/app
working_dir: /app working_dir: /app
command: wu -pattern="*.go" go run main.go serv command: wtc
depends_on: depends_on:
- db - db
- rails_app - rails_app

View File

@ -1043,26 +1043,35 @@ We're tried to ensure that the config file is self documenting and easy to work
app_name: "Super Graph Development" app_name: "Super Graph Development"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: true web_ui: true
debug_level: 1
# debug, info, warn, error, fatal, panic, disable # debug, info, warn, error, fatal, panic
log_level: "info" log_level: "debug"
# Disable this in development to get a list of # Disable this in development to get a list of
# queries used. When enabled super graph # queries used. When enabled super graph
# will only allow queries from this list # will only allow queries from this list
# List saved to ./config/allow.list # List saved to ./config/allow.list
use_allow_list: true use_allow_list: false
# Throw a 401 on auth failure for queries that need auth # Throw a 401 on auth failure for queries that need auth
# valid values: always, per_query, never # valid values: always, per_query, never
auth_fail_block: always auth_fail_block: never
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
# response # response
enable_tracing: true enable_tracing: true
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: true
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# Postgres related environment Variables # Postgres related environment Variables
# SG_DATABASE_HOST # SG_DATABASE_HOST
# SG_DATABASE_PORT # SG_DATABASE_PORT
@ -1086,7 +1095,7 @@ auth:
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header. Good for testing # the user_id via a header. Good for testing
header: X-User-ID creds_in_header: true
rails: rails:
# Rails version this is used for reading the # Rails version this is used for reading the
@ -1097,10 +1106,10 @@ auth:
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566 secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
# Remote cookie store. (memcache or redis) # Remote cookie store. (memcache or redis)
# url: redis://127.0.0.1:6379 # url: redis://redis:6379
# password: test # password: ""
# max_idle: 80, # max_idle: 80
# max_active: 12000, # max_active: 12000
# In most cases you don't need these # In most cases you don't need these
# salt: "encrypted cookie" # salt: "encrypted cookie"
@ -1120,20 +1129,23 @@ database:
dbname: app_development dbname: app_development
user: postgres user: postgres
password: '' password: ''
#schema: "public"
#pool_size: 10 #pool_size: 10
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"
# Define variables here that you want to use in filters # Define variables here that you want to use in filters
# sub-queries must be wrapped in ()
variables: variables:
account_id: "select account_id from users where id = $user_id" account_id: "(select account_id from users where id = $user_id)"
# Define defaults to for the field key and values below # Define defaults to for the field key and values below
defaults: defaults:
filter: ["{ user_id: { eq: $user_id } }"] # filters: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block # Field and table names that you wish to block
blacklist: blocklist:
- ar_internal_metadata - ar_internal_metadata
- schema_migrations - schema_migrations
- secret - secret
@ -1142,45 +1154,84 @@ database:
- token - token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
- name: products
# Multiple filters are AND'd together
filter: [
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }"
]
- name: customers - name: customers
# No filter is used for this field not
# even defaults.filter
filter: none
remotes: remotes:
- name: payments - name: payments
id: stripe_id id: stripe_id
url: http://rails_app:3000/stripe/$id url: http://rails_app:3000/stripe/$id
path: data path: data
# pass_headers: # debug: true
# - cookie pass_headers:
# - host - cookie
set_headers: set_headers:
- name: Authorization - name: Host
value: Bearer <stripe_api_key> value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
- # You can create new fields that have a - # You can create new fields that have a
# real db table backing them # real db table backing them
name: me name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"]
# - name: posts roles_query: "SELECT * FROM users as usr WHERE id = $user_id"
# filter: ["{ account_id: { _eq: $account_id } }"]
roles:
- name: anon
tables:
- name: products
limit: 10
query:
columns: ["id", "name", "description" ]
aggregation: false
insert:
allow: false
update:
allow: false
delete:
allow: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_aggregation: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
set:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
set:
- updated_at: "now"
delete:
deny: true
- name: admin
match: id = 1
tables:
- name: users
# query:
# filters: ["{ account_id: { _eq: $account_id } }"]
``` ```
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets

View File

@ -12,7 +12,7 @@ func simpleInsert(t *testing.T) {
} }
}` }`
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337";` sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`), "data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
@ -36,7 +36,7 @@ func singleInsert(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`), "insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
@ -60,7 +60,7 @@ func bulkInsert(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`), "insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
@ -84,7 +84,7 @@ func singleUpsert(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), "upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
@ -108,7 +108,7 @@ func bulkUpsert(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`), "upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
@ -132,7 +132,7 @@ func singleUpdate(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), "update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
@ -156,7 +156,7 @@ func delete(t *testing.T) {
} }
}` }`
sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";` sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
vars := map[string]json.RawMessage{ vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`), "update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),

View File

@ -32,16 +32,16 @@ func TestMain(m *testing.M) {
qcompile.AddRole("user", "product", qcode.TRConfig{ qcompile.AddRole("user", "product", qcode.TRConfig{
Query: qcode.QueryConfig{ Query: qcode.QueryConfig{
Columns: []string{"id", "name", "price", "users", "customers"}, Columns: []string{"id", "name", "price", "users", "customers"},
Filter: []string{ Filters: []string{
"{ price: { gt: 0 } }", "{ price: { gt: 0 } }",
"{ price: { lt: 8 } }", "{ price: { lt: 8 } }",
}, },
}, },
Update: qcode.UpdateConfig{ Update: qcode.UpdateConfig{
Filter: []string{"{ user_id: { eq: $user_id } }"}, Filters: []string{"{ user_id: { eq: $user_id } }"},
}, },
Delete: qcode.DeleteConfig{ Delete: qcode.DeleteConfig{
Filter: []string{ Filters: []string{
"{ price: { gt: 0 } }", "{ price: { gt: 0 } }",
"{ price: { lt: 8 } }", "{ price: { lt: 8 } }",
}, },
@ -70,7 +70,7 @@ func TestMain(m *testing.M) {
qcompile.AddRole("user", "mes", qcode.TRConfig{ qcompile.AddRole("user", "mes", qcode.TRConfig{
Query: qcode.QueryConfig{ Query: qcode.QueryConfig{
Columns: []string{"id", "full_name", "avatar"}, Columns: []string{"id", "full_name", "avatar"},
Filter: []string{ Filters: []string{
"{ id: { eq: $user_id } }", "{ id: { eq: $user_id } }",
}, },
}, },

View File

@ -7,25 +7,25 @@ type Config struct {
type QueryConfig struct { type QueryConfig struct {
Limit int Limit int
Filter []string Filters []string
Columns []string Columns []string
DisableFunctions bool DisableFunctions bool
} }
type InsertConfig struct { type InsertConfig struct {
Filter []string Filters []string
Columns []string Columns []string
Set map[string]string Set map[string]string
} }
type UpdateConfig struct { type UpdateConfig struct {
Filter []string Filters []string
Columns []string Columns []string
Set map[string]string Set map[string]string
} }
type DeleteConfig struct { type DeleteConfig struct {
Filter []string Filters []string
Columns []string Columns []string
} }

View File

@ -190,7 +190,7 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
} }
// query config // query config
trv.query.fil, err = compileFilter(trc.Query.Filter) trv.query.fil, err = compileFilter(trc.Query.Filters)
if err != nil { if err != nil {
return err return err
} }
@ -201,20 +201,20 @@ func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
trv.query.disable.funcs = trc.Query.DisableFunctions trv.query.disable.funcs = trc.Query.DisableFunctions
// insert config // insert config
if trv.insert.fil, err = compileFilter(trc.Insert.Filter); err != nil { if trv.insert.fil, err = compileFilter(trc.Insert.Filters); err != nil {
return err return err
} }
trv.insert.cols = toMap(trc.Insert.Columns) trv.insert.cols = toMap(trc.Insert.Columns)
// update config // update config
if trv.update.fil, err = compileFilter(trc.Update.Filter); err != nil { if trv.update.fil, err = compileFilter(trc.Update.Filters); err != nil {
return err return err
} }
trv.insert.cols = toMap(trc.Insert.Columns) trv.insert.cols = toMap(trc.Insert.Columns)
trv.insert.set = trc.Insert.Set trv.insert.set = trc.Insert.Set
// delete config // delete config
if trv.delete.fil, err = compileFilter(trc.Delete.Filter); err != nil { if trv.delete.fil, err = compileFilter(trc.Delete.Filters); err != nil {
return err return err
} }
trv.delete.cols = toMap(trc.Delete.Columns) trv.delete.cols = toMap(trc.Delete.Columns)

View File

@ -26,7 +26,8 @@ type allowItem struct {
var _allowList allowList var _allowList allowList
type allowList struct { type allowList struct {
list map[string]*allowItem list []*allowItem
index map[string]int
filepath string filepath string
saveChan chan *allowItem saveChan chan *allowItem
active bool active bool
@ -34,7 +35,7 @@ type allowList struct {
func initAllowList(cpath string) { func initAllowList(cpath string) {
_allowList = allowList{ _allowList = allowList{
list: make(map[string]*allowItem), index: make(map[string]int),
saveChan: make(chan *allowItem), saveChan: make(chan *allowItem),
active: true, active: true,
} }
@ -172,19 +173,21 @@ func (al *allowList) load() {
if c == 0 { if c == 0 {
if ty == AL_QUERY { if ty == AL_QUERY {
q := string(b[s:(e + 1)]) q := string(b[s:(e + 1)])
key := gqlHash(q, varBytes, "")
item := &allowItem{ if idx, ok := al.index[key]; !ok {
al.list = append(al.list, &allowItem{
uri: uri, uri: uri,
gql: q, gql: q,
} vars: varBytes,
})
if len(varBytes) != 0 { al.index[key] = len(al.list) - 1
} else {
item := al.list[idx]
item.gql = q
item.vars = varBytes item.vars = varBytes
} }
//fmt.Println("%%", item.gql, string(item.vars))
al.list[gqlHash(q, varBytes, "")] = item
varBytes = nil varBytes = nil
} else if ty == AL_VARS { } else if ty == AL_VARS {
@ -205,11 +208,15 @@ func (al *allowList) save(item *allowItem) {
if al.active == false { if al.active == false {
return return
} }
h := gqlHash(item.gql, item.vars, "")
if _, ok := al.list[h]; ok { key := gqlHash(item.gql, item.vars, "")
return
if idx, ok := al.index[key]; ok {
al.list[idx] = item
} else {
al.list = append(al.list, item)
al.index[key] = len(al.list) - 1
} }
al.list[gqlHash(item.gql, item.vars, "")] = item
f, err := os.Create(al.filepath) f, err := os.Create(al.filepath)
if err != nil { if err != nil {

View File

@ -65,7 +65,7 @@ type config struct {
Vars map[string]string `mapstructure:"variables"` Vars map[string]string `mapstructure:"variables"`
Defaults struct { Defaults struct {
Filter []string Filters []string
Blocklist []string Blocklist []string
} }
@ -106,28 +106,28 @@ type configRole struct {
Query struct { Query struct {
Limit int Limit int
Filter []string Filters []string
Columns []string Columns []string
DisableAggregation bool `mapstructure:"disable_aggregation"` DisableAggregation bool `mapstructure:"disable_aggregation"`
Deny bool Deny bool
} }
Insert struct { Insert struct {
Filter []string Filters []string
Columns []string Columns []string
Set map[string]string Set map[string]string
Deny bool Deny bool
} }
Update struct { Update struct {
Filter []string Filters []string
Columns []string Columns []string
Set map[string]string Set map[string]string
Deny bool Deny bool
} }
Delete struct { Delete struct {
Filter []string Filters []string
Columns []string Columns []string
Deny bool Deny bool
} }

View File

@ -34,25 +34,25 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
for _, t := range r.Tables { for _, t := range r.Tables {
query := qcode.QueryConfig{ query := qcode.QueryConfig{
Limit: t.Query.Limit, Limit: t.Query.Limit,
Filter: t.Query.Filter, Filters: t.Query.Filters,
Columns: t.Query.Columns, Columns: t.Query.Columns,
DisableFunctions: t.Query.DisableAggregation, DisableFunctions: t.Query.DisableAggregation,
} }
insert := qcode.InsertConfig{ insert := qcode.InsertConfig{
Filter: t.Insert.Filter, Filters: t.Insert.Filters,
Columns: t.Insert.Columns, Columns: t.Insert.Columns,
Set: t.Insert.Set, Set: t.Insert.Set,
} }
update := qcode.UpdateConfig{ update := qcode.UpdateConfig{
Filter: t.Insert.Filter, Filters: t.Insert.Filters,
Columns: t.Insert.Columns, Columns: t.Insert.Columns,
Set: t.Insert.Set, Set: t.Insert.Set,
} }
delete := qcode.DeleteConfig{ delete := qcode.DeleteConfig{
Filter: t.Insert.Filter, Filters: t.Insert.Filters,
Columns: t.Insert.Columns, Columns: t.Insert.Columns,
} }

View File

@ -80,7 +80,7 @@ SQL Output
account_id: "select account_id from users where id = $user_id" account_id: "select account_id from users where id = $user_id"
defaults: defaults:
filter: ["{ user_id: { eq: $user_id } }"] Filters: ["{ user_id: { eq: $user_id } }"]
blacklist: blacklist:
- password - password
@ -88,14 +88,14 @@ SQL Output
fields: fields:
- name: users - name: users
filter: ["{ id: { eq: $user_id } }"] Filters: ["{ id: { eq: $user_id } }"]
- name: products - name: products
filter: [ Filters: [
"{ price: { gt: 0 } }", "{ price: { gt: 0 } }",
"{ price: { lt: 8 } }" "{ price: { lt: 8 } }"
] ]
- name: me - name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"] Filters: ["{ id: { eq: $user_id } }"]

View File

@ -1,4 +1,4 @@
app_name: "{% app_name %} Development" app_name: "Super Graph Development"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: true web_ui: true
@ -53,7 +53,7 @@ auth:
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header. Good for testing # the user_id via a header. Good for testing
header: X-User-ID creds_in_header: true
rails: rails:
# Rails version this is used for reading the # Rails version this is used for reading the
@ -84,7 +84,7 @@ database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {% app_name_slug %}_development dbname: app_development
user: postgres user: postgres
password: '' password: ''
@ -100,7 +100,7 @@ database:
# Define defaults to for the field key and values below # Define defaults to for the field key and values below
defaults: defaults:
# filter: ["{ user_id: { eq: $user_id } }"] # filters: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block # Field and table names that you wish to block
blocklist: blocklist:
@ -112,25 +112,7 @@ database:
- token - token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together
# filter: [
# "{ price: { gt: 0 } }",
# "{ price: { lt: 8 } }"
# ]
- name: customers - name: customers
# No filter is used for this field not
# even defaults.filter
filter: none
remotes: remotes:
- name: payments - name: payments
id: stripe_id id: stripe_id
@ -149,7 +131,61 @@ database:
# real db table backing them # real db table backing them
name: me name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"]
# - name: posts roles_query: "SELECT * FROM users as usr WHERE id = $user_id"
# filter: ["{ account_id: { _eq: $account_id } }"]
roles:
- name: anon
tables:
- name: products
limit: 10
query:
columns: ["id", "name", "description" ]
aggregation: false
insert:
allow: false
update:
allow: false
delete:
allow: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_aggregation: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
set:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
set:
- updated_at: "now"
delete:
deny: true
- name: admin
match: id = 1
tables:
- name: users
# query:
# filters: ["{ account_id: { _eq: $account_id } }"]

View File

@ -1,4 +1,4 @@
app_name: "{% app_name %} Production" app_name: "Super Graph Production"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: false web_ui: false
@ -76,7 +76,7 @@ database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {% app_name_slug %}_production dbname: {{app_name_slug}}_development
user: postgres user: postgres
password: '' password: ''
#pool_size: 10 #pool_size: 10
@ -90,7 +90,7 @@ database:
# Define defaults to for the field key and values below # Define defaults to for the field key and values below
defaults: defaults:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block # Field and table names that you wish to block
blocklist: blocklist:
@ -102,29 +102,16 @@ database:
- token - token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together
# filter: [
# "{ price: { gt: 0 } }",
# "{ price: { lt: 8 } }"
# ]
- name: customers - name: customers
remotes: # remotes:
- name: payments # - name: payments
id: stripe_id # id: stripe_id
url: http://rails_app:3000/stripe/$id # url: http://rails_app:3000/stripe/$id
path: data # path: data
# debug: true # # pass_headers:
pass_headers: # # - cookie
- cookie # # - host
set_headers: # set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization # - name: Authorization
# value: Bearer <stripe_api_key> # value: Bearer <stripe_api_key>
@ -158,24 +145,23 @@ roles:
tables: tables:
- name: users - name: users
query: query:
filter: ["{ id: { _eq: $user_id } }"] filters: ["{ id: { _eq: $user_id } }"]
- name: products - name: products
query: query:
limit: 50 limit: 50
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ] columns: ["id", "name", "description" ]
disable_aggregation: false disable_aggregation: false
insert: insert:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ] columns: ["id", "name", "description" ]
set: set:
- created_at: "now" - created_at: "now"
update: update:
filter: ["{ user_id: { eq: $user_id } }"] filters: ["{ user_id: { eq: $user_id } }"]
columns: columns:
- id - id
- name - name
@ -189,6 +175,5 @@ roles:
match: id = 1 match: id = 1
tables: tables:
- name: users - name: users
# query:
# select: # filters: ["{ account_id: { _eq: $account_id } }"]
# filter: ["{ account_id: { _eq: $account_id } }"]