Cleanup and redesign config files
This commit is contained in:
parent
8acc3ed08d
commit
e3660473cc
85
dev.yml
85
dev.yml
|
@ -15,47 +15,37 @@ auth_fail_block: never
|
||||||
# SG_DATABASE_PASSWORD
|
# SG_DATABASE_PASSWORD
|
||||||
|
|
||||||
# Auth related environment Variables
|
# Auth related environment Variables
|
||||||
# SG_AUTH_SECRET_KEY_BASE
|
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||||
# SG_AUTH_PUBLIC_KEY_FILE
|
# SG_AUTH_RAILS_REDIS_URL
|
||||||
# SG_AUTH_URL
|
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||||
# SG_AUTH_PASSWORD
|
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||||
|
|
||||||
# inflections:
|
# inflections:
|
||||||
# person: people
|
# person: people
|
||||||
# sheep: sheep
|
# sheep: sheep
|
||||||
|
|
||||||
auth:
|
auth:
|
||||||
type: header
|
type: header
|
||||||
field_name: X-User-ID
|
cookie: _app_session
|
||||||
|
header: X-User-ID
|
||||||
|
|
||||||
# auth:
|
# rails_cookie:
|
||||||
# type: rails
|
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||||
# cookie: _app_session
|
|
||||||
# store: cookie
|
|
||||||
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
|
||||||
|
|
||||||
# auth:
|
# rails_memcache:
|
||||||
# type: rails
|
# host: 127.0.0.1
|
||||||
# cookie: _app_session
|
|
||||||
# store: memcache
|
|
||||||
# host: 127.0.0.1
|
|
||||||
|
|
||||||
# auth:
|
# rails_redis:
|
||||||
# type: rails
|
# url: redis://127.0.0.1:6379
|
||||||
# cookie: _app_session
|
# password: ""
|
||||||
# store: redis
|
# max_idle: 80,
|
||||||
# max_idle: 80,
|
# max_active: 12000,
|
||||||
# max_active: 12000,
|
|
||||||
# url: redis://127.0.0.1:6379
|
|
||||||
# password: ""
|
|
||||||
|
|
||||||
# auth:
|
# jwt:
|
||||||
# type: jwt
|
# provider: auth0
|
||||||
# provider: auth0
|
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||||
# cookie: _app_session
|
# public_key_file: /secrets/public_key.pem
|
||||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
# public_key_type: ecdsa #rsa
|
||||||
# public_key_file: /secrets/public_key.pem
|
|
||||||
# public_key_type: ecdsa #rsa
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
type: postgres
|
type: postgres
|
||||||
|
@ -72,18 +62,27 @@ database:
|
||||||
variables:
|
variables:
|
||||||
account_id: "select account_id from users where id = $user_id"
|
account_id: "select account_id from users where id = $user_id"
|
||||||
|
|
||||||
# Used to add access to tables
|
# Define defaults to for the field key and values below
|
||||||
filters:
|
defaults:
|
||||||
users: "{ id: { _eq: $user_id } }"
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
posts: "{ account_id: { _eq: $account_id } }"
|
|
||||||
|
# Fields and table names that you wish to block
|
||||||
|
blacklist:
|
||||||
|
- ar_internal_metadata
|
||||||
|
- schema_migrations
|
||||||
|
- secret
|
||||||
|
- password
|
||||||
|
- encrypted
|
||||||
|
- token
|
||||||
|
|
||||||
# Fields and table names that you wish to block
|
fields:
|
||||||
blacklist:
|
- name: users
|
||||||
- ar_internal_metadata
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
- schema_migrations
|
|
||||||
- secret
|
|
||||||
- password
|
|
||||||
- encrypted
|
|
||||||
- token
|
|
||||||
|
|
||||||
|
# - name: posts
|
||||||
|
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||||
|
|
||||||
|
- name: my_products
|
||||||
|
table: products
|
||||||
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
|
|
||||||
|
|
137
docs/guide.md
137
docs/guide.md
|
@ -335,33 +335,36 @@ Super Graph can handle all these variations including the old and new session fo
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
auth:
|
auth:
|
||||||
type: rails
|
type: rails_cookie
|
||||||
cookie: _app_session
|
cookie: _app_session
|
||||||
store: cookie
|
|
||||||
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
rails_cookie:
|
||||||
|
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Memcache session store
|
#### Memcache session store
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
auth:
|
auth:
|
||||||
type: rails
|
type: rails_memcache
|
||||||
cookie: _app_session
|
cookie: _app_session
|
||||||
store: memcache
|
|
||||||
host: 127.0.0.1
|
rails_memcache:
|
||||||
|
host: 127.0.0.1
|
||||||
```
|
```
|
||||||
|
|
||||||
#### Redis session store
|
#### Redis session store
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
auth:
|
auth:
|
||||||
type: rails
|
type: rails_redis
|
||||||
cookie: _app_session
|
cookie: _app_session
|
||||||
store: redis
|
|
||||||
max_idle: 80,
|
rails_redis:
|
||||||
max_active: 12000,
|
url: redis://127.0.0.1:6379
|
||||||
url: redis://127.0.0.1:6379
|
password: ""
|
||||||
password: ""
|
max_idle: 80
|
||||||
|
max_active: 12000
|
||||||
```
|
```
|
||||||
|
|
||||||
### JWT Token Auth
|
### JWT Token Auth
|
||||||
|
@ -369,11 +372,13 @@ auth:
|
||||||
```yaml
|
```yaml
|
||||||
auth:
|
auth:
|
||||||
type: jwt
|
type: jwt
|
||||||
provider: auth0 #none
|
|
||||||
cookie: _app_session
|
cookie: _app_session
|
||||||
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
|
||||||
public_key_file: /secrets/public_key.pem
|
jwt:
|
||||||
public_key_type: ecdsa #rsa
|
provider: auth0 #none
|
||||||
|
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||||
|
public_key_file: /secrets/public_key.pem
|
||||||
|
public_key_type: ecdsa #rsa
|
||||||
```
|
```
|
||||||
|
|
||||||
For JWT tokens we currently support tokens from a provider like Auth0
|
For JWT tokens we currently support tokens from a provider like Auth0
|
||||||
|
@ -389,11 +394,13 @@ For validation a `secret` or a public key (ecdsa or rsa) is required. When using
|
||||||
Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument.
|
Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument.
|
||||||
|
|
||||||
```yaml
|
```yaml
|
||||||
|
title: Super Graph Development
|
||||||
host_port: 0.0.0.0:8080
|
host_port: 0.0.0.0:8080
|
||||||
web_ui: true
|
web_ui: true
|
||||||
debug_level: 1
|
debug_level: 1
|
||||||
|
enable_tracing: false
|
||||||
|
|
||||||
# When to throw a 401 on auth failure
|
# Throw a 401 on auth failure for queries that need auth
|
||||||
# valid values: always, per_query, never
|
# valid values: always, per_query, never
|
||||||
auth_fail_block: never
|
auth_fail_block: never
|
||||||
|
|
||||||
|
@ -404,46 +411,37 @@ auth_fail_block: never
|
||||||
# SG_DATABASE_PASSWORD
|
# SG_DATABASE_PASSWORD
|
||||||
|
|
||||||
# Auth related environment Variables
|
# Auth related environment Variables
|
||||||
# SG_AUTH_SECRET_KEY_BASE
|
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||||
# SG_AUTH_PUBLIC_KEY_FILE
|
# SG_AUTH_RAILS_REDIS_URL
|
||||||
# SG_AUTH_URL
|
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||||
# SG_AUTH_PASSWORD
|
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||||
|
|
||||||
# inflections:
|
# inflections:
|
||||||
# person: people
|
# person: people
|
||||||
# sheep: sheep
|
# sheep: sheep
|
||||||
|
|
||||||
auth:
|
auth:
|
||||||
type: header
|
type: header
|
||||||
field_name: X-User-ID
|
cookie: _app_session
|
||||||
|
header: X-User-ID
|
||||||
|
|
||||||
# auth:
|
# rails_cookie:
|
||||||
# type: rails
|
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||||
# cookie: _app_session
|
|
||||||
# store: cookie
|
|
||||||
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
|
||||||
|
|
||||||
# auth:
|
# rails_memcache:
|
||||||
# type: rails
|
# host: 127.0.0.1
|
||||||
# cookie: _app_session
|
|
||||||
# store: memcache
|
|
||||||
# host: 127.0.0.1
|
|
||||||
|
|
||||||
# auth:
|
# rails_redis:
|
||||||
# type: rails
|
# url: redis://127.0.0.1:6379
|
||||||
# cookie: _app_session
|
# password: ""
|
||||||
# store: redis
|
# max_idle: 80,
|
||||||
# max_idle: 80,
|
# max_active: 12000,
|
||||||
# max_active: 12000,
|
|
||||||
# url: redis://127.0.0.1:6379
|
|
||||||
# password: ""
|
|
||||||
|
|
||||||
# auth:
|
# jwt:
|
||||||
# type: jwt
|
# provider: auth0
|
||||||
# cookie: _app_session
|
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
# public_key_file: /secrets/public_key.pem
|
||||||
# public_key_file: /secrets/public_key.pem
|
# public_key_type: ecdsa #rsa
|
||||||
# public_key_type: ecdsa #rsa
|
|
||||||
|
|
||||||
database:
|
database:
|
||||||
type: postgres
|
type: postgres
|
||||||
|
@ -460,21 +458,37 @@ database:
|
||||||
variables:
|
variables:
|
||||||
account_id: "select account_id from users where id = $user_id"
|
account_id: "select account_id from users where id = $user_id"
|
||||||
|
|
||||||
# Used to add access to tables
|
# Define defaults to for the field key and values below
|
||||||
filters:
|
defaults:
|
||||||
users: "{ id: { _eq: $user_id } }"
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
posts: "{ account_id: { _eq: $account_id } }"
|
|
||||||
|
# Fields and table names that you wish to block
|
||||||
|
blacklist:
|
||||||
|
- ar_internal_metadata
|
||||||
|
- schema_migrations
|
||||||
|
- secret
|
||||||
|
- password
|
||||||
|
- encrypted
|
||||||
|
- token
|
||||||
|
|
||||||
|
fields:
|
||||||
|
- name: users
|
||||||
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
|
|
||||||
|
# - name: posts
|
||||||
|
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||||
|
|
||||||
|
- name: my_products
|
||||||
|
table: products
|
||||||
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
|
|
||||||
|
|
||||||
# Fields and table names that you wish to block
|
|
||||||
blacklist:
|
|
||||||
- secret
|
|
||||||
- password
|
|
||||||
- encrypted
|
|
||||||
- token
|
|
||||||
```
|
```
|
||||||
|
|
||||||
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets
|
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets
|
||||||
|
|
||||||
|
Keep in mind any value can be overwritten using environment variables for example `auth.jwt.public_key_type` converts to `SG_AUTH_JWT_PUBLIC_KEY_TYPE`. In short prefix `SG_`, upper case and all `.` should changed to `_`.
|
||||||
|
|
||||||
#### Postgres environment variables
|
#### Postgres environment variables
|
||||||
```bash
|
```bash
|
||||||
SG_DATABASE_HOST
|
SG_DATABASE_HOST
|
||||||
|
@ -485,13 +499,12 @@ SG_DATABASE_PASSWORD
|
||||||
|
|
||||||
#### Auth environment variables
|
#### Auth environment variables
|
||||||
```bash
|
```bash
|
||||||
SG_AUTH_SECRET_KEY_BASE
|
SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||||
SG_AUTH_PUBLIC_KEY_FILE
|
SG_AUTH_RAILS_REDIS_URL
|
||||||
SG_AUTH_URL
|
SG_AUTH_RAILS_REDIS_PASSWORD
|
||||||
SG_AUTH_PASSWORD
|
SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
||||||
## Deploying Super Graph
|
## Deploying Super Graph
|
||||||
|
|
||||||
How do I deploy the Super Graph service with my existing rails app? You have several options here. Esentially you need to ensure your app's session cookie will be passed to this service.
|
How do I deploy the Super Graph service with my existing rails app? You have several options here. Esentially you need to ensure your app's session cookie will be passed to this service.
|
||||||
|
|
82
prod.yml
82
prod.yml
|
@ -3,6 +3,9 @@ host_port: 0.0.0.0:8080
|
||||||
web_ui: false
|
web_ui: false
|
||||||
debug_level: 0
|
debug_level: 0
|
||||||
enable_tracing: false
|
enable_tracing: false
|
||||||
|
|
||||||
|
# Throw a 401 on auth failure for queries that need auth
|
||||||
|
# valid values: always, per_query, never
|
||||||
auth_fail_block: always
|
auth_fail_block: always
|
||||||
|
|
||||||
# Postgres related environment Variables
|
# Postgres related environment Variables
|
||||||
|
@ -12,43 +15,36 @@ auth_fail_block: always
|
||||||
# SG_DATABASE_PASSWORD
|
# SG_DATABASE_PASSWORD
|
||||||
|
|
||||||
# Auth related environment Variables
|
# Auth related environment Variables
|
||||||
# SG_AUTH_SECRET_KEY_BASE
|
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||||
# SG_AUTH_PUBLIC_KEY_FILE
|
# SG_AUTH_RAILS_REDIS_URL
|
||||||
# SG_AUTH_URL
|
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||||
# SG_AUTH_PASSWORD
|
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||||
|
|
||||||
# inflections:
|
# inflections:
|
||||||
# person: people
|
# person: people
|
||||||
# sheep: sheep
|
# sheep: sheep
|
||||||
|
|
||||||
auth:
|
auth:
|
||||||
type: rails
|
type: cookie
|
||||||
cookie: _app_session
|
cookie: _app_session
|
||||||
store: cookie
|
|
||||||
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
|
||||||
|
|
||||||
# auth:
|
rails_cookie:
|
||||||
# type: rails
|
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||||
# cookie: _app_session
|
|
||||||
# store: memcache
|
|
||||||
# host: 127.0.0.1
|
|
||||||
|
|
||||||
# auth:
|
# rails_memcache:
|
||||||
# type: rails
|
# host: 127.0.0.1
|
||||||
# cookie: _app_session
|
|
||||||
# store: redis
|
|
||||||
# max_idle: 80,
|
|
||||||
# max_active: 12000,
|
|
||||||
# url: redis://127.0.0.1:6379
|
|
||||||
# password: ""
|
|
||||||
|
|
||||||
# auth:
|
# rails_redis:
|
||||||
# type: jwt
|
# url: redis://127.0.0.1:6379
|
||||||
# provider: auth0
|
# password: ""
|
||||||
# cookie: _app_session
|
# max_idle: 80,
|
||||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
# max_active: 12000,
|
||||||
# public_key_file: /secrets/public_key.pem
|
|
||||||
# public_key_type: ecdsa #rsa
|
# jwt:
|
||||||
|
# provider: auth0
|
||||||
|
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||||
|
# public_key_file: /secrets/public_key.pem
|
||||||
|
# public_key_type: ecdsa #rsa
|
||||||
|
|
||||||
database:
|
database:
|
||||||
type: postgres
|
type: postgres
|
||||||
|
@ -56,24 +52,28 @@ database:
|
||||||
port: 5432
|
port: 5432
|
||||||
dbname: app_development
|
dbname: app_development
|
||||||
user: postgres
|
user: postgres
|
||||||
password: "too many secrets"
|
password: ''
|
||||||
#pool_size: 10
|
#pool_size: 10
|
||||||
#max_retries: 0
|
#max_retries: 0
|
||||||
#log_level: "debug"
|
#log_level: "debug"
|
||||||
|
|
||||||
|
# Define variables here that you want to use in filters
|
||||||
variables:
|
variables:
|
||||||
account_id: "select account_id from users where id = $user_id"
|
account_id: "select account_id from users where id = $user_id"
|
||||||
|
|
||||||
filters:
|
# Define defaults to for the field key and values below
|
||||||
users: "{ id: { _eq: $user_id } }"
|
defaults:
|
||||||
#posts: "{ account_id: { _eq: $account_id } }"
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
|
|
||||||
|
# Fields and table names that you wish to block
|
||||||
|
blacklist:
|
||||||
|
- ar_internal_metadata
|
||||||
|
- schema_migrations
|
||||||
|
- secret
|
||||||
|
- password
|
||||||
|
- encrypted
|
||||||
|
- token
|
||||||
|
|
||||||
blacklist:
|
fields:
|
||||||
- ar_internal_metadata
|
- name: users
|
||||||
- schema_migrations
|
filter: ["{ id: { _eq: $user_id } }"]
|
||||||
- secret
|
|
||||||
- password
|
|
||||||
- encrypted
|
|
||||||
- token
|
|
||||||
|
|
||||||
|
|
13
psql/psql.go
13
psql/psql.go
|
@ -10,15 +10,18 @@ import (
|
||||||
"github.com/dosco/super-graph/util"
|
"github.com/dosco/super-graph/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
type Variables map[string]string
|
type Config struct {
|
||||||
|
Schema *DBSchema
|
||||||
|
Vars map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
type Compiler struct {
|
type Compiler struct {
|
||||||
schema *DBSchema
|
schema *DBSchema
|
||||||
vars Variables
|
vars map[string]string
|
||||||
}
|
}
|
||||||
|
|
||||||
func NewCompiler(schema *DBSchema, vars Variables) *Compiler {
|
func NewCompiler(conf Config) *Compiler {
|
||||||
return &Compiler{schema, vars}
|
return &Compiler{conf.Schema, conf.Vars}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *Compiler) Compile(w io.Writer, qc *qcode.QCode) error {
|
func (c *Compiler) Compile(w io.Writer, qc *qcode.QCode) error {
|
||||||
|
@ -607,7 +610,7 @@ func renderList(w io.Writer, ex *qcode.Exp) {
|
||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
}
|
}
|
||||||
|
|
||||||
func renderVal(w io.Writer, ex *qcode.Exp, vars Variables) {
|
func renderVal(w io.Writer, ex *qcode.Exp, vars map[string]string) {
|
||||||
io.WriteString(w, ` (`)
|
io.WriteString(w, ` (`)
|
||||||
switch ex.Type {
|
switch ex.Type {
|
||||||
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
|
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
|
||||||
|
|
|
@ -179,25 +179,41 @@ const (
|
||||||
OrderDescNullsLast
|
OrderDescNullsLast
|
||||||
)
|
)
|
||||||
|
|
||||||
type FilterMap map[string]*Exp
|
type Config struct {
|
||||||
type Blacklist map[string]struct{}
|
Filter []string
|
||||||
|
FilterMap map[string][]string
|
||||||
|
Blacklist []string
|
||||||
|
}
|
||||||
|
|
||||||
func CompileFilter(filter string) (*Exp, error) {
|
type Compiler struct {
|
||||||
node, err := ParseArgValue(filter)
|
fl *Exp
|
||||||
|
fm map[string]*Exp
|
||||||
|
bl map[string]struct{}
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewCompiler(conf Config) (*Compiler, error) {
|
||||||
|
bl := make(map[string]struct{}, len(conf.Blacklist))
|
||||||
|
|
||||||
|
for i := range conf.Blacklist {
|
||||||
|
bl[strings.ToLower(conf.Blacklist[i])] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
fl, err := compileFilter(conf.Filter)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return (&Compiler{}).compileArgNode(node)
|
fm := make(map[string]*Exp, len(conf.FilterMap))
|
||||||
}
|
|
||||||
|
|
||||||
type Compiler struct {
|
for k, v := range conf.FilterMap {
|
||||||
fm FilterMap
|
fil, err := compileFilter(v)
|
||||||
bl Blacklist
|
if err != nil {
|
||||||
}
|
return nil, err
|
||||||
|
}
|
||||||
|
fm[strings.ToLower(k)] = fil
|
||||||
|
}
|
||||||
|
|
||||||
func NewCompiler(fm FilterMap, bl Blacklist) *Compiler {
|
return &Compiler{fl, fm, bl}, nil
|
||||||
return &Compiler{fm, bl}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (com *Compiler) CompileQuery(query string) (*QCode, error) {
|
func (com *Compiler) CompileQuery(query string) (*QCode, error) {
|
||||||
|
@ -767,3 +783,25 @@ func pushChildren(st *util.Stack, ex *Exp, node *Node) {
|
||||||
st.Push(&expT{ex, node.Children[i]})
|
st.Push(&expT{ex, node.Children[i]})
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func compileFilter(filter []string) (*Exp, error) {
|
||||||
|
var fl *Exp
|
||||||
|
com := &Compiler{}
|
||||||
|
|
||||||
|
for i := range filter {
|
||||||
|
node, err := ParseArgValue(filter[i])
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
f, err := com.compileArgNode(node)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if fl == nil {
|
||||||
|
fl = f
|
||||||
|
} else {
|
||||||
|
fl = &Exp{Op: OpAnd, Children: []*Exp{fl, f}}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return fl, nil
|
||||||
|
}
|
||||||
|
|
|
@ -1,28 +0,0 @@
|
||||||
package qcode
|
|
||||||
|
|
||||||
import (
|
|
||||||
"strings"
|
|
||||||
)
|
|
||||||
|
|
||||||
func NewBlacklist(list []string) Blacklist {
|
|
||||||
bl := make(map[string]struct{}, len(list))
|
|
||||||
|
|
||||||
for i := range list {
|
|
||||||
bl[strings.ToLower(list[i])] = struct{}{}
|
|
||||||
}
|
|
||||||
return bl
|
|
||||||
}
|
|
||||||
|
|
||||||
func NewFilterMap(filters map[string]string) FilterMap {
|
|
||||||
fm := make(FilterMap)
|
|
||||||
|
|
||||||
for k, v := range filters {
|
|
||||||
fil, err := CompileFilter(v)
|
|
||||||
if err != nil {
|
|
||||||
panic(err)
|
|
||||||
}
|
|
||||||
key := strings.ToLower(k)
|
|
||||||
fm[key] = fil
|
|
||||||
}
|
|
||||||
return fm
|
|
||||||
}
|
|
30
serv/auth.go
30
serv/auth.go
|
@ -4,7 +4,6 @@ import (
|
||||||
"context"
|
"context"
|
||||||
"errors"
|
"errors"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
|
@ -21,9 +20,9 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
func headerHandler(next http.HandlerFunc) http.HandlerFunc {
|
func headerHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
fn := conf.GetString("auth.field_name")
|
fn := conf.Auth.Header
|
||||||
if len(fn) == 0 {
|
if len(fn) == 0 {
|
||||||
panic(errors.New("no auth.field_name defined"))
|
panic(errors.New("no auth.header defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
@ -39,33 +38,26 @@ func headerHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
}
|
}
|
||||||
|
|
||||||
func withAuth(next http.HandlerFunc) http.HandlerFunc {
|
func withAuth(next http.HandlerFunc) http.HandlerFunc {
|
||||||
atype := strings.ToLower(conf.GetString("auth.type"))
|
at := conf.Auth.Type
|
||||||
if len(atype) == 0 {
|
|
||||||
return next
|
|
||||||
}
|
|
||||||
store := strings.ToLower(conf.GetString("auth.store"))
|
|
||||||
|
|
||||||
switch atype {
|
switch at {
|
||||||
case "header":
|
case "header":
|
||||||
return headerHandler(next)
|
return headerHandler(next)
|
||||||
|
|
||||||
case "rails":
|
case "rails_cookie":
|
||||||
switch store {
|
return railsCookieHandler(next)
|
||||||
case "memcache":
|
|
||||||
return railsMemcacheHandler(next)
|
|
||||||
|
|
||||||
case "redis":
|
case "rails_memcache":
|
||||||
return railsRedisHandler(next)
|
return railsMemcacheHandler(next)
|
||||||
|
|
||||||
default:
|
case "rails_redis":
|
||||||
return railsCookieHandler(next)
|
return railsRedisHandler(next)
|
||||||
}
|
|
||||||
|
|
||||||
case "jwt":
|
case "jwt":
|
||||||
return jwtHandler(next)
|
return jwtHandler(next)
|
||||||
|
|
||||||
default:
|
default:
|
||||||
panic(errors.New("unknown auth.type"))
|
return next
|
||||||
}
|
}
|
||||||
|
|
||||||
return next
|
return next
|
||||||
|
|
|
@ -18,18 +18,14 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
var key interface{}
|
var key interface{}
|
||||||
var jwtProvider int
|
var jwtProvider int
|
||||||
|
|
||||||
cookie := conf.GetString("auth.cookie")
|
cookie := conf.Auth.Cookie
|
||||||
|
|
||||||
provider := conf.GetString("auth.provider")
|
if conf.Auth.JWT.Provider == "auth0" {
|
||||||
if provider == "auth0" {
|
|
||||||
jwtProvider = jwtAuth0
|
jwtProvider = jwtAuth0
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.BindEnv("auth.secret", "SG_AUTH_SECRET")
|
secret := conf.Auth.JWT.Secret
|
||||||
secret := conf.GetString("auth.secret")
|
publicKeyFile := conf.Auth.JWT.PubKeyFile
|
||||||
|
|
||||||
conf.BindEnv("auth.public_key_file", "SG_AUTH_PUBLIC_KEY_FILE")
|
|
||||||
publicKeyFile := conf.GetString("auth.public_key_file")
|
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case len(secret) != 0:
|
case len(secret) != 0:
|
||||||
|
@ -41,7 +37,7 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch conf.GetString("auth.public_key_type") {
|
switch conf.Auth.JWT.PubKeyType {
|
||||||
case "ecdsa":
|
case "ecdsa":
|
||||||
key, err = jwt.ParseECPublicKeyFromPEM(kd)
|
key, err = jwt.ParseECPublicKeyFromPEM(kd)
|
||||||
|
|
||||||
|
|
|
@ -14,31 +14,26 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
cookie := conf.GetString("auth.cookie")
|
cookie := conf.Auth.Cookie
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
panic(errors.New("no auth.cookie defined"))
|
panic(errors.New("no auth.cookie defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.BindEnv("auth.url", "SG_AUTH_URL")
|
authURL := conf.Auth.RailsRedis.URL
|
||||||
authURL := conf.GetString("auth.url")
|
|
||||||
if len(authURL) == 0 {
|
if len(authURL) == 0 {
|
||||||
panic(errors.New("no auth.url defined"))
|
panic(errors.New("no auth.rails_redis.url defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.SetDefault("auth.max_idle", 80)
|
|
||||||
conf.SetDefault("auth.max_active", 12000)
|
|
||||||
|
|
||||||
rp := &redis.Pool{
|
rp := &redis.Pool{
|
||||||
MaxIdle: conf.GetInt("auth.max_idle"),
|
MaxIdle: conf.Auth.RailsRedis.MaxIdle,
|
||||||
MaxActive: conf.GetInt("auth.max_active"),
|
MaxActive: conf.Auth.RailsRedis.MaxActive,
|
||||||
Dial: func() (redis.Conn, error) {
|
Dial: func() (redis.Conn, error) {
|
||||||
c, err := redis.DialURL(authURL)
|
c, err := redis.DialURL(authURL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.BindEnv("auth.password", "SG_AUTH_PASSWORD")
|
pwd := conf.Auth.RailsRedis.Password
|
||||||
pwd := conf.GetString("auth.password")
|
|
||||||
if len(pwd) != 0 {
|
if len(pwd) != 0 {
|
||||||
if _, err := c.Do("AUTH", pwd); err != nil {
|
if _, err := c.Do("AUTH", pwd); err != nil {
|
||||||
panic(err)
|
panic(err)
|
||||||
|
@ -74,14 +69,14 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
}
|
}
|
||||||
|
|
||||||
func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
cookie := conf.GetString("auth.cookie")
|
cookie := conf.Auth.Cookie
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
panic(errors.New("no auth.cookie defined"))
|
panic(errors.New("no auth.cookie defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
host := conf.GetString("auth.host")
|
host := conf.Auth.RailsMemcache.Host
|
||||||
if len(host) == 0 {
|
if len(host) == 0 {
|
||||||
panic(errors.New("no auth.host defined"))
|
panic(errors.New("no auth.rails_memcache.host defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
mc := memcache.New(host)
|
mc := memcache.New(host)
|
||||||
|
@ -112,15 +107,14 @@ func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
}
|
}
|
||||||
|
|
||||||
func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
|
func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||||
cookie := conf.GetString("auth.cookie")
|
cookie := conf.Auth.Cookie
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
panic(errors.New("no auth.cookie defined"))
|
panic(errors.New("no auth.cookie defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.BindEnv("auth.secret_key_base", "SG_AUTH_SECRET_KEY_BASE")
|
secret := conf.Auth.RailsCookie.SecretKeyBase
|
||||||
secret := conf.GetString("auth.secret_key_base")
|
|
||||||
if len(secret) == 0 {
|
if len(secret) == 0 {
|
||||||
panic(errors.New("no auth.secret_key_base defined"))
|
panic(errors.New("no auth.rails_cookie.secret_key_base defined"))
|
||||||
}
|
}
|
||||||
|
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
|
@ -123,8 +123,9 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
||||||
errorResp(w, err)
|
errorResp(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
finalSQL := sqlStmt.String()
|
finalSQL := sqlStmt.String()
|
||||||
if debug > 0 {
|
if conf.DebugLevel > 0 {
|
||||||
fmt.Println(finalSQL)
|
fmt.Println(finalSQL)
|
||||||
}
|
}
|
||||||
st := time.Now()
|
st := time.Now()
|
||||||
|
@ -140,7 +141,7 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
||||||
et := time.Now()
|
et := time.Now()
|
||||||
resp := gqlResp{}
|
resp := gqlResp{}
|
||||||
|
|
||||||
if tracing {
|
if conf.EnableTracing {
|
||||||
resp.Extensions = &extensions{newTrace(st, et, qc)}
|
resp.Extensions = &extensions{newTrace(st, et, qc)}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
288
serv/serv.go
288
serv/serv.go
|
@ -4,6 +4,7 @@ import (
|
||||||
"errors"
|
"errors"
|
||||||
"flag"
|
"flag"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"log"
|
||||||
"net/http"
|
"net/http"
|
||||||
"os"
|
"os"
|
||||||
"strings"
|
"strings"
|
||||||
|
@ -26,142 +27,253 @@ const (
|
||||||
|
|
||||||
var (
|
var (
|
||||||
logger *logrus.Logger
|
logger *logrus.Logger
|
||||||
debug int
|
conf *config
|
||||||
conf *viper.Viper
|
|
||||||
db *pg.DB
|
db *pg.DB
|
||||||
pcompile *psql.Compiler
|
pcompile *psql.Compiler
|
||||||
qcompile *qcode.Compiler
|
qcompile *qcode.Compiler
|
||||||
authFailBlock int
|
authFailBlock int
|
||||||
tracing bool
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func initLog() {
|
type config struct {
|
||||||
logger = logrus.New()
|
Env string
|
||||||
logger.Formatter = new(logrus.TextFormatter)
|
HostPort string `mapstructure:"host_port"`
|
||||||
logger.Formatter.(*logrus.TextFormatter).DisableColors = false
|
WebUI bool `mapstructure:"web_ui"`
|
||||||
logger.Formatter.(*logrus.TextFormatter).DisableTimestamp = true
|
DebugLevel int `mapstructure:"debug_level"`
|
||||||
logger.Level = logrus.TraceLevel
|
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||||
logger.Out = os.Stdout
|
AuthFailBlock string `mapstructure:"auth_fail_block"`
|
||||||
|
Inflections map[string]string
|
||||||
|
|
||||||
|
Auth struct {
|
||||||
|
Type string
|
||||||
|
Cookie string
|
||||||
|
Header string
|
||||||
|
|
||||||
|
RailsCookie struct {
|
||||||
|
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||||
|
}
|
||||||
|
|
||||||
|
RailsMemcache struct {
|
||||||
|
Host string
|
||||||
|
}
|
||||||
|
|
||||||
|
RailsRedis struct {
|
||||||
|
URL string
|
||||||
|
Password string
|
||||||
|
MaxIdle int `mapstructure:"max_idle"`
|
||||||
|
MaxActive int `mapstructure:"max_active"`
|
||||||
|
}
|
||||||
|
|
||||||
|
JWT struct {
|
||||||
|
Provider string
|
||||||
|
Secret string
|
||||||
|
PubKeyFile string `mapstructure:"public_key_file"`
|
||||||
|
PubKeyType string `mapstructure:"public_key_type"`
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
DB struct {
|
||||||
|
Type string
|
||||||
|
Host string
|
||||||
|
Port string
|
||||||
|
DBName string
|
||||||
|
User string
|
||||||
|
Password string
|
||||||
|
PoolSize int `mapstructure:"pool_size"`
|
||||||
|
MaxRetries int `mapstructure:"max_retries"`
|
||||||
|
LogLevel string `mapstructure:"log_level"`
|
||||||
|
|
||||||
|
Variables map[string]string
|
||||||
|
|
||||||
|
Defaults struct {
|
||||||
|
Filter []string
|
||||||
|
Blacklist []string
|
||||||
|
}
|
||||||
|
|
||||||
|
Fields []struct {
|
||||||
|
Name string
|
||||||
|
Filter []string
|
||||||
|
Table string
|
||||||
|
Blacklist []string
|
||||||
|
}
|
||||||
|
} `mapstructure:"database"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func initConf() {
|
func initLog() *logrus.Logger {
|
||||||
conf = viper.New()
|
log := logrus.New()
|
||||||
|
log.Formatter = new(logrus.TextFormatter)
|
||||||
|
log.Formatter.(*logrus.TextFormatter).DisableColors = false
|
||||||
|
log.Formatter.(*logrus.TextFormatter).DisableTimestamp = true
|
||||||
|
log.Level = logrus.TraceLevel
|
||||||
|
log.Out = os.Stdout
|
||||||
|
|
||||||
cPath := flag.String("path", ".", "Path to folder that contains config files")
|
return log
|
||||||
|
}
|
||||||
|
|
||||||
|
func initConf() (*config, error) {
|
||||||
|
vi := viper.New()
|
||||||
|
|
||||||
|
path := flag.String("path", "./", "Path to config files")
|
||||||
flag.Parse()
|
flag.Parse()
|
||||||
|
|
||||||
conf.AddConfigPath(*cPath)
|
vi.SetEnvPrefix("SG")
|
||||||
|
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||||
|
vi.AutomaticEnv()
|
||||||
|
|
||||||
switch os.Getenv("GO_ENV") {
|
vi.AddConfigPath(*path)
|
||||||
case "production", "prod":
|
vi.AddConfigPath("./conf")
|
||||||
conf.SetConfigName("prod")
|
vi.SetConfigName(getConfigName())
|
||||||
case "staging", "stage":
|
|
||||||
conf.SetConfigName("stage")
|
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||||
default:
|
vi.SetDefault("web_ui", false)
|
||||||
conf.SetConfigName("dev")
|
vi.SetDefault("debug_level", 0)
|
||||||
|
vi.SetDefault("enable_tracing", false)
|
||||||
|
|
||||||
|
vi.SetDefault("database.type", "postgres")
|
||||||
|
vi.SetDefault("database.host", "localhost")
|
||||||
|
vi.SetDefault("database.port", 5432)
|
||||||
|
vi.SetDefault("database.user", "postgres")
|
||||||
|
vi.SetDefault("database.password", "")
|
||||||
|
|
||||||
|
vi.SetDefault("env", "development")
|
||||||
|
vi.BindEnv("env", "GO_ENV")
|
||||||
|
|
||||||
|
vi.SetDefault("auth.rails_redis.max_idle", 80)
|
||||||
|
vi.SetDefault("auth.rails_redis.max_active", 12000)
|
||||||
|
|
||||||
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
err := conf.ReadInConfig()
|
c := &config{}
|
||||||
if err != nil {
|
|
||||||
logger.Fatal(err)
|
if err := vi.Unmarshal(c); err != nil {
|
||||||
|
return nil, fmt.Errorf("unable to decode config, %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
debug = conf.GetInt("debug_level")
|
for k, v := range c.Inflections {
|
||||||
|
|
||||||
for k, v := range conf.GetStringMapString("inflections") {
|
|
||||||
flect.AddPlural(k, v)
|
flect.AddPlural(k, v)
|
||||||
}
|
}
|
||||||
|
|
||||||
conf.SetDefault("host_port", "0.0.0.0:8080")
|
authFailBlock = getAuthFailBlock(c)
|
||||||
conf.SetDefault("web_ui", false)
|
|
||||||
conf.SetDefault("debug_level", 0)
|
|
||||||
conf.SetDefault("enable_tracing", false)
|
|
||||||
|
|
||||||
conf.SetDefault("database.type", "postgres")
|
//fmt.Printf("%#v", c)
|
||||||
conf.SetDefault("database.host", "localhost")
|
|
||||||
conf.SetDefault("database.port", 5432)
|
|
||||||
conf.SetDefault("database.user", "postgres")
|
|
||||||
conf.SetDefault("database.password", "")
|
|
||||||
|
|
||||||
conf.SetDefault("env", "development")
|
return c, nil
|
||||||
conf.BindEnv("env", "GO_ENV")
|
|
||||||
|
|
||||||
tracing = conf.GetBool("enable_tracing")
|
|
||||||
|
|
||||||
switch conf.GetString("auth_fail_block") {
|
|
||||||
case "always":
|
|
||||||
authFailBlock = authFailBlockAlways
|
|
||||||
case "per_query", "perquery", "query":
|
|
||||||
authFailBlock = authFailBlockPerQuery
|
|
||||||
case "never", "false":
|
|
||||||
authFailBlock = authFailBlockNever
|
|
||||||
default:
|
|
||||||
authFailBlock = authFailBlockAlways
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func initDB() {
|
func initDB(c *config) (*pg.DB, error) {
|
||||||
conf.BindEnv("database.host", "SG_DATABASE_HOST")
|
|
||||||
conf.BindEnv("database.port", "SG_DATABASE_PORT")
|
|
||||||
conf.BindEnv("database.user", "SG_DATABASE_USER")
|
|
||||||
conf.BindEnv("database.password", "SG_DATABASE_PASSWORD")
|
|
||||||
|
|
||||||
hostport := strings.Join([]string{
|
|
||||||
conf.GetString("database.host"), conf.GetString("database.port")}, ":")
|
|
||||||
|
|
||||||
opt := &pg.Options{
|
opt := &pg.Options{
|
||||||
Addr: hostport,
|
Addr: strings.Join([]string{c.DB.Host, c.DB.Port}, ":"),
|
||||||
User: conf.GetString("database.user"),
|
User: c.DB.User,
|
||||||
Password: conf.GetString("database.password"),
|
Password: c.DB.Password,
|
||||||
Database: conf.GetString("database.dbname"),
|
Database: c.DB.DBName,
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.IsSet("database.pool_size") {
|
if c.DB.PoolSize != 0 {
|
||||||
opt.PoolSize = conf.GetInt("database.pool_size")
|
opt.PoolSize = conf.DB.PoolSize
|
||||||
}
|
}
|
||||||
|
|
||||||
if conf.IsSet("database.max_retries") {
|
if c.DB.MaxRetries != 0 {
|
||||||
opt.MaxRetries = conf.GetInt("database.max_retries")
|
opt.MaxRetries = c.DB.MaxRetries
|
||||||
}
|
}
|
||||||
|
|
||||||
if db = pg.Connect(opt); db == nil {
|
db := pg.Connect(opt)
|
||||||
logger.Fatal(errors.New("failed to connect to postgres db"))
|
if db == nil {
|
||||||
|
return nil, errors.New("failed to connect to postgres db")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return db, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func initCompilers() {
|
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
|
||||||
filters := conf.GetStringMapString("database.filters")
|
cdb := c.DB
|
||||||
blacklist := conf.GetStringSlice("database.blacklist")
|
|
||||||
|
|
||||||
fm := qcode.NewFilterMap(filters)
|
fm := make(map[string][]string, len(cdb.Fields))
|
||||||
bl := qcode.NewBlacklist(blacklist)
|
for i := range cdb.Fields {
|
||||||
qcompile = qcode.NewCompiler(fm, bl)
|
f := cdb.Fields[i]
|
||||||
|
fm[strings.ToLower(f.Name)] = f.Filter
|
||||||
|
}
|
||||||
|
|
||||||
|
qc, err := qcode.NewCompiler(qcode.Config{
|
||||||
|
Filter: cdb.Defaults.Filter,
|
||||||
|
FilterMap: fm,
|
||||||
|
Blacklist: cdb.Defaults.Blacklist,
|
||||||
|
})
|
||||||
|
if err != nil {
|
||||||
|
return nil, nil, err
|
||||||
|
}
|
||||||
|
|
||||||
schema, err := psql.NewDBSchema(db)
|
schema, err := psql.NewDBSchema(db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
logger.Fatal(err)
|
return nil, nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
varlist := conf.GetStringMapString("database.variables")
|
pc := psql.NewCompiler(psql.Config{
|
||||||
vars := psql.NewVariables(varlist)
|
Schema: schema,
|
||||||
|
Vars: cdb.Variables,
|
||||||
|
})
|
||||||
|
|
||||||
pcompile = psql.NewCompiler(schema, vars)
|
return qc, pc, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func InitAndListen() {
|
func InitAndListen() {
|
||||||
initLog()
|
var err error
|
||||||
initConf()
|
|
||||||
initDB()
|
logger = initLog()
|
||||||
initCompilers()
|
|
||||||
|
conf, err = initConf()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err = initDB(conf)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
qcompile, pcompile, err = initCompilers(conf)
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
http.HandleFunc("/api/v1/graphql", withAuth(apiv1Http))
|
http.HandleFunc("/api/v1/graphql", withAuth(apiv1Http))
|
||||||
|
|
||||||
if conf.GetBool("web_ui") {
|
if conf.WebUI {
|
||||||
http.Handle("/", http.FileServer(_escFS(false)))
|
http.Handle("/", http.FileServer(_escFS(false)))
|
||||||
}
|
}
|
||||||
|
|
||||||
hp := conf.GetString("host_port")
|
fmt.Printf("Super-Graph listening on %s (%s)\n",
|
||||||
fmt.Printf("Super-Graph listening on %s (%s)\n", hp, conf.GetString("env"))
|
conf.HostPort, conf.Env)
|
||||||
|
|
||||||
logger.Fatal(http.ListenAndServe(hp, nil))
|
logger.Fatal(http.ListenAndServe(conf.HostPort, nil))
|
||||||
|
}
|
||||||
|
|
||||||
|
func getConfigName() string {
|
||||||
|
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(ge, "pro"):
|
||||||
|
return "prod"
|
||||||
|
|
||||||
|
case strings.HasPrefix(ge, "sta"):
|
||||||
|
return "stage"
|
||||||
|
|
||||||
|
case strings.HasPrefix(ge, "tes"):
|
||||||
|
return "test"
|
||||||
|
}
|
||||||
|
|
||||||
|
return "dev"
|
||||||
|
}
|
||||||
|
|
||||||
|
func getAuthFailBlock(c *config) int {
|
||||||
|
switch c.AuthFailBlock {
|
||||||
|
case "always":
|
||||||
|
return authFailBlockAlways
|
||||||
|
case "per_query", "perquery", "query":
|
||||||
|
return authFailBlockPerQuery
|
||||||
|
case "never", "false":
|
||||||
|
return authFailBlockNever
|
||||||
|
}
|
||||||
|
|
||||||
|
return authFailBlockAlways
|
||||||
}
|
}
|
||||||
|
|
Loading…
Reference in New Issue