Cleanup and redesign config files
This commit is contained in:
parent
8acc3ed08d
commit
e3660473cc
51
dev.yml
51
dev.yml
|
@ -15,10 +15,10 @@ auth_fail_block: never
|
|||
# SG_DATABASE_PASSWORD
|
||||
|
||||
# Auth related environment Variables
|
||||
# SG_AUTH_SECRET_KEY_BASE
|
||||
# SG_AUTH_PUBLIC_KEY_FILE
|
||||
# SG_AUTH_URL
|
||||
# SG_AUTH_PASSWORD
|
||||
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
# SG_AUTH_RAILS_REDIS_URL
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
# inflections:
|
||||
# person: people
|
||||
|
@ -26,33 +26,23 @@ auth_fail_block: never
|
|||
|
||||
auth:
|
||||
type: header
|
||||
field_name: X-User-ID
|
||||
cookie: _app_session
|
||||
header: X-User-ID
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: cookie
|
||||
# rails_cookie:
|
||||
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: memcache
|
||||
# rails_memcache:
|
||||
# host: 127.0.0.1
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: redis
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
# rails_redis:
|
||||
# url: redis://127.0.0.1:6379
|
||||
# password: ""
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
|
||||
# auth:
|
||||
# type: jwt
|
||||
# jwt:
|
||||
# provider: auth0
|
||||
# cookie: _app_session
|
||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
@ -72,10 +62,9 @@ database:
|
|||
variables:
|
||||
account_id: "select account_id from users where id = $user_id"
|
||||
|
||||
# Used to add access to tables
|
||||
filters:
|
||||
users: "{ id: { _eq: $user_id } }"
|
||||
posts: "{ account_id: { _eq: $account_id } }"
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
# Fields and table names that you wish to block
|
||||
blacklist:
|
||||
|
@ -86,4 +75,14 @@ database:
|
|||
- encrypted
|
||||
- token
|
||||
|
||||
fields:
|
||||
- name: users
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
|
||||
- name: my_products
|
||||
table: products
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
|
|
|
@ -335,9 +335,10 @@ Super Graph can handle all these variations including the old and new session fo
|
|||
|
||||
```yaml
|
||||
auth:
|
||||
type: rails
|
||||
type: rails_cookie
|
||||
cookie: _app_session
|
||||
store: cookie
|
||||
|
||||
rails_cookie:
|
||||
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||
```
|
||||
|
||||
|
@ -345,9 +346,10 @@ auth:
|
|||
|
||||
```yaml
|
||||
auth:
|
||||
type: rails
|
||||
type: rails_memcache
|
||||
cookie: _app_session
|
||||
store: memcache
|
||||
|
||||
rails_memcache:
|
||||
host: 127.0.0.1
|
||||
```
|
||||
|
||||
|
@ -355,13 +357,14 @@ auth:
|
|||
|
||||
```yaml
|
||||
auth:
|
||||
type: rails
|
||||
type: rails_redis
|
||||
cookie: _app_session
|
||||
store: redis
|
||||
max_idle: 80,
|
||||
max_active: 12000,
|
||||
|
||||
rails_redis:
|
||||
url: redis://127.0.0.1:6379
|
||||
password: ""
|
||||
max_idle: 80
|
||||
max_active: 12000
|
||||
```
|
||||
|
||||
### JWT Token Auth
|
||||
|
@ -369,8 +372,10 @@ auth:
|
|||
```yaml
|
||||
auth:
|
||||
type: jwt
|
||||
provider: auth0 #none
|
||||
cookie: _app_session
|
||||
|
||||
jwt:
|
||||
provider: auth0 #none
|
||||
secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
public_key_file: /secrets/public_key.pem
|
||||
public_key_type: ecdsa #rsa
|
||||
|
@ -389,11 +394,13 @@ For validation a `secret` or a public key (ecdsa or rsa) is required. When using
|
|||
Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument.
|
||||
|
||||
```yaml
|
||||
title: Super Graph Development
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
debug_level: 1
|
||||
enable_tracing: false
|
||||
|
||||
# When to throw a 401 on auth failure
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: never
|
||||
|
||||
|
@ -404,10 +411,10 @@ auth_fail_block: never
|
|||
# SG_DATABASE_PASSWORD
|
||||
|
||||
# Auth related environment Variables
|
||||
# SG_AUTH_SECRET_KEY_BASE
|
||||
# SG_AUTH_PUBLIC_KEY_FILE
|
||||
# SG_AUTH_URL
|
||||
# SG_AUTH_PASSWORD
|
||||
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
# SG_AUTH_RAILS_REDIS_URL
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
# inflections:
|
||||
# person: people
|
||||
|
@ -415,32 +422,23 @@ auth_fail_block: never
|
|||
|
||||
auth:
|
||||
type: header
|
||||
field_name: X-User-ID
|
||||
cookie: _app_session
|
||||
header: X-User-ID
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: cookie
|
||||
# rails_cookie:
|
||||
# secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: memcache
|
||||
# rails_memcache:
|
||||
# host: 127.0.0.1
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: redis
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
# rails_redis:
|
||||
# url: redis://127.0.0.1:6379
|
||||
# password: ""
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
|
||||
# auth:
|
||||
# type: jwt
|
||||
# cookie: _app_session
|
||||
# jwt:
|
||||
# provider: auth0
|
||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
@ -460,21 +458,37 @@ database:
|
|||
variables:
|
||||
account_id: "select account_id from users where id = $user_id"
|
||||
|
||||
# Used to add access to tables
|
||||
filters:
|
||||
users: "{ id: { _eq: $user_id } }"
|
||||
posts: "{ account_id: { _eq: $account_id } }"
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
# Fields and table names that you wish to block
|
||||
blacklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
fields:
|
||||
- name: users
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
|
||||
- name: my_products
|
||||
table: products
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
|
||||
```
|
||||
|
||||
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets
|
||||
|
||||
Keep in mind any value can be overwritten using environment variables for example `auth.jwt.public_key_type` converts to `SG_AUTH_JWT_PUBLIC_KEY_TYPE`. In short prefix `SG_`, upper case and all `.` should changed to `_`.
|
||||
|
||||
#### Postgres environment variables
|
||||
```bash
|
||||
SG_DATABASE_HOST
|
||||
|
@ -485,13 +499,12 @@ SG_DATABASE_PASSWORD
|
|||
|
||||
#### Auth environment variables
|
||||
```bash
|
||||
SG_AUTH_SECRET_KEY_BASE
|
||||
SG_AUTH_PUBLIC_KEY_FILE
|
||||
SG_AUTH_URL
|
||||
SG_AUTH_PASSWORD
|
||||
SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
SG_AUTH_RAILS_REDIS_URL
|
||||
SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
```
|
||||
|
||||
|
||||
## Deploying Super Graph
|
||||
|
||||
How do I deploy the Super Graph service with my existing rails app? You have several options here. Esentially you need to ensure your app's session cookie will be passed to this service.
|
||||
|
|
48
prod.yml
48
prod.yml
|
@ -3,6 +3,9 @@ host_port: 0.0.0.0:8080
|
|||
web_ui: false
|
||||
debug_level: 0
|
||||
enable_tracing: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
|
||||
# Postgres related environment Variables
|
||||
|
@ -12,40 +15,33 @@ auth_fail_block: always
|
|||
# SG_DATABASE_PASSWORD
|
||||
|
||||
# Auth related environment Variables
|
||||
# SG_AUTH_SECRET_KEY_BASE
|
||||
# SG_AUTH_PUBLIC_KEY_FILE
|
||||
# SG_AUTH_URL
|
||||
# SG_AUTH_PASSWORD
|
||||
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
# SG_AUTH_RAILS_REDIS_URL
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
# inflections:
|
||||
# person: people
|
||||
# sheep: sheep
|
||||
|
||||
auth:
|
||||
type: rails
|
||||
type: cookie
|
||||
cookie: _app_session
|
||||
store: cookie
|
||||
|
||||
rails_cookie:
|
||||
secret_key_base: caf335bfcfdb04e50db5bb0a4d67ab9...
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: memcache
|
||||
# rails_memcache:
|
||||
# host: 127.0.0.1
|
||||
|
||||
# auth:
|
||||
# type: rails
|
||||
# cookie: _app_session
|
||||
# store: redis
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
# rails_redis:
|
||||
# url: redis://127.0.0.1:6379
|
||||
# password: ""
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
|
||||
# auth:
|
||||
# type: jwt
|
||||
# jwt:
|
||||
# provider: auth0
|
||||
# cookie: _app_session
|
||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
@ -56,18 +52,20 @@ database:
|
|||
port: 5432
|
||||
dbname: app_development
|
||||
user: postgres
|
||||
password: "too many secrets"
|
||||
password: ''
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
variables:
|
||||
account_id: "select account_id from users where id = $user_id"
|
||||
|
||||
filters:
|
||||
users: "{ id: { _eq: $user_id } }"
|
||||
#posts: "{ account_id: { _eq: $account_id } }"
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
# Fields and table names that you wish to block
|
||||
blacklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
|
@ -76,4 +74,6 @@ database:
|
|||
- encrypted
|
||||
- token
|
||||
|
||||
|
||||
fields:
|
||||
- name: users
|
||||
filter: ["{ id: { _eq: $user_id } }"]
|
13
psql/psql.go
13
psql/psql.go
|
@ -10,15 +10,18 @@ import (
|
|||
"github.com/dosco/super-graph/util"
|
||||
)
|
||||
|
||||
type Variables map[string]string
|
||||
type Config struct {
|
||||
Schema *DBSchema
|
||||
Vars map[string]string
|
||||
}
|
||||
|
||||
type Compiler struct {
|
||||
schema *DBSchema
|
||||
vars Variables
|
||||
vars map[string]string
|
||||
}
|
||||
|
||||
func NewCompiler(schema *DBSchema, vars Variables) *Compiler {
|
||||
return &Compiler{schema, vars}
|
||||
func NewCompiler(conf Config) *Compiler {
|
||||
return &Compiler{conf.Schema, conf.Vars}
|
||||
}
|
||||
|
||||
func (c *Compiler) Compile(w io.Writer, qc *qcode.QCode) error {
|
||||
|
@ -607,7 +610,7 @@ func renderList(w io.Writer, ex *qcode.Exp) {
|
|||
io.WriteString(w, `)`)
|
||||
}
|
||||
|
||||
func renderVal(w io.Writer, ex *qcode.Exp, vars Variables) {
|
||||
func renderVal(w io.Writer, ex *qcode.Exp, vars map[string]string) {
|
||||
io.WriteString(w, ` (`)
|
||||
switch ex.Type {
|
||||
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
|
||||
|
|
|
@ -179,25 +179,41 @@ const (
|
|||
OrderDescNullsLast
|
||||
)
|
||||
|
||||
type FilterMap map[string]*Exp
|
||||
type Blacklist map[string]struct{}
|
||||
type Config struct {
|
||||
Filter []string
|
||||
FilterMap map[string][]string
|
||||
Blacklist []string
|
||||
}
|
||||
|
||||
func CompileFilter(filter string) (*Exp, error) {
|
||||
node, err := ParseArgValue(filter)
|
||||
type Compiler struct {
|
||||
fl *Exp
|
||||
fm map[string]*Exp
|
||||
bl map[string]struct{}
|
||||
}
|
||||
|
||||
func NewCompiler(conf Config) (*Compiler, error) {
|
||||
bl := make(map[string]struct{}, len(conf.Blacklist))
|
||||
|
||||
for i := range conf.Blacklist {
|
||||
bl[strings.ToLower(conf.Blacklist[i])] = struct{}{}
|
||||
}
|
||||
|
||||
fl, err := compileFilter(conf.Filter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return (&Compiler{}).compileArgNode(node)
|
||||
fm := make(map[string]*Exp, len(conf.FilterMap))
|
||||
|
||||
for k, v := range conf.FilterMap {
|
||||
fil, err := compileFilter(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fm[strings.ToLower(k)] = fil
|
||||
}
|
||||
|
||||
type Compiler struct {
|
||||
fm FilterMap
|
||||
bl Blacklist
|
||||
}
|
||||
|
||||
func NewCompiler(fm FilterMap, bl Blacklist) *Compiler {
|
||||
return &Compiler{fm, bl}
|
||||
return &Compiler{fl, fm, bl}, nil
|
||||
}
|
||||
|
||||
func (com *Compiler) CompileQuery(query string) (*QCode, error) {
|
||||
|
@ -767,3 +783,25 @@ func pushChildren(st *util.Stack, ex *Exp, node *Node) {
|
|||
st.Push(&expT{ex, node.Children[i]})
|
||||
}
|
||||
}
|
||||
|
||||
func compileFilter(filter []string) (*Exp, error) {
|
||||
var fl *Exp
|
||||
com := &Compiler{}
|
||||
|
||||
for i := range filter {
|
||||
node, err := ParseArgValue(filter[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
f, err := com.compileArgNode(node)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if fl == nil {
|
||||
fl = f
|
||||
} else {
|
||||
fl = &Exp{Op: OpAnd, Children: []*Exp{fl, f}}
|
||||
}
|
||||
}
|
||||
return fl, nil
|
||||
}
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
package qcode
|
||||
|
||||
import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
func NewBlacklist(list []string) Blacklist {
|
||||
bl := make(map[string]struct{}, len(list))
|
||||
|
||||
for i := range list {
|
||||
bl[strings.ToLower(list[i])] = struct{}{}
|
||||
}
|
||||
return bl
|
||||
}
|
||||
|
||||
func NewFilterMap(filters map[string]string) FilterMap {
|
||||
fm := make(FilterMap)
|
||||
|
||||
for k, v := range filters {
|
||||
fil, err := CompileFilter(v)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
key := strings.ToLower(k)
|
||||
fm[key] = fil
|
||||
}
|
||||
return fm
|
||||
}
|
28
serv/auth.go
28
serv/auth.go
|
@ -4,7 +4,6 @@ import (
|
|||
"context"
|
||||
"errors"
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -21,9 +20,9 @@ var (
|
|||
)
|
||||
|
||||
func headerHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
fn := conf.GetString("auth.field_name")
|
||||
fn := conf.Auth.Header
|
||||
if len(fn) == 0 {
|
||||
panic(errors.New("no auth.field_name defined"))
|
||||
panic(errors.New("no auth.header defined"))
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
|
@ -39,33 +38,26 @@ func headerHandler(next http.HandlerFunc) http.HandlerFunc {
|
|||
}
|
||||
|
||||
func withAuth(next http.HandlerFunc) http.HandlerFunc {
|
||||
atype := strings.ToLower(conf.GetString("auth.type"))
|
||||
if len(atype) == 0 {
|
||||
return next
|
||||
}
|
||||
store := strings.ToLower(conf.GetString("auth.store"))
|
||||
at := conf.Auth.Type
|
||||
|
||||
switch atype {
|
||||
switch at {
|
||||
case "header":
|
||||
return headerHandler(next)
|
||||
|
||||
case "rails":
|
||||
switch store {
|
||||
case "memcache":
|
||||
case "rails_cookie":
|
||||
return railsCookieHandler(next)
|
||||
|
||||
case "rails_memcache":
|
||||
return railsMemcacheHandler(next)
|
||||
|
||||
case "redis":
|
||||
case "rails_redis":
|
||||
return railsRedisHandler(next)
|
||||
|
||||
default:
|
||||
return railsCookieHandler(next)
|
||||
}
|
||||
|
||||
case "jwt":
|
||||
return jwtHandler(next)
|
||||
|
||||
default:
|
||||
panic(errors.New("unknown auth.type"))
|
||||
return next
|
||||
}
|
||||
|
||||
return next
|
||||
|
|
|
@ -18,18 +18,14 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
|||
var key interface{}
|
||||
var jwtProvider int
|
||||
|
||||
cookie := conf.GetString("auth.cookie")
|
||||
cookie := conf.Auth.Cookie
|
||||
|
||||
provider := conf.GetString("auth.provider")
|
||||
if provider == "auth0" {
|
||||
if conf.Auth.JWT.Provider == "auth0" {
|
||||
jwtProvider = jwtAuth0
|
||||
}
|
||||
|
||||
conf.BindEnv("auth.secret", "SG_AUTH_SECRET")
|
||||
secret := conf.GetString("auth.secret")
|
||||
|
||||
conf.BindEnv("auth.public_key_file", "SG_AUTH_PUBLIC_KEY_FILE")
|
||||
publicKeyFile := conf.GetString("auth.public_key_file")
|
||||
secret := conf.Auth.JWT.Secret
|
||||
publicKeyFile := conf.Auth.JWT.PubKeyFile
|
||||
|
||||
switch {
|
||||
case len(secret) != 0:
|
||||
|
@ -41,7 +37,7 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
|||
panic(err)
|
||||
}
|
||||
|
||||
switch conf.GetString("auth.public_key_type") {
|
||||
switch conf.Auth.JWT.PubKeyType {
|
||||
case "ecdsa":
|
||||
key, err = jwt.ParseECPublicKeyFromPEM(kd)
|
||||
|
||||
|
|
|
@ -14,31 +14,26 @@ import (
|
|||
)
|
||||
|
||||
func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
cookie := conf.GetString("auth.cookie")
|
||||
cookie := conf.Auth.Cookie
|
||||
if len(cookie) == 0 {
|
||||
panic(errors.New("no auth.cookie defined"))
|
||||
}
|
||||
|
||||
conf.BindEnv("auth.url", "SG_AUTH_URL")
|
||||
authURL := conf.GetString("auth.url")
|
||||
authURL := conf.Auth.RailsRedis.URL
|
||||
if len(authURL) == 0 {
|
||||
panic(errors.New("no auth.url defined"))
|
||||
panic(errors.New("no auth.rails_redis.url defined"))
|
||||
}
|
||||
|
||||
conf.SetDefault("auth.max_idle", 80)
|
||||
conf.SetDefault("auth.max_active", 12000)
|
||||
|
||||
rp := &redis.Pool{
|
||||
MaxIdle: conf.GetInt("auth.max_idle"),
|
||||
MaxActive: conf.GetInt("auth.max_active"),
|
||||
MaxIdle: conf.Auth.RailsRedis.MaxIdle,
|
||||
MaxActive: conf.Auth.RailsRedis.MaxActive,
|
||||
Dial: func() (redis.Conn, error) {
|
||||
c, err := redis.DialURL(authURL)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
|
||||
conf.BindEnv("auth.password", "SG_AUTH_PASSWORD")
|
||||
pwd := conf.GetString("auth.password")
|
||||
pwd := conf.Auth.RailsRedis.Password
|
||||
if len(pwd) != 0 {
|
||||
if _, err := c.Do("AUTH", pwd); err != nil {
|
||||
panic(err)
|
||||
|
@ -74,14 +69,14 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
|||
}
|
||||
|
||||
func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
cookie := conf.GetString("auth.cookie")
|
||||
cookie := conf.Auth.Cookie
|
||||
if len(cookie) == 0 {
|
||||
panic(errors.New("no auth.cookie defined"))
|
||||
}
|
||||
|
||||
host := conf.GetString("auth.host")
|
||||
host := conf.Auth.RailsMemcache.Host
|
||||
if len(host) == 0 {
|
||||
panic(errors.New("no auth.host defined"))
|
||||
panic(errors.New("no auth.rails_memcache.host defined"))
|
||||
}
|
||||
|
||||
mc := memcache.New(host)
|
||||
|
@ -112,15 +107,14 @@ func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
|||
}
|
||||
|
||||
func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
cookie := conf.GetString("auth.cookie")
|
||||
cookie := conf.Auth.Cookie
|
||||
if len(cookie) == 0 {
|
||||
panic(errors.New("no auth.cookie defined"))
|
||||
}
|
||||
|
||||
conf.BindEnv("auth.secret_key_base", "SG_AUTH_SECRET_KEY_BASE")
|
||||
secret := conf.GetString("auth.secret_key_base")
|
||||
secret := conf.Auth.RailsCookie.SecretKeyBase
|
||||
if len(secret) == 0 {
|
||||
panic(errors.New("no auth.secret_key_base defined"))
|
||||
panic(errors.New("no auth.rails_cookie.secret_key_base defined"))
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
|
|
|
@ -123,8 +123,9 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
|||
errorResp(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
finalSQL := sqlStmt.String()
|
||||
if debug > 0 {
|
||||
if conf.DebugLevel > 0 {
|
||||
fmt.Println(finalSQL)
|
||||
}
|
||||
st := time.Now()
|
||||
|
@ -140,7 +141,7 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
|||
et := time.Now()
|
||||
resp := gqlResp{}
|
||||
|
||||
if tracing {
|
||||
if conf.EnableTracing {
|
||||
resp.Extensions = &extensions{newTrace(st, et, qc)}
|
||||
}
|
||||
|
||||
|
|
290
serv/serv.go
290
serv/serv.go
|
@ -4,6 +4,7 @@ import (
|
|||
"errors"
|
||||
"flag"
|
||||
"fmt"
|
||||
"log"
|
||||
"net/http"
|
||||
"os"
|
||||
"strings"
|
||||
|
@ -26,142 +27,253 @@ const (
|
|||
|
||||
var (
|
||||
logger *logrus.Logger
|
||||
debug int
|
||||
conf *viper.Viper
|
||||
conf *config
|
||||
db *pg.DB
|
||||
pcompile *psql.Compiler
|
||||
qcompile *qcode.Compiler
|
||||
authFailBlock int
|
||||
tracing bool
|
||||
)
|
||||
|
||||
func initLog() {
|
||||
logger = logrus.New()
|
||||
logger.Formatter = new(logrus.TextFormatter)
|
||||
logger.Formatter.(*logrus.TextFormatter).DisableColors = false
|
||||
logger.Formatter.(*logrus.TextFormatter).DisableTimestamp = true
|
||||
logger.Level = logrus.TraceLevel
|
||||
logger.Out = os.Stdout
|
||||
type config struct {
|
||||
Env string
|
||||
HostPort string `mapstructure:"host_port"`
|
||||
WebUI bool `mapstructure:"web_ui"`
|
||||
DebugLevel int `mapstructure:"debug_level"`
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
AuthFailBlock string `mapstructure:"auth_fail_block"`
|
||||
Inflections map[string]string
|
||||
|
||||
Auth struct {
|
||||
Type string
|
||||
Cookie string
|
||||
Header string
|
||||
|
||||
RailsCookie struct {
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
}
|
||||
|
||||
func initConf() {
|
||||
conf = viper.New()
|
||||
RailsMemcache struct {
|
||||
Host string
|
||||
}
|
||||
|
||||
cPath := flag.String("path", ".", "Path to folder that contains config files")
|
||||
RailsRedis struct {
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
}
|
||||
|
||||
DB struct {
|
||||
Type string
|
||||
Host string
|
||||
Port string
|
||||
DBName string
|
||||
User string
|
||||
Password string
|
||||
PoolSize int `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
LogLevel string `mapstructure:"log_level"`
|
||||
|
||||
Variables map[string]string
|
||||
|
||||
Defaults struct {
|
||||
Filter []string
|
||||
Blacklist []string
|
||||
}
|
||||
|
||||
Fields []struct {
|
||||
Name string
|
||||
Filter []string
|
||||
Table string
|
||||
Blacklist []string
|
||||
}
|
||||
} `mapstructure:"database"`
|
||||
}
|
||||
|
||||
func initLog() *logrus.Logger {
|
||||
log := logrus.New()
|
||||
log.Formatter = new(logrus.TextFormatter)
|
||||
log.Formatter.(*logrus.TextFormatter).DisableColors = false
|
||||
log.Formatter.(*logrus.TextFormatter).DisableTimestamp = true
|
||||
log.Level = logrus.TraceLevel
|
||||
log.Out = os.Stdout
|
||||
|
||||
return log
|
||||
}
|
||||
|
||||
func initConf() (*config, error) {
|
||||
vi := viper.New()
|
||||
|
||||
path := flag.String("path", "./", "Path to config files")
|
||||
flag.Parse()
|
||||
|
||||
conf.AddConfigPath(*cPath)
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
switch os.Getenv("GO_ENV") {
|
||||
case "production", "prod":
|
||||
conf.SetConfigName("prod")
|
||||
case "staging", "stage":
|
||||
conf.SetConfigName("stage")
|
||||
default:
|
||||
conf.SetConfigName("dev")
|
||||
vi.AddConfigPath(*path)
|
||||
vi.AddConfigPath("./conf")
|
||||
vi.SetConfigName(getConfigName())
|
||||
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
vi.SetDefault("debug_level", 0)
|
||||
vi.SetDefault("enable_tracing", false)
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
vi.SetDefault("database.user", "postgres")
|
||||
vi.SetDefault("database.password", "")
|
||||
|
||||
vi.SetDefault("env", "development")
|
||||
vi.BindEnv("env", "GO_ENV")
|
||||
|
||||
vi.SetDefault("auth.rails_redis.max_idle", 80)
|
||||
vi.SetDefault("auth.rails_redis.max_active", 12000)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
err := conf.ReadInConfig()
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
c := &config{}
|
||||
|
||||
if err := vi.Unmarshal(c); err != nil {
|
||||
return nil, fmt.Errorf("unable to decode config, %v", err)
|
||||
}
|
||||
|
||||
debug = conf.GetInt("debug_level")
|
||||
|
||||
for k, v := range conf.GetStringMapString("inflections") {
|
||||
for k, v := range c.Inflections {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
conf.SetDefault("host_port", "0.0.0.0:8080")
|
||||
conf.SetDefault("web_ui", false)
|
||||
conf.SetDefault("debug_level", 0)
|
||||
conf.SetDefault("enable_tracing", false)
|
||||
authFailBlock = getAuthFailBlock(c)
|
||||
|
||||
conf.SetDefault("database.type", "postgres")
|
||||
conf.SetDefault("database.host", "localhost")
|
||||
conf.SetDefault("database.port", 5432)
|
||||
conf.SetDefault("database.user", "postgres")
|
||||
conf.SetDefault("database.password", "")
|
||||
//fmt.Printf("%#v", c)
|
||||
|
||||
conf.SetDefault("env", "development")
|
||||
conf.BindEnv("env", "GO_ENV")
|
||||
|
||||
tracing = conf.GetBool("enable_tracing")
|
||||
|
||||
switch conf.GetString("auth_fail_block") {
|
||||
case "always":
|
||||
authFailBlock = authFailBlockAlways
|
||||
case "per_query", "perquery", "query":
|
||||
authFailBlock = authFailBlockPerQuery
|
||||
case "never", "false":
|
||||
authFailBlock = authFailBlockNever
|
||||
default:
|
||||
authFailBlock = authFailBlockAlways
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func initDB() {
|
||||
conf.BindEnv("database.host", "SG_DATABASE_HOST")
|
||||
conf.BindEnv("database.port", "SG_DATABASE_PORT")
|
||||
conf.BindEnv("database.user", "SG_DATABASE_USER")
|
||||
conf.BindEnv("database.password", "SG_DATABASE_PASSWORD")
|
||||
|
||||
hostport := strings.Join([]string{
|
||||
conf.GetString("database.host"), conf.GetString("database.port")}, ":")
|
||||
|
||||
func initDB(c *config) (*pg.DB, error) {
|
||||
opt := &pg.Options{
|
||||
Addr: hostport,
|
||||
User: conf.GetString("database.user"),
|
||||
Password: conf.GetString("database.password"),
|
||||
Database: conf.GetString("database.dbname"),
|
||||
Addr: strings.Join([]string{c.DB.Host, c.DB.Port}, ":"),
|
||||
User: c.DB.User,
|
||||
Password: c.DB.Password,
|
||||
Database: c.DB.DBName,
|
||||
}
|
||||
|
||||
if conf.IsSet("database.pool_size") {
|
||||
opt.PoolSize = conf.GetInt("database.pool_size")
|
||||
if c.DB.PoolSize != 0 {
|
||||
opt.PoolSize = conf.DB.PoolSize
|
||||
}
|
||||
|
||||
if conf.IsSet("database.max_retries") {
|
||||
opt.MaxRetries = conf.GetInt("database.max_retries")
|
||||
if c.DB.MaxRetries != 0 {
|
||||
opt.MaxRetries = c.DB.MaxRetries
|
||||
}
|
||||
|
||||
if db = pg.Connect(opt); db == nil {
|
||||
logger.Fatal(errors.New("failed to connect to postgres db"))
|
||||
}
|
||||
db := pg.Connect(opt)
|
||||
if db == nil {
|
||||
return nil, errors.New("failed to connect to postgres db")
|
||||
}
|
||||
|
||||
func initCompilers() {
|
||||
filters := conf.GetStringMapString("database.filters")
|
||||
blacklist := conf.GetStringSlice("database.blacklist")
|
||||
return db, nil
|
||||
}
|
||||
|
||||
fm := qcode.NewFilterMap(filters)
|
||||
bl := qcode.NewBlacklist(blacklist)
|
||||
qcompile = qcode.NewCompiler(fm, bl)
|
||||
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
|
||||
cdb := c.DB
|
||||
|
||||
fm := make(map[string][]string, len(cdb.Fields))
|
||||
for i := range cdb.Fields {
|
||||
f := cdb.Fields[i]
|
||||
fm[strings.ToLower(f.Name)] = f.Filter
|
||||
}
|
||||
|
||||
qc, err := qcode.NewCompiler(qcode.Config{
|
||||
Filter: cdb.Defaults.Filter,
|
||||
FilterMap: fm,
|
||||
Blacklist: cdb.Defaults.Blacklist,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
schema, err := psql.NewDBSchema(db)
|
||||
if err != nil {
|
||||
logger.Fatal(err)
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
varlist := conf.GetStringMapString("database.variables")
|
||||
vars := psql.NewVariables(varlist)
|
||||
pc := psql.NewCompiler(psql.Config{
|
||||
Schema: schema,
|
||||
Vars: cdb.Variables,
|
||||
})
|
||||
|
||||
pcompile = psql.NewCompiler(schema, vars)
|
||||
return qc, pc, nil
|
||||
}
|
||||
|
||||
func InitAndListen() {
|
||||
initLog()
|
||||
initConf()
|
||||
initDB()
|
||||
initCompilers()
|
||||
var err error
|
||||
|
||||
logger = initLog()
|
||||
|
||||
conf, err = initConf()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
db, err = initDB(conf)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
qcompile, pcompile, err = initCompilers(conf)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
http.HandleFunc("/api/v1/graphql", withAuth(apiv1Http))
|
||||
|
||||
if conf.GetBool("web_ui") {
|
||||
if conf.WebUI {
|
||||
http.Handle("/", http.FileServer(_escFS(false)))
|
||||
}
|
||||
|
||||
hp := conf.GetString("host_port")
|
||||
fmt.Printf("Super-Graph listening on %s (%s)\n", hp, conf.GetString("env"))
|
||||
fmt.Printf("Super-Graph listening on %s (%s)\n",
|
||||
conf.HostPort, conf.Env)
|
||||
|
||||
logger.Fatal(http.ListenAndServe(hp, nil))
|
||||
logger.Fatal(http.ListenAndServe(conf.HostPort, nil))
|
||||
}
|
||||
|
||||
func getConfigName() string {
|
||||
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(ge, "pro"):
|
||||
return "prod"
|
||||
|
||||
case strings.HasPrefix(ge, "sta"):
|
||||
return "stage"
|
||||
|
||||
case strings.HasPrefix(ge, "tes"):
|
||||
return "test"
|
||||
}
|
||||
|
||||
return "dev"
|
||||
}
|
||||
|
||||
func getAuthFailBlock(c *config) int {
|
||||
switch c.AuthFailBlock {
|
||||
case "always":
|
||||
return authFailBlockAlways
|
||||
case "per_query", "perquery", "query":
|
||||
return authFailBlockPerQuery
|
||||
case "never", "false":
|
||||
return authFailBlockNever
|
||||
}
|
||||
|
||||
return authFailBlockAlways
|
||||
}
|
||||
|
|
Loading…
Reference in New Issue