Block unauthorized requests when 'anon' role is not defined
This commit is contained in:
parent
0deb3596c5
commit
77a51924a7
@ -159,7 +159,6 @@ query {
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "gfk@myspace.com",
|
||||
@ -272,4 +271,20 @@ query {
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
id
|
||||
email
|
||||
picture: avatar
|
||||
password
|
||||
full_name
|
||||
products(limit: 2, where: {price: {gt: 10}}) {
|
||||
id
|
||||
name
|
||||
description
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
|
@ -12,8 +12,7 @@ log_level: "debug"
|
||||
use_allow_list: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: never
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
|
@ -16,8 +16,7 @@ log_level: "info"
|
||||
use_allow_list: true
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
auth_fail_block: true
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
|
@ -2,7 +2,6 @@ package psql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@ -261,8 +260,6 @@ func simpleUpdateWithPresets(t *testing.T) {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(resSQL))
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
|
@ -211,13 +211,13 @@ func (al *allowList) save(item *allowItem) {
|
||||
|
||||
key := gqlHash(item.gql, item.vars, "")
|
||||
|
||||
if idx, ok := al.index[key]; ok {
|
||||
al.list[idx] = item
|
||||
} else {
|
||||
al.list = append(al.list, item)
|
||||
al.index[key] = len(al.list) - 1
|
||||
if _, ok := al.index[key]; ok {
|
||||
return
|
||||
}
|
||||
|
||||
al.list = append(al.list, item)
|
||||
al.index[key] = len(al.list) - 1
|
||||
|
||||
f, err := os.Create(al.filepath)
|
||||
if err != nil {
|
||||
logger.Warn().Err(err).Msgf("Failed to write allow list: %s", al.filepath)
|
||||
|
@ -121,7 +121,7 @@ func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
if err != nil || len(ck.Value) == 0 {
|
||||
logger.Warn().Err(err).Msg("rails cookie missing")
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
|
19
serv/cmd.go
19
serv/cmd.go
@ -19,20 +19,15 @@ import (
|
||||
|
||||
const (
|
||||
serverName = "Super Graph"
|
||||
|
||||
authFailBlockAlways = iota + 1
|
||||
authFailBlockPerQuery
|
||||
authFailBlockNever
|
||||
)
|
||||
|
||||
var (
|
||||
logger *zerolog.Logger
|
||||
conf *config
|
||||
confPath string
|
||||
db *pgxpool.Pool
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *psql.Compiler
|
||||
authFailBlock int
|
||||
logger *zerolog.Logger
|
||||
conf *config
|
||||
confPath string
|
||||
db *pgxpool.Pool
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *psql.Compiler
|
||||
)
|
||||
|
||||
func Init() {
|
||||
@ -179,8 +174,6 @@ func initConf() (*config, error) {
|
||||
return nil, fmt.Errorf("unable to decode config, %v", err)
|
||||
}
|
||||
|
||||
authFailBlock = getAuthFailBlock(c)
|
||||
|
||||
logLevel, err := zerolog.ParseLevel(c.LogLevel)
|
||||
if err != nil {
|
||||
logger.Error().Err(err).Msg("error setting log_level")
|
||||
|
@ -24,7 +24,7 @@ type config struct {
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||
AuthFailBlock string `mapstructure:"auth_fail_block"`
|
||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||
SeedFile string `mapstructure:"seed_file"`
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
|
||||
@ -103,36 +103,41 @@ type configRemote struct {
|
||||
} `mapstructure:"set_headers"`
|
||||
}
|
||||
|
||||
type configQuery struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configInsert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configUpdate struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configDelete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configRoleTable struct {
|
||||
Name string
|
||||
|
||||
Query struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block bool
|
||||
}
|
||||
|
||||
Insert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
Update struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
Delete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block bool
|
||||
}
|
||||
Query configQuery
|
||||
Insert configInsert
|
||||
Update configUpdate
|
||||
Delete configDelete
|
||||
}
|
||||
|
||||
type configRole struct {
|
||||
@ -213,7 +218,7 @@ func (c *config) Init(vi *viper.Viper) error {
|
||||
rolesMap := make(map[string]struct{})
|
||||
|
||||
for i := range c.Roles {
|
||||
role := &c.Roles[i]
|
||||
role := c.Roles[i]
|
||||
|
||||
if _, ok := rolesMap[role.Name]; ok {
|
||||
logger.Fatal().Msgf("duplicate role '%s' found", role.Name)
|
||||
@ -228,7 +233,8 @@ func (c *config) Init(vi *viper.Viper) error {
|
||||
}
|
||||
|
||||
if _, ok := rolesMap["anon"]; !ok {
|
||||
c.Roles = append(c.Roles, configRole{Name: "anon"})
|
||||
logger.Warn().Msg("unauthenticated requests will be blocked. no role 'anon' defined")
|
||||
c.AuthFailBlock = true
|
||||
}
|
||||
|
||||
c.validate()
|
||||
|
@ -208,10 +208,8 @@ func (c *coreContext) resolveSQL() ([]byte, uint32, error) {
|
||||
buf := &bytes.Buffer{}
|
||||
_, err = t.ExecuteFunc(buf, varMap(c))
|
||||
|
||||
if err == errNoUserID &&
|
||||
authFailBlock == authFailBlockPerQuery &&
|
||||
authCheck(c) == false {
|
||||
return nil, 0, errUnauthorized
|
||||
if err == errNoUserID {
|
||||
logger.Warn().Msg("no user id found. query requires an authenicated request")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
|
@ -70,10 +70,9 @@ type resolver struct {
|
||||
func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := &coreContext{Context: r.Context()}
|
||||
|
||||
if authFailBlock == authFailBlockAlways && authCheck(ctx) == false {
|
||||
err := "Not authorized"
|
||||
logger.Debug().Msg(err)
|
||||
http.Error(w, err, 401)
|
||||
if conf.AuthFailBlock && authCheck(ctx) == false {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
json.NewEncoder(w).Encode(gqlResp{Error: errUnauthorized.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
|
@ -107,6 +107,13 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
|
||||
case event := <-watcher.Events:
|
||||
// Ensure that we use the correct events, as they are not uniform across
|
||||
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
|
||||
|
||||
if conf.UseAllowList == false && strings.HasSuffix(event.Name, "/allow.list") {
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
|
||||
|
||||
var trigger bool
|
||||
switch runtime.GOOS {
|
||||
case "darwin", "freebsd", "openbsd", "netbsd", "dragonfly":
|
||||
|
13
serv/serv.go
13
serv/serv.go
@ -204,16 +204,3 @@ func getConfigName() string {
|
||||
|
||||
return ge
|
||||
}
|
||||
|
||||
func getAuthFailBlock(c *config) int {
|
||||
switch c.AuthFailBlock {
|
||||
case "always":
|
||||
return authFailBlockAlways
|
||||
case "per_query", "perquery", "query":
|
||||
return authFailBlockPerQuery
|
||||
case "never", "false":
|
||||
return authFailBlockNever
|
||||
}
|
||||
|
||||
return authFailBlockAlways
|
||||
}
|
||||
|
@ -12,8 +12,7 @@ log_level: "debug"
|
||||
use_allow_list: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: never
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
|
@ -16,8 +16,7 @@ log_level: "info"
|
||||
use_allow_list: true
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
auth_fail_block: true
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
|
Loading…
Reference in New Issue
Block a user