Remove config package
This commit is contained in:
parent
7831d27345
commit
a266517d17
2
Makefile
2
Makefile
|
@ -77,7 +77,7 @@ clean:
|
||||||
run: clean
|
run: clean
|
||||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||||
|
|
||||||
install: gen
|
install:
|
||||||
@echo $(GOPATH)
|
@echo $(GOPATH)
|
||||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||||
|
|
|
@ -3,13 +3,11 @@ package serv
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
type actionFn func(w http.ResponseWriter, r *http.Request) error
|
type actionFn func(w http.ResponseWriter, r *http.Request) error
|
||||||
|
|
||||||
func newAction(a *config.Action) (http.Handler, error) {
|
func newAction(a *Action) (http.Handler, error) {
|
||||||
var fn actionFn
|
var fn actionFn
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
@ -32,7 +30,7 @@ func newAction(a *config.Action) (http.Handler, error) {
|
||||||
return http.HandlerFunc(httpFn), nil
|
return http.HandlerFunc(httpFn), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func newSQLAction(a *config.Action) (actionFn, error) {
|
func newSQLAction(a *Action) (actionFn, error) {
|
||||||
fn := func(w http.ResponseWriter, r *http.Request) error {
|
fn := func(w http.ResponseWriter, r *http.Request) error {
|
||||||
_, err := db.ExecContext(r.Context(), a.SQL)
|
_, err := db.ExecContext(r.Context(), a.SQL)
|
||||||
return err
|
return err
|
||||||
|
|
|
@ -0,0 +1,106 @@
|
||||||
|
package serv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||||
|
"github.com/dosco/super-graph/core"
|
||||||
|
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
LogLevelNone int = iota
|
||||||
|
LogLevelInfo
|
||||||
|
LogLevelWarn
|
||||||
|
LogLevelError
|
||||||
|
LogLevelDebug
|
||||||
|
)
|
||||||
|
|
||||||
|
type Core = core.Config
|
||||||
|
|
||||||
|
// Config struct holds the Super Graph config values
|
||||||
|
type Config struct {
|
||||||
|
Core `mapstructure:",squash"`
|
||||||
|
Serv `mapstructure:",squash"`
|
||||||
|
|
||||||
|
cpath string
|
||||||
|
vi *viper.Viper
|
||||||
|
}
|
||||||
|
|
||||||
|
// Serv struct contains config values used by the Super Graph service
|
||||||
|
type Serv struct {
|
||||||
|
AppName string `mapstructure:"app_name"`
|
||||||
|
Production bool
|
||||||
|
LogLevel string `mapstructure:"log_level"`
|
||||||
|
HostPort string `mapstructure:"host_port"`
|
||||||
|
Host string
|
||||||
|
Port string
|
||||||
|
HTTPGZip bool `mapstructure:"http_compress"`
|
||||||
|
WebUI bool `mapstructure:"web_ui"`
|
||||||
|
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||||
|
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||||
|
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||||
|
SeedFile string `mapstructure:"seed_file"`
|
||||||
|
MigrationsPath string `mapstructure:"migrations_path"`
|
||||||
|
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
||||||
|
DebugCORS bool `mapstructure:"cors_debug"`
|
||||||
|
|
||||||
|
Auth auth.Auth
|
||||||
|
Auths []auth.Auth
|
||||||
|
|
||||||
|
DB struct {
|
||||||
|
Type string
|
||||||
|
Host string
|
||||||
|
Port uint16
|
||||||
|
DBName string
|
||||||
|
User string
|
||||||
|
Password string
|
||||||
|
Schema string
|
||||||
|
PoolSize int32 `mapstructure:"pool_size"`
|
||||||
|
MaxRetries int `mapstructure:"max_retries"`
|
||||||
|
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||||
|
} `mapstructure:"database"`
|
||||||
|
|
||||||
|
Actions []Action
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auth struct contains authentication related config values used by the Super Graph service
|
||||||
|
type Auth struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
Cookie string
|
||||||
|
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||||
|
|
||||||
|
Rails struct {
|
||||||
|
Version string
|
||||||
|
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||||
|
URL string
|
||||||
|
Password string
|
||||||
|
MaxIdle int `mapstructure:"max_idle"`
|
||||||
|
MaxActive int `mapstructure:"max_active"`
|
||||||
|
Salt string
|
||||||
|
SignSalt string `mapstructure:"sign_salt"`
|
||||||
|
AuthSalt string `mapstructure:"auth_salt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
JWT struct {
|
||||||
|
Provider string
|
||||||
|
Secret string
|
||||||
|
PubKeyFile string `mapstructure:"public_key_file"`
|
||||||
|
PubKeyType string `mapstructure:"public_key_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
Header struct {
|
||||||
|
Name string
|
||||||
|
Value string
|
||||||
|
Exists bool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Action struct contains config values for a Super Graph service action
|
||||||
|
type Action struct {
|
||||||
|
Name string
|
||||||
|
SQL string
|
||||||
|
AuthName string `mapstructure:"auth_name"`
|
||||||
|
}
|
|
@ -6,11 +6,8 @@ import (
|
||||||
_log "log"
|
_log "log"
|
||||||
"os"
|
"os"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
"github.com/spf13/viper"
|
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -29,12 +26,13 @@ var (
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
var (
|
||||||
log *_log.Logger // logger
|
log *_log.Logger // logger
|
||||||
zlog *zap.Logger // fast logger
|
zlog *zap.Logger // fast logger
|
||||||
conf *config.Config // parsed config
|
logLevel int // log level
|
||||||
confPath string // path to the config file
|
conf *Config // parsed config
|
||||||
db *sql.DB // database connection pool
|
confPath string // path to the config file
|
||||||
secretKey [32]byte // encryption key
|
db *sql.DB // database connection pool
|
||||||
|
secretKey [32]byte // encryption key
|
||||||
)
|
)
|
||||||
|
|
||||||
func Cmd() {
|
func Cmd() {
|
||||||
|
@ -132,12 +130,12 @@ e.g. db:migrate -+1
|
||||||
Run: cmdNew,
|
Run: cmdNew,
|
||||||
})
|
})
|
||||||
|
|
||||||
rootCmd.AddCommand(&cobra.Command{
|
// rootCmd.AddCommand(&cobra.Command{
|
||||||
Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
|
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
|
||||||
Short: "Dump config to file",
|
// Short: "Dump config to file",
|
||||||
Long: "Dump current config to a file in the selected format",
|
// Long: "Dump current config to a file in the selected format",
|
||||||
Run: cmdConfDump,
|
// Run: cmdConfDump,
|
||||||
})
|
// })
|
||||||
|
|
||||||
rootCmd.AddCommand(&cobra.Command{
|
rootCmd.AddCommand(&cobra.Command{
|
||||||
Use: "version",
|
Use: "version",
|
||||||
|
|
|
@ -1,29 +1,21 @@
|
||||||
package serv
|
package serv
|
||||||
|
|
||||||
import (
|
// func cmdConfDump(cmd *cobra.Command, args []string) {
|
||||||
"fmt"
|
// if len(args) != 1 {
|
||||||
"os"
|
// cmd.Help() //nolint: errcheck
|
||||||
|
// os.Exit(1)
|
||||||
|
// }
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
|
||||||
"github.com/spf13/cobra"
|
|
||||||
)
|
|
||||||
|
|
||||||
func cmdConfDump(cmd *cobra.Command, args []string) {
|
// conf, err := initConf()
|
||||||
if len(args) != 1 {
|
// if err != nil {
|
||||||
cmd.Help() //nolint: errcheck
|
// log.Fatalf("ERR failed to read config: %s", err)
|
||||||
os.Exit(1)
|
// }
|
||||||
}
|
|
||||||
|
|
||||||
fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
|
// if err := conf.WriteConfigAs(fname); err != nil {
|
||||||
|
// log.Fatalf("ERR failed to write config: %s", err)
|
||||||
|
// }
|
||||||
|
|
||||||
conf, err := initConf()
|
// log.Printf("INF config dumped to ./%s", fname)
|
||||||
if err != nil {
|
// }
|
||||||
log.Fatalf("ERR failed to read config: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := conf.WriteConfigAs(fname); err != nil {
|
|
||||||
log.Fatalf("ERR failed to write config: %s", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
log.Printf("INF config dumped to ./%s", fname)
|
|
||||||
}
|
|
||||||
|
|
|
@ -26,7 +26,7 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
|
||||||
cmdDBCreate(cmd, []string{})
|
cmdDBCreate(cmd, []string{})
|
||||||
cmdDBMigrate(cmd, []string{"up"})
|
cmdDBMigrate(cmd, []string{"up"})
|
||||||
|
|
||||||
sfile := path.Join(conf.ConfigPathUsed(), conf.SeedFile)
|
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||||
_, err := os.Stat(sfile)
|
_, err := os.Stat(sfile)
|
||||||
|
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
@ -144,7 +144,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||||
|
|
||||||
m.Data = getMigrationVars()
|
m.Data = getMigrationVars()
|
||||||
|
|
||||||
err = m.LoadMigrations(path.Join(conf.ConfigPathUsed(), conf.MigrationsPath))
|
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -33,14 +33,14 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sfile := path.Join(conf.ConfigPathUsed(), conf.SeedFile)
|
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||||
|
|
||||||
b, err := ioutil.ReadFile(sfile)
|
b, err := ioutil.ReadFile(sfile)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
|
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(conf, db)
|
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
|
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -28,7 +28,7 @@ func cmdServ(cmd *cobra.Command, args []string) {
|
||||||
// initResolvers()
|
// initResolvers()
|
||||||
// }
|
// }
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(conf, db)
|
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalInProd(err, "failed to initialize Super Graph")
|
fatalInProd(err, "failed to initialize Super Graph")
|
||||||
}
|
}
|
||||||
|
|
|
@ -0,0 +1,115 @@
|
||||||
|
package serv
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"os"
|
||||||
|
"path"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/spf13/viper"
|
||||||
|
)
|
||||||
|
|
||||||
|
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||||
|
// environment variable. This is the best way to create a new Super Graph config.
|
||||||
|
func ReadInConfig(configFile string) (*Config, error) {
|
||||||
|
cpath := path.Dir(configFile)
|
||||||
|
cfile := path.Base(configFile)
|
||||||
|
vi := newViper(cpath, cfile)
|
||||||
|
|
||||||
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
inherits := vi.GetString("inherits")
|
||||||
|
|
||||||
|
if len(inherits) != 0 {
|
||||||
|
vi = newViper(cpath, inherits)
|
||||||
|
|
||||||
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if vi.IsSet("inherits") {
|
||||||
|
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||||
|
inherits,
|
||||||
|
vi.GetString("inherits"))
|
||||||
|
}
|
||||||
|
|
||||||
|
vi.SetConfigName(cfile)
|
||||||
|
|
||||||
|
if err := vi.MergeInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
c := &Config{cpath: cpath, vi: vi}
|
||||||
|
|
||||||
|
if err := vi.Unmarshal(&c); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(c.Core.AllowListFile) == 0 {
|
||||||
|
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func newViper(configPath, configFile string) *viper.Viper {
|
||||||
|
vi := viper.New()
|
||||||
|
|
||||||
|
vi.SetEnvPrefix("SG")
|
||||||
|
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||||
|
vi.AutomaticEnv()
|
||||||
|
|
||||||
|
vi.AddConfigPath(configPath)
|
||||||
|
vi.SetConfigName(configFile)
|
||||||
|
vi.AddConfigPath("./config")
|
||||||
|
|
||||||
|
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||||
|
vi.SetDefault("web_ui", false)
|
||||||
|
vi.SetDefault("enable_tracing", false)
|
||||||
|
vi.SetDefault("auth_fail_block", "always")
|
||||||
|
vi.SetDefault("seed_file", "seed.js")
|
||||||
|
|
||||||
|
vi.SetDefault("database.type", "postgres")
|
||||||
|
vi.SetDefault("database.host", "localhost")
|
||||||
|
vi.SetDefault("database.port", 5432)
|
||||||
|
vi.SetDefault("database.user", "postgres")
|
||||||
|
vi.SetDefault("database.schema", "public")
|
||||||
|
|
||||||
|
vi.SetDefault("env", "development")
|
||||||
|
|
||||||
|
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
||||||
|
vi.BindEnv("host", "HOST") //nolint: errcheck
|
||||||
|
vi.BindEnv("port", "PORT") //nolint: errcheck
|
||||||
|
|
||||||
|
vi.SetDefault("auth.rails.max_idle", 80)
|
||||||
|
vi.SetDefault("auth.rails.max_active", 12000)
|
||||||
|
|
||||||
|
return vi
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetConfigName() string {
|
||||||
|
if len(os.Getenv("GO_ENV")) == 0 {
|
||||||
|
return "dev"
|
||||||
|
}
|
||||||
|
|
||||||
|
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||||
|
|
||||||
|
switch {
|
||||||
|
case strings.HasPrefix(ge, "pro"):
|
||||||
|
return "prod"
|
||||||
|
|
||||||
|
case strings.HasPrefix(ge, "sta"):
|
||||||
|
return "stage"
|
||||||
|
|
||||||
|
case strings.HasPrefix(ge, "tes"):
|
||||||
|
return "test"
|
||||||
|
|
||||||
|
case strings.HasPrefix(ge, "dev"):
|
||||||
|
return "dev"
|
||||||
|
}
|
||||||
|
|
||||||
|
return ge
|
||||||
|
}
|
|
@ -9,7 +9,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
|
@ -83,7 +82,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||||
|
|
||||||
if conf.LogLevel() >= config.LogLevelDebug {
|
if logLevel >= LogLevelDebug {
|
||||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -94,7 +93,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||||
|
|
||||||
json.NewEncoder(w).Encode(res)
|
json.NewEncoder(w).Encode(res)
|
||||||
|
|
||||||
if conf.LogLevel() >= config.LogLevelInfo {
|
if logLevel >= LogLevelInfo {
|
||||||
zlog.Info("success",
|
zlog.Info("success",
|
||||||
zap.String("op", res.Operation()),
|
zap.String("op", res.Operation()),
|
||||||
zap.String("name", res.QueryName()),
|
zap.String("name", res.QueryName()),
|
||||||
|
@ -111,7 +110,7 @@ func renderErr(w http.ResponseWriter, err error, res *core.Result) {
|
||||||
|
|
||||||
json.NewEncoder(w).Encode(&errorResp{err})
|
json.NewEncoder(w).Encode(&errorResp{err})
|
||||||
|
|
||||||
if conf.LogLevel() >= config.LogLevelError {
|
if logLevel >= LogLevelError {
|
||||||
if res != nil {
|
if res != nil {
|
||||||
zlog.Error(err.Error(),
|
zlog.Error(err.Error(),
|
||||||
zap.String("op", res.Operation()),
|
zap.String("op", res.Operation()),
|
||||||
|
|
|
@ -3,17 +3,82 @@ package serv
|
||||||
import (
|
import (
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
_ "github.com/jackc/pgx/v4/stdlib"
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
)
|
)
|
||||||
|
|
||||||
func initConf() (*config.Config, error) {
|
func initConf() (*Config, error) {
|
||||||
return config.NewConfigWithLogger(confPath, log)
|
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
switch c.LogLevel {
|
||||||
|
case "debug":
|
||||||
|
logLevel = LogLevelDebug
|
||||||
|
case "error":
|
||||||
|
logLevel = LogLevelError
|
||||||
|
case "warn":
|
||||||
|
logLevel = LogLevelWarn
|
||||||
|
case "info":
|
||||||
|
logLevel = LogLevelInfo
|
||||||
|
default:
|
||||||
|
logLevel = LogLevelNone
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auths: validate and sanitize
|
||||||
|
am := make(map[string]struct{})
|
||||||
|
|
||||||
|
for i := 0; i < len(c.Auths); i++ {
|
||||||
|
a := &c.Auths[i]
|
||||||
|
a.Name = sanitize(a.Name)
|
||||||
|
|
||||||
|
if _, ok := am[a.Name]; ok {
|
||||||
|
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
|
||||||
|
log.Printf("WRN duplicate auth found: %s", a.Name)
|
||||||
|
}
|
||||||
|
am[a.Name] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Actions: validate and sanitize
|
||||||
|
axm := make(map[string]struct{})
|
||||||
|
|
||||||
|
for i := 0; i < len(c.Actions); i++ {
|
||||||
|
a := &c.Actions[i]
|
||||||
|
a.Name = sanitize(a.Name)
|
||||||
|
a.AuthName = sanitize(a.AuthName)
|
||||||
|
|
||||||
|
if _, ok := axm[a.Name]; ok {
|
||||||
|
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||||
|
log.Printf("WRN duplicate action found: %s", a.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
if _, ok := am[a.AuthName]; !ok {
|
||||||
|
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||||
|
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
|
||||||
|
}
|
||||||
|
axm[a.Name] = struct{}{}
|
||||||
|
}
|
||||||
|
|
||||||
|
var anonFound bool
|
||||||
|
|
||||||
|
for _, r := range c.Roles {
|
||||||
|
if sanitize(r.Name) == "anon" {
|
||||||
|
anonFound = true
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if !anonFound {
|
||||||
|
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
|
||||||
|
c.AuthFailBlock = false
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func initDB(c *config.Config) (*sql.DB, error) {
|
func initDB(c *Config) (*sql.DB, error) {
|
||||||
var db *sql.DB
|
var db *sql.DB
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
|
|
|
@ -5,11 +5,43 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
)
|
)
|
||||||
|
|
||||||
func SimpleHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
// Auth struct contains authentication related config values used by the Super Graph service
|
||||||
|
type Auth struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
Cookie string
|
||||||
|
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||||
|
|
||||||
|
Rails struct {
|
||||||
|
Version string
|
||||||
|
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||||
|
URL string
|
||||||
|
Password string
|
||||||
|
MaxIdle int `mapstructure:"max_idle"`
|
||||||
|
MaxActive int `mapstructure:"max_active"`
|
||||||
|
Salt string
|
||||||
|
SignSalt string `mapstructure:"sign_salt"`
|
||||||
|
AuthSalt string `mapstructure:"auth_salt"`
|
||||||
|
}
|
||||||
|
|
||||||
|
JWT struct {
|
||||||
|
Provider string
|
||||||
|
Secret string
|
||||||
|
PubKeyFile string `mapstructure:"public_key_file"`
|
||||||
|
PubKeyType string `mapstructure:"public_key_type"`
|
||||||
|
}
|
||||||
|
|
||||||
|
Header struct {
|
||||||
|
Name string
|
||||||
|
Value string
|
||||||
|
Exists bool
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
return func(w http.ResponseWriter, r *http.Request) {
|
return func(w http.ResponseWriter, r *http.Request) {
|
||||||
ctx := r.Context()
|
ctx := r.Context()
|
||||||
|
|
||||||
|
@ -32,7 +64,7 @@ func SimpleHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func HeaderHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
hdr := ac.Header
|
hdr := ac.Header
|
||||||
|
|
||||||
if len(hdr.Name) == 0 {
|
if len(hdr.Name) == 0 {
|
||||||
|
@ -64,7 +96,7 @@ func HeaderHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func WithAuth(next http.Handler, ac *config.Auth) (http.Handler, error) {
|
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if ac.CredsInHeader {
|
if ac.CredsInHeader {
|
||||||
|
|
|
@ -7,7 +7,6 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
jwt "github.com/dgrijalva/jwt-go"
|
jwt "github.com/dgrijalva/jwt-go"
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -16,7 +15,7 @@ const (
|
||||||
jwtAuth0 int = iota + 1
|
jwtAuth0 int = iota + 1
|
||||||
)
|
)
|
||||||
|
|
||||||
func JwtHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
var key interface{}
|
var key interface{}
|
||||||
var jwtProvider int
|
var jwtProvider int
|
||||||
|
|
||||||
|
|
|
@ -9,13 +9,12 @@ import (
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bradfitz/gomemcache/memcache"
|
"github.com/bradfitz/gomemcache/memcache"
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core"
|
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
||||||
|
"github.com/dosco/super-graph/core"
|
||||||
"github.com/garyburd/redigo/redis"
|
"github.com/garyburd/redigo/redis"
|
||||||
)
|
)
|
||||||
|
|
||||||
func RailsHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
ru := ac.Rails.URL
|
ru := ac.Rails.URL
|
||||||
|
|
||||||
if strings.HasPrefix(ru, "memcache:") {
|
if strings.HasPrefix(ru, "memcache:") {
|
||||||
|
@ -29,7 +28,7 @@ func RailsHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||||
return RailsCookieHandler(ac, next)
|
return RailsCookieHandler(ac, next)
|
||||||
}
|
}
|
||||||
|
|
||||||
func RailsRedisHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
cookie := ac.Cookie
|
cookie := ac.Cookie
|
||||||
|
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
|
@ -85,7 +84,7 @@ func RailsRedisHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, er
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func RailsMemcacheHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
cookie := ac.Cookie
|
cookie := ac.Cookie
|
||||||
|
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
|
@ -128,7 +127,7 @@ func RailsMemcacheHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc,
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func RailsCookieHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||||
cookie := ac.Cookie
|
cookie := ac.Cookie
|
||||||
if len(cookie) == 0 {
|
if len(cookie) == 0 {
|
||||||
return nil, fmt.Errorf("no auth.cookie defined")
|
return nil, fmt.Errorf("no auth.cookie defined")
|
||||||
|
@ -159,7 +158,7 @@ func RailsCookieHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, e
|
||||||
}, nil
|
}, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func railsAuth(ac *config.Auth) (*rails.Auth, error) {
|
func railsAuth(ac *Auth) (*rails.Auth, error) {
|
||||||
secret := ac.Rails.SecretKeyBase
|
secret := ac.Rails.SecretKeyBase
|
||||||
if len(secret) == 0 {
|
if len(secret) == 0 {
|
||||||
return nil, errors.New("no auth.rails.secret_key_base defined")
|
return nil, errors.New("no auth.rails.secret_key_base defined")
|
||||||
|
|
|
@ -12,11 +12,10 @@ import (
|
||||||
rice "github.com/GeertJohan/go.rice"
|
rice "github.com/GeertJohan/go.rice"
|
||||||
"github.com/NYTimes/gziphandler"
|
"github.com/NYTimes/gziphandler"
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func initWatcher() {
|
func initWatcher() {
|
||||||
cpath := conf.ConfigPathUsed()
|
cpath := conf.cpath
|
||||||
if conf != nil && !conf.WatchAndReload {
|
if conf != nil && !conf.WatchAndReload {
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
@ -170,7 +169,7 @@ func setActionRoutes(routes map[string]http.Handler) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func findAuth(name string) *config.Auth {
|
func findAuth(name string) *auth.Auth {
|
||||||
for _, a := range conf.Auths {
|
for _, a := range conf.Auths {
|
||||||
if strings.EqualFold(a.Name, name) {
|
if strings.EqualFold(a.Name, name) {
|
||||||
return &a
|
return &a
|
||||||
|
|
|
@ -113,12 +113,12 @@ func al(b byte) bool {
|
||||||
func fatalInProd(err error, msg string) {
|
func fatalInProd(err error, msg string) {
|
||||||
var wg sync.WaitGroup
|
var wg sync.WaitGroup
|
||||||
|
|
||||||
if !isDev() {
|
if isDev() {
|
||||||
|
log.Printf("ERR %s: %s", msg, err)
|
||||||
|
} else {
|
||||||
log.Fatalf("ERR %s: %s", msg, err)
|
log.Fatalf("ERR %s: %s", msg, err)
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Printf("ERR %s: %s", msg, err)
|
|
||||||
|
|
||||||
wg.Add(1)
|
wg.Add(1)
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
}
|
}
|
||||||
|
@ -126,3 +126,7 @@ func fatalInProd(err error, msg string) {
|
||||||
func isDev() bool {
|
func isDev() bool {
|
||||||
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
|
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func sanitize(value string) string {
|
||||||
|
return strings.ToLower(strings.TrimSpace(value))
|
||||||
|
}
|
||||||
|
|
|
@ -7,7 +7,7 @@
|
||||||
/coverage
|
/coverage
|
||||||
|
|
||||||
# production
|
# production
|
||||||
/build
|
# /build
|
||||||
|
|
||||||
# development
|
# development
|
||||||
/src/components/dataviz/core/*.js.map
|
/src/components/dataviz/core/*.js.map
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
{
|
||||||
|
"files": {
|
||||||
|
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
||||||
|
"main.js": "/static/js/main.04d74040.chunk.js",
|
||||||
|
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
|
||||||
|
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
|
||||||
|
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
|
||||||
|
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
|
||||||
|
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
|
||||||
|
"index.html": "/index.html",
|
||||||
|
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
|
||||||
|
"service-worker.js": "/service-worker.js",
|
||||||
|
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
||||||
|
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
||||||
|
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
||||||
|
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
||||||
|
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
||||||
|
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
||||||
|
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
||||||
|
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
||||||
|
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
||||||
|
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
||||||
|
},
|
||||||
|
"entrypoints": [
|
||||||
|
"static/js/runtime-main.4aea9da3.js",
|
||||||
|
"static/js/2.03370bd3.chunk.js",
|
||||||
|
"static/css/main.c6b5c55c.chunk.css",
|
||||||
|
"static/js/main.04d74040.chunk.js"
|
||||||
|
]
|
||||||
|
}
|
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
|
@ -0,0 +1 @@
|
||||||
|
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>
|
|
@ -0,0 +1,15 @@
|
||||||
|
{
|
||||||
|
"short_name": "Super Graph",
|
||||||
|
"name": "Super Graph - GraphQL API for Rails",
|
||||||
|
"icons": [
|
||||||
|
{
|
||||||
|
"src": "favicon.ico",
|
||||||
|
"sizes": "64x64 32x32 24x24 16x16",
|
||||||
|
"type": "image/x-icon"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"start_url": ".",
|
||||||
|
"display": "standalone",
|
||||||
|
"theme_color": "#000000",
|
||||||
|
"background_color": "#ffffff"
|
||||||
|
}
|
|
@ -0,0 +1,58 @@
|
||||||
|
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
||||||
|
{
|
||||||
|
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
|
||||||
|
"url": "/index.html"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "6e9467dc213a3e2b84ea",
|
||||||
|
"url": "/static/css/main.c6b5c55c.chunk.css"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "c156a125990ddf5dcc51",
|
||||||
|
"url": "/static/js/2.03370bd3.chunk.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "6e9467dc213a3e2b84ea",
|
||||||
|
"url": "/static/js/main.04d74040.chunk.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "427262b6771d3f49a7c5",
|
||||||
|
"url": "/static/js/runtime-main.4aea9da3.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
||||||
|
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
||||||
|
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
||||||
|
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
||||||
|
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
||||||
|
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "d94118379d362fc161aa1246bcc14d43",
|
||||||
|
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
||||||
|
"url": "/static/media/getOutline.js.c04e3998.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
||||||
|
"url": "/static/media/index.js.02c24280.flow"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
||||||
|
"url": "/static/media/logo.57ee3b60.png"
|
||||||
|
}
|
||||||
|
]);
|
|
@ -0,0 +1,39 @@
|
||||||
|
/**
|
||||||
|
* Welcome to your Workbox-powered service worker!
|
||||||
|
*
|
||||||
|
* You'll need to register this file in your web app and you should
|
||||||
|
* disable HTTP caching for this file too.
|
||||||
|
* See https://goo.gl/nhQhGp
|
||||||
|
*
|
||||||
|
* The rest of the code is auto-generated. Please don't update this file
|
||||||
|
* directly; instead, make changes to your Workbox build configuration
|
||||||
|
* and re-run your build process.
|
||||||
|
* See https://goo.gl/2aRDsh
|
||||||
|
*/
|
||||||
|
|
||||||
|
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
|
||||||
|
|
||||||
|
importScripts(
|
||||||
|
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
|
||||||
|
);
|
||||||
|
|
||||||
|
self.addEventListener('message', (event) => {
|
||||||
|
if (event.data && event.data.type === 'SKIP_WAITING') {
|
||||||
|
self.skipWaiting();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
workbox.core.clientsClaim();
|
||||||
|
|
||||||
|
/**
|
||||||
|
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
|
||||||
|
* requests for URLs in the manifest.
|
||||||
|
* See https://goo.gl/S9QRab
|
||||||
|
*/
|
||||||
|
self.__precacheManifest = [].concat(self.__precacheManifest || []);
|
||||||
|
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
|
||||||
|
|
||||||
|
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
|
||||||
|
|
||||||
|
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
|
||||||
|
});
|
|
@ -0,0 +1,2 @@
|
||||||
|
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
||||||
|
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
|
@ -0,0 +1 @@
|
||||||
|
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,2 @@
|
||||||
|
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
|
||||||
|
//# sourceMappingURL=main.04d74040.chunk.js.map
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,2 @@
|
||||||
|
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
|
||||||
|
//# sourceMappingURL=runtime-main.4aea9da3.js.map
|
File diff suppressed because one or more lines are too long
|
@ -0,0 +1,328 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {
|
||||||
|
DocumentNode,
|
||||||
|
FragmentSpreadNode,
|
||||||
|
FragmentDefinitionNode,
|
||||||
|
OperationDefinitionNode,
|
||||||
|
TypeDefinitionNode,
|
||||||
|
NamedTypeNode,
|
||||||
|
} from 'graphql';
|
||||||
|
import type {
|
||||||
|
CompletionItem,
|
||||||
|
DefinitionQueryResult,
|
||||||
|
Diagnostic,
|
||||||
|
GraphQLCache,
|
||||||
|
GraphQLConfig,
|
||||||
|
GraphQLProjectConfig,
|
||||||
|
Uri,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
import type {Position} from 'graphql-language-service-utils';
|
||||||
|
import type {Hover} from 'vscode-languageserver-types';
|
||||||
|
|
||||||
|
import {Kind, parse, print} from 'graphql';
|
||||||
|
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||||
|
import {getHoverInformation} from './getHoverInformation';
|
||||||
|
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
||||||
|
import {
|
||||||
|
getDefinitionQueryResultForFragmentSpread,
|
||||||
|
getDefinitionQueryResultForDefinitionNode,
|
||||||
|
getDefinitionQueryResultForNamedType,
|
||||||
|
} from './getDefinition';
|
||||||
|
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
||||||
|
|
||||||
|
const {
|
||||||
|
FRAGMENT_DEFINITION,
|
||||||
|
OBJECT_TYPE_DEFINITION,
|
||||||
|
INTERFACE_TYPE_DEFINITION,
|
||||||
|
ENUM_TYPE_DEFINITION,
|
||||||
|
UNION_TYPE_DEFINITION,
|
||||||
|
SCALAR_TYPE_DEFINITION,
|
||||||
|
INPUT_OBJECT_TYPE_DEFINITION,
|
||||||
|
SCALAR_TYPE_EXTENSION,
|
||||||
|
OBJECT_TYPE_EXTENSION,
|
||||||
|
INTERFACE_TYPE_EXTENSION,
|
||||||
|
UNION_TYPE_EXTENSION,
|
||||||
|
ENUM_TYPE_EXTENSION,
|
||||||
|
INPUT_OBJECT_TYPE_EXTENSION,
|
||||||
|
DIRECTIVE_DEFINITION,
|
||||||
|
FRAGMENT_SPREAD,
|
||||||
|
OPERATION_DEFINITION,
|
||||||
|
NAMED_TYPE,
|
||||||
|
} = Kind;
|
||||||
|
|
||||||
|
export class GraphQLLanguageService {
|
||||||
|
_graphQLCache: GraphQLCache;
|
||||||
|
_graphQLConfig: GraphQLConfig;
|
||||||
|
|
||||||
|
constructor(cache: GraphQLCache) {
|
||||||
|
this._graphQLCache = cache;
|
||||||
|
this._graphQLConfig = cache.getGraphQLConfig();
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDiagnostics(
|
||||||
|
query: string,
|
||||||
|
uri: Uri,
|
||||||
|
isRelayCompatMode?: boolean,
|
||||||
|
): Promise<Array<Diagnostic>> {
|
||||||
|
// Perform syntax diagnostics first, as this doesn't require
|
||||||
|
// schema/fragment definitions, even the project configuration.
|
||||||
|
let queryHasExtensions = false;
|
||||||
|
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
||||||
|
const schemaPath = projectConfig.schemaPath;
|
||||||
|
try {
|
||||||
|
const queryAST = parse(query);
|
||||||
|
if (!schemaPath || uri !== schemaPath) {
|
||||||
|
queryHasExtensions = queryAST.definitions.some(definition => {
|
||||||
|
switch (definition.kind) {
|
||||||
|
case OBJECT_TYPE_DEFINITION:
|
||||||
|
case INTERFACE_TYPE_DEFINITION:
|
||||||
|
case ENUM_TYPE_DEFINITION:
|
||||||
|
case UNION_TYPE_DEFINITION:
|
||||||
|
case SCALAR_TYPE_DEFINITION:
|
||||||
|
case INPUT_OBJECT_TYPE_DEFINITION:
|
||||||
|
case SCALAR_TYPE_EXTENSION:
|
||||||
|
case OBJECT_TYPE_EXTENSION:
|
||||||
|
case INTERFACE_TYPE_EXTENSION:
|
||||||
|
case UNION_TYPE_EXTENSION:
|
||||||
|
case ENUM_TYPE_EXTENSION:
|
||||||
|
case INPUT_OBJECT_TYPE_EXTENSION:
|
||||||
|
case DIRECTIVE_DEFINITION:
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
const range = getRange(error.locations[0], query);
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
severity: SEVERITY.ERROR,
|
||||||
|
message: error.message,
|
||||||
|
source: 'GraphQL: Syntax',
|
||||||
|
range,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
// If there's a matching config, proceed to prepare to run validation
|
||||||
|
let source = query;
|
||||||
|
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||||
|
projectConfig,
|
||||||
|
);
|
||||||
|
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
||||||
|
query,
|
||||||
|
fragmentDefinitions,
|
||||||
|
);
|
||||||
|
const dependenciesSource = fragmentDependencies.reduce(
|
||||||
|
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
||||||
|
'',
|
||||||
|
);
|
||||||
|
|
||||||
|
source = `${source} ${dependenciesSource}`;
|
||||||
|
|
||||||
|
let validationAst = null;
|
||||||
|
try {
|
||||||
|
validationAst = parse(source);
|
||||||
|
} catch (error) {
|
||||||
|
// the query string is already checked to be parsed properly - errors
|
||||||
|
// from this parse must be from corrupted fragment dependencies.
|
||||||
|
// For IDEs we don't care for errors outside of the currently edited
|
||||||
|
// query, so we return an empty array here.
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if there are custom validation rules to be used
|
||||||
|
let customRules;
|
||||||
|
const customRulesModulePath =
|
||||||
|
projectConfig.extensions.customValidationRules;
|
||||||
|
if (customRulesModulePath) {
|
||||||
|
/* eslint-disable no-implicit-coercion */
|
||||||
|
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
||||||
|
if (rulesPath) {
|
||||||
|
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
||||||
|
}
|
||||||
|
/* eslint-enable no-implicit-coercion */
|
||||||
|
}
|
||||||
|
|
||||||
|
const schema = await this._graphQLCache
|
||||||
|
.getSchema(projectConfig.projectName, queryHasExtensions)
|
||||||
|
.catch(() => null);
|
||||||
|
|
||||||
|
if (!schema) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
||||||
|
}
|
||||||
|
|
||||||
|
async getAutocompleteSuggestions(
|
||||||
|
query: string,
|
||||||
|
position: Position,
|
||||||
|
filePath: Uri,
|
||||||
|
): Promise<Array<CompletionItem>> {
|
||||||
|
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||||
|
const schema = await this._graphQLCache
|
||||||
|
.getSchema(projectConfig.projectName)
|
||||||
|
.catch(() => null);
|
||||||
|
|
||||||
|
if (schema) {
|
||||||
|
return getAutocompleteSuggestions(schema, query, position);
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
async getHoverInformation(
|
||||||
|
query: string,
|
||||||
|
position: Position,
|
||||||
|
filePath: Uri,
|
||||||
|
): Promise<Hover.contents> {
|
||||||
|
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||||
|
const schema = await this._graphQLCache
|
||||||
|
.getSchema(projectConfig.projectName)
|
||||||
|
.catch(() => null);
|
||||||
|
|
||||||
|
if (schema) {
|
||||||
|
return getHoverInformation(schema, query, position);
|
||||||
|
}
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
async getDefinition(
|
||||||
|
query: string,
|
||||||
|
position: Position,
|
||||||
|
filePath: Uri,
|
||||||
|
): Promise<?DefinitionQueryResult> {
|
||||||
|
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||||
|
|
||||||
|
let ast;
|
||||||
|
try {
|
||||||
|
ast = parse(query);
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const node = getASTNodeAtPosition(query, ast, position);
|
||||||
|
if (node) {
|
||||||
|
switch (node.kind) {
|
||||||
|
case FRAGMENT_SPREAD:
|
||||||
|
return this._getDefinitionForFragmentSpread(
|
||||||
|
query,
|
||||||
|
ast,
|
||||||
|
node,
|
||||||
|
filePath,
|
||||||
|
projectConfig,
|
||||||
|
);
|
||||||
|
case FRAGMENT_DEFINITION:
|
||||||
|
case OPERATION_DEFINITION:
|
||||||
|
return getDefinitionQueryResultForDefinitionNode(
|
||||||
|
filePath,
|
||||||
|
query,
|
||||||
|
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
||||||
|
);
|
||||||
|
case NAMED_TYPE:
|
||||||
|
return this._getDefinitionForNamedType(
|
||||||
|
query,
|
||||||
|
ast,
|
||||||
|
node,
|
||||||
|
filePath,
|
||||||
|
projectConfig,
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
async _getDefinitionForNamedType(
|
||||||
|
query: string,
|
||||||
|
ast: DocumentNode,
|
||||||
|
node: NamedTypeNode,
|
||||||
|
filePath: Uri,
|
||||||
|
projectConfig: GraphQLProjectConfig,
|
||||||
|
): Promise<?DefinitionQueryResult> {
|
||||||
|
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
||||||
|
projectConfig,
|
||||||
|
);
|
||||||
|
|
||||||
|
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
||||||
|
ast,
|
||||||
|
objectTypeDefinitions,
|
||||||
|
);
|
||||||
|
|
||||||
|
const localObjectTypeDefinitions = ast.definitions.filter(
|
||||||
|
definition =>
|
||||||
|
definition.kind === OBJECT_TYPE_DEFINITION ||
|
||||||
|
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
||||||
|
definition.kind === ENUM_TYPE_DEFINITION,
|
||||||
|
);
|
||||||
|
|
||||||
|
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
||||||
|
TypeDefinitionNode,
|
||||||
|
>);
|
||||||
|
|
||||||
|
const localOperationDefinationInfos = typeCastedDefs.map(
|
||||||
|
(definition: TypeDefinitionNode) => ({
|
||||||
|
filePath,
|
||||||
|
content: query,
|
||||||
|
definition,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await getDefinitionQueryResultForNamedType(
|
||||||
|
query,
|
||||||
|
node,
|
||||||
|
dependencies.concat(localOperationDefinationInfos),
|
||||||
|
);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
|
||||||
|
async _getDefinitionForFragmentSpread(
|
||||||
|
query: string,
|
||||||
|
ast: DocumentNode,
|
||||||
|
node: FragmentSpreadNode,
|
||||||
|
filePath: Uri,
|
||||||
|
projectConfig: GraphQLProjectConfig,
|
||||||
|
): Promise<?DefinitionQueryResult> {
|
||||||
|
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||||
|
projectConfig,
|
||||||
|
);
|
||||||
|
|
||||||
|
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
||||||
|
ast,
|
||||||
|
fragmentDefinitions,
|
||||||
|
);
|
||||||
|
|
||||||
|
const localFragDefinitions = ast.definitions.filter(
|
||||||
|
definition => definition.kind === FRAGMENT_DEFINITION,
|
||||||
|
);
|
||||||
|
|
||||||
|
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
||||||
|
FragmentDefinitionNode,
|
||||||
|
>);
|
||||||
|
|
||||||
|
const localFragInfos = typeCastedDefs.map(
|
||||||
|
(definition: FragmentDefinitionNode) => ({
|
||||||
|
filePath,
|
||||||
|
content: query,
|
||||||
|
definition,
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
|
||||||
|
const result = await getDefinitionQueryResultForFragmentSpread(
|
||||||
|
query,
|
||||||
|
node,
|
||||||
|
dependencies.concat(localFragInfos),
|
||||||
|
);
|
||||||
|
|
||||||
|
return result;
|
||||||
|
}
|
||||||
|
}
|
|
@ -0,0 +1,204 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
||||||
|
import {isCompositeType} from 'graphql';
|
||||||
|
import {
|
||||||
|
SchemaMetaFieldDef,
|
||||||
|
TypeMetaFieldDef,
|
||||||
|
TypeNameMetaFieldDef,
|
||||||
|
} from 'graphql/type/introspection';
|
||||||
|
import type {
|
||||||
|
CompletionItem,
|
||||||
|
ContextToken,
|
||||||
|
State,
|
||||||
|
TypeInfo,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
|
||||||
|
// Utility for returning the state representing the Definition this token state
|
||||||
|
// is within, if any.
|
||||||
|
export function getDefinitionState(tokenState: State): ?State {
|
||||||
|
let definitionState;
|
||||||
|
|
||||||
|
forEachState(tokenState, state => {
|
||||||
|
switch (state.kind) {
|
||||||
|
case 'Query':
|
||||||
|
case 'ShortQuery':
|
||||||
|
case 'Mutation':
|
||||||
|
case 'Subscription':
|
||||||
|
case 'FragmentDefinition':
|
||||||
|
definitionState = state;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return definitionState;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Gets the field definition given a type and field name
|
||||||
|
export function getFieldDef(
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
type: GraphQLType,
|
||||||
|
fieldName: string,
|
||||||
|
): ?GraphQLField<*, *> {
|
||||||
|
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
||||||
|
return SchemaMetaFieldDef;
|
||||||
|
}
|
||||||
|
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
||||||
|
return TypeMetaFieldDef;
|
||||||
|
}
|
||||||
|
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
||||||
|
return TypeNameMetaFieldDef;
|
||||||
|
}
|
||||||
|
if (type.getFields && typeof type.getFields === 'function') {
|
||||||
|
return (type.getFields()[fieldName]: any);
|
||||||
|
}
|
||||||
|
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
||||||
|
export function forEachState(
|
||||||
|
stack: State,
|
||||||
|
fn: (state: State) => ?TypeInfo,
|
||||||
|
): void {
|
||||||
|
const reverseStateStack = [];
|
||||||
|
let state = stack;
|
||||||
|
while (state && state.kind) {
|
||||||
|
reverseStateStack.push(state);
|
||||||
|
state = state.prevState;
|
||||||
|
}
|
||||||
|
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
||||||
|
fn(reverseStateStack[i]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
export function objectValues(object: Object): Array<any> {
|
||||||
|
const keys = Object.keys(object);
|
||||||
|
const len = keys.length;
|
||||||
|
const values = new Array(len);
|
||||||
|
for (let i = 0; i < len; ++i) {
|
||||||
|
values[i] = object[keys[i]];
|
||||||
|
}
|
||||||
|
return values;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Create the expected hint response given a possible list and a token
|
||||||
|
export function hintList(
|
||||||
|
token: ContextToken,
|
||||||
|
list: Array<CompletionItem>,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
return filterAndSortList(list, normalizeText(token.string));
|
||||||
|
}
|
||||||
|
|
||||||
|
// Given a list of hint entries and currently typed text, sort and filter to
|
||||||
|
// provide a concise list.
|
||||||
|
function filterAndSortList(
|
||||||
|
list: Array<CompletionItem>,
|
||||||
|
text: string,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
if (!text) {
|
||||||
|
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
||||||
|
}
|
||||||
|
|
||||||
|
const byProximity = list.map(entry => ({
|
||||||
|
proximity: getProximity(normalizeText(entry.label), text),
|
||||||
|
entry,
|
||||||
|
}));
|
||||||
|
|
||||||
|
const conciseMatches = filterNonEmpty(
|
||||||
|
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
||||||
|
pair => !pair.entry.isDeprecated,
|
||||||
|
);
|
||||||
|
|
||||||
|
const sortedMatches = conciseMatches.sort(
|
||||||
|
(a, b) =>
|
||||||
|
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
||||||
|
a.proximity - b.proximity ||
|
||||||
|
a.entry.label.length - b.entry.label.length,
|
||||||
|
);
|
||||||
|
|
||||||
|
return sortedMatches.map(pair => pair.entry);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Filters the array by the predicate, unless it results in an empty array,
|
||||||
|
// in which case return the original array.
|
||||||
|
function filterNonEmpty(
|
||||||
|
array: Array<Object>,
|
||||||
|
predicate: (entry: Object) => boolean,
|
||||||
|
): Array<Object> {
|
||||||
|
const filtered = array.filter(predicate);
|
||||||
|
return filtered.length === 0 ? array : filtered;
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeText(text: string): string {
|
||||||
|
return text.toLowerCase().replace(/\W/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Determine a numeric proximity for a suggestion based on current text.
|
||||||
|
function getProximity(suggestion: string, text: string): number {
|
||||||
|
// start with lexical distance
|
||||||
|
let proximity = lexicalDistance(text, suggestion);
|
||||||
|
if (suggestion.length > text.length) {
|
||||||
|
// do not penalize long suggestions.
|
||||||
|
proximity -= suggestion.length - text.length - 1;
|
||||||
|
// penalize suggestions not starting with this phrase
|
||||||
|
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
||||||
|
}
|
||||||
|
return proximity;
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Computes the lexical distance between strings A and B.
|
||||||
|
*
|
||||||
|
* The "distance" between two strings is given by counting the minimum number
|
||||||
|
* of edits needed to transform string A into string B. An edit can be an
|
||||||
|
* insertion, deletion, or substitution of a single character, or a swap of two
|
||||||
|
* adjacent characters.
|
||||||
|
*
|
||||||
|
* This distance can be useful for detecting typos in input or sorting
|
||||||
|
*
|
||||||
|
* @param {string} a
|
||||||
|
* @param {string} b
|
||||||
|
* @return {int} distance in number of edits
|
||||||
|
*/
|
||||||
|
function lexicalDistance(a: string, b: string): number {
|
||||||
|
let i;
|
||||||
|
let j;
|
||||||
|
const d = [];
|
||||||
|
const aLength = a.length;
|
||||||
|
const bLength = b.length;
|
||||||
|
|
||||||
|
for (i = 0; i <= aLength; i++) {
|
||||||
|
d[i] = [i];
|
||||||
|
}
|
||||||
|
|
||||||
|
for (j = 1; j <= bLength; j++) {
|
||||||
|
d[0][j] = j;
|
||||||
|
}
|
||||||
|
|
||||||
|
for (i = 1; i <= aLength; i++) {
|
||||||
|
for (j = 1; j <= bLength; j++) {
|
||||||
|
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
||||||
|
|
||||||
|
d[i][j] = Math.min(
|
||||||
|
d[i - 1][j] + 1,
|
||||||
|
d[i][j - 1] + 1,
|
||||||
|
d[i - 1][j - 1] + cost,
|
||||||
|
);
|
||||||
|
|
||||||
|
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
||||||
|
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return d[aLength][bLength];
|
||||||
|
}
|
|
@ -0,0 +1,665 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {
|
||||||
|
FragmentDefinitionNode,
|
||||||
|
GraphQLDirective,
|
||||||
|
GraphQLSchema,
|
||||||
|
} from 'graphql';
|
||||||
|
import type {
|
||||||
|
CompletionItem,
|
||||||
|
ContextToken,
|
||||||
|
State,
|
||||||
|
TypeInfo,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
import type {Position} from 'graphql-language-service-utils';
|
||||||
|
|
||||||
|
import {
|
||||||
|
GraphQLBoolean,
|
||||||
|
GraphQLEnumType,
|
||||||
|
GraphQLInputObjectType,
|
||||||
|
GraphQLList,
|
||||||
|
SchemaMetaFieldDef,
|
||||||
|
TypeMetaFieldDef,
|
||||||
|
TypeNameMetaFieldDef,
|
||||||
|
assertAbstractType,
|
||||||
|
doTypesOverlap,
|
||||||
|
getNamedType,
|
||||||
|
getNullableType,
|
||||||
|
isAbstractType,
|
||||||
|
isCompositeType,
|
||||||
|
isInputType,
|
||||||
|
} from 'graphql';
|
||||||
|
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||||
|
import {
|
||||||
|
forEachState,
|
||||||
|
getDefinitionState,
|
||||||
|
getFieldDef,
|
||||||
|
hintList,
|
||||||
|
objectValues,
|
||||||
|
} from './autocompleteUtils';
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Given GraphQLSchema, queryText, and context of the current position within
|
||||||
|
* the source text, provide a list of typeahead entries.
|
||||||
|
*/
|
||||||
|
export function getAutocompleteSuggestions(
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
queryText: string,
|
||||||
|
cursor: Position,
|
||||||
|
contextToken?: ContextToken,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||||
|
|
||||||
|
const state =
|
||||||
|
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
||||||
|
|
||||||
|
// relieve flow errors by checking if `state` exists
|
||||||
|
if (!state) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const kind = state.kind;
|
||||||
|
const step = state.step;
|
||||||
|
const typeInfo = getTypeInfo(schema, token.state);
|
||||||
|
|
||||||
|
// Definition kinds
|
||||||
|
if (kind === 'Document') {
|
||||||
|
return hintList(token, [
|
||||||
|
{label: 'query'},
|
||||||
|
{label: 'mutation'},
|
||||||
|
{label: 'subscription'},
|
||||||
|
{label: 'fragment'},
|
||||||
|
{label: '{'},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Field names
|
||||||
|
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
||||||
|
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Argument names
|
||||||
|
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
||||||
|
const argDefs = typeInfo.argDefs;
|
||||||
|
if (argDefs) {
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
argDefs.map(argDef => ({
|
||||||
|
label: argDef.name,
|
||||||
|
detail: String(argDef.type),
|
||||||
|
documentation: argDef.description,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Input Object fields
|
||||||
|
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
||||||
|
if (typeInfo.objectFieldDefs) {
|
||||||
|
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
objectFields.map(field => ({
|
||||||
|
label: field.name,
|
||||||
|
detail: String(field.type),
|
||||||
|
documentation: field.description,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Input values: Enum and Boolean
|
||||||
|
if (
|
||||||
|
kind === 'EnumValue' ||
|
||||||
|
(kind === 'ListValue' && step === 1) ||
|
||||||
|
(kind === 'ObjectField' && step === 2) ||
|
||||||
|
(kind === 'Argument' && step === 2)
|
||||||
|
) {
|
||||||
|
return getSuggestionsForInputValues(token, typeInfo);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fragment type conditions
|
||||||
|
if (
|
||||||
|
(kind === 'TypeCondition' && step === 1) ||
|
||||||
|
(kind === 'NamedType' &&
|
||||||
|
state.prevState != null &&
|
||||||
|
state.prevState.kind === 'TypeCondition')
|
||||||
|
) {
|
||||||
|
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Fragment spread names
|
||||||
|
if (kind === 'FragmentSpread' && step === 1) {
|
||||||
|
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Variable definition types
|
||||||
|
if (
|
||||||
|
(kind === 'VariableDefinition' && step === 2) ||
|
||||||
|
(kind === 'ListType' && step === 1) ||
|
||||||
|
(kind === 'NamedType' &&
|
||||||
|
state.prevState &&
|
||||||
|
(state.prevState.kind === 'VariableDefinition' ||
|
||||||
|
state.prevState.kind === 'ListType'))
|
||||||
|
) {
|
||||||
|
return getSuggestionsForVariableDefinition(token, schema);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Directive names
|
||||||
|
if (kind === 'Directive') {
|
||||||
|
return getSuggestionsForDirective(token, state, schema);
|
||||||
|
}
|
||||||
|
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
// Helper functions to get suggestions for each kinds
|
||||||
|
function getSuggestionsForFieldNames(
|
||||||
|
token: ContextToken,
|
||||||
|
typeInfo: TypeInfo,
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
if (typeInfo.parentType) {
|
||||||
|
const parentType = typeInfo.parentType;
|
||||||
|
const fields =
|
||||||
|
parentType.getFields instanceof Function
|
||||||
|
? objectValues(parentType.getFields())
|
||||||
|
: [];
|
||||||
|
if (isAbstractType(parentType)) {
|
||||||
|
fields.push(TypeNameMetaFieldDef);
|
||||||
|
}
|
||||||
|
if (parentType === schema.getQueryType()) {
|
||||||
|
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
||||||
|
}
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
fields.map(field => ({
|
||||||
|
label: field.name,
|
||||||
|
detail: String(field.type),
|
||||||
|
documentation: field.description,
|
||||||
|
isDeprecated: field.isDeprecated,
|
||||||
|
deprecationReason: field.deprecationReason,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSuggestionsForInputValues(
|
||||||
|
token: ContextToken,
|
||||||
|
typeInfo: TypeInfo,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
const namedInputType = getNamedType(typeInfo.inputType);
|
||||||
|
if (namedInputType instanceof GraphQLEnumType) {
|
||||||
|
const values = namedInputType.getValues();
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
values.map(value => ({
|
||||||
|
label: value.name,
|
||||||
|
detail: String(namedInputType),
|
||||||
|
documentation: value.description,
|
||||||
|
isDeprecated: value.isDeprecated,
|
||||||
|
deprecationReason: value.deprecationReason,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
} else if (namedInputType === GraphQLBoolean) {
|
||||||
|
return hintList(token, [
|
||||||
|
{
|
||||||
|
label: 'true',
|
||||||
|
detail: String(GraphQLBoolean),
|
||||||
|
documentation: 'Not false.',
|
||||||
|
},
|
||||||
|
{
|
||||||
|
label: 'false',
|
||||||
|
detail: String(GraphQLBoolean),
|
||||||
|
documentation: 'Not true.',
|
||||||
|
},
|
||||||
|
]);
|
||||||
|
}
|
||||||
|
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSuggestionsForFragmentTypeConditions(
|
||||||
|
token: ContextToken,
|
||||||
|
typeInfo: TypeInfo,
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
let possibleTypes;
|
||||||
|
if (typeInfo.parentType) {
|
||||||
|
if (isAbstractType(typeInfo.parentType)) {
|
||||||
|
const abstractType = assertAbstractType(typeInfo.parentType);
|
||||||
|
// Collect both the possible Object types as well as the interfaces
|
||||||
|
// they implement.
|
||||||
|
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
||||||
|
const possibleIfaceMap = Object.create(null);
|
||||||
|
possibleObjTypes.forEach(type => {
|
||||||
|
type.getInterfaces().forEach(iface => {
|
||||||
|
possibleIfaceMap[iface.name] = iface;
|
||||||
|
});
|
||||||
|
});
|
||||||
|
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
||||||
|
} else {
|
||||||
|
// The parent type is a non-abstract Object type, so the only possible
|
||||||
|
// type that can be used is that same type.
|
||||||
|
possibleTypes = [typeInfo.parentType];
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
const typeMap = schema.getTypeMap();
|
||||||
|
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
||||||
|
}
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
possibleTypes.map(type => {
|
||||||
|
const namedType = getNamedType(type);
|
||||||
|
return {
|
||||||
|
label: String(type),
|
||||||
|
documentation: (namedType && namedType.description) || '',
|
||||||
|
};
|
||||||
|
}),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSuggestionsForFragmentSpread(
|
||||||
|
token: ContextToken,
|
||||||
|
typeInfo: TypeInfo,
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
queryText: string,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
const typeMap = schema.getTypeMap();
|
||||||
|
const defState = getDefinitionState(token.state);
|
||||||
|
const fragments = getFragmentDefinitions(queryText);
|
||||||
|
|
||||||
|
// Filter down to only the fragments which may exist here.
|
||||||
|
const relevantFrags = fragments.filter(
|
||||||
|
frag =>
|
||||||
|
// Only include fragments with known types.
|
||||||
|
typeMap[frag.typeCondition.name.value] &&
|
||||||
|
// Only include fragments which are not cyclic.
|
||||||
|
!(
|
||||||
|
defState &&
|
||||||
|
defState.kind === 'FragmentDefinition' &&
|
||||||
|
defState.name === frag.name.value
|
||||||
|
) &&
|
||||||
|
// Only include fragments which could possibly be spread here.
|
||||||
|
isCompositeType(typeInfo.parentType) &&
|
||||||
|
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
||||||
|
doTypesOverlap(
|
||||||
|
schema,
|
||||||
|
typeInfo.parentType,
|
||||||
|
typeMap[frag.typeCondition.name.value],
|
||||||
|
),
|
||||||
|
);
|
||||||
|
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
relevantFrags.map(frag => ({
|
||||||
|
label: frag.name.value,
|
||||||
|
detail: String(typeMap[frag.typeCondition.name.value]),
|
||||||
|
documentation: `fragment ${frag.name.value} on ${
|
||||||
|
frag.typeCondition.name.value
|
||||||
|
}`,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getFragmentDefinitions(
|
||||||
|
queryText: string,
|
||||||
|
): Array<FragmentDefinitionNode> {
|
||||||
|
const fragmentDefs = [];
|
||||||
|
runOnlineParser(queryText, (_, state) => {
|
||||||
|
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
||||||
|
fragmentDefs.push({
|
||||||
|
kind: 'FragmentDefinition',
|
||||||
|
name: {
|
||||||
|
kind: 'Name',
|
||||||
|
value: state.name,
|
||||||
|
},
|
||||||
|
selectionSet: {
|
||||||
|
kind: 'SelectionSet',
|
||||||
|
selections: [],
|
||||||
|
},
|
||||||
|
typeCondition: {
|
||||||
|
kind: 'NamedType',
|
||||||
|
name: {
|
||||||
|
kind: 'Name',
|
||||||
|
value: state.type,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return fragmentDefs;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSuggestionsForVariableDefinition(
|
||||||
|
token: ContextToken,
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
const inputTypeMap = schema.getTypeMap();
|
||||||
|
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
inputTypes.map(type => ({
|
||||||
|
label: type.name,
|
||||||
|
documentation: type.description,
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getSuggestionsForDirective(
|
||||||
|
token: ContextToken,
|
||||||
|
state: State,
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
): Array<CompletionItem> {
|
||||||
|
if (state.prevState && state.prevState.kind) {
|
||||||
|
const directives = schema
|
||||||
|
.getDirectives()
|
||||||
|
.filter(directive => canUseDirective(state.prevState, directive));
|
||||||
|
return hintList(
|
||||||
|
token,
|
||||||
|
directives.map(directive => ({
|
||||||
|
label: directive.name,
|
||||||
|
documentation: directive.description || '',
|
||||||
|
})),
|
||||||
|
);
|
||||||
|
}
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getTokenAtPosition(
|
||||||
|
queryText: string,
|
||||||
|
cursor: Position,
|
||||||
|
): ContextToken {
|
||||||
|
let styleAtCursor = null;
|
||||||
|
let stateAtCursor = null;
|
||||||
|
let stringAtCursor = null;
|
||||||
|
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
||||||
|
if (index === cursor.line) {
|
||||||
|
if (stream.getCurrentPosition() >= cursor.character) {
|
||||||
|
styleAtCursor = style;
|
||||||
|
stateAtCursor = {...state};
|
||||||
|
stringAtCursor = stream.current();
|
||||||
|
return 'BREAK';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Return the state/style of parsed token in case those at cursor aren't
|
||||||
|
// available.
|
||||||
|
return {
|
||||||
|
start: token.start,
|
||||||
|
end: token.end,
|
||||||
|
string: stringAtCursor || token.string,
|
||||||
|
state: stateAtCursor || token.state,
|
||||||
|
style: styleAtCursor || token.style,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Provides an utility function to parse a given query text and construct a
|
||||||
|
* `token` context object.
|
||||||
|
* A token context provides useful information about the token/style that
|
||||||
|
* CharacterStream currently possesses, as well as the end state and style
|
||||||
|
* of the token.
|
||||||
|
*/
|
||||||
|
type callbackFnType = (
|
||||||
|
stream: CharacterStream,
|
||||||
|
state: State,
|
||||||
|
style: string,
|
||||||
|
index: number,
|
||||||
|
) => void | 'BREAK';
|
||||||
|
|
||||||
|
function runOnlineParser(
|
||||||
|
queryText: string,
|
||||||
|
callback: callbackFnType,
|
||||||
|
): ContextToken {
|
||||||
|
const lines = queryText.split('\n');
|
||||||
|
const parser = onlineParser();
|
||||||
|
let state = parser.startState();
|
||||||
|
let style = '';
|
||||||
|
|
||||||
|
let stream: CharacterStream = new CharacterStream('');
|
||||||
|
|
||||||
|
for (let i = 0; i < lines.length; i++) {
|
||||||
|
stream = new CharacterStream(lines[i]);
|
||||||
|
while (!stream.eol()) {
|
||||||
|
style = parser.token(stream, state);
|
||||||
|
const code = callback(stream, state, style, i);
|
||||||
|
if (code === 'BREAK') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Above while loop won't run if there is an empty line.
|
||||||
|
// Run the callback one more time to catch this.
|
||||||
|
callback(stream, state, style, i);
|
||||||
|
|
||||||
|
if (!state.kind) {
|
||||||
|
state = parser.startState();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return {
|
||||||
|
start: stream.getStartOfToken(),
|
||||||
|
end: stream.getCurrentPosition(),
|
||||||
|
string: stream.current(),
|
||||||
|
state,
|
||||||
|
style,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function canUseDirective(
|
||||||
|
state: $PropertyType<State, 'prevState'>,
|
||||||
|
directive: GraphQLDirective,
|
||||||
|
): boolean {
|
||||||
|
if (!state || !state.kind) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
const kind = state.kind;
|
||||||
|
const locations = directive.locations;
|
||||||
|
switch (kind) {
|
||||||
|
case 'Query':
|
||||||
|
return locations.indexOf('QUERY') !== -1;
|
||||||
|
case 'Mutation':
|
||||||
|
return locations.indexOf('MUTATION') !== -1;
|
||||||
|
case 'Subscription':
|
||||||
|
return locations.indexOf('SUBSCRIPTION') !== -1;
|
||||||
|
case 'Field':
|
||||||
|
case 'AliasedField':
|
||||||
|
return locations.indexOf('FIELD') !== -1;
|
||||||
|
case 'FragmentDefinition':
|
||||||
|
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
||||||
|
case 'FragmentSpread':
|
||||||
|
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
||||||
|
case 'InlineFragment':
|
||||||
|
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
||||||
|
|
||||||
|
// Schema Definitions
|
||||||
|
case 'SchemaDef':
|
||||||
|
return locations.indexOf('SCHEMA') !== -1;
|
||||||
|
case 'ScalarDef':
|
||||||
|
return locations.indexOf('SCALAR') !== -1;
|
||||||
|
case 'ObjectTypeDef':
|
||||||
|
return locations.indexOf('OBJECT') !== -1;
|
||||||
|
case 'FieldDef':
|
||||||
|
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
||||||
|
case 'InterfaceDef':
|
||||||
|
return locations.indexOf('INTERFACE') !== -1;
|
||||||
|
case 'UnionDef':
|
||||||
|
return locations.indexOf('UNION') !== -1;
|
||||||
|
case 'EnumDef':
|
||||||
|
return locations.indexOf('ENUM') !== -1;
|
||||||
|
case 'EnumValue':
|
||||||
|
return locations.indexOf('ENUM_VALUE') !== -1;
|
||||||
|
case 'InputDef':
|
||||||
|
return locations.indexOf('INPUT_OBJECT') !== -1;
|
||||||
|
case 'InputValueDef':
|
||||||
|
const prevStateKind = state.prevState && state.prevState.kind;
|
||||||
|
switch (prevStateKind) {
|
||||||
|
case 'ArgumentsDef':
|
||||||
|
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
||||||
|
case 'InputDef':
|
||||||
|
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Utility for collecting rich type information given any token's state
|
||||||
|
// from the graphql-mode parser.
|
||||||
|
export function getTypeInfo(
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
tokenState: State,
|
||||||
|
): TypeInfo {
|
||||||
|
let argDef;
|
||||||
|
let argDefs;
|
||||||
|
let directiveDef;
|
||||||
|
let enumValue;
|
||||||
|
let fieldDef;
|
||||||
|
let inputType;
|
||||||
|
let objectFieldDefs;
|
||||||
|
let parentType;
|
||||||
|
let type;
|
||||||
|
|
||||||
|
forEachState(tokenState, state => {
|
||||||
|
switch (state.kind) {
|
||||||
|
case 'Query':
|
||||||
|
case 'ShortQuery':
|
||||||
|
type = schema.getQueryType();
|
||||||
|
break;
|
||||||
|
case 'Mutation':
|
||||||
|
type = schema.getMutationType();
|
||||||
|
break;
|
||||||
|
case 'Subscription':
|
||||||
|
type = schema.getSubscriptionType();
|
||||||
|
break;
|
||||||
|
case 'InlineFragment':
|
||||||
|
case 'FragmentDefinition':
|
||||||
|
if (state.type) {
|
||||||
|
type = schema.getType(state.type);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'Field':
|
||||||
|
case 'AliasedField':
|
||||||
|
if (!type || !state.name) {
|
||||||
|
fieldDef = null;
|
||||||
|
} else {
|
||||||
|
fieldDef = parentType
|
||||||
|
? getFieldDef(schema, parentType, state.name)
|
||||||
|
: null;
|
||||||
|
type = fieldDef ? fieldDef.type : null;
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'SelectionSet':
|
||||||
|
parentType = getNamedType(type);
|
||||||
|
break;
|
||||||
|
case 'Directive':
|
||||||
|
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
||||||
|
break;
|
||||||
|
case 'Arguments':
|
||||||
|
if (!state.prevState) {
|
||||||
|
argDefs = null;
|
||||||
|
} else {
|
||||||
|
switch (state.prevState.kind) {
|
||||||
|
case 'Field':
|
||||||
|
argDefs = fieldDef && fieldDef.args;
|
||||||
|
break;
|
||||||
|
case 'Directive':
|
||||||
|
argDefs = directiveDef && directiveDef.args;
|
||||||
|
break;
|
||||||
|
case 'AliasedField':
|
||||||
|
const name = state.prevState && state.prevState.name;
|
||||||
|
if (!name) {
|
||||||
|
argDefs = null;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
const field = parentType
|
||||||
|
? getFieldDef(schema, parentType, name)
|
||||||
|
: null;
|
||||||
|
if (!field) {
|
||||||
|
argDefs = null;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
argDefs = field.args;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
argDefs = null;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
case 'Argument':
|
||||||
|
if (argDefs) {
|
||||||
|
for (let i = 0; i < argDefs.length; i++) {
|
||||||
|
if (argDefs[i].name === state.name) {
|
||||||
|
argDef = argDefs[i];
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
inputType = argDef && argDef.type;
|
||||||
|
break;
|
||||||
|
case 'EnumValue':
|
||||||
|
const enumType = getNamedType(inputType);
|
||||||
|
enumValue =
|
||||||
|
enumType instanceof GraphQLEnumType
|
||||||
|
? find(enumType.getValues(), val => val.value === state.name)
|
||||||
|
: null;
|
||||||
|
break;
|
||||||
|
case 'ListValue':
|
||||||
|
const nullableType = getNullableType(inputType);
|
||||||
|
inputType =
|
||||||
|
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
||||||
|
break;
|
||||||
|
case 'ObjectValue':
|
||||||
|
const objectType = getNamedType(inputType);
|
||||||
|
objectFieldDefs =
|
||||||
|
objectType instanceof GraphQLInputObjectType
|
||||||
|
? objectType.getFields()
|
||||||
|
: null;
|
||||||
|
break;
|
||||||
|
case 'ObjectField':
|
||||||
|
const objectField =
|
||||||
|
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
||||||
|
inputType = objectField && objectField.type;
|
||||||
|
break;
|
||||||
|
case 'NamedType':
|
||||||
|
if (state.name) {
|
||||||
|
type = schema.getType(state.name);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
argDef,
|
||||||
|
argDefs,
|
||||||
|
directiveDef,
|
||||||
|
enumValue,
|
||||||
|
fieldDef,
|
||||||
|
inputType,
|
||||||
|
objectFieldDefs,
|
||||||
|
parentType,
|
||||||
|
type,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
// Returns the first item in the array which causes predicate to return truthy.
|
||||||
|
function find(array, predicate) {
|
||||||
|
for (let i = 0; i < array.length; i++) {
|
||||||
|
if (predicate(array[i])) {
|
||||||
|
return array[i];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
}
|
|
@ -0,0 +1,136 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {
|
||||||
|
ASTNode,
|
||||||
|
FragmentSpreadNode,
|
||||||
|
FragmentDefinitionNode,
|
||||||
|
OperationDefinitionNode,
|
||||||
|
NamedTypeNode,
|
||||||
|
TypeDefinitionNode,
|
||||||
|
} from 'graphql';
|
||||||
|
import type {
|
||||||
|
Definition,
|
||||||
|
DefinitionQueryResult,
|
||||||
|
FragmentInfo,
|
||||||
|
Position,
|
||||||
|
Range,
|
||||||
|
Uri,
|
||||||
|
ObjectTypeInfo,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
||||||
|
import invariant from 'assert';
|
||||||
|
|
||||||
|
export const LANGUAGE = 'GraphQL';
|
||||||
|
|
||||||
|
function getRange(text: string, node: ASTNode): Range {
|
||||||
|
const location = node.loc;
|
||||||
|
invariant(location, 'Expected ASTNode to have a location.');
|
||||||
|
return locToRange(text, location);
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPosition(text: string, node: ASTNode): Position {
|
||||||
|
const location = node.loc;
|
||||||
|
invariant(location, 'Expected ASTNode to have a location.');
|
||||||
|
return offsetToPosition(text, location.start);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDefinitionQueryResultForNamedType(
|
||||||
|
text: string,
|
||||||
|
node: NamedTypeNode,
|
||||||
|
dependencies: Array<ObjectTypeInfo>,
|
||||||
|
): Promise<DefinitionQueryResult> {
|
||||||
|
const name = node.name.value;
|
||||||
|
const defNodes = dependencies.filter(
|
||||||
|
({definition}) => definition.name && definition.name.value === name,
|
||||||
|
);
|
||||||
|
if (defNodes.length === 0) {
|
||||||
|
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
||||||
|
return {queryRange: [], definitions: []};
|
||||||
|
}
|
||||||
|
const definitions: Array<Definition> = defNodes.map(
|
||||||
|
({filePath, content, definition}) =>
|
||||||
|
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
definitions,
|
||||||
|
queryRange: definitions.map(_ => getRange(text, node)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getDefinitionQueryResultForFragmentSpread(
|
||||||
|
text: string,
|
||||||
|
fragment: FragmentSpreadNode,
|
||||||
|
dependencies: Array<FragmentInfo>,
|
||||||
|
): Promise<DefinitionQueryResult> {
|
||||||
|
const name = fragment.name.value;
|
||||||
|
const defNodes = dependencies.filter(
|
||||||
|
({definition}) => definition.name.value === name,
|
||||||
|
);
|
||||||
|
if (defNodes.length === 0) {
|
||||||
|
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
||||||
|
return {queryRange: [], definitions: []};
|
||||||
|
}
|
||||||
|
const definitions: Array<Definition> = defNodes.map(
|
||||||
|
({filePath, content, definition}) =>
|
||||||
|
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
||||||
|
);
|
||||||
|
return {
|
||||||
|
definitions,
|
||||||
|
queryRange: definitions.map(_ => getRange(text, fragment)),
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getDefinitionQueryResultForDefinitionNode(
|
||||||
|
path: Uri,
|
||||||
|
text: string,
|
||||||
|
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||||
|
): DefinitionQueryResult {
|
||||||
|
return {
|
||||||
|
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
||||||
|
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDefinitionForFragmentDefinition(
|
||||||
|
path: Uri,
|
||||||
|
text: string,
|
||||||
|
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||||
|
): Definition {
|
||||||
|
const name = definition.name;
|
||||||
|
invariant(name, 'Expected ASTNode to have a Name.');
|
||||||
|
return {
|
||||||
|
path,
|
||||||
|
position: getPosition(text, definition),
|
||||||
|
range: getRange(text, definition),
|
||||||
|
name: name.value || '',
|
||||||
|
language: LANGUAGE,
|
||||||
|
// This is a file inside the project root, good enough for now
|
||||||
|
projectRoot: path,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function getDefinitionForNodeDefinition(
|
||||||
|
path: Uri,
|
||||||
|
text: string,
|
||||||
|
definition: TypeDefinitionNode,
|
||||||
|
): Definition {
|
||||||
|
const name = definition.name;
|
||||||
|
invariant(name, 'Expected ASTNode to have a Name.');
|
||||||
|
return {
|
||||||
|
path,
|
||||||
|
position: getPosition(text, definition),
|
||||||
|
range: getRange(text, definition),
|
||||||
|
name: name.value || '',
|
||||||
|
language: LANGUAGE,
|
||||||
|
// This is a file inside the project root, good enough for now
|
||||||
|
projectRoot: path,
|
||||||
|
};
|
||||||
|
}
|
|
@ -0,0 +1,172 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {
|
||||||
|
ASTNode,
|
||||||
|
DocumentNode,
|
||||||
|
GraphQLError,
|
||||||
|
GraphQLSchema,
|
||||||
|
Location,
|
||||||
|
SourceLocation,
|
||||||
|
} from 'graphql';
|
||||||
|
import type {
|
||||||
|
Diagnostic,
|
||||||
|
CustomValidationRule,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
|
||||||
|
import invariant from 'assert';
|
||||||
|
import {findDeprecatedUsages, parse} from 'graphql';
|
||||||
|
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||||
|
import {
|
||||||
|
Position,
|
||||||
|
Range,
|
||||||
|
validateWithCustomRules,
|
||||||
|
} from 'graphql-language-service-utils';
|
||||||
|
|
||||||
|
export const SEVERITY = {
|
||||||
|
ERROR: 1,
|
||||||
|
WARNING: 2,
|
||||||
|
INFORMATION: 3,
|
||||||
|
HINT: 4,
|
||||||
|
};
|
||||||
|
|
||||||
|
export function getDiagnostics(
|
||||||
|
query: string,
|
||||||
|
schema: ?GraphQLSchema = null,
|
||||||
|
customRules?: Array<CustomValidationRule>,
|
||||||
|
isRelayCompatMode?: boolean,
|
||||||
|
): Array<Diagnostic> {
|
||||||
|
let ast = null;
|
||||||
|
try {
|
||||||
|
ast = parse(query);
|
||||||
|
} catch (error) {
|
||||||
|
const range = getRange(error.locations[0], query);
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
severity: SEVERITY.ERROR,
|
||||||
|
message: error.message,
|
||||||
|
source: 'GraphQL: Syntax',
|
||||||
|
range,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
||||||
|
}
|
||||||
|
|
||||||
|
export function validateQuery(
|
||||||
|
ast: DocumentNode,
|
||||||
|
schema: ?GraphQLSchema = null,
|
||||||
|
customRules?: Array<CustomValidationRule>,
|
||||||
|
isRelayCompatMode?: boolean,
|
||||||
|
): Array<Diagnostic> {
|
||||||
|
// We cannot validate the query unless a schema is provided.
|
||||||
|
if (!schema) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const validationErrorAnnotations = mapCat(
|
||||||
|
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
||||||
|
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
||||||
|
);
|
||||||
|
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
||||||
|
// support older versions of graphql-js.
|
||||||
|
const deprecationWarningAnnotations = !findDeprecatedUsages
|
||||||
|
? []
|
||||||
|
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
||||||
|
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
||||||
|
);
|
||||||
|
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
||||||
|
}
|
||||||
|
|
||||||
|
// General utility for map-cating (aka flat-mapping).
|
||||||
|
function mapCat<T>(
|
||||||
|
array: Array<T>,
|
||||||
|
mapper: (item: T) => Array<any>,
|
||||||
|
): Array<any> {
|
||||||
|
return Array.prototype.concat.apply([], array.map(mapper));
|
||||||
|
}
|
||||||
|
|
||||||
|
function annotations(
|
||||||
|
error: GraphQLError,
|
||||||
|
severity: number,
|
||||||
|
type: string,
|
||||||
|
): Array<Diagnostic> {
|
||||||
|
if (!error.nodes) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
return error.nodes.map(node => {
|
||||||
|
const highlightNode =
|
||||||
|
node.kind !== 'Variable' && node.name
|
||||||
|
? node.name
|
||||||
|
: node.variable
|
||||||
|
? node.variable
|
||||||
|
: node;
|
||||||
|
|
||||||
|
invariant(error.locations, 'GraphQL validation error requires locations.');
|
||||||
|
const loc = error.locations[0];
|
||||||
|
const highlightLoc = getLocation(highlightNode);
|
||||||
|
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
||||||
|
return {
|
||||||
|
source: `GraphQL: ${type}`,
|
||||||
|
message: error.message,
|
||||||
|
severity,
|
||||||
|
range: new Range(
|
||||||
|
new Position(loc.line - 1, loc.column - 1),
|
||||||
|
new Position(loc.line - 1, end),
|
||||||
|
),
|
||||||
|
};
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
export function getRange(location: SourceLocation, queryText: string) {
|
||||||
|
const parser = onlineParser();
|
||||||
|
const state = parser.startState();
|
||||||
|
const lines = queryText.split('\n');
|
||||||
|
|
||||||
|
invariant(
|
||||||
|
lines.length >= location.line,
|
||||||
|
'Query text must have more lines than where the error happened',
|
||||||
|
);
|
||||||
|
|
||||||
|
let stream = null;
|
||||||
|
|
||||||
|
for (let i = 0; i < location.line; i++) {
|
||||||
|
stream = new CharacterStream(lines[i]);
|
||||||
|
while (!stream.eol()) {
|
||||||
|
const style = parser.token(stream, state);
|
||||||
|
if (style === 'invalidchar') {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
invariant(stream, 'Expected Parser stream to be available.');
|
||||||
|
|
||||||
|
const line = location.line - 1;
|
||||||
|
const start = stream.getStartOfToken();
|
||||||
|
const end = stream.getCurrentPosition();
|
||||||
|
|
||||||
|
return new Range(new Position(line, start), new Position(line, end));
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Get location info from a node in a type-safe way.
|
||||||
|
*
|
||||||
|
* The only way a node could not have a location is if we initialized the parser
|
||||||
|
* (and therefore the lexer) with the `noLocation` option, but we always
|
||||||
|
* call `parse` without options above.
|
||||||
|
*/
|
||||||
|
function getLocation(node: any): Location {
|
||||||
|
const typeCastedNode = (node: ASTNode);
|
||||||
|
const location = typeCastedNode.loc;
|
||||||
|
invariant(location, 'Expected ASTNode to have a location.');
|
||||||
|
return location;
|
||||||
|
}
|
|
@ -0,0 +1,186 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Ported from codemirror-graphql
|
||||||
|
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {GraphQLSchema} from 'graphql';
|
||||||
|
import type {ContextToken} from 'graphql-language-service-types';
|
||||||
|
import type {Hover} from 'vscode-languageserver-types';
|
||||||
|
import type {Position} from 'graphql-language-service-utils';
|
||||||
|
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
||||||
|
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
||||||
|
|
||||||
|
export function getHoverInformation(
|
||||||
|
schema: GraphQLSchema,
|
||||||
|
queryText: string,
|
||||||
|
cursor: Position,
|
||||||
|
contextToken?: ContextToken,
|
||||||
|
): Hover.contents {
|
||||||
|
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||||
|
|
||||||
|
if (!schema || !token || !token.state) {
|
||||||
|
return [];
|
||||||
|
}
|
||||||
|
|
||||||
|
const state = token.state;
|
||||||
|
const kind = state.kind;
|
||||||
|
const step = state.step;
|
||||||
|
const typeInfo = getTypeInfo(schema, token.state);
|
||||||
|
const options = {schema};
|
||||||
|
|
||||||
|
// Given a Schema and a Token, produce the contents of an info tooltip.
|
||||||
|
// To do this, create a div element that we will render "into" and then pass
|
||||||
|
// it to various rendering functions.
|
||||||
|
if (
|
||||||
|
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
||||||
|
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
||||||
|
) {
|
||||||
|
const into = [];
|
||||||
|
renderField(into, typeInfo, options);
|
||||||
|
renderDescription(into, options, typeInfo.fieldDef);
|
||||||
|
return into.join('').trim();
|
||||||
|
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
||||||
|
const into = [];
|
||||||
|
renderDirective(into, typeInfo, options);
|
||||||
|
renderDescription(into, options, typeInfo.directiveDef);
|
||||||
|
return into.join('').trim();
|
||||||
|
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
||||||
|
const into = [];
|
||||||
|
renderArg(into, typeInfo, options);
|
||||||
|
renderDescription(into, options, typeInfo.argDef);
|
||||||
|
return into.join('').trim();
|
||||||
|
} else if (
|
||||||
|
kind === 'EnumValue' &&
|
||||||
|
typeInfo.enumValue &&
|
||||||
|
typeInfo.enumValue.description
|
||||||
|
) {
|
||||||
|
const into = [];
|
||||||
|
renderEnumValue(into, typeInfo, options);
|
||||||
|
renderDescription(into, options, typeInfo.enumValue);
|
||||||
|
return into.join('').trim();
|
||||||
|
} else if (
|
||||||
|
kind === 'NamedType' &&
|
||||||
|
typeInfo.type &&
|
||||||
|
typeInfo.type.description
|
||||||
|
) {
|
||||||
|
const into = [];
|
||||||
|
renderType(into, typeInfo, options, typeInfo.type);
|
||||||
|
renderDescription(into, options, typeInfo.type);
|
||||||
|
return into.join('').trim();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderField(into, typeInfo, options) {
|
||||||
|
renderQualifiedField(into, typeInfo, options);
|
||||||
|
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderQualifiedField(into, typeInfo, options) {
|
||||||
|
if (!typeInfo.fieldDef) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const fieldName = (typeInfo.fieldDef.name: string);
|
||||||
|
if (fieldName.slice(0, 2) !== '__') {
|
||||||
|
renderType(into, typeInfo, options, typeInfo.parentType);
|
||||||
|
text(into, '.');
|
||||||
|
}
|
||||||
|
text(into, fieldName);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderDirective(into, typeInfo, options) {
|
||||||
|
if (!typeInfo.directiveDef) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const name = '@' + typeInfo.directiveDef.name;
|
||||||
|
text(into, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderArg(into, typeInfo, options) {
|
||||||
|
if (typeInfo.directiveDef) {
|
||||||
|
renderDirective(into, typeInfo, options);
|
||||||
|
} else if (typeInfo.fieldDef) {
|
||||||
|
renderQualifiedField(into, typeInfo, options);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!typeInfo.argDef) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const name = typeInfo.argDef.name;
|
||||||
|
text(into, '(');
|
||||||
|
text(into, name);
|
||||||
|
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
||||||
|
text(into, ')');
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderTypeAnnotation(into, typeInfo, options, t) {
|
||||||
|
text(into, ': ');
|
||||||
|
renderType(into, typeInfo, options, t);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderEnumValue(into, typeInfo, options) {
|
||||||
|
if (!typeInfo.enumValue) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const name = typeInfo.enumValue.name;
|
||||||
|
renderType(into, typeInfo, options, typeInfo.inputType);
|
||||||
|
text(into, '.');
|
||||||
|
text(into, name);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderType(into, typeInfo, options, t) {
|
||||||
|
if (!t) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
if (t instanceof GraphQLNonNull) {
|
||||||
|
renderType(into, typeInfo, options, t.ofType);
|
||||||
|
text(into, '!');
|
||||||
|
} else if (t instanceof GraphQLList) {
|
||||||
|
text(into, '[');
|
||||||
|
renderType(into, typeInfo, options, t.ofType);
|
||||||
|
text(into, ']');
|
||||||
|
} else {
|
||||||
|
text(into, t.name);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderDescription(into, options, def) {
|
||||||
|
if (!def) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const description =
|
||||||
|
typeof def.description === 'string' ? def.description : null;
|
||||||
|
if (description) {
|
||||||
|
text(into, '\n\n');
|
||||||
|
text(into, description);
|
||||||
|
}
|
||||||
|
renderDeprecation(into, options, def);
|
||||||
|
}
|
||||||
|
|
||||||
|
function renderDeprecation(into, options, def) {
|
||||||
|
if (!def) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
const reason =
|
||||||
|
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
||||||
|
if (!reason) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
text(into, '\n\n');
|
||||||
|
text(into, 'Deprecated: ');
|
||||||
|
text(into, reason);
|
||||||
|
}
|
||||||
|
|
||||||
|
function text(into: string[], content: string) {
|
||||||
|
into.push(content);
|
||||||
|
}
|
|
@ -0,0 +1,121 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
import type {
|
||||||
|
Outline,
|
||||||
|
TextToken,
|
||||||
|
TokenKind,
|
||||||
|
} from 'graphql-language-service-types';
|
||||||
|
|
||||||
|
import {Kind, parse, visit} from 'graphql';
|
||||||
|
import {offsetToPosition} from 'graphql-language-service-utils';
|
||||||
|
|
||||||
|
const {INLINE_FRAGMENT} = Kind;
|
||||||
|
|
||||||
|
const OUTLINEABLE_KINDS = {
|
||||||
|
Field: true,
|
||||||
|
OperationDefinition: true,
|
||||||
|
Document: true,
|
||||||
|
SelectionSet: true,
|
||||||
|
Name: true,
|
||||||
|
FragmentDefinition: true,
|
||||||
|
FragmentSpread: true,
|
||||||
|
InlineFragment: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
type OutlineTreeConverterType = {[name: string]: Function};
|
||||||
|
|
||||||
|
export function getOutline(queryText: string): ?Outline {
|
||||||
|
let ast;
|
||||||
|
try {
|
||||||
|
ast = parse(queryText);
|
||||||
|
} catch (error) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const visitorFns = outlineTreeConverter(queryText);
|
||||||
|
const outlineTrees = visit(ast, {
|
||||||
|
leave(node) {
|
||||||
|
if (
|
||||||
|
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
||||||
|
visitorFns[node.kind]
|
||||||
|
) {
|
||||||
|
return visitorFns[node.kind](node);
|
||||||
|
}
|
||||||
|
return null;
|
||||||
|
},
|
||||||
|
});
|
||||||
|
return {outlineTrees};
|
||||||
|
}
|
||||||
|
|
||||||
|
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
||||||
|
const meta = node => ({
|
||||||
|
representativeName: node.name,
|
||||||
|
startPosition: offsetToPosition(docText, node.loc.start),
|
||||||
|
endPosition: offsetToPosition(docText, node.loc.end),
|
||||||
|
children: node.selectionSet || [],
|
||||||
|
});
|
||||||
|
return {
|
||||||
|
Field: node => {
|
||||||
|
const tokenizedText = node.alias
|
||||||
|
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
||||||
|
: [];
|
||||||
|
tokenizedText.push(buildToken('plain', node.name));
|
||||||
|
return {tokenizedText, ...meta(node)};
|
||||||
|
},
|
||||||
|
OperationDefinition: node => ({
|
||||||
|
tokenizedText: [
|
||||||
|
buildToken('keyword', node.operation),
|
||||||
|
buildToken('whitespace', ' '),
|
||||||
|
buildToken('class-name', node.name),
|
||||||
|
],
|
||||||
|
...meta(node),
|
||||||
|
}),
|
||||||
|
Document: node => node.definitions,
|
||||||
|
SelectionSet: node =>
|
||||||
|
concatMap(node.selections, child => {
|
||||||
|
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
||||||
|
}),
|
||||||
|
Name: node => node.value,
|
||||||
|
FragmentDefinition: node => ({
|
||||||
|
tokenizedText: [
|
||||||
|
buildToken('keyword', 'fragment'),
|
||||||
|
buildToken('whitespace', ' '),
|
||||||
|
buildToken('class-name', node.name),
|
||||||
|
],
|
||||||
|
...meta(node),
|
||||||
|
}),
|
||||||
|
FragmentSpread: node => ({
|
||||||
|
tokenizedText: [
|
||||||
|
buildToken('plain', '...'),
|
||||||
|
buildToken('class-name', node.name),
|
||||||
|
],
|
||||||
|
...meta(node),
|
||||||
|
}),
|
||||||
|
InlineFragment: node => node.selectionSet,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function buildToken(kind: TokenKind, value: string): TextToken {
|
||||||
|
return {kind, value};
|
||||||
|
}
|
||||||
|
|
||||||
|
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
||||||
|
const res = [];
|
||||||
|
for (let i = 0; i < arr.length; i++) {
|
||||||
|
const x = fn(arr[i], i);
|
||||||
|
if (Array.isArray(x)) {
|
||||||
|
res.push(...x);
|
||||||
|
} else {
|
||||||
|
res.push(x);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
|
@ -0,0 +1,31 @@
|
||||||
|
/**
|
||||||
|
* Copyright (c) Facebook, Inc.
|
||||||
|
* All rights reserved.
|
||||||
|
*
|
||||||
|
* This source code is licensed under the license found in the
|
||||||
|
* LICENSE file in the root directory of this source tree.
|
||||||
|
*
|
||||||
|
* @flow
|
||||||
|
*/
|
||||||
|
|
||||||
|
export {
|
||||||
|
getDefinitionState,
|
||||||
|
getFieldDef,
|
||||||
|
forEachState,
|
||||||
|
objectValues,
|
||||||
|
hintList,
|
||||||
|
} from './autocompleteUtils';
|
||||||
|
|
||||||
|
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||||
|
|
||||||
|
export {
|
||||||
|
LANGUAGE,
|
||||||
|
getDefinitionQueryResultForFragmentSpread,
|
||||||
|
getDefinitionQueryResultForDefinitionNode,
|
||||||
|
} from './getDefinition';
|
||||||
|
|
||||||
|
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
||||||
|
export {getOutline} from './getOutline';
|
||||||
|
export {getHoverInformation} from './getHoverInformation';
|
||||||
|
|
||||||
|
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
Binary file not shown.
After Width: | Height: | Size: 31 KiB |
505
config/config.go
505
config/config.go
|
@ -1,505 +0,0 @@
|
||||||
// Package config provides the config values needed for Super Graph
|
|
||||||
// For detailed documentation visit https://supergraph.dev
|
|
||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"fmt"
|
|
||||||
"log"
|
|
||||||
"os"
|
|
||||||
"path"
|
|
||||||
"strings"
|
|
||||||
"time"
|
|
||||||
|
|
||||||
"github.com/gobuffalo/flect"
|
|
||||||
"github.com/spf13/viper"
|
|
||||||
)
|
|
||||||
|
|
||||||
const (
|
|
||||||
LogLevelNone int = iota
|
|
||||||
LogLevelInfo
|
|
||||||
LogLevelWarn
|
|
||||||
LogLevelError
|
|
||||||
LogLevelDebug
|
|
||||||
)
|
|
||||||
|
|
||||||
// Config struct holds the Super Graph config values
|
|
||||||
type Config struct {
|
|
||||||
Core `mapstructure:",squash"`
|
|
||||||
Serv `mapstructure:",squash"`
|
|
||||||
|
|
||||||
vi *viper.Viper
|
|
||||||
log *log.Logger
|
|
||||||
logLevel int
|
|
||||||
roles map[string]*Role
|
|
||||||
abacEnabled bool
|
|
||||||
valid bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Core struct contains core specific config value
|
|
||||||
type Core struct {
|
|
||||||
Env string
|
|
||||||
Production bool
|
|
||||||
LogLevel string `mapstructure:"log_level"`
|
|
||||||
SecretKey string `mapstructure:"secret_key"`
|
|
||||||
SetUserID bool `mapstructure:"set_user_id"`
|
|
||||||
Vars map[string]string `mapstructure:"variables"`
|
|
||||||
Blocklist []string
|
|
||||||
Tables []Table
|
|
||||||
RolesQuery string `mapstructure:"roles_query"`
|
|
||||||
Roles []Role
|
|
||||||
}
|
|
||||||
|
|
||||||
// Serv struct contains config values used by the Super Graph service
|
|
||||||
type Serv struct {
|
|
||||||
AppName string `mapstructure:"app_name"`
|
|
||||||
HostPort string `mapstructure:"host_port"`
|
|
||||||
Host string
|
|
||||||
Port string
|
|
||||||
HTTPGZip bool `mapstructure:"http_compress"`
|
|
||||||
WebUI bool `mapstructure:"web_ui"`
|
|
||||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
|
||||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
|
||||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
|
||||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
|
||||||
SeedFile string `mapstructure:"seed_file"`
|
|
||||||
MigrationsPath string `mapstructure:"migrations_path"`
|
|
||||||
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
|
||||||
DebugCORS bool `mapstructure:"cors_debug"`
|
|
||||||
|
|
||||||
Inflections map[string]string
|
|
||||||
|
|
||||||
Auth Auth
|
|
||||||
Auths []Auth
|
|
||||||
|
|
||||||
DB struct {
|
|
||||||
Type string
|
|
||||||
Host string
|
|
||||||
Port uint16
|
|
||||||
DBName string
|
|
||||||
User string
|
|
||||||
Password string
|
|
||||||
Schema string
|
|
||||||
PoolSize int32 `mapstructure:"pool_size"`
|
|
||||||
MaxRetries int `mapstructure:"max_retries"`
|
|
||||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
|
||||||
} `mapstructure:"database"`
|
|
||||||
|
|
||||||
Actions []Action
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auth struct contains authentication related config values used by the Super Graph service
|
|
||||||
type Auth struct {
|
|
||||||
Name string
|
|
||||||
Type string
|
|
||||||
Cookie string
|
|
||||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
|
||||||
|
|
||||||
Rails struct {
|
|
||||||
Version string
|
|
||||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
|
||||||
URL string
|
|
||||||
Password string
|
|
||||||
MaxIdle int `mapstructure:"max_idle"`
|
|
||||||
MaxActive int `mapstructure:"max_active"`
|
|
||||||
Salt string
|
|
||||||
SignSalt string `mapstructure:"sign_salt"`
|
|
||||||
AuthSalt string `mapstructure:"auth_salt"`
|
|
||||||
}
|
|
||||||
|
|
||||||
JWT struct {
|
|
||||||
Provider string
|
|
||||||
Secret string
|
|
||||||
PubKeyFile string `mapstructure:"public_key_file"`
|
|
||||||
PubKeyType string `mapstructure:"public_key_type"`
|
|
||||||
}
|
|
||||||
|
|
||||||
Header struct {
|
|
||||||
Name string
|
|
||||||
Value string
|
|
||||||
Exists bool
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Column struct defines a database column
|
|
||||||
type Column struct {
|
|
||||||
Name string
|
|
||||||
Type string
|
|
||||||
ForeignKey string `mapstructure:"related_to"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Table struct defines a database table
|
|
||||||
type Table struct {
|
|
||||||
Name string
|
|
||||||
Table string
|
|
||||||
Blocklist []string
|
|
||||||
Remotes []Remote
|
|
||||||
Columns []Column
|
|
||||||
}
|
|
||||||
|
|
||||||
// Remote struct defines a remote API endpoint
|
|
||||||
type Remote struct {
|
|
||||||
Name string
|
|
||||||
ID string
|
|
||||||
Path string
|
|
||||||
URL string
|
|
||||||
Debug bool
|
|
||||||
PassHeaders []string `mapstructure:"pass_headers"`
|
|
||||||
SetHeaders []struct {
|
|
||||||
Name string
|
|
||||||
Value string
|
|
||||||
} `mapstructure:"set_headers"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// Query struct contains access control values for query operations
|
|
||||||
type Query struct {
|
|
||||||
Limit int
|
|
||||||
Filters []string
|
|
||||||
Columns []string
|
|
||||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
|
||||||
Block bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert struct contains access control values for insert operations
|
|
||||||
type Insert struct {
|
|
||||||
Filters []string
|
|
||||||
Columns []string
|
|
||||||
Presets map[string]string
|
|
||||||
Block bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Insert struct contains access control values for update operations
|
|
||||||
type Update struct {
|
|
||||||
Filters []string
|
|
||||||
Columns []string
|
|
||||||
Presets map[string]string
|
|
||||||
Block bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// Delete struct contains access control values for delete operations
|
|
||||||
type Delete struct {
|
|
||||||
Filters []string
|
|
||||||
Columns []string
|
|
||||||
Block bool
|
|
||||||
}
|
|
||||||
|
|
||||||
// RoleTable struct contains role specific access control values for a database table
|
|
||||||
type RoleTable struct {
|
|
||||||
Name string
|
|
||||||
|
|
||||||
Query Query
|
|
||||||
Insert Insert
|
|
||||||
Update Update
|
|
||||||
Delete Delete
|
|
||||||
}
|
|
||||||
|
|
||||||
// Role struct contains role specific access control values for for all database tables
|
|
||||||
type Role struct {
|
|
||||||
Name string
|
|
||||||
Match string
|
|
||||||
Tables []RoleTable
|
|
||||||
tablesMap map[string]*RoleTable
|
|
||||||
}
|
|
||||||
|
|
||||||
// Action struct contains config values for a Super Graph service action
|
|
||||||
type Action struct {
|
|
||||||
Name string
|
|
||||||
SQL string
|
|
||||||
AuthName string `mapstructure:"auth_name"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewConfig function reads in the config file for the environment specified in the GO_ENV
|
|
||||||
// environment variable. This is the best way to create a new Super Graph config.
|
|
||||||
func NewConfig(path string) (*Config, error) {
|
|
||||||
return NewConfigWithLogger(path, log.New(os.Stdout, "", 0))
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewConfigWithLogger function reads in the config file for the environment specified in the GO_ENV
|
|
||||||
// environment variable. This is the best way to create a new Super Graph config.
|
|
||||||
func NewConfigWithLogger(path string, logger *log.Logger) (*Config, error) {
|
|
||||||
vi := newViper(path, GetConfigName())
|
|
||||||
|
|
||||||
if err := vi.ReadInConfig(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
inherits := vi.GetString("inherits")
|
|
||||||
|
|
||||||
if len(inherits) != 0 {
|
|
||||||
vi = newViper(path, inherits)
|
|
||||||
|
|
||||||
if err := vi.ReadInConfig(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
if vi.IsSet("inherits") {
|
|
||||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
|
||||||
inherits,
|
|
||||||
vi.GetString("inherits"))
|
|
||||||
}
|
|
||||||
|
|
||||||
vi.SetConfigName(GetConfigName())
|
|
||||||
|
|
||||||
if err := vi.MergeInConfig(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
c := &Config{log: logger, vi: vi}
|
|
||||||
|
|
||||||
if err := vi.Unmarshal(&c); err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := c.init(); err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to initialize config: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// NewConfigFrom function initializes a Config struct that you manually created
|
|
||||||
// so it can be used by Super Graph
|
|
||||||
func NewConfigFrom(c *Config, configPath string, logger *log.Logger) (*Config, error) {
|
|
||||||
c.vi = newViper(configPath, GetConfigName())
|
|
||||||
c.log = logger
|
|
||||||
if err := c.init(); err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return c, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func newViper(configPath, filename string) *viper.Viper {
|
|
||||||
vi := viper.New()
|
|
||||||
|
|
||||||
vi.SetEnvPrefix("SG")
|
|
||||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
|
||||||
vi.AutomaticEnv()
|
|
||||||
|
|
||||||
vi.SetConfigName(filename)
|
|
||||||
vi.AddConfigPath(configPath)
|
|
||||||
vi.AddConfigPath("./config")
|
|
||||||
|
|
||||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
|
||||||
vi.SetDefault("web_ui", false)
|
|
||||||
vi.SetDefault("enable_tracing", false)
|
|
||||||
vi.SetDefault("auth_fail_block", "always")
|
|
||||||
vi.SetDefault("seed_file", "seed.js")
|
|
||||||
|
|
||||||
vi.SetDefault("database.type", "postgres")
|
|
||||||
vi.SetDefault("database.host", "localhost")
|
|
||||||
vi.SetDefault("database.port", 5432)
|
|
||||||
vi.SetDefault("database.user", "postgres")
|
|
||||||
vi.SetDefault("database.schema", "public")
|
|
||||||
|
|
||||||
vi.SetDefault("env", "development")
|
|
||||||
|
|
||||||
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
|
||||||
vi.BindEnv("host", "HOST") //nolint: errcheck
|
|
||||||
vi.BindEnv("port", "PORT") //nolint: errcheck
|
|
||||||
|
|
||||||
vi.SetDefault("auth.rails.max_idle", 80)
|
|
||||||
vi.SetDefault("auth.rails.max_active", 12000)
|
|
||||||
|
|
||||||
return vi
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Config) init() error {
|
|
||||||
switch c.Core.LogLevel {
|
|
||||||
case "debug":
|
|
||||||
c.logLevel = LogLevelDebug
|
|
||||||
case "error":
|
|
||||||
c.logLevel = LogLevelError
|
|
||||||
case "warn":
|
|
||||||
c.logLevel = LogLevelWarn
|
|
||||||
case "info":
|
|
||||||
c.logLevel = LogLevelInfo
|
|
||||||
default:
|
|
||||||
c.logLevel = LogLevelNone
|
|
||||||
}
|
|
||||||
|
|
||||||
if c.UseAllowList {
|
|
||||||
c.Production = true
|
|
||||||
}
|
|
||||||
|
|
||||||
for k, v := range c.Inflections {
|
|
||||||
flect.AddPlural(k, v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Tables: Validate and sanitize
|
|
||||||
tm := make(map[string]struct{})
|
|
||||||
|
|
||||||
for i := 0; i < len(c.Tables); i++ {
|
|
||||||
t := &c.Tables[i]
|
|
||||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
|
||||||
|
|
||||||
if _, ok := tm[t.Name]; ok {
|
|
||||||
c.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
|
||||||
c.log.Printf("WRN duplicate table found: %s", t.Name)
|
|
||||||
}
|
|
||||||
tm[t.Name] = struct{}{}
|
|
||||||
|
|
||||||
t.Table = flect.Pluralize(strings.ToLower(t.Table))
|
|
||||||
}
|
|
||||||
|
|
||||||
// Variables: Validate and sanitize
|
|
||||||
for k, v := range c.Vars {
|
|
||||||
c.Vars[k] = sanitize(v)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Roles: validate and sanitize
|
|
||||||
c.RolesQuery = sanitize(c.RolesQuery)
|
|
||||||
c.roles = make(map[string]*Role)
|
|
||||||
|
|
||||||
for i := 0; i < len(c.Roles); i++ {
|
|
||||||
r := &c.Roles[i]
|
|
||||||
r.Name = strings.ToLower(r.Name)
|
|
||||||
|
|
||||||
if _, ok := c.roles[r.Name]; ok {
|
|
||||||
c.Roles = append(c.Roles[:i], c.Roles[i+1:]...)
|
|
||||||
c.log.Printf("WRN duplicate role found: %s", r.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
r.Match = sanitize(r.Match)
|
|
||||||
r.tablesMap = make(map[string]*RoleTable)
|
|
||||||
|
|
||||||
for n, table := range r.Tables {
|
|
||||||
r.tablesMap[table.Name] = &r.Tables[n]
|
|
||||||
}
|
|
||||||
|
|
||||||
c.roles[r.Name] = r
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := c.roles["user"]; !ok {
|
|
||||||
u := Role{Name: "user"}
|
|
||||||
c.Roles = append(c.Roles, u)
|
|
||||||
c.roles["user"] = &u
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := c.roles["anon"]; !ok {
|
|
||||||
c.log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
|
|
||||||
c.AuthFailBlock = true
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(c.RolesQuery) == 0 {
|
|
||||||
c.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
|
||||||
}
|
|
||||||
|
|
||||||
if len(c.RolesQuery) == 0 {
|
|
||||||
c.abacEnabled = false
|
|
||||||
} else {
|
|
||||||
switch len(c.Roles) {
|
|
||||||
case 0, 1:
|
|
||||||
c.abacEnabled = false
|
|
||||||
case 2:
|
|
||||||
_, ok1 := c.roles["anon"]
|
|
||||||
_, ok2 := c.roles["user"]
|
|
||||||
c.abacEnabled = !(ok1 && ok2)
|
|
||||||
default:
|
|
||||||
c.abacEnabled = true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Auths: validate and sanitize
|
|
||||||
am := make(map[string]struct{})
|
|
||||||
|
|
||||||
for i := 0; i < len(c.Auths); i++ {
|
|
||||||
a := &c.Auths[i]
|
|
||||||
a.Name = strings.ToLower(a.Name)
|
|
||||||
|
|
||||||
if _, ok := am[a.Name]; ok {
|
|
||||||
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
|
|
||||||
c.log.Printf("WRN duplicate auth found: %s", a.Name)
|
|
||||||
}
|
|
||||||
am[a.Name] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Actions: validate and sanitize
|
|
||||||
axm := make(map[string]struct{})
|
|
||||||
|
|
||||||
for i := 0; i < len(c.Actions); i++ {
|
|
||||||
a := &c.Actions[i]
|
|
||||||
a.Name = strings.ToLower(a.Name)
|
|
||||||
a.AuthName = strings.ToLower(a.AuthName)
|
|
||||||
|
|
||||||
if _, ok := axm[a.Name]; ok {
|
|
||||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
|
||||||
c.log.Printf("WRN duplicate action found: %s", a.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
if _, ok := am[a.AuthName]; !ok {
|
|
||||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
|
||||||
c.log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
|
|
||||||
}
|
|
||||||
axm[a.Name] = struct{}{}
|
|
||||||
}
|
|
||||||
|
|
||||||
c.valid = true
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetDBTableAliases function returns a map with database tables as keys
|
|
||||||
// and a list of aliases as values
|
|
||||||
func (c *Config) GetDBTableAliases() map[string][]string {
|
|
||||||
m := make(map[string][]string, len(c.Tables))
|
|
||||||
|
|
||||||
for i := range c.Tables {
|
|
||||||
t := c.Tables[i]
|
|
||||||
|
|
||||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
m[t.Table] = append(m[t.Table], t.Name)
|
|
||||||
}
|
|
||||||
return m
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsABACEnabled function returns true if attribute based access control is enabled
|
|
||||||
func (c *Config) IsABACEnabled() bool {
|
|
||||||
return c.abacEnabled
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsAnonRoleDefined function returns true if the config has configuration for the `anon` role
|
|
||||||
func (c *Config) IsAnonRoleDefined() bool {
|
|
||||||
_, ok := c.roles["anon"]
|
|
||||||
return ok
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetRole function returns returns the Role struct by name
|
|
||||||
func (c *Config) GetRole(name string) *Role {
|
|
||||||
role := c.roles[name]
|
|
||||||
return role
|
|
||||||
}
|
|
||||||
|
|
||||||
// ConfigPathUsed function returns the path to the current config file (excluding filename)
|
|
||||||
func (c *Config) ConfigPathUsed() string {
|
|
||||||
return path.Dir(c.vi.ConfigFileUsed())
|
|
||||||
}
|
|
||||||
|
|
||||||
// WriteConfigAs function writes the config to a file
|
|
||||||
// Format defined by extension (eg: .yml, .json)
|
|
||||||
func (c *Config) WriteConfigAs(fname string) error {
|
|
||||||
return c.vi.WriteConfigAs(fname)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Log function returns the logger
|
|
||||||
func (c *Config) Log() *log.Logger {
|
|
||||||
return c.log
|
|
||||||
}
|
|
||||||
|
|
||||||
// LogLevel function returns the log level
|
|
||||||
func (c *Config) LogLevel() int {
|
|
||||||
return c.logLevel
|
|
||||||
}
|
|
||||||
|
|
||||||
// IsValid function returns true if the Config struct is initialized and valid
|
|
||||||
func (c *Config) IsValid() bool {
|
|
||||||
return c.valid
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetTable function returns the RoleTable struct for a Role by table name
|
|
||||||
func (r *Role) GetTable(name string) *RoleTable {
|
|
||||||
table := r.tablesMap[name]
|
|
||||||
return table
|
|
||||||
}
|
|
|
@ -1,13 +0,0 @@
|
||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"testing"
|
|
||||||
)
|
|
||||||
|
|
||||||
func TestInitConf(t *testing.T) {
|
|
||||||
_, err := NewConfig("../examples/rails-app/config/supergraph")
|
|
||||||
|
|
||||||
if err != nil {
|
|
||||||
t.Fatal(err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
|
@ -1,52 +0,0 @@
|
||||||
package config
|
|
||||||
|
|
||||||
import (
|
|
||||||
"os"
|
|
||||||
"regexp"
|
|
||||||
"strings"
|
|
||||||
"unicode"
|
|
||||||
)
|
|
||||||
|
|
||||||
var (
|
|
||||||
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
|
||||||
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
|
||||||
)
|
|
||||||
|
|
||||||
func sanitize(s string) string {
|
|
||||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
|
||||||
|
|
||||||
s1 := strings.Map(func(r rune) rune {
|
|
||||||
if unicode.IsSpace(r) {
|
|
||||||
return ' '
|
|
||||||
}
|
|
||||||
return r
|
|
||||||
}, s0)
|
|
||||||
|
|
||||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
|
||||||
return strings.ToLower(m)
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
func GetConfigName() string {
|
|
||||||
if len(os.Getenv("GO_ENV")) == 0 {
|
|
||||||
return "dev"
|
|
||||||
}
|
|
||||||
|
|
||||||
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
|
||||||
|
|
||||||
switch {
|
|
||||||
case strings.HasPrefix(ge, "pro"):
|
|
||||||
return "prod"
|
|
||||||
|
|
||||||
case strings.HasPrefix(ge, "sta"):
|
|
||||||
return "stage"
|
|
||||||
|
|
||||||
case strings.HasPrefix(ge, "tes"):
|
|
||||||
return "test"
|
|
||||||
|
|
||||||
case strings.HasPrefix(ge, "dev"):
|
|
||||||
return "dev"
|
|
||||||
}
|
|
||||||
|
|
||||||
return ge
|
|
||||||
}
|
|
50
core/api.go
50
core/api.go
|
@ -9,7 +9,6 @@
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"fmt"
|
"fmt"
|
||||||
"time"
|
"time"
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
_ "github.com/jackc/pgx/v4/stdlib"
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
)
|
)
|
||||||
|
@ -20,7 +19,7 @@
|
||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
conf, err := config.NewConfig("./config")
|
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
|
@ -53,10 +52,9 @@ import (
|
||||||
"crypto/sha256"
|
"crypto/sha256"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"fmt"
|
_log "log"
|
||||||
"log"
|
"os"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/crypto"
|
"github.com/dosco/super-graph/core/internal/crypto"
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
|
@ -80,36 +78,32 @@ const (
|
||||||
// SuperGraph struct is an instance of the Super Graph engine it holds all the required information like
|
// SuperGraph struct is an instance of the Super Graph engine it holds all the required information like
|
||||||
// datase schemas, relationships, etc that the GraphQL to SQL compiler would need to do it's job.
|
// datase schemas, relationships, etc that the GraphQL to SQL compiler would need to do it's job.
|
||||||
type SuperGraph struct {
|
type SuperGraph struct {
|
||||||
conf *config.Config
|
conf *Config
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
schema *psql.DBSchema
|
log *_log.Logger
|
||||||
allowList *allow.List
|
schema *psql.DBSchema
|
||||||
encKey [32]byte
|
allowList *allow.List
|
||||||
prepared map[string]*preparedItem
|
encKey [32]byte
|
||||||
getRole *sql.Stmt
|
prepared map[string]*preparedItem
|
||||||
qc *qcode.Compiler
|
roles map[string]*Role
|
||||||
pc *psql.Compiler
|
getRole *sql.Stmt
|
||||||
}
|
abacEnabled bool
|
||||||
|
anonExists bool
|
||||||
// NewConfig functions initializes config using a config.Core struct
|
qc *qcode.Compiler
|
||||||
func NewConfig(core config.Core, configPath string, logger *log.Logger) (*config.Config, error) {
|
pc *psql.Compiler
|
||||||
c, err := config.NewConfigFrom(&config.Config{Core: core}, configPath, logger)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
return c, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
||||||
// schemas and relationships
|
// schemas and relationships
|
||||||
func NewSuperGraph(conf *config.Config, db *sql.DB) (*SuperGraph, error) {
|
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
||||||
if !conf.IsValid() {
|
|
||||||
return nil, fmt.Errorf("invalid config")
|
|
||||||
}
|
|
||||||
|
|
||||||
sg := &SuperGraph{
|
sg := &SuperGraph{
|
||||||
conf: conf,
|
conf: conf,
|
||||||
db: db,
|
db: db,
|
||||||
|
log: _log.New(os.Stdout, "", 0),
|
||||||
|
}
|
||||||
|
|
||||||
|
if err := sg.initConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sg.initCompilers(); err != nil {
|
if err := sg.initCompilers(); err != nil {
|
||||||
|
|
|
@ -7,13 +7,12 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
)
|
)
|
||||||
|
|
||||||
type stmt struct {
|
type stmt struct {
|
||||||
role *config.Role
|
role *Role
|
||||||
qc *qcode.QCode
|
qc *qcode.QCode
|
||||||
skipped uint32
|
skipped uint32
|
||||||
sql string
|
sql string
|
||||||
|
@ -29,7 +28,7 @@ func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string)
|
||||||
return sg.buildRoleStmt(query, vars, "anon")
|
return sg.buildRoleStmt(query, vars, "anon")
|
||||||
}
|
}
|
||||||
|
|
||||||
if sg.conf.IsABACEnabled() {
|
if sg.abacEnabled {
|
||||||
return sg.buildMultiStmt(query, vars)
|
return sg.buildMultiStmt(query, vars)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -41,8 +40,8 @@ func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, error) {
|
func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, error) {
|
||||||
ro := sg.conf.GetRole(role)
|
ro, ok := sg.roles[role]
|
||||||
if ro == nil {
|
if !ok {
|
||||||
return nil, fmt.Errorf(`roles '%s' not defined in c.sg.config`, role)
|
return nil, fmt.Errorf(`roles '%s' not defined in c.sg.config`, role)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -168,7 +167,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||||
return w.String(), nil
|
return w.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *config.Role) bool {
|
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
|
||||||
for _, id := range qc.Roots {
|
for _, id := range qc.Roots {
|
||||||
t, err := sg.schema.GetTable(qc.Selects[id].Name)
|
t, err := sg.schema.GetTable(qc.Selects[id].Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
286
core/config.go
286
core/config.go
|
@ -2,164 +2,162 @@ package core
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"path"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
"github.com/spf13/viper"
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
func addTables(c *config.Config, di *psql.DBInfo) error {
|
// Core struct contains core specific config value
|
||||||
for _, t := range c.Tables {
|
type Config struct {
|
||||||
if len(t.Table) == 0 || len(t.Columns) == 0 {
|
SecretKey string `mapstructure:"secret_key"`
|
||||||
continue
|
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||||
|
AllowListFile string `mapstructure:"allow_list_file"`
|
||||||
|
SetUserID bool `mapstructure:"set_user_id"`
|
||||||
|
Vars map[string]string `mapstructure:"variables"`
|
||||||
|
Blocklist []string
|
||||||
|
Tables []Table
|
||||||
|
RolesQuery string `mapstructure:"roles_query"`
|
||||||
|
Roles []Role
|
||||||
|
Inflections map[string]string
|
||||||
|
}
|
||||||
|
|
||||||
|
// Table struct defines a database table
|
||||||
|
type Table struct {
|
||||||
|
Name string
|
||||||
|
Table string
|
||||||
|
Blocklist []string
|
||||||
|
Remotes []Remote
|
||||||
|
Columns []Column
|
||||||
|
}
|
||||||
|
|
||||||
|
// Column struct defines a database column
|
||||||
|
type Column struct {
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
ForeignKey string `mapstructure:"related_to"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Remote struct defines a remote API endpoint
|
||||||
|
type Remote struct {
|
||||||
|
Name string
|
||||||
|
ID string
|
||||||
|
Path string
|
||||||
|
URL string
|
||||||
|
Debug bool
|
||||||
|
PassHeaders []string `mapstructure:"pass_headers"`
|
||||||
|
SetHeaders []struct {
|
||||||
|
Name string
|
||||||
|
Value string
|
||||||
|
} `mapstructure:"set_headers"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Role struct contains role specific access control values for for all database tables
|
||||||
|
type Role struct {
|
||||||
|
Name string
|
||||||
|
Match string
|
||||||
|
Tables []RoleTable
|
||||||
|
tm map[string]*RoleTable
|
||||||
|
}
|
||||||
|
|
||||||
|
// RoleTable struct contains role specific access control values for a database table
|
||||||
|
type RoleTable struct {
|
||||||
|
Name string
|
||||||
|
|
||||||
|
Query Query
|
||||||
|
Insert Insert
|
||||||
|
Update Update
|
||||||
|
Delete Delete
|
||||||
|
}
|
||||||
|
|
||||||
|
// Query struct contains access control values for query operations
|
||||||
|
type Query struct {
|
||||||
|
Limit int
|
||||||
|
Filters []string
|
||||||
|
Columns []string
|
||||||
|
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||||
|
Block bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert struct contains access control values for insert operations
|
||||||
|
type Insert struct {
|
||||||
|
Filters []string
|
||||||
|
Columns []string
|
||||||
|
Presets map[string]string
|
||||||
|
Block bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Insert struct contains access control values for update operations
|
||||||
|
type Update struct {
|
||||||
|
Filters []string
|
||||||
|
Columns []string
|
||||||
|
Presets map[string]string
|
||||||
|
Block bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// Delete struct contains access control values for delete operations
|
||||||
|
type Delete struct {
|
||||||
|
Filters []string
|
||||||
|
Columns []string
|
||||||
|
Block bool
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||||
|
// environment variable. This is the best way to create a new Super Graph config.
|
||||||
|
func ReadInConfig(configFile string) (*Config, error) {
|
||||||
|
cpath := path.Dir(configFile)
|
||||||
|
cfile := path.Base(configFile)
|
||||||
|
vi := newViper(cpath, cfile)
|
||||||
|
|
||||||
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
inherits := vi.GetString("inherits")
|
||||||
|
|
||||||
|
if len(inherits) != 0 {
|
||||||
|
vi = newViper(cpath, inherits)
|
||||||
|
|
||||||
|
if err := vi.ReadInConfig(); err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
if err := addTable(di, t.Columns, t); err != nil {
|
|
||||||
return err
|
if vi.IsSet("inherits") {
|
||||||
|
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||||
|
inherits,
|
||||||
|
vi.GetString("inherits"))
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addTable(di *psql.DBInfo, cols []config.Column, t config.Table) error {
|
vi.SetConfigName(cfile)
|
||||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf(
|
|
||||||
"Column '%s' not found on table '%s'",
|
|
||||||
t.Name, t.Table)
|
|
||||||
}
|
|
||||||
|
|
||||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
if err := vi.MergeInConfig(); err != nil {
|
||||||
return fmt.Errorf(
|
return nil, err
|
||||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
|
||||||
t.Name, t.Table, bc.Type)
|
|
||||||
}
|
|
||||||
|
|
||||||
table := psql.DBTable{
|
|
||||||
Name: t.Name,
|
|
||||||
Key: strings.ToLower(t.Name),
|
|
||||||
Type: bc.Type,
|
|
||||||
}
|
|
||||||
|
|
||||||
columns := make([]psql.DBColumn, 0, len(cols))
|
|
||||||
|
|
||||||
for i := range cols {
|
|
||||||
c := cols[i]
|
|
||||||
columns = append(columns, psql.DBColumn{
|
|
||||||
Name: c.Name,
|
|
||||||
Key: strings.ToLower(c.Name),
|
|
||||||
Type: c.Type,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
di.AddTable(table, columns)
|
|
||||||
bc.FKeyTable = t.Name
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addForeignKeys(c *config.Config, di *psql.DBInfo) error {
|
|
||||||
for _, t := range c.Tables {
|
|
||||||
for _, c := range t.Columns {
|
|
||||||
if len(c.ForeignKey) == 0 {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
if err := addForeignKey(di, c, t); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addForeignKey(di *psql.DBInfo, c config.Column, t config.Table) error {
|
|
||||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf(
|
|
||||||
"Invalid table '%s' or column '%s' in config.Config",
|
|
||||||
t.Name, c.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
|
||||||
if len(v) != 2 {
|
|
||||||
return fmt.Errorf(
|
|
||||||
"Invalid foreign_key in config.Config for table '%s' and column '%s",
|
|
||||||
t.Name, c.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
fkt, fkc := v[0], v[1]
|
|
||||||
c2, ok := di.GetColumn(fkt, fkc)
|
|
||||||
if !ok {
|
|
||||||
return fmt.Errorf(
|
|
||||||
"Invalid foreign_key in config.Config for table '%s' and column '%s",
|
|
||||||
t.Name, c.Name)
|
|
||||||
}
|
|
||||||
|
|
||||||
c1.FKeyTable = fkt
|
|
||||||
c1.FKeyColID = []int16{c2.ID}
|
|
||||||
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func addRoles(c *config.Config, qc *qcode.Compiler) error {
|
|
||||||
for _, r := range c.Roles {
|
|
||||||
for _, t := range r.Tables {
|
|
||||||
if err := addRole(qc, r, t); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
c := &Config{}
|
||||||
|
|
||||||
|
if err := vi.Unmarshal(&c); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if len(c.AllowListFile) == 0 {
|
||||||
|
c.AllowListFile = path.Join(cpath, "allow.list")
|
||||||
|
}
|
||||||
|
|
||||||
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func addRole(qc *qcode.Compiler, r config.Role, t config.RoleTable) error {
|
func newViper(configPath, configFile string) *viper.Viper {
|
||||||
blockFilter := []string{"false"}
|
vi := viper.New()
|
||||||
|
|
||||||
query := qcode.QueryConfig{
|
vi.SetEnvPrefix("SG")
|
||||||
Limit: t.Query.Limit,
|
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||||
Filters: t.Query.Filters,
|
vi.AutomaticEnv()
|
||||||
Columns: t.Query.Columns,
|
|
||||||
DisableFunctions: t.Query.DisableFunctions,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Query.Block {
|
vi.SetConfigName(configFile)
|
||||||
query.Filters = blockFilter
|
vi.AddConfigPath(configPath)
|
||||||
}
|
vi.AddConfigPath("./config")
|
||||||
|
|
||||||
insert := qcode.InsertConfig{
|
return vi
|
||||||
Filters: t.Insert.Filters,
|
|
||||||
Columns: t.Insert.Columns,
|
|
||||||
Presets: t.Insert.Presets,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Insert.Block {
|
|
||||||
insert.Filters = blockFilter
|
|
||||||
}
|
|
||||||
|
|
||||||
update := qcode.UpdateConfig{
|
|
||||||
Filters: t.Update.Filters,
|
|
||||||
Columns: t.Update.Columns,
|
|
||||||
Presets: t.Update.Presets,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Update.Block {
|
|
||||||
update.Filters = blockFilter
|
|
||||||
}
|
|
||||||
|
|
||||||
delete := qcode.DeleteConfig{
|
|
||||||
Filters: t.Delete.Filters,
|
|
||||||
Columns: t.Delete.Columns,
|
|
||||||
}
|
|
||||||
|
|
||||||
if t.Delete.Block {
|
|
||||||
delete.Filters = blockFilter
|
|
||||||
}
|
|
||||||
|
|
||||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
|
||||||
Query: query,
|
|
||||||
Insert: insert,
|
|
||||||
Update: update,
|
|
||||||
Delete: delete,
|
|
||||||
})
|
|
||||||
}
|
}
|
||||||
|
|
10
core/core.go
10
core/core.go
|
@ -63,7 +63,7 @@ func (sg *SuperGraph) initCompilers() error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.schema, err = psql.NewDBSchema(di, sg.conf.GetDBTableAliases())
|
sg.schema, err = psql.NewDBSchema(di, getDBTableAliases(sg.conf))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -92,7 +92,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
||||||
// var st *stmt
|
// var st *stmt
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if c.sg.conf.Production {
|
if c.sg.conf.UseAllowList {
|
||||||
data, _, err = c.resolvePreparedSQL()
|
data, _, err = c.resolvePreparedSQL()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
|
@ -115,7 +115,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
mutation := (c.res.op == qcode.QTMutation)
|
mutation := (c.res.op == qcode.QTMutation)
|
||||||
useRoleQuery := c.sg.conf.IsABACEnabled() && mutation
|
useRoleQuery := c.sg.abacEnabled && mutation
|
||||||
useTx := useRoleQuery || c.sg.conf.SetUserID
|
useTx := useRoleQuery || c.sg.conf.SetUserID
|
||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
|
@ -148,7 +148,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||||
|
|
||||||
c.res.role = role
|
c.res.role = role
|
||||||
|
|
||||||
ps, ok := prepared[stmtHash(c.res.name, role)]
|
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, nil, errNotFound
|
return nil, nil, errNotFound
|
||||||
}
|
}
|
||||||
|
@ -198,7 +198,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
mutation := (c.res.op == qcode.QTMutation)
|
mutation := (c.res.op == qcode.QTMutation)
|
||||||
useRoleQuery := c.sg.conf.IsABACEnabled() && mutation
|
useRoleQuery := c.sg.abacEnabled && mutation
|
||||||
useTx := useRoleQuery || c.sg.conf.SetUserID
|
useTx := useRoleQuery || c.sg.conf.SetUserID
|
||||||
|
|
||||||
if useTx {
|
if useTx {
|
||||||
|
|
|
@ -0,0 +1,284 @@
|
||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
"unicode"
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
|
"github.com/gobuffalo/flect"
|
||||||
|
)
|
||||||
|
|
||||||
|
func (sg *SuperGraph) initConfig() error {
|
||||||
|
c := sg.conf
|
||||||
|
|
||||||
|
for k, v := range c.Inflections {
|
||||||
|
flect.AddPlural(k, v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Variables: Validate and sanitize
|
||||||
|
for k, v := range c.Vars {
|
||||||
|
c.Vars[k] = sanitizeVars(v)
|
||||||
|
}
|
||||||
|
|
||||||
|
// Tables: Validate and sanitize
|
||||||
|
tm := make(map[string]struct{})
|
||||||
|
|
||||||
|
for i := 0; i < len(c.Tables); i++ {
|
||||||
|
t := &c.Tables[i]
|
||||||
|
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||||
|
|
||||||
|
if _, ok := tm[t.Name]; ok {
|
||||||
|
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||||
|
sg.log.Printf("WRN duplicate table found: %s", t.Name)
|
||||||
|
}
|
||||||
|
tm[t.Name] = struct{}{}
|
||||||
|
|
||||||
|
t.Table = flect.Pluralize(strings.ToLower(t.Table))
|
||||||
|
}
|
||||||
|
|
||||||
|
sg.roles = make(map[string]*Role)
|
||||||
|
|
||||||
|
for i := 0; i < len(c.Roles); i++ {
|
||||||
|
role := &c.Roles[i]
|
||||||
|
role.Name = sanitize(role.Name)
|
||||||
|
|
||||||
|
if _, ok := sg.roles[role.Name]; ok {
|
||||||
|
c.Roles = append(c.Roles[:i], c.Roles[i+1:]...)
|
||||||
|
sg.log.Printf("WRN duplicate role found: %s", role.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
role.Match = sanitize(role.Match)
|
||||||
|
role.tm = make(map[string]*RoleTable)
|
||||||
|
|
||||||
|
for n, table := range role.Tables {
|
||||||
|
role.tm[table.Name] = &role.Tables[n]
|
||||||
|
}
|
||||||
|
|
||||||
|
sg.roles[role.Name] = role
|
||||||
|
}
|
||||||
|
|
||||||
|
// If user role not defined then create it
|
||||||
|
if _, ok := sg.roles["user"]; !ok {
|
||||||
|
ur := Role{
|
||||||
|
Name: "user",
|
||||||
|
tm: make(map[string]*RoleTable),
|
||||||
|
}
|
||||||
|
c.Roles = append(c.Roles, ur)
|
||||||
|
sg.roles["user"] = &ur
|
||||||
|
}
|
||||||
|
|
||||||
|
// Roles: validate and sanitize
|
||||||
|
c.RolesQuery = sanitize(c.RolesQuery)
|
||||||
|
|
||||||
|
if len(c.RolesQuery) == 0 {
|
||||||
|
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||||
|
}
|
||||||
|
|
||||||
|
_, userExists := sg.roles["user"]
|
||||||
|
_, sg.anonExists = sg.roles["anon"]
|
||||||
|
|
||||||
|
sg.abacEnabled = userExists && len(c.RolesQuery) != 0
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func getDBTableAliases(c *Config) map[string][]string {
|
||||||
|
m := make(map[string][]string, len(c.Tables))
|
||||||
|
|
||||||
|
for i := range c.Tables {
|
||||||
|
t := c.Tables[i]
|
||||||
|
|
||||||
|
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
m[t.Table] = append(m[t.Table], t.Name)
|
||||||
|
}
|
||||||
|
return m
|
||||||
|
}
|
||||||
|
|
||||||
|
func addTables(c *Config, di *psql.DBInfo) error {
|
||||||
|
for _, t := range c.Tables {
|
||||||
|
if len(t.Table) == 0 || len(t.Columns) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := addTable(di, t.Columns, t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||||
|
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"Column '%s' not found on table '%s'",
|
||||||
|
t.Name, t.Table)
|
||||||
|
}
|
||||||
|
|
||||||
|
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||||
|
t.Name, t.Table, bc.Type)
|
||||||
|
}
|
||||||
|
|
||||||
|
table := psql.DBTable{
|
||||||
|
Name: t.Name,
|
||||||
|
Key: strings.ToLower(t.Name),
|
||||||
|
Type: bc.Type,
|
||||||
|
}
|
||||||
|
|
||||||
|
columns := make([]psql.DBColumn, 0, len(cols))
|
||||||
|
|
||||||
|
for i := range cols {
|
||||||
|
c := cols[i]
|
||||||
|
columns = append(columns, psql.DBColumn{
|
||||||
|
Name: c.Name,
|
||||||
|
Key: strings.ToLower(c.Name),
|
||||||
|
Type: c.Type,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
di.AddTable(table, columns)
|
||||||
|
bc.FKeyTable = t.Name
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||||
|
for _, t := range c.Tables {
|
||||||
|
for _, c := range t.Columns {
|
||||||
|
if len(c.ForeignKey) == 0 {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
if err := addForeignKey(di, c, t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||||
|
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"Invalid table '%s' or column '%s' in Config",
|
||||||
|
t.Name, c.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||||
|
if len(v) != 2 {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||||
|
t.Name, c.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
fkt, fkc := v[0], v[1]
|
||||||
|
c2, ok := di.GetColumn(fkt, fkc)
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf(
|
||||||
|
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||||
|
t.Name, c.Name)
|
||||||
|
}
|
||||||
|
|
||||||
|
c1.FKeyTable = fkt
|
||||||
|
c1.FKeyColID = []int16{c2.ID}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||||
|
for _, r := range c.Roles {
|
||||||
|
for _, t := range r.Tables {
|
||||||
|
if err := addRole(qc, r, t); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func addRole(qc *qcode.Compiler, r Role, t RoleTable) error {
|
||||||
|
blockFilter := []string{"false"}
|
||||||
|
|
||||||
|
query := qcode.QueryConfig{
|
||||||
|
Limit: t.Query.Limit,
|
||||||
|
Filters: t.Query.Filters,
|
||||||
|
Columns: t.Query.Columns,
|
||||||
|
DisableFunctions: t.Query.DisableFunctions,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Query.Block {
|
||||||
|
query.Filters = blockFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
insert := qcode.InsertConfig{
|
||||||
|
Filters: t.Insert.Filters,
|
||||||
|
Columns: t.Insert.Columns,
|
||||||
|
Presets: t.Insert.Presets,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Insert.Block {
|
||||||
|
insert.Filters = blockFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
update := qcode.UpdateConfig{
|
||||||
|
Filters: t.Update.Filters,
|
||||||
|
Columns: t.Update.Columns,
|
||||||
|
Presets: t.Update.Presets,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Update.Block {
|
||||||
|
update.Filters = blockFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
delete := qcode.DeleteConfig{
|
||||||
|
Filters: t.Delete.Filters,
|
||||||
|
Columns: t.Delete.Columns,
|
||||||
|
}
|
||||||
|
|
||||||
|
if t.Delete.Block {
|
||||||
|
delete.Filters = blockFilter
|
||||||
|
}
|
||||||
|
|
||||||
|
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||||
|
Query: query,
|
||||||
|
Insert: insert,
|
||||||
|
Update: update,
|
||||||
|
Delete: delete,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
func (r *Role) GetTable(name string) *RoleTable {
|
||||||
|
return r.tm[name]
|
||||||
|
}
|
||||||
|
|
||||||
|
func sanitize(value string) string {
|
||||||
|
return strings.ToLower(strings.TrimSpace(value))
|
||||||
|
}
|
||||||
|
|
||||||
|
var (
|
||||||
|
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||||
|
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
||||||
|
)
|
||||||
|
|
||||||
|
func sanitizeVars(s string) string {
|
||||||
|
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
||||||
|
|
||||||
|
s1 := strings.Map(func(r rune) rune {
|
||||||
|
if unicode.IsSpace(r) {
|
||||||
|
return ' '
|
||||||
|
}
|
||||||
|
return r
|
||||||
|
}, s0)
|
||||||
|
|
||||||
|
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
||||||
|
return strings.ToLower(m)
|
||||||
|
})
|
||||||
|
}
|
|
@ -7,7 +7,6 @@ import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
|
||||||
"sort"
|
"sort"
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
@ -35,11 +34,11 @@ type Config struct {
|
||||||
Persist bool
|
Persist bool
|
||||||
}
|
}
|
||||||
|
|
||||||
func New(cpath string, conf Config) (*List, error) {
|
func New(filename string, conf Config) (*List, error) {
|
||||||
al := List{}
|
al := List{}
|
||||||
|
|
||||||
if len(cpath) != 0 {
|
if len(filename) != 0 {
|
||||||
fp := path.Join(cpath, "allow.list")
|
fp := filename
|
||||||
|
|
||||||
if _, err := os.Stat(fp); err == nil {
|
if _, err := os.Stat(fp); err == nil {
|
||||||
al.filepath = fp
|
al.filepath = fp
|
||||||
|
@ -73,10 +72,10 @@ func New(cpath string, conf Config) (*List, error) {
|
||||||
return nil, errors.New("allow.list not found")
|
return nil, errors.New("allow.list not found")
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(cpath) == 0 {
|
if len(filename) == 0 {
|
||||||
al.filepath = "./config/allow.list"
|
al.filepath = "./config/allow.list"
|
||||||
} else {
|
} else {
|
||||||
al.filepath = path.Join(cpath, "allow.list")
|
al.filepath = filename
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -23,17 +23,13 @@ type preparedItem struct {
|
||||||
roleArg bool
|
roleArg bool
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
|
||||||
prepared map[string]*preparedItem
|
|
||||||
)
|
|
||||||
|
|
||||||
func (sg *SuperGraph) initPrepared() error {
|
func (sg *SuperGraph) initPrepared() error {
|
||||||
ct := context.Background()
|
ct := context.Background()
|
||||||
|
|
||||||
if sg.allowList.IsPersist() {
|
if sg.allowList.IsPersist() {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
prepared = make(map[string]*preparedItem)
|
sg.prepared = make(map[string]*preparedItem)
|
||||||
|
|
||||||
tx, err := sg.db.BeginTx(ct, nil)
|
tx, err := sg.db.BeginTx(ct, nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -100,7 +96,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||||
var stmts1 []stmt
|
var stmts1 []stmt
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if sg.conf.IsABACEnabled() {
|
if sg.abacEnabled {
|
||||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
stmts1, err = sg.buildMultiStmt(qb, vars)
|
||||||
} else {
|
} else {
|
||||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
||||||
|
@ -117,7 +113,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if sg.conf.IsAnonRoleDefined() {
|
if sg.anonExists {
|
||||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
||||||
|
|
||||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
||||||
|
@ -184,7 +180,7 @@ func (sg *SuperGraph) prepare(ct context.Context, tx *sql.Tx, st []stmt, key str
|
||||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !sg.conf.IsABACEnabled() {
|
if !sg.abacEnabled {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -255,11 +251,16 @@ func (sg *SuperGraph) initAllowList() error {
|
||||||
var ac allow.Config
|
var ac allow.Config
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if !sg.conf.Production {
|
if len(sg.conf.AllowListFile) == 0 {
|
||||||
|
sg.conf.UseAllowList = false
|
||||||
|
sg.log.Printf("WRN allow list disabled no file specified")
|
||||||
|
}
|
||||||
|
|
||||||
|
if sg.conf.UseAllowList {
|
||||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.allowList, err = allow.New(sg.conf.ConfigPathUsed(), ac)
|
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return fmt.Errorf("failed to initialize allow list: %w", err)
|
return fmt.Errorf("failed to initialize allow list: %w", err)
|
||||||
}
|
}
|
||||||
|
|
|
@ -6,7 +6,6 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/config"
|
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -36,7 +35,7 @@ type resolvFn struct {
|
||||||
// }
|
// }
|
||||||
// }
|
// }
|
||||||
|
|
||||||
// func initRemotes(t config.Table) error {
|
// func initRemotes(t Table) error {
|
||||||
// h := xxhash.New()
|
// h := xxhash.New()
|
||||||
|
|
||||||
// for _, r := range t.Remotes {
|
// for _, r := range t.Remotes {
|
||||||
|
@ -92,7 +91,7 @@ type resolvFn struct {
|
||||||
// return nil
|
// return nil
|
||||||
// }
|
// }
|
||||||
|
|
||||||
func buildFn(r config.Remote) func(http.Header, []byte) ([]byte, error) {
|
func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
||||||
reqURL := strings.Replace(r.URL, "$id", "%s", 1)
|
reqURL := strings.Replace(r.URL, "$id", "%s", 1)
|
||||||
client := &http.Client{}
|
client := &http.Client{}
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue