Compare commits
3 Commits
Author | SHA1 | Date | |
---|---|---|---|
ef50c1957b | |||
41ea6ef6f5 | |||
a266517d17 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -29,7 +29,6 @@
|
||||
.release
|
||||
main
|
||||
super-graph
|
||||
supergraph
|
||||
*-fuzz.zip
|
||||
crashers
|
||||
suppressions
|
||||
|
@ -5,6 +5,8 @@ COPY /cmd/internal/serv/web/ ./
|
||||
RUN yarn
|
||||
RUN yarn build
|
||||
|
||||
|
||||
|
||||
# stage: 2
|
||||
FROM golang:1.14-alpine as go-build
|
||||
RUN apk update && \
|
||||
@ -31,6 +33,8 @@ RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
upx --ultra-brute -qq super-graph && \
|
||||
upx -t super-graph
|
||||
|
||||
|
||||
|
||||
# stage: 3
|
||||
FROM alpine:latest
|
||||
WORKDIR /
|
||||
|
2
Makefile
2
Makefile
@ -77,7 +77,7 @@ clean:
|
||||
run: clean
|
||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||
|
||||
install: gen
|
||||
install:
|
||||
@echo $(GOPATH)
|
||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||
|
82
README.md
82
README.md
@ -1,26 +1,74 @@
|
||||
<!-- <a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a> -->
|
||||
|
||||
<img src="docs/.vuepress/public/super-graph.png" width="250" />
|
||||
<img src="docs/guide/.vuepress/public/super-graph.png" width="250" />
|
||||
|
||||
### Build web products faster. Secure high performance GraphQL
|
||||
|
||||

|
||||

|
||||

|
||||
[](https://pkg.go.dev/github.com/dosco/super-graph/core?tab=doc)
|
||||

|
||||

|
||||

|
||||
[](https://discord.gg/6pSWCTZ)
|
||||
|
||||
## What's Super Graph?
|
||||
|
||||
## What is Super Graph
|
||||
Designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance GraphQL API for Postgres DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a GO library and a service, use it in your own code or run it as a seperate service.
|
||||
|
||||
Is designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance and secure GraphQL API for Postgres DB. GraphQL queries are translated into a single fast SQL query. No more writing API code as you develop
|
||||
your web frontend just make the query you need and Super Graph will do the rest.
|
||||
## Using it as a service
|
||||
|
||||
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, role and attribute based access control, support for JWT tokens, built-in DB mutations and seeding, and a lot more.
|
||||
```console
|
||||
git clone https://github.com/dosco/super-graph
|
||||
cd ./super-graph
|
||||
make install
|
||||
|
||||

|
||||
super-graph new <app_name>
|
||||
```
|
||||
|
||||
## Using it in your own code
|
||||
|
||||
## The story of Super Graph?
|
||||
```golang
|
||||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
"github.com/dosco/super-graph/core"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
func main() {
|
||||
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
query := `
|
||||
query {
|
||||
posts {
|
||||
id
|
||||
title
|
||||
}
|
||||
}`
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(res.Data))
|
||||
}
|
||||
```
|
||||
|
||||
## About Super Graph
|
||||
|
||||
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
|
||||
|
||||
@ -37,6 +85,7 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
|
||||
- Complex nested queries and mutations
|
||||
- Auto learns database tables and relationships
|
||||
- Role and Attribute based access control
|
||||
- Opaque cursor based efficient pagination
|
||||
- Full text search and aggregations
|
||||
- JWT tokens supported (Auth0, etc)
|
||||
- Join database queries with remote REST APIs
|
||||
@ -50,15 +99,6 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
|
||||
- Database seeding tool
|
||||
- Works with Postgres and YugabyteDB
|
||||
|
||||
## Get started
|
||||
|
||||
```
|
||||
git clone https://github.com/dosco/super-graph
|
||||
cd ./super-graph
|
||||
make install
|
||||
|
||||
super-graph new <app_name>
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
|
@ -3,13 +3,11 @@ package serv
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
)
|
||||
|
||||
type actionFn func(w http.ResponseWriter, r *http.Request) error
|
||||
|
||||
func newAction(a *config.Action) (http.Handler, error) {
|
||||
func newAction(a *Action) (http.Handler, error) {
|
||||
var fn actionFn
|
||||
var err error
|
||||
|
||||
@ -32,7 +30,7 @@ func newAction(a *config.Action) (http.Handler, error) {
|
||||
return http.HandlerFunc(httpFn), nil
|
||||
}
|
||||
|
||||
func newSQLAction(a *config.Action) (actionFn, error) {
|
||||
func newSQLAction(a *Action) (actionFn, error) {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) error {
|
||||
_, err := db.ExecContext(r.Context(), a.SQL)
|
||||
return err
|
||||
|
106
cmd/internal/serv/api.go
Normal file
106
cmd/internal/serv/api.go
Normal file
@ -0,0 +1,106 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/core"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
const (
|
||||
LogLevelNone int = iota
|
||||
LogLevelInfo
|
||||
LogLevelWarn
|
||||
LogLevelError
|
||||
LogLevelDebug
|
||||
)
|
||||
|
||||
type Core = core.Config
|
||||
|
||||
// Config struct holds the Super Graph config values
|
||||
type Config struct {
|
||||
Core `mapstructure:",squash"`
|
||||
Serv `mapstructure:",squash"`
|
||||
|
||||
cpath string
|
||||
vi *viper.Viper
|
||||
}
|
||||
|
||||
// Serv struct contains config values used by the Super Graph service
|
||||
type Serv struct {
|
||||
AppName string `mapstructure:"app_name"`
|
||||
Production bool
|
||||
LogLevel string `mapstructure:"log_level"`
|
||||
HostPort string `mapstructure:"host_port"`
|
||||
Host string
|
||||
Port string
|
||||
HTTPGZip bool `mapstructure:"http_compress"`
|
||||
WebUI bool `mapstructure:"web_ui"`
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||
SeedFile string `mapstructure:"seed_file"`
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
||||
DebugCORS bool `mapstructure:"cors_debug"`
|
||||
|
||||
Auth auth.Auth
|
||||
Auths []auth.Auth
|
||||
|
||||
DB struct {
|
||||
Type string
|
||||
Host string
|
||||
Port uint16
|
||||
DBName string
|
||||
User string
|
||||
Password string
|
||||
Schema string
|
||||
PoolSize int32 `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||
} `mapstructure:"database"`
|
||||
|
||||
Actions []Action
|
||||
}
|
||||
|
||||
// Auth struct contains authentication related config values used by the Super Graph service
|
||||
type Auth struct {
|
||||
Name string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
Salt string
|
||||
SignSalt string `mapstructure:"sign_salt"`
|
||||
AuthSalt string `mapstructure:"auth_salt"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
Name string
|
||||
Value string
|
||||
Exists bool
|
||||
}
|
||||
}
|
||||
|
||||
// Action struct contains config values for a Super Graph service action
|
||||
type Action struct {
|
||||
Name string
|
||||
SQL string
|
||||
AuthName string `mapstructure:"auth_name"`
|
||||
}
|
@ -6,11 +6,8 @@ import (
|
||||
_log "log"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
@ -29,12 +26,13 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
log *_log.Logger // logger
|
||||
zlog *zap.Logger // fast logger
|
||||
conf *config.Config // parsed config
|
||||
confPath string // path to the config file
|
||||
db *sql.DB // database connection pool
|
||||
secretKey [32]byte // encryption key
|
||||
log *_log.Logger // logger
|
||||
zlog *zap.Logger // fast logger
|
||||
logLevel int // log level
|
||||
conf *Config // parsed config
|
||||
confPath string // path to the config file
|
||||
db *sql.DB // database connection pool
|
||||
secretKey [32]byte // encryption key
|
||||
)
|
||||
|
||||
func Cmd() {
|
||||
@ -132,12 +130,12 @@ e.g. db:migrate -+1
|
||||
Run: cmdNew,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
|
||||
Short: "Dump config to file",
|
||||
Long: "Dump current config to a file in the selected format",
|
||||
Run: cmdConfDump,
|
||||
})
|
||||
// rootCmd.AddCommand(&cobra.Command{
|
||||
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
|
||||
// Short: "Dump config to file",
|
||||
// Long: "Dump current config to a file in the selected format",
|
||||
// Run: cmdConfDump,
|
||||
// })
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "version",
|
||||
|
@ -1,29 +1,21 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
// func cmdConfDump(cmd *cobra.Command, args []string) {
|
||||
// if len(args) != 1 {
|
||||
// cmd.Help() //nolint: errcheck
|
||||
// os.Exit(1)
|
||||
// }
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
|
||||
|
||||
func cmdConfDump(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 1 {
|
||||
cmd.Help() //nolint: errcheck
|
||||
os.Exit(1)
|
||||
}
|
||||
// conf, err := initConf()
|
||||
// if err != nil {
|
||||
// log.Fatalf("ERR failed to read config: %s", err)
|
||||
// }
|
||||
|
||||
fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
|
||||
// if err := conf.WriteConfigAs(fname); err != nil {
|
||||
// log.Fatalf("ERR failed to write config: %s", err)
|
||||
// }
|
||||
|
||||
conf, err := initConf()
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to read config: %s", err)
|
||||
}
|
||||
|
||||
if err := conf.WriteConfigAs(fname); err != nil {
|
||||
log.Fatalf("ERR failed to write config: %s", err)
|
||||
}
|
||||
|
||||
log.Printf("INF config dumped to ./%s", fname)
|
||||
}
|
||||
// log.Printf("INF config dumped to ./%s", fname)
|
||||
// }
|
||||
|
@ -26,7 +26,7 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
|
||||
cmdDBCreate(cmd, []string{})
|
||||
cmdDBMigrate(cmd, []string{"up"})
|
||||
|
||||
sfile := path.Join(conf.ConfigPathUsed(), conf.SeedFile)
|
||||
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||
_, err := os.Stat(sfile)
|
||||
|
||||
if err == nil {
|
||||
@ -144,7 +144,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
|
||||
m.Data = getMigrationVars()
|
||||
|
||||
err = m.LoadMigrations(path.Join(conf.ConfigPathUsed(), conf.MigrationsPath))
|
||||
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||
}
|
||||
|
@ -33,14 +33,14 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
|
||||
sfile := path.Join(conf.ConfigPathUsed(), conf.SeedFile)
|
||||
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||
|
||||
b, err := ioutil.ReadFile(sfile)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
|
||||
}
|
||||
|
@ -28,7 +28,7 @@ func cmdServ(cmd *cobra.Command, args []string) {
|
||||
// initResolvers()
|
||||
// }
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to initialize Super Graph")
|
||||
}
|
||||
|
115
cmd/internal/serv/config.go
Normal file
115
cmd/internal/serv/config.go
Normal file
@ -0,0 +1,115 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func ReadInConfig(configFile string) (*Config, error) {
|
||||
cpath := path.Dir(configFile)
|
||||
cfile := path.Base(configFile)
|
||||
vi := newViper(cpath, cfile)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
|
||||
if len(inherits) != 0 {
|
||||
vi = newViper(cpath, inherits)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
}
|
||||
|
||||
vi.SetConfigName(cfile)
|
||||
|
||||
if err := vi.MergeInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
c := &Config{cpath: cpath, vi: vi}
|
||||
|
||||
if err := vi.Unmarshal(&c); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||
}
|
||||
|
||||
if len(c.Core.AllowListFile) == 0 {
|
||||
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi := viper.New()
|
||||
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath("./config")
|
||||
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
vi.SetDefault("enable_tracing", false)
|
||||
vi.SetDefault("auth_fail_block", "always")
|
||||
vi.SetDefault("seed_file", "seed.js")
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
vi.SetDefault("database.user", "postgres")
|
||||
vi.SetDefault("database.schema", "public")
|
||||
|
||||
vi.SetDefault("env", "development")
|
||||
|
||||
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
||||
vi.BindEnv("host", "HOST") //nolint: errcheck
|
||||
vi.BindEnv("port", "PORT") //nolint: errcheck
|
||||
|
||||
vi.SetDefault("auth.rails.max_idle", 80)
|
||||
vi.SetDefault("auth.rails.max_active", 12000)
|
||||
|
||||
return vi
|
||||
}
|
||||
|
||||
func GetConfigName() string {
|
||||
if len(os.Getenv("GO_ENV")) == 0 {
|
||||
return "dev"
|
||||
}
|
||||
|
||||
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(ge, "pro"):
|
||||
return "prod"
|
||||
|
||||
case strings.HasPrefix(ge, "sta"):
|
||||
return "stage"
|
||||
|
||||
case strings.HasPrefix(ge, "tes"):
|
||||
return "test"
|
||||
|
||||
case strings.HasPrefix(ge, "dev"):
|
||||
return "dev"
|
||||
}
|
||||
|
||||
return ge
|
||||
}
|
@ -9,7 +9,6 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/rs/cors"
|
||||
"go.uber.org/zap"
|
||||
@ -83,7 +82,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||
|
||||
if conf.LogLevel() >= config.LogLevelDebug {
|
||||
if logLevel >= LogLevelDebug {
|
||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
||||
}
|
||||
|
||||
@ -94,7 +93,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
json.NewEncoder(w).Encode(res)
|
||||
|
||||
if conf.LogLevel() >= config.LogLevelInfo {
|
||||
if logLevel >= LogLevelInfo {
|
||||
zlog.Info("success",
|
||||
zap.String("op", res.Operation()),
|
||||
zap.String("name", res.QueryName()),
|
||||
@ -111,7 +110,7 @@ func renderErr(w http.ResponseWriter, err error, res *core.Result) {
|
||||
|
||||
json.NewEncoder(w).Encode(&errorResp{err})
|
||||
|
||||
if conf.LogLevel() >= config.LogLevelError {
|
||||
if logLevel >= LogLevelError {
|
||||
if res != nil {
|
||||
zlog.Error(err.Error(),
|
||||
zap.String("op", res.Operation()),
|
||||
|
@ -2,78 +2,96 @@ package serv
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
func initConf() (*config.Config, error) {
|
||||
return config.NewConfigWithLogger(confPath, log)
|
||||
}
|
||||
|
||||
func initDB(c *config.Config) (*sql.DB, error) {
|
||||
var db *sql.DB
|
||||
var err error
|
||||
|
||||
cs := fmt.Sprintf("postgres://%s:%s@%s:%d/%s",
|
||||
c.DB.User, c.DB.Password,
|
||||
c.DB.Host, c.DB.Port, c.DB.DBName)
|
||||
|
||||
for i := 1; i < 10; i++ {
|
||||
db, err = sql.Open("pgx", cs)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
}
|
||||
|
||||
func initConf() (*Config, error) {
|
||||
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
switch c.LogLevel {
|
||||
case "debug":
|
||||
logLevel = LogLevelDebug
|
||||
case "error":
|
||||
logLevel = LogLevelError
|
||||
case "warn":
|
||||
logLevel = LogLevelWarn
|
||||
case "info":
|
||||
logLevel = LogLevelInfo
|
||||
default:
|
||||
logLevel = LogLevelNone
|
||||
}
|
||||
|
||||
// config, _ := pgxpool.ParseConfig("")
|
||||
// config.ConnConfig.Host = c.DB.Host
|
||||
// config.ConnConfig.Port = c.DB.Port
|
||||
// config.ConnConfig.Database = c.DB.DBName
|
||||
// config.ConnConfig.User = c.DB.User
|
||||
// config.ConnConfig.Password = c.DB.Password
|
||||
// config.ConnConfig.RuntimeParams = map[string]string{
|
||||
// "application_name": c.AppName,
|
||||
// "search_path": c.DB.Schema,
|
||||
// }
|
||||
// Auths: validate and sanitize
|
||||
am := make(map[string]struct{})
|
||||
|
||||
// switch c.LogLevel {
|
||||
// case "debug":
|
||||
// config.ConnConfig.LogLevel = pgx.LogLevelDebug
|
||||
// case "info":
|
||||
// config.ConnConfig.LogLevel = pgx.LogLevelInfo
|
||||
// case "warn":
|
||||
// config.ConnConfig.LogLevel = pgx.LogLevelWarn
|
||||
// case "error":
|
||||
// config.ConnConfig.LogLevel = pgx.LogLevelError
|
||||
// default:
|
||||
// config.ConnConfig.LogLevel = pgx.LogLevelNone
|
||||
// }
|
||||
for i := 0; i < len(c.Auths); i++ {
|
||||
a := &c.Auths[i]
|
||||
a.Name = sanitize(a.Name)
|
||||
|
||||
// config.ConnConfig.Logger = NewSQLLogger(logger)
|
||||
if _, ok := am[a.Name]; ok {
|
||||
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
|
||||
log.Printf("WRN duplicate auth found: %s", a.Name)
|
||||
}
|
||||
am[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
// // if c.DB.MaxRetries != 0 {
|
||||
// // opt.MaxRetries = c.DB.MaxRetries
|
||||
// // }
|
||||
// Actions: validate and sanitize
|
||||
axm := make(map[string]struct{})
|
||||
|
||||
// if c.DB.PoolSize != 0 {
|
||||
// config.MaxConns = conf.DB.PoolSize
|
||||
// }
|
||||
for i := 0; i < len(c.Actions); i++ {
|
||||
a := &c.Actions[i]
|
||||
a.Name = sanitize(a.Name)
|
||||
a.AuthName = sanitize(a.AuthName)
|
||||
|
||||
// var db *pgxpool.Pool
|
||||
// var err error
|
||||
if _, ok := axm[a.Name]; ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
log.Printf("WRN duplicate action found: %s", a.Name)
|
||||
}
|
||||
|
||||
if _, ok := am[a.AuthName]; !ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
|
||||
}
|
||||
axm[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
var anonFound bool
|
||||
|
||||
for _, r := range c.Roles {
|
||||
if sanitize(r.Name) == "anon" {
|
||||
anonFound = true
|
||||
}
|
||||
}
|
||||
|
||||
if !anonFound {
|
||||
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
|
||||
c.AuthFailBlock = false
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func initDB(c *Config) (*sql.DB, error) {
|
||||
var db *sql.DB
|
||||
var err error
|
||||
|
||||
// cs := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s",
|
||||
// c.DB.Host, c.DB.Port,
|
||||
// c.DB.User, c.DB.Password,
|
||||
// c.DB.DBName)
|
||||
|
||||
// fmt.Println(">>", cs)
|
||||
|
||||
// for i := 1; i < 10; i++ {
|
||||
// db, err = pgxpool.ConnectConfig(context.Background(), config)
|
||||
// db, err = sql.Open("pgx", cs)
|
||||
// if err == nil {
|
||||
// break
|
||||
// }
|
||||
@ -85,4 +103,52 @@ func initDB(c *config.Config) (*sql.DB, error) {
|
||||
// }
|
||||
|
||||
// return db, nil
|
||||
|
||||
config, _ := pgx.ParseConfig("")
|
||||
config.Host = c.DB.Host
|
||||
config.Port = c.DB.Port
|
||||
config.Database = c.DB.DBName
|
||||
config.User = c.DB.User
|
||||
config.Password = c.DB.Password
|
||||
config.RuntimeParams = map[string]string{
|
||||
"application_name": c.AppName,
|
||||
"search_path": c.DB.Schema,
|
||||
}
|
||||
|
||||
// switch c.LogLevel {
|
||||
// case "debug":
|
||||
// config.LogLevel = pgx.LogLevelDebug
|
||||
// case "info":
|
||||
// config.LogLevel = pgx.LogLevelInfo
|
||||
// case "warn":
|
||||
// config.LogLevel = pgx.LogLevelWarn
|
||||
// case "error":
|
||||
// config.LogLevel = pgx.LogLevelError
|
||||
// default:
|
||||
// config.LogLevel = pgx.LogLevelNone
|
||||
// }
|
||||
|
||||
//config.Logger = NewSQLLogger(logger)
|
||||
|
||||
// if c.DB.MaxRetries != 0 {
|
||||
// opt.MaxRetries = c.DB.MaxRetries
|
||||
// }
|
||||
|
||||
// if c.DB.PoolSize != 0 {
|
||||
// config.MaxConns = conf.DB.PoolSize
|
||||
// }
|
||||
|
||||
for i := 1; i < 10; i++ {
|
||||
db = stdlib.OpenDB(*config)
|
||||
if db == nil {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
@ -5,11 +5,43 @@ import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core"
|
||||
)
|
||||
|
||||
func SimpleHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
// Auth struct contains authentication related config values used by the Super Graph service
|
||||
type Auth struct {
|
||||
Name string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
Salt string
|
||||
SignSalt string `mapstructure:"sign_salt"`
|
||||
AuthSalt string `mapstructure:"auth_salt"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
Name string
|
||||
Value string
|
||||
Exists bool
|
||||
}
|
||||
}
|
||||
|
||||
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
@ -32,7 +64,7 @@ func SimpleHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func HeaderHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
hdr := ac.Header
|
||||
|
||||
if len(hdr.Name) == 0 {
|
||||
@ -64,7 +96,7 @@ func HeaderHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func WithAuth(next http.Handler, ac *config.Auth) (http.Handler, error) {
|
||||
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
|
||||
var err error
|
||||
|
||||
if ac.CredsInHeader {
|
||||
|
@ -7,7 +7,6 @@ import (
|
||||
"strings"
|
||||
|
||||
jwt "github.com/dgrijalva/jwt-go"
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core"
|
||||
)
|
||||
|
||||
@ -16,7 +15,7 @@ const (
|
||||
jwtAuth0 int = iota + 1
|
||||
)
|
||||
|
||||
func JwtHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
var key interface{}
|
||||
var jwtProvider int
|
||||
|
||||
|
@ -9,13 +9,12 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/bradfitz/gomemcache/memcache"
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
func RailsHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
ru := ac.Rails.URL
|
||||
|
||||
if strings.HasPrefix(ru, "memcache:") {
|
||||
@ -29,7 +28,7 @@ func RailsHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error)
|
||||
return RailsCookieHandler(ac, next)
|
||||
}
|
||||
|
||||
func RailsRedisHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
|
||||
if len(cookie) == 0 {
|
||||
@ -85,7 +84,7 @@ func RailsRedisHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, er
|
||||
}, nil
|
||||
}
|
||||
|
||||
func RailsMemcacheHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
|
||||
if len(cookie) == 0 {
|
||||
@ -128,7 +127,7 @@ func RailsMemcacheHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc,
|
||||
}, nil
|
||||
}
|
||||
|
||||
func RailsCookieHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
if len(cookie) == 0 {
|
||||
return nil, fmt.Errorf("no auth.cookie defined")
|
||||
@ -159,7 +158,7 @@ func RailsCookieHandler(ac *config.Auth, next http.Handler) (http.HandlerFunc, e
|
||||
}, nil
|
||||
}
|
||||
|
||||
func railsAuth(ac *config.Auth) (*rails.Auth, error) {
|
||||
func railsAuth(ac *Auth) (*rails.Auth, error) {
|
||||
secret := ac.Rails.SecretKeyBase
|
||||
if len(secret) == 0 {
|
||||
return nil, errors.New("no auth.rails.secret_key_base defined")
|
||||
|
@ -12,11 +12,10 @@ import (
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/NYTimes/gziphandler"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/config"
|
||||
)
|
||||
|
||||
func initWatcher() {
|
||||
cpath := conf.ConfigPathUsed()
|
||||
cpath := conf.cpath
|
||||
if conf != nil && !conf.WatchAndReload {
|
||||
return
|
||||
}
|
||||
@ -170,7 +169,7 @@ func setActionRoutes(routes map[string]http.Handler) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func findAuth(name string) *config.Auth {
|
||||
func findAuth(name string) *auth.Auth {
|
||||
for _, a := range conf.Auths {
|
||||
if strings.EqualFold(a.Name, name) {
|
||||
return &a
|
||||
|
@ -113,12 +113,12 @@ func al(b byte) bool {
|
||||
func fatalInProd(err error, msg string) {
|
||||
var wg sync.WaitGroup
|
||||
|
||||
if !isDev() {
|
||||
if isDev() {
|
||||
log.Printf("ERR %s: %s", msg, err)
|
||||
} else {
|
||||
log.Fatalf("ERR %s: %s", msg, err)
|
||||
}
|
||||
|
||||
log.Printf("ERR %s: %s", msg, err)
|
||||
|
||||
wg.Add(1)
|
||||
wg.Wait()
|
||||
}
|
||||
@ -126,3 +126,7 @@ func fatalInProd(err error, msg string) {
|
||||
func isDev() bool {
|
||||
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
|
||||
}
|
||||
|
||||
func sanitize(value string) string {
|
||||
return strings.ToLower(strings.TrimSpace(value))
|
||||
}
|
||||
|
2
cmd/internal/serv/web/.gitignore
vendored
2
cmd/internal/serv/web/.gitignore
vendored
@ -7,7 +7,7 @@
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
# /build
|
||||
|
||||
# development
|
||||
/src/components/dataviz/core/*.js.map
|
||||
|
30
cmd/internal/serv/web/build/asset-manifest.json
Normal file
30
cmd/internal/serv/web/build/asset-manifest.json
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
||||
"main.js": "/static/js/main.04d74040.chunk.js",
|
||||
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
|
||||
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
|
||||
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
|
||||
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
|
||||
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
|
||||
"index.html": "/index.html",
|
||||
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
|
||||
"service-worker.js": "/service-worker.js",
|
||||
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
||||
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
||||
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
||||
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
||||
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
||||
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
||||
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
||||
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
||||
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
||||
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/js/runtime-main.4aea9da3.js",
|
||||
"static/js/2.03370bd3.chunk.js",
|
||||
"static/css/main.c6b5c55c.chunk.css",
|
||||
"static/js/main.04d74040.chunk.js"
|
||||
]
|
||||
}
|
BIN
cmd/internal/serv/web/build/favicon.ico
Executable file
BIN
cmd/internal/serv/web/build/favicon.ico
Executable file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
1
cmd/internal/serv/web/build/index.html
Normal file
1
cmd/internal/serv/web/build/index.html
Normal file
@ -0,0 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>
|
15
cmd/internal/serv/web/build/manifest.json
Executable file
15
cmd/internal/serv/web/build/manifest.json
Executable file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"short_name": "Super Graph",
|
||||
"name": "Super Graph - GraphQL API for Rails",
|
||||
"icons": [
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "64x64 32x32 24x24 16x16",
|
||||
"type": "image/x-icon"
|
||||
}
|
||||
],
|
||||
"start_url": ".",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff"
|
||||
}
|
@ -0,0 +1,58 @@
|
||||
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
||||
{
|
||||
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
|
||||
"url": "/index.html"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/css/main.c6b5c55c.chunk.css"
|
||||
},
|
||||
{
|
||||
"revision": "c156a125990ddf5dcc51",
|
||||
"url": "/static/js/2.03370bd3.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/js/main.04d74040.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "427262b6771d3f49a7c5",
|
||||
"url": "/static/js/runtime-main.4aea9da3.js"
|
||||
},
|
||||
{
|
||||
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
||||
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
||||
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
||||
},
|
||||
{
|
||||
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
||||
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
||||
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
||||
},
|
||||
{
|
||||
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
||||
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
||||
},
|
||||
{
|
||||
"revision": "d94118379d362fc161aa1246bcc14d43",
|
||||
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
||||
},
|
||||
{
|
||||
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
||||
"url": "/static/media/getOutline.js.c04e3998.flow"
|
||||
},
|
||||
{
|
||||
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
||||
"url": "/static/media/index.js.02c24280.flow"
|
||||
},
|
||||
{
|
||||
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
||||
"url": "/static/media/logo.57ee3b60.png"
|
||||
}
|
||||
]);
|
39
cmd/internal/serv/web/build/service-worker.js
Normal file
39
cmd/internal/serv/web/build/service-worker.js
Normal file
@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Welcome to your Workbox-powered service worker!
|
||||
*
|
||||
* You'll need to register this file in your web app and you should
|
||||
* disable HTTP caching for this file too.
|
||||
* See https://goo.gl/nhQhGp
|
||||
*
|
||||
* The rest of the code is auto-generated. Please don't update this file
|
||||
* directly; instead, make changes to your Workbox build configuration
|
||||
* and re-run your build process.
|
||||
* See https://goo.gl/2aRDsh
|
||||
*/
|
||||
|
||||
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
|
||||
|
||||
importScripts(
|
||||
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
|
||||
);
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
if (event.data && event.data.type === 'SKIP_WAITING') {
|
||||
self.skipWaiting();
|
||||
}
|
||||
});
|
||||
|
||||
workbox.core.clientsClaim();
|
||||
|
||||
/**
|
||||
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
|
||||
* requests for URLs in the manifest.
|
||||
* See https://goo.gl/S9QRab
|
||||
*/
|
||||
self.__precacheManifest = [].concat(self.__precacheManifest || []);
|
||||
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
|
||||
|
||||
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
|
||||
|
||||
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
|
||||
});
|
@ -0,0 +1,2 @@
|
||||
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
||||
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,2 @@
|
||||
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
|
||||
//# sourceMappingURL=main.04d74040.chunk.js.map
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,2 @@
|
||||
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
|
||||
//# sourceMappingURL=runtime-main.4aea9da3.js.map
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,328 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
DocumentNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
NamedTypeNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
DefinitionQueryResult,
|
||||
Diagnostic,
|
||||
GraphQLCache,
|
||||
GraphQLConfig,
|
||||
GraphQLProjectConfig,
|
||||
Uri,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
|
||||
import {Kind, parse, print} from 'graphql';
|
||||
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
import {getHoverInformation} from './getHoverInformation';
|
||||
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
||||
import {
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
getDefinitionQueryResultForNamedType,
|
||||
} from './getDefinition';
|
||||
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {
|
||||
FRAGMENT_DEFINITION,
|
||||
OBJECT_TYPE_DEFINITION,
|
||||
INTERFACE_TYPE_DEFINITION,
|
||||
ENUM_TYPE_DEFINITION,
|
||||
UNION_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_DEFINITION,
|
||||
INPUT_OBJECT_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_EXTENSION,
|
||||
OBJECT_TYPE_EXTENSION,
|
||||
INTERFACE_TYPE_EXTENSION,
|
||||
UNION_TYPE_EXTENSION,
|
||||
ENUM_TYPE_EXTENSION,
|
||||
INPUT_OBJECT_TYPE_EXTENSION,
|
||||
DIRECTIVE_DEFINITION,
|
||||
FRAGMENT_SPREAD,
|
||||
OPERATION_DEFINITION,
|
||||
NAMED_TYPE,
|
||||
} = Kind;
|
||||
|
||||
export class GraphQLLanguageService {
|
||||
_graphQLCache: GraphQLCache;
|
||||
_graphQLConfig: GraphQLConfig;
|
||||
|
||||
constructor(cache: GraphQLCache) {
|
||||
this._graphQLCache = cache;
|
||||
this._graphQLConfig = cache.getGraphQLConfig();
|
||||
}
|
||||
|
||||
async getDiagnostics(
|
||||
query: string,
|
||||
uri: Uri,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Promise<Array<Diagnostic>> {
|
||||
// Perform syntax diagnostics first, as this doesn't require
|
||||
// schema/fragment definitions, even the project configuration.
|
||||
let queryHasExtensions = false;
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
||||
const schemaPath = projectConfig.schemaPath;
|
||||
try {
|
||||
const queryAST = parse(query);
|
||||
if (!schemaPath || uri !== schemaPath) {
|
||||
queryHasExtensions = queryAST.definitions.some(definition => {
|
||||
switch (definition.kind) {
|
||||
case OBJECT_TYPE_DEFINITION:
|
||||
case INTERFACE_TYPE_DEFINITION:
|
||||
case ENUM_TYPE_DEFINITION:
|
||||
case UNION_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_DEFINITION:
|
||||
case INPUT_OBJECT_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_EXTENSION:
|
||||
case OBJECT_TYPE_EXTENSION:
|
||||
case INTERFACE_TYPE_EXTENSION:
|
||||
case UNION_TYPE_EXTENSION:
|
||||
case ENUM_TYPE_EXTENSION:
|
||||
case INPUT_OBJECT_TYPE_EXTENSION:
|
||||
case DIRECTIVE_DEFINITION:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// If there's a matching config, proceed to prepare to run validation
|
||||
let source = query;
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
||||
query,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
const dependenciesSource = fragmentDependencies.reduce(
|
||||
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
||||
'',
|
||||
);
|
||||
|
||||
source = `${source} ${dependenciesSource}`;
|
||||
|
||||
let validationAst = null;
|
||||
try {
|
||||
validationAst = parse(source);
|
||||
} catch (error) {
|
||||
// the query string is already checked to be parsed properly - errors
|
||||
// from this parse must be from corrupted fragment dependencies.
|
||||
// For IDEs we don't care for errors outside of the currently edited
|
||||
// query, so we return an empty array here.
|
||||
return [];
|
||||
}
|
||||
|
||||
// Check if there are custom validation rules to be used
|
||||
let customRules;
|
||||
const customRulesModulePath =
|
||||
projectConfig.extensions.customValidationRules;
|
||||
if (customRulesModulePath) {
|
||||
/* eslint-disable no-implicit-coercion */
|
||||
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
||||
if (rulesPath) {
|
||||
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
||||
}
|
||||
/* eslint-enable no-implicit-coercion */
|
||||
}
|
||||
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName, queryHasExtensions)
|
||||
.catch(() => null);
|
||||
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
async getAutocompleteSuggestions(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Array<CompletionItem>> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getAutocompleteSuggestions(schema, query, position);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async getHoverInformation(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Hover.contents> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getHoverInformation(schema, query, position);
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
async getDefinition(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const node = getASTNodeAtPosition(query, ast, position);
|
||||
if (node) {
|
||||
switch (node.kind) {
|
||||
case FRAGMENT_SPREAD:
|
||||
return this._getDefinitionForFragmentSpread(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
case FRAGMENT_DEFINITION:
|
||||
case OPERATION_DEFINITION:
|
||||
return getDefinitionQueryResultForDefinitionNode(
|
||||
filePath,
|
||||
query,
|
||||
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
||||
);
|
||||
case NAMED_TYPE:
|
||||
return this._getDefinitionForNamedType(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async _getDefinitionForNamedType(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: NamedTypeNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
||||
ast,
|
||||
objectTypeDefinitions,
|
||||
);
|
||||
|
||||
const localObjectTypeDefinitions = ast.definitions.filter(
|
||||
definition =>
|
||||
definition.kind === OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === ENUM_TYPE_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
||||
TypeDefinitionNode,
|
||||
>);
|
||||
|
||||
const localOperationDefinationInfos = typeCastedDefs.map(
|
||||
(definition: TypeDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForNamedType(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localOperationDefinationInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async _getDefinitionForFragmentSpread(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: FragmentSpreadNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
||||
ast,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
|
||||
const localFragDefinitions = ast.definitions.filter(
|
||||
definition => definition.kind === FRAGMENT_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
||||
FragmentDefinitionNode,
|
||||
>);
|
||||
|
||||
const localFragInfos = typeCastedDefs.map(
|
||||
(definition: FragmentDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForFragmentSpread(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localFragInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
||||
import {isCompositeType} from 'graphql';
|
||||
import {
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
} from 'graphql/type/introspection';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
// Utility for returning the state representing the Definition this token state
|
||||
// is within, if any.
|
||||
export function getDefinitionState(tokenState: State): ?State {
|
||||
let definitionState;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
case 'Mutation':
|
||||
case 'Subscription':
|
||||
case 'FragmentDefinition':
|
||||
definitionState = state;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return definitionState;
|
||||
}
|
||||
|
||||
// Gets the field definition given a type and field name
|
||||
export function getFieldDef(
|
||||
schema: GraphQLSchema,
|
||||
type: GraphQLType,
|
||||
fieldName: string,
|
||||
): ?GraphQLField<*, *> {
|
||||
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return SchemaMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return TypeMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
||||
return TypeNameMetaFieldDef;
|
||||
}
|
||||
if (type.getFields && typeof type.getFields === 'function') {
|
||||
return (type.getFields()[fieldName]: any);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
||||
export function forEachState(
|
||||
stack: State,
|
||||
fn: (state: State) => ?TypeInfo,
|
||||
): void {
|
||||
const reverseStateStack = [];
|
||||
let state = stack;
|
||||
while (state && state.kind) {
|
||||
reverseStateStack.push(state);
|
||||
state = state.prevState;
|
||||
}
|
||||
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
||||
fn(reverseStateStack[i]);
|
||||
}
|
||||
}
|
||||
|
||||
export function objectValues(object: Object): Array<any> {
|
||||
const keys = Object.keys(object);
|
||||
const len = keys.length;
|
||||
const values = new Array(len);
|
||||
for (let i = 0; i < len; ++i) {
|
||||
values[i] = object[keys[i]];
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
// Create the expected hint response given a possible list and a token
|
||||
export function hintList(
|
||||
token: ContextToken,
|
||||
list: Array<CompletionItem>,
|
||||
): Array<CompletionItem> {
|
||||
return filterAndSortList(list, normalizeText(token.string));
|
||||
}
|
||||
|
||||
// Given a list of hint entries and currently typed text, sort and filter to
|
||||
// provide a concise list.
|
||||
function filterAndSortList(
|
||||
list: Array<CompletionItem>,
|
||||
text: string,
|
||||
): Array<CompletionItem> {
|
||||
if (!text) {
|
||||
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
||||
}
|
||||
|
||||
const byProximity = list.map(entry => ({
|
||||
proximity: getProximity(normalizeText(entry.label), text),
|
||||
entry,
|
||||
}));
|
||||
|
||||
const conciseMatches = filterNonEmpty(
|
||||
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
||||
pair => !pair.entry.isDeprecated,
|
||||
);
|
||||
|
||||
const sortedMatches = conciseMatches.sort(
|
||||
(a, b) =>
|
||||
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
||||
a.proximity - b.proximity ||
|
||||
a.entry.label.length - b.entry.label.length,
|
||||
);
|
||||
|
||||
return sortedMatches.map(pair => pair.entry);
|
||||
}
|
||||
|
||||
// Filters the array by the predicate, unless it results in an empty array,
|
||||
// in which case return the original array.
|
||||
function filterNonEmpty(
|
||||
array: Array<Object>,
|
||||
predicate: (entry: Object) => boolean,
|
||||
): Array<Object> {
|
||||
const filtered = array.filter(predicate);
|
||||
return filtered.length === 0 ? array : filtered;
|
||||
}
|
||||
|
||||
function normalizeText(text: string): string {
|
||||
return text.toLowerCase().replace(/\W/g, '');
|
||||
}
|
||||
|
||||
// Determine a numeric proximity for a suggestion based on current text.
|
||||
function getProximity(suggestion: string, text: string): number {
|
||||
// start with lexical distance
|
||||
let proximity = lexicalDistance(text, suggestion);
|
||||
if (suggestion.length > text.length) {
|
||||
// do not penalize long suggestions.
|
||||
proximity -= suggestion.length - text.length - 1;
|
||||
// penalize suggestions not starting with this phrase
|
||||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
||||
}
|
||||
return proximity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the lexical distance between strings A and B.
|
||||
*
|
||||
* The "distance" between two strings is given by counting the minimum number
|
||||
* of edits needed to transform string A into string B. An edit can be an
|
||||
* insertion, deletion, or substitution of a single character, or a swap of two
|
||||
* adjacent characters.
|
||||
*
|
||||
* This distance can be useful for detecting typos in input or sorting
|
||||
*
|
||||
* @param {string} a
|
||||
* @param {string} b
|
||||
* @return {int} distance in number of edits
|
||||
*/
|
||||
function lexicalDistance(a: string, b: string): number {
|
||||
let i;
|
||||
let j;
|
||||
const d = [];
|
||||
const aLength = a.length;
|
||||
const bLength = b.length;
|
||||
|
||||
for (i = 0; i <= aLength; i++) {
|
||||
d[i] = [i];
|
||||
}
|
||||
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
d[0][j] = j;
|
||||
}
|
||||
|
||||
for (i = 1; i <= aLength; i++) {
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
||||
|
||||
d[i][j] = Math.min(
|
||||
d[i - 1][j] + 1,
|
||||
d[i][j - 1] + 1,
|
||||
d[i - 1][j - 1] + cost,
|
||||
);
|
||||
|
||||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
||||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return d[aLength][bLength];
|
||||
}
|
@ -0,0 +1,665 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
FragmentDefinitionNode,
|
||||
GraphQLDirective,
|
||||
GraphQLSchema,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
|
||||
import {
|
||||
GraphQLBoolean,
|
||||
GraphQLEnumType,
|
||||
GraphQLInputObjectType,
|
||||
GraphQLList,
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
assertAbstractType,
|
||||
doTypesOverlap,
|
||||
getNamedType,
|
||||
getNullableType,
|
||||
isAbstractType,
|
||||
isCompositeType,
|
||||
isInputType,
|
||||
} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
forEachState,
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
hintList,
|
||||
objectValues,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
/**
|
||||
* Given GraphQLSchema, queryText, and context of the current position within
|
||||
* the source text, provide a list of typeahead entries.
|
||||
*/
|
||||
export function getAutocompleteSuggestions(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Array<CompletionItem> {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
const state =
|
||||
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
||||
|
||||
// relieve flow errors by checking if `state` exists
|
||||
if (!state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
|
||||
// Definition kinds
|
||||
if (kind === 'Document') {
|
||||
return hintList(token, [
|
||||
{label: 'query'},
|
||||
{label: 'mutation'},
|
||||
{label: 'subscription'},
|
||||
{label: 'fragment'},
|
||||
{label: '{'},
|
||||
]);
|
||||
}
|
||||
|
||||
// Field names
|
||||
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
||||
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Argument names
|
||||
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
||||
const argDefs = typeInfo.argDefs;
|
||||
if (argDefs) {
|
||||
return hintList(
|
||||
token,
|
||||
argDefs.map(argDef => ({
|
||||
label: argDef.name,
|
||||
detail: String(argDef.type),
|
||||
documentation: argDef.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input Object fields
|
||||
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
||||
if (typeInfo.objectFieldDefs) {
|
||||
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
||||
return hintList(
|
||||
token,
|
||||
objectFields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input values: Enum and Boolean
|
||||
if (
|
||||
kind === 'EnumValue' ||
|
||||
(kind === 'ListValue' && step === 1) ||
|
||||
(kind === 'ObjectField' && step === 2) ||
|
||||
(kind === 'Argument' && step === 2)
|
||||
) {
|
||||
return getSuggestionsForInputValues(token, typeInfo);
|
||||
}
|
||||
|
||||
// Fragment type conditions
|
||||
if (
|
||||
(kind === 'TypeCondition' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState != null &&
|
||||
state.prevState.kind === 'TypeCondition')
|
||||
) {
|
||||
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Fragment spread names
|
||||
if (kind === 'FragmentSpread' && step === 1) {
|
||||
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
||||
}
|
||||
|
||||
// Variable definition types
|
||||
if (
|
||||
(kind === 'VariableDefinition' && step === 2) ||
|
||||
(kind === 'ListType' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState &&
|
||||
(state.prevState.kind === 'VariableDefinition' ||
|
||||
state.prevState.kind === 'ListType'))
|
||||
) {
|
||||
return getSuggestionsForVariableDefinition(token, schema);
|
||||
}
|
||||
|
||||
// Directive names
|
||||
if (kind === 'Directive') {
|
||||
return getSuggestionsForDirective(token, state, schema);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
// Helper functions to get suggestions for each kinds
|
||||
function getSuggestionsForFieldNames(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (typeInfo.parentType) {
|
||||
const parentType = typeInfo.parentType;
|
||||
const fields =
|
||||
parentType.getFields instanceof Function
|
||||
? objectValues(parentType.getFields())
|
||||
: [];
|
||||
if (isAbstractType(parentType)) {
|
||||
fields.push(TypeNameMetaFieldDef);
|
||||
}
|
||||
if (parentType === schema.getQueryType()) {
|
||||
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
fields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
isDeprecated: field.isDeprecated,
|
||||
deprecationReason: field.deprecationReason,
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForInputValues(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
): Array<CompletionItem> {
|
||||
const namedInputType = getNamedType(typeInfo.inputType);
|
||||
if (namedInputType instanceof GraphQLEnumType) {
|
||||
const values = namedInputType.getValues();
|
||||
return hintList(
|
||||
token,
|
||||
values.map(value => ({
|
||||
label: value.name,
|
||||
detail: String(namedInputType),
|
||||
documentation: value.description,
|
||||
isDeprecated: value.isDeprecated,
|
||||
deprecationReason: value.deprecationReason,
|
||||
})),
|
||||
);
|
||||
} else if (namedInputType === GraphQLBoolean) {
|
||||
return hintList(token, [
|
||||
{
|
||||
label: 'true',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not false.',
|
||||
},
|
||||
{
|
||||
label: 'false',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not true.',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentTypeConditions(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
let possibleTypes;
|
||||
if (typeInfo.parentType) {
|
||||
if (isAbstractType(typeInfo.parentType)) {
|
||||
const abstractType = assertAbstractType(typeInfo.parentType);
|
||||
// Collect both the possible Object types as well as the interfaces
|
||||
// they implement.
|
||||
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
||||
const possibleIfaceMap = Object.create(null);
|
||||
possibleObjTypes.forEach(type => {
|
||||
type.getInterfaces().forEach(iface => {
|
||||
possibleIfaceMap[iface.name] = iface;
|
||||
});
|
||||
});
|
||||
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
||||
} else {
|
||||
// The parent type is a non-abstract Object type, so the only possible
|
||||
// type that can be used is that same type.
|
||||
possibleTypes = [typeInfo.parentType];
|
||||
}
|
||||
} else {
|
||||
const typeMap = schema.getTypeMap();
|
||||
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
possibleTypes.map(type => {
|
||||
const namedType = getNamedType(type);
|
||||
return {
|
||||
label: String(type),
|
||||
documentation: (namedType && namedType.description) || '',
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentSpread(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
): Array<CompletionItem> {
|
||||
const typeMap = schema.getTypeMap();
|
||||
const defState = getDefinitionState(token.state);
|
||||
const fragments = getFragmentDefinitions(queryText);
|
||||
|
||||
// Filter down to only the fragments which may exist here.
|
||||
const relevantFrags = fragments.filter(
|
||||
frag =>
|
||||
// Only include fragments with known types.
|
||||
typeMap[frag.typeCondition.name.value] &&
|
||||
// Only include fragments which are not cyclic.
|
||||
!(
|
||||
defState &&
|
||||
defState.kind === 'FragmentDefinition' &&
|
||||
defState.name === frag.name.value
|
||||
) &&
|
||||
// Only include fragments which could possibly be spread here.
|
||||
isCompositeType(typeInfo.parentType) &&
|
||||
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
||||
doTypesOverlap(
|
||||
schema,
|
||||
typeInfo.parentType,
|
||||
typeMap[frag.typeCondition.name.value],
|
||||
),
|
||||
);
|
||||
|
||||
return hintList(
|
||||
token,
|
||||
relevantFrags.map(frag => ({
|
||||
label: frag.name.value,
|
||||
detail: String(typeMap[frag.typeCondition.name.value]),
|
||||
documentation: `fragment ${frag.name.value} on ${
|
||||
frag.typeCondition.name.value
|
||||
}`,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getFragmentDefinitions(
|
||||
queryText: string,
|
||||
): Array<FragmentDefinitionNode> {
|
||||
const fragmentDefs = [];
|
||||
runOnlineParser(queryText, (_, state) => {
|
||||
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
||||
fragmentDefs.push({
|
||||
kind: 'FragmentDefinition',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.name,
|
||||
},
|
||||
selectionSet: {
|
||||
kind: 'SelectionSet',
|
||||
selections: [],
|
||||
},
|
||||
typeCondition: {
|
||||
kind: 'NamedType',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.type,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return fragmentDefs;
|
||||
}
|
||||
|
||||
function getSuggestionsForVariableDefinition(
|
||||
token: ContextToken,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
const inputTypeMap = schema.getTypeMap();
|
||||
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
||||
return hintList(
|
||||
token,
|
||||
inputTypes.map(type => ({
|
||||
label: type.name,
|
||||
documentation: type.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForDirective(
|
||||
token: ContextToken,
|
||||
state: State,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (state.prevState && state.prevState.kind) {
|
||||
const directives = schema
|
||||
.getDirectives()
|
||||
.filter(directive => canUseDirective(state.prevState, directive));
|
||||
return hintList(
|
||||
token,
|
||||
directives.map(directive => ({
|
||||
label: directive.name,
|
||||
documentation: directive.description || '',
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
export function getTokenAtPosition(
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
): ContextToken {
|
||||
let styleAtCursor = null;
|
||||
let stateAtCursor = null;
|
||||
let stringAtCursor = null;
|
||||
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
||||
if (index === cursor.line) {
|
||||
if (stream.getCurrentPosition() >= cursor.character) {
|
||||
styleAtCursor = style;
|
||||
stateAtCursor = {...state};
|
||||
stringAtCursor = stream.current();
|
||||
return 'BREAK';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Return the state/style of parsed token in case those at cursor aren't
|
||||
// available.
|
||||
return {
|
||||
start: token.start,
|
||||
end: token.end,
|
||||
string: stringAtCursor || token.string,
|
||||
state: stateAtCursor || token.state,
|
||||
style: styleAtCursor || token.style,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides an utility function to parse a given query text and construct a
|
||||
* `token` context object.
|
||||
* A token context provides useful information about the token/style that
|
||||
* CharacterStream currently possesses, as well as the end state and style
|
||||
* of the token.
|
||||
*/
|
||||
type callbackFnType = (
|
||||
stream: CharacterStream,
|
||||
state: State,
|
||||
style: string,
|
||||
index: number,
|
||||
) => void | 'BREAK';
|
||||
|
||||
function runOnlineParser(
|
||||
queryText: string,
|
||||
callback: callbackFnType,
|
||||
): ContextToken {
|
||||
const lines = queryText.split('\n');
|
||||
const parser = onlineParser();
|
||||
let state = parser.startState();
|
||||
let style = '';
|
||||
|
||||
let stream: CharacterStream = new CharacterStream('');
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
style = parser.token(stream, state);
|
||||
const code = callback(stream, state, style, i);
|
||||
if (code === 'BREAK') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Above while loop won't run if there is an empty line.
|
||||
// Run the callback one more time to catch this.
|
||||
callback(stream, state, style, i);
|
||||
|
||||
if (!state.kind) {
|
||||
state = parser.startState();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start: stream.getStartOfToken(),
|
||||
end: stream.getCurrentPosition(),
|
||||
string: stream.current(),
|
||||
state,
|
||||
style,
|
||||
};
|
||||
}
|
||||
|
||||
function canUseDirective(
|
||||
state: $PropertyType<State, 'prevState'>,
|
||||
directive: GraphQLDirective,
|
||||
): boolean {
|
||||
if (!state || !state.kind) {
|
||||
return false;
|
||||
}
|
||||
const kind = state.kind;
|
||||
const locations = directive.locations;
|
||||
switch (kind) {
|
||||
case 'Query':
|
||||
return locations.indexOf('QUERY') !== -1;
|
||||
case 'Mutation':
|
||||
return locations.indexOf('MUTATION') !== -1;
|
||||
case 'Subscription':
|
||||
return locations.indexOf('SUBSCRIPTION') !== -1;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
return locations.indexOf('FIELD') !== -1;
|
||||
case 'FragmentDefinition':
|
||||
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
||||
case 'FragmentSpread':
|
||||
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
||||
case 'InlineFragment':
|
||||
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
||||
|
||||
// Schema Definitions
|
||||
case 'SchemaDef':
|
||||
return locations.indexOf('SCHEMA') !== -1;
|
||||
case 'ScalarDef':
|
||||
return locations.indexOf('SCALAR') !== -1;
|
||||
case 'ObjectTypeDef':
|
||||
return locations.indexOf('OBJECT') !== -1;
|
||||
case 'FieldDef':
|
||||
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
||||
case 'InterfaceDef':
|
||||
return locations.indexOf('INTERFACE') !== -1;
|
||||
case 'UnionDef':
|
||||
return locations.indexOf('UNION') !== -1;
|
||||
case 'EnumDef':
|
||||
return locations.indexOf('ENUM') !== -1;
|
||||
case 'EnumValue':
|
||||
return locations.indexOf('ENUM_VALUE') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_OBJECT') !== -1;
|
||||
case 'InputValueDef':
|
||||
const prevStateKind = state.prevState && state.prevState.kind;
|
||||
switch (prevStateKind) {
|
||||
case 'ArgumentsDef':
|
||||
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Utility for collecting rich type information given any token's state
|
||||
// from the graphql-mode parser.
|
||||
export function getTypeInfo(
|
||||
schema: GraphQLSchema,
|
||||
tokenState: State,
|
||||
): TypeInfo {
|
||||
let argDef;
|
||||
let argDefs;
|
||||
let directiveDef;
|
||||
let enumValue;
|
||||
let fieldDef;
|
||||
let inputType;
|
||||
let objectFieldDefs;
|
||||
let parentType;
|
||||
let type;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
type = schema.getQueryType();
|
||||
break;
|
||||
case 'Mutation':
|
||||
type = schema.getMutationType();
|
||||
break;
|
||||
case 'Subscription':
|
||||
type = schema.getSubscriptionType();
|
||||
break;
|
||||
case 'InlineFragment':
|
||||
case 'FragmentDefinition':
|
||||
if (state.type) {
|
||||
type = schema.getType(state.type);
|
||||
}
|
||||
break;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
if (!type || !state.name) {
|
||||
fieldDef = null;
|
||||
} else {
|
||||
fieldDef = parentType
|
||||
? getFieldDef(schema, parentType, state.name)
|
||||
: null;
|
||||
type = fieldDef ? fieldDef.type : null;
|
||||
}
|
||||
break;
|
||||
case 'SelectionSet':
|
||||
parentType = getNamedType(type);
|
||||
break;
|
||||
case 'Directive':
|
||||
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
||||
break;
|
||||
case 'Arguments':
|
||||
if (!state.prevState) {
|
||||
argDefs = null;
|
||||
} else {
|
||||
switch (state.prevState.kind) {
|
||||
case 'Field':
|
||||
argDefs = fieldDef && fieldDef.args;
|
||||
break;
|
||||
case 'Directive':
|
||||
argDefs = directiveDef && directiveDef.args;
|
||||
break;
|
||||
case 'AliasedField':
|
||||
const name = state.prevState && state.prevState.name;
|
||||
if (!name) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
const field = parentType
|
||||
? getFieldDef(schema, parentType, name)
|
||||
: null;
|
||||
if (!field) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
argDefs = field.args;
|
||||
break;
|
||||
default:
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'Argument':
|
||||
if (argDefs) {
|
||||
for (let i = 0; i < argDefs.length; i++) {
|
||||
if (argDefs[i].name === state.name) {
|
||||
argDef = argDefs[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
inputType = argDef && argDef.type;
|
||||
break;
|
||||
case 'EnumValue':
|
||||
const enumType = getNamedType(inputType);
|
||||
enumValue =
|
||||
enumType instanceof GraphQLEnumType
|
||||
? find(enumType.getValues(), val => val.value === state.name)
|
||||
: null;
|
||||
break;
|
||||
case 'ListValue':
|
||||
const nullableType = getNullableType(inputType);
|
||||
inputType =
|
||||
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
||||
break;
|
||||
case 'ObjectValue':
|
||||
const objectType = getNamedType(inputType);
|
||||
objectFieldDefs =
|
||||
objectType instanceof GraphQLInputObjectType
|
||||
? objectType.getFields()
|
||||
: null;
|
||||
break;
|
||||
case 'ObjectField':
|
||||
const objectField =
|
||||
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
||||
inputType = objectField && objectField.type;
|
||||
break;
|
||||
case 'NamedType':
|
||||
if (state.name) {
|
||||
type = schema.getType(state.name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
argDef,
|
||||
argDefs,
|
||||
directiveDef,
|
||||
enumValue,
|
||||
fieldDef,
|
||||
inputType,
|
||||
objectFieldDefs,
|
||||
parentType,
|
||||
type,
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the first item in the array which causes predicate to return truthy.
|
||||
function find(array, predicate) {
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
if (predicate(array[i])) {
|
||||
return array[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
NamedTypeNode,
|
||||
TypeDefinitionNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Definition,
|
||||
DefinitionQueryResult,
|
||||
FragmentInfo,
|
||||
Position,
|
||||
Range,
|
||||
Uri,
|
||||
ObjectTypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
||||
import invariant from 'assert';
|
||||
|
||||
export const LANGUAGE = 'GraphQL';
|
||||
|
||||
function getRange(text: string, node: ASTNode): Range {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return locToRange(text, location);
|
||||
}
|
||||
|
||||
function getPosition(text: string, node: ASTNode): Position {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return offsetToPosition(text, location.start);
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForNamedType(
|
||||
text: string,
|
||||
node: NamedTypeNode,
|
||||
dependencies: Array<ObjectTypeInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = node.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name && definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, node)),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForFragmentSpread(
|
||||
text: string,
|
||||
fragment: FragmentSpreadNode,
|
||||
dependencies: Array<FragmentInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = fragment.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, fragment)),
|
||||
};
|
||||
}
|
||||
|
||||
export function getDefinitionQueryResultForDefinitionNode(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): DefinitionQueryResult {
|
||||
return {
|
||||
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
||||
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForFragmentDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForNodeDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: TypeDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
@ -0,0 +1,172 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
DocumentNode,
|
||||
GraphQLError,
|
||||
GraphQLSchema,
|
||||
Location,
|
||||
SourceLocation,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Diagnostic,
|
||||
CustomValidationRule,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import invariant from 'assert';
|
||||
import {findDeprecatedUsages, parse} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
Position,
|
||||
Range,
|
||||
validateWithCustomRules,
|
||||
} from 'graphql-language-service-utils';
|
||||
|
||||
export const SEVERITY = {
|
||||
ERROR: 1,
|
||||
WARNING: 2,
|
||||
INFORMATION: 3,
|
||||
HINT: 4,
|
||||
};
|
||||
|
||||
export function getDiagnostics(
|
||||
query: string,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
let ast = null;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
export function validateQuery(
|
||||
ast: DocumentNode,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
// We cannot validate the query unless a schema is provided.
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const validationErrorAnnotations = mapCat(
|
||||
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
||||
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
||||
);
|
||||
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
||||
// support older versions of graphql-js.
|
||||
const deprecationWarningAnnotations = !findDeprecatedUsages
|
||||
? []
|
||||
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
||||
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
||||
);
|
||||
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
||||
}
|
||||
|
||||
// General utility for map-cating (aka flat-mapping).
|
||||
function mapCat<T>(
|
||||
array: Array<T>,
|
||||
mapper: (item: T) => Array<any>,
|
||||
): Array<any> {
|
||||
return Array.prototype.concat.apply([], array.map(mapper));
|
||||
}
|
||||
|
||||
function annotations(
|
||||
error: GraphQLError,
|
||||
severity: number,
|
||||
type: string,
|
||||
): Array<Diagnostic> {
|
||||
if (!error.nodes) {
|
||||
return [];
|
||||
}
|
||||
return error.nodes.map(node => {
|
||||
const highlightNode =
|
||||
node.kind !== 'Variable' && node.name
|
||||
? node.name
|
||||
: node.variable
|
||||
? node.variable
|
||||
: node;
|
||||
|
||||
invariant(error.locations, 'GraphQL validation error requires locations.');
|
||||
const loc = error.locations[0];
|
||||
const highlightLoc = getLocation(highlightNode);
|
||||
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
||||
return {
|
||||
source: `GraphQL: ${type}`,
|
||||
message: error.message,
|
||||
severity,
|
||||
range: new Range(
|
||||
new Position(loc.line - 1, loc.column - 1),
|
||||
new Position(loc.line - 1, end),
|
||||
),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getRange(location: SourceLocation, queryText: string) {
|
||||
const parser = onlineParser();
|
||||
const state = parser.startState();
|
||||
const lines = queryText.split('\n');
|
||||
|
||||
invariant(
|
||||
lines.length >= location.line,
|
||||
'Query text must have more lines than where the error happened',
|
||||
);
|
||||
|
||||
let stream = null;
|
||||
|
||||
for (let i = 0; i < location.line; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
const style = parser.token(stream, state);
|
||||
if (style === 'invalidchar') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
invariant(stream, 'Expected Parser stream to be available.');
|
||||
|
||||
const line = location.line - 1;
|
||||
const start = stream.getStartOfToken();
|
||||
const end = stream.getCurrentPosition();
|
||||
|
||||
return new Range(new Position(line, start), new Position(line, end));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get location info from a node in a type-safe way.
|
||||
*
|
||||
* The only way a node could not have a location is if we initialized the parser
|
||||
* (and therefore the lexer) with the `noLocation` option, but we always
|
||||
* call `parse` without options above.
|
||||
*/
|
||||
function getLocation(node: any): Location {
|
||||
const typeCastedNode = (node: ASTNode);
|
||||
const location = typeCastedNode.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return location;
|
||||
}
|
@ -0,0 +1,186 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/**
|
||||
* Ported from codemirror-graphql
|
||||
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
||||
*/
|
||||
|
||||
import type {GraphQLSchema} from 'graphql';
|
||||
import type {ContextToken} from 'graphql-language-service-types';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
||||
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
||||
|
||||
export function getHoverInformation(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Hover.contents {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
if (!schema || !token || !token.state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const state = token.state;
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
const options = {schema};
|
||||
|
||||
// Given a Schema and a Token, produce the contents of an info tooltip.
|
||||
// To do this, create a div element that we will render "into" and then pass
|
||||
// it to various rendering functions.
|
||||
if (
|
||||
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
||||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
||||
) {
|
||||
const into = [];
|
||||
renderField(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.fieldDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
||||
const into = [];
|
||||
renderDirective(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.directiveDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
||||
const into = [];
|
||||
renderArg(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.argDef);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'EnumValue' &&
|
||||
typeInfo.enumValue &&
|
||||
typeInfo.enumValue.description
|
||||
) {
|
||||
const into = [];
|
||||
renderEnumValue(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.enumValue);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'NamedType' &&
|
||||
typeInfo.type &&
|
||||
typeInfo.type.description
|
||||
) {
|
||||
const into = [];
|
||||
renderType(into, typeInfo, options, typeInfo.type);
|
||||
renderDescription(into, options, typeInfo.type);
|
||||
return into.join('').trim();
|
||||
}
|
||||
}
|
||||
|
||||
function renderField(into, typeInfo, options) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
||||
}
|
||||
|
||||
function renderQualifiedField(into, typeInfo, options) {
|
||||
if (!typeInfo.fieldDef) {
|
||||
return;
|
||||
}
|
||||
const fieldName = (typeInfo.fieldDef.name: string);
|
||||
if (fieldName.slice(0, 2) !== '__') {
|
||||
renderType(into, typeInfo, options, typeInfo.parentType);
|
||||
text(into, '.');
|
||||
}
|
||||
text(into, fieldName);
|
||||
}
|
||||
|
||||
function renderDirective(into, typeInfo, options) {
|
||||
if (!typeInfo.directiveDef) {
|
||||
return;
|
||||
}
|
||||
const name = '@' + typeInfo.directiveDef.name;
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderArg(into, typeInfo, options) {
|
||||
if (typeInfo.directiveDef) {
|
||||
renderDirective(into, typeInfo, options);
|
||||
} else if (typeInfo.fieldDef) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
}
|
||||
|
||||
if (!typeInfo.argDef) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = typeInfo.argDef.name;
|
||||
text(into, '(');
|
||||
text(into, name);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, ')');
|
||||
}
|
||||
|
||||
function renderTypeAnnotation(into, typeInfo, options, t) {
|
||||
text(into, ': ');
|
||||
renderType(into, typeInfo, options, t);
|
||||
}
|
||||
|
||||
function renderEnumValue(into, typeInfo, options) {
|
||||
if (!typeInfo.enumValue) {
|
||||
return;
|
||||
}
|
||||
const name = typeInfo.enumValue.name;
|
||||
renderType(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, '.');
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderType(into, typeInfo, options, t) {
|
||||
if (!t) {
|
||||
return;
|
||||
}
|
||||
if (t instanceof GraphQLNonNull) {
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, '!');
|
||||
} else if (t instanceof GraphQLList) {
|
||||
text(into, '[');
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, ']');
|
||||
} else {
|
||||
text(into, t.name);
|
||||
}
|
||||
}
|
||||
|
||||
function renderDescription(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const description =
|
||||
typeof def.description === 'string' ? def.description : null;
|
||||
if (description) {
|
||||
text(into, '\n\n');
|
||||
text(into, description);
|
||||
}
|
||||
renderDeprecation(into, options, def);
|
||||
}
|
||||
|
||||
function renderDeprecation(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const reason =
|
||||
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
||||
if (!reason) {
|
||||
return;
|
||||
}
|
||||
text(into, '\n\n');
|
||||
text(into, 'Deprecated: ');
|
||||
text(into, reason);
|
||||
}
|
||||
|
||||
function text(into: string[], content: string) {
|
||||
into.push(content);
|
||||
}
|
@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
Outline,
|
||||
TextToken,
|
||||
TokenKind,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import {Kind, parse, visit} from 'graphql';
|
||||
import {offsetToPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {INLINE_FRAGMENT} = Kind;
|
||||
|
||||
const OUTLINEABLE_KINDS = {
|
||||
Field: true,
|
||||
OperationDefinition: true,
|
||||
Document: true,
|
||||
SelectionSet: true,
|
||||
Name: true,
|
||||
FragmentDefinition: true,
|
||||
FragmentSpread: true,
|
||||
InlineFragment: true,
|
||||
};
|
||||
|
||||
type OutlineTreeConverterType = {[name: string]: Function};
|
||||
|
||||
export function getOutline(queryText: string): ?Outline {
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(queryText);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const visitorFns = outlineTreeConverter(queryText);
|
||||
const outlineTrees = visit(ast, {
|
||||
leave(node) {
|
||||
if (
|
||||
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
||||
visitorFns[node.kind]
|
||||
) {
|
||||
return visitorFns[node.kind](node);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
return {outlineTrees};
|
||||
}
|
||||
|
||||
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
||||
const meta = node => ({
|
||||
representativeName: node.name,
|
||||
startPosition: offsetToPosition(docText, node.loc.start),
|
||||
endPosition: offsetToPosition(docText, node.loc.end),
|
||||
children: node.selectionSet || [],
|
||||
});
|
||||
return {
|
||||
Field: node => {
|
||||
const tokenizedText = node.alias
|
||||
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
||||
: [];
|
||||
tokenizedText.push(buildToken('plain', node.name));
|
||||
return {tokenizedText, ...meta(node)};
|
||||
},
|
||||
OperationDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', node.operation),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
Document: node => node.definitions,
|
||||
SelectionSet: node =>
|
||||
concatMap(node.selections, child => {
|
||||
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
||||
}),
|
||||
Name: node => node.value,
|
||||
FragmentDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', 'fragment'),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
FragmentSpread: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('plain', '...'),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
InlineFragment: node => node.selectionSet,
|
||||
};
|
||||
}
|
||||
|
||||
function buildToken(kind: TokenKind, value: string): TextToken {
|
||||
return {kind, value};
|
||||
}
|
||||
|
||||
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
||||
const res = [];
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
const x = fn(arr[i], i);
|
||||
if (Array.isArray(x)) {
|
||||
res.push(...x);
|
||||
} else {
|
||||
res.push(x);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
export {
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
forEachState,
|
||||
objectValues,
|
||||
hintList,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
|
||||
export {
|
||||
LANGUAGE,
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
} from './getDefinition';
|
||||
|
||||
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
||||
export {getOutline} from './getOutline';
|
||||
export {getHoverInformation} from './getHoverInformation';
|
||||
|
||||
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
BIN
cmd/internal/serv/web/build/static/media/logo.57ee3b60.png
Normal file
BIN
cmd/internal/serv/web/build/static/media/logo.57ee3b60.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 31 KiB |
755
config/allow.list
Normal file
755
config/allow.list
Normal file
@ -0,0 +1,755 @@
|
||||
# http://localhost:8080/
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Protect Ya Neck",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Enter the Wu-Tang",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "Wu-Tang",
|
||||
"description": "No description needed"
|
||||
},
|
||||
"product_id": 1
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: $product_id, update: $update) {
|
||||
id
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
users {
|
||||
id
|
||||
email
|
||||
picture: avatar
|
||||
products(limit: 2, where: {price: {gt: 10}}) {
|
||||
id
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Gumbo1",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Gumbo2",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: 199, delete: true) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
user {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"product_id": 5
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: $product_id, delete: true) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
price
|
||||
users {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "gfk@myspace.com",
|
||||
"full_name": "Ghostface Killah",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "Helloo",
|
||||
"description": "World \u003c\u003e"
|
||||
},
|
||||
"user": 123
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: 5, update: $update) {
|
||||
id
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "WOOO",
|
||||
"price": 50.5
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
query getProducts {
|
||||
products {
|
||||
id
|
||||
name
|
||||
price
|
||||
description
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
deals {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"beer": "smoke"
|
||||
}
|
||||
|
||||
query beerSearch {
|
||||
products(search: $beer) {
|
||||
id
|
||||
name
|
||||
search_rank
|
||||
search_headline_description
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "goo1@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "goo12@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": [
|
||||
{
|
||||
"name": "Banana 1",
|
||||
"price": 1.1,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Banana 2",
|
||||
"price": 2.2,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
products {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Banana 3",
|
||||
"price": 1.1,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"email": "a2@a.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
price
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "my_name",
|
||||
"description": "my_desc"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(id: 15, update: $update, where: {id: {eq: 1}}) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "my_name",
|
||||
"description": "my_desc"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $update, where: {id: {eq: 1}}) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "my_name 2",
|
||||
"description": "my_desc 2"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $update, where: {id: {eq: 1}}) {
|
||||
id
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"sale_type": "tuutuu",
|
||||
"quantity": 5,
|
||||
"due_date": "now",
|
||||
"customer": {
|
||||
"email": "thedude1@rug.com",
|
||||
"full_name": "The Dude"
|
||||
},
|
||||
"product": {
|
||||
"name": "Apple",
|
||||
"price": 1.25
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
purchase(update: $data, id: 5) {
|
||||
sale_type
|
||||
quantity
|
||||
due_date
|
||||
customer {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "thedude@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": {
|
||||
"where": {
|
||||
"id": 2
|
||||
},
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(update: $data, where: {id: {eq: 8}}) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "thedude@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": {
|
||||
"where": {
|
||||
"id": 2
|
||||
},
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
user(where: {id: {eq: 8}}) {
|
||||
id
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"email": "thedude@rug.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
user {
|
||||
email
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"email": "booboo@demo.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 6) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"email": "booboo@demo.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
product(id: 6) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "thedude123@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": {
|
||||
"connect": {
|
||||
"id": 7
|
||||
},
|
||||
"disconnect": {
|
||||
"id": 8
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(update: $data, id: 6) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 5,
|
||||
"email": "test@test.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 9) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "thed44ude@rug.com",
|
||||
"full_name": "The Dude",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"product": {
|
||||
"connect": {
|
||||
"id": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
product {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 6
|
||||
}
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Coconut",
|
||||
"price": 2.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 3
|
||||
}
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Coconut",
|
||||
"price": 2.25,
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 5,
|
||||
"email": "test@test.com"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 9) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"user": {
|
||||
"connect": {
|
||||
"id": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 9) {
|
||||
id
|
||||
name
|
||||
user {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"user": {
|
||||
"disconnect": {
|
||||
"id": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 9) {
|
||||
id
|
||||
name
|
||||
user_id
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"name": "Apple",
|
||||
"price": 1.25,
|
||||
"user": {
|
||||
"disconnect": {
|
||||
"id": 5
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
product(update: $data, id: 2) {
|
||||
id
|
||||
name
|
||||
user_id
|
||||
}
|
||||
}
|
||||
|
||||
|
505
config/config.go
505
config/config.go
@ -1,505 +0,0 @@
|
||||
// Package config provides the config values needed for Super Graph
|
||||
// For detailed documentation visit https://supergraph.dev
|
||||
package config
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gobuffalo/flect"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
const (
|
||||
LogLevelNone int = iota
|
||||
LogLevelInfo
|
||||
LogLevelWarn
|
||||
LogLevelError
|
||||
LogLevelDebug
|
||||
)
|
||||
|
||||
// Config struct holds the Super Graph config values
|
||||
type Config struct {
|
||||
Core `mapstructure:",squash"`
|
||||
Serv `mapstructure:",squash"`
|
||||
|
||||
vi *viper.Viper
|
||||
log *log.Logger
|
||||
logLevel int
|
||||
roles map[string]*Role
|
||||
abacEnabled bool
|
||||
valid bool
|
||||
}
|
||||
|
||||
// Core struct contains core specific config value
|
||||
type Core struct {
|
||||
Env string
|
||||
Production bool
|
||||
LogLevel string `mapstructure:"log_level"`
|
||||
SecretKey string `mapstructure:"secret_key"`
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
Vars map[string]string `mapstructure:"variables"`
|
||||
Blocklist []string
|
||||
Tables []Table
|
||||
RolesQuery string `mapstructure:"roles_query"`
|
||||
Roles []Role
|
||||
}
|
||||
|
||||
// Serv struct contains config values used by the Super Graph service
|
||||
type Serv struct {
|
||||
AppName string `mapstructure:"app_name"`
|
||||
HostPort string `mapstructure:"host_port"`
|
||||
Host string
|
||||
Port string
|
||||
HTTPGZip bool `mapstructure:"http_compress"`
|
||||
WebUI bool `mapstructure:"web_ui"`
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||
SeedFile string `mapstructure:"seed_file"`
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
||||
DebugCORS bool `mapstructure:"cors_debug"`
|
||||
|
||||
Inflections map[string]string
|
||||
|
||||
Auth Auth
|
||||
Auths []Auth
|
||||
|
||||
DB struct {
|
||||
Type string
|
||||
Host string
|
||||
Port uint16
|
||||
DBName string
|
||||
User string
|
||||
Password string
|
||||
Schema string
|
||||
PoolSize int32 `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||
} `mapstructure:"database"`
|
||||
|
||||
Actions []Action
|
||||
}
|
||||
|
||||
// Auth struct contains authentication related config values used by the Super Graph service
|
||||
type Auth struct {
|
||||
Name string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
Salt string
|
||||
SignSalt string `mapstructure:"sign_salt"`
|
||||
AuthSalt string `mapstructure:"auth_salt"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
Name string
|
||||
Value string
|
||||
Exists bool
|
||||
}
|
||||
}
|
||||
|
||||
// Column struct defines a database column
|
||||
type Column struct {
|
||||
Name string
|
||||
Type string
|
||||
ForeignKey string `mapstructure:"related_to"`
|
||||
}
|
||||
|
||||
// Table struct defines a database table
|
||||
type Table struct {
|
||||
Name string
|
||||
Table string
|
||||
Blocklist []string
|
||||
Remotes []Remote
|
||||
Columns []Column
|
||||
}
|
||||
|
||||
// Remote struct defines a remote API endpoint
|
||||
type Remote struct {
|
||||
Name string
|
||||
ID string
|
||||
Path string
|
||||
URL string
|
||||
Debug bool
|
||||
PassHeaders []string `mapstructure:"pass_headers"`
|
||||
SetHeaders []struct {
|
||||
Name string
|
||||
Value string
|
||||
} `mapstructure:"set_headers"`
|
||||
}
|
||||
|
||||
// Query struct contains access control values for query operations
|
||||
type Query struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for insert operations
|
||||
type Insert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for update operations
|
||||
type Update struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Delete struct contains access control values for delete operations
|
||||
type Delete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// RoleTable struct contains role specific access control values for a database table
|
||||
type RoleTable struct {
|
||||
Name string
|
||||
|
||||
Query Query
|
||||
Insert Insert
|
||||
Update Update
|
||||
Delete Delete
|
||||
}
|
||||
|
||||
// Role struct contains role specific access control values for for all database tables
|
||||
type Role struct {
|
||||
Name string
|
||||
Match string
|
||||
Tables []RoleTable
|
||||
tablesMap map[string]*RoleTable
|
||||
}
|
||||
|
||||
// Action struct contains config values for a Super Graph service action
|
||||
type Action struct {
|
||||
Name string
|
||||
SQL string
|
||||
AuthName string `mapstructure:"auth_name"`
|
||||
}
|
||||
|
||||
// NewConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func NewConfig(path string) (*Config, error) {
|
||||
return NewConfigWithLogger(path, log.New(os.Stdout, "", 0))
|
||||
}
|
||||
|
||||
// NewConfigWithLogger function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func NewConfigWithLogger(path string, logger *log.Logger) (*Config, error) {
|
||||
vi := newViper(path, GetConfigName())
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
|
||||
if len(inherits) != 0 {
|
||||
vi = newViper(path, inherits)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
}
|
||||
|
||||
vi.SetConfigName(GetConfigName())
|
||||
|
||||
if err := vi.MergeInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
c := &Config{log: logger, vi: vi}
|
||||
|
||||
if err := vi.Unmarshal(&c); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||
}
|
||||
|
||||
if err := c.init(); err != nil {
|
||||
return nil, fmt.Errorf("failed to initialize config: %w", err)
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
// NewConfigFrom function initializes a Config struct that you manually created
|
||||
// so it can be used by Super Graph
|
||||
func NewConfigFrom(c *Config, configPath string, logger *log.Logger) (*Config, error) {
|
||||
c.vi = newViper(configPath, GetConfigName())
|
||||
c.log = logger
|
||||
if err := c.init(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func newViper(configPath, filename string) *viper.Viper {
|
||||
vi := viper.New()
|
||||
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
vi.SetConfigName(filename)
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.AddConfigPath("./config")
|
||||
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
vi.SetDefault("enable_tracing", false)
|
||||
vi.SetDefault("auth_fail_block", "always")
|
||||
vi.SetDefault("seed_file", "seed.js")
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
vi.SetDefault("database.user", "postgres")
|
||||
vi.SetDefault("database.schema", "public")
|
||||
|
||||
vi.SetDefault("env", "development")
|
||||
|
||||
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
||||
vi.BindEnv("host", "HOST") //nolint: errcheck
|
||||
vi.BindEnv("port", "PORT") //nolint: errcheck
|
||||
|
||||
vi.SetDefault("auth.rails.max_idle", 80)
|
||||
vi.SetDefault("auth.rails.max_active", 12000)
|
||||
|
||||
return vi
|
||||
}
|
||||
|
||||
func (c *Config) init() error {
|
||||
switch c.Core.LogLevel {
|
||||
case "debug":
|
||||
c.logLevel = LogLevelDebug
|
||||
case "error":
|
||||
c.logLevel = LogLevelError
|
||||
case "warn":
|
||||
c.logLevel = LogLevelWarn
|
||||
case "info":
|
||||
c.logLevel = LogLevelInfo
|
||||
default:
|
||||
c.logLevel = LogLevelNone
|
||||
}
|
||||
|
||||
if c.UseAllowList {
|
||||
c.Production = true
|
||||
}
|
||||
|
||||
for k, v := range c.Inflections {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
// Tables: Validate and sanitize
|
||||
tm := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Tables); i++ {
|
||||
t := &c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
|
||||
if _, ok := tm[t.Name]; ok {
|
||||
c.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||
c.log.Printf("WRN duplicate table found: %s", t.Name)
|
||||
}
|
||||
tm[t.Name] = struct{}{}
|
||||
|
||||
t.Table = flect.Pluralize(strings.ToLower(t.Table))
|
||||
}
|
||||
|
||||
// Variables: Validate and sanitize
|
||||
for k, v := range c.Vars {
|
||||
c.Vars[k] = sanitize(v)
|
||||
}
|
||||
|
||||
// Roles: validate and sanitize
|
||||
c.RolesQuery = sanitize(c.RolesQuery)
|
||||
c.roles = make(map[string]*Role)
|
||||
|
||||
for i := 0; i < len(c.Roles); i++ {
|
||||
r := &c.Roles[i]
|
||||
r.Name = strings.ToLower(r.Name)
|
||||
|
||||
if _, ok := c.roles[r.Name]; ok {
|
||||
c.Roles = append(c.Roles[:i], c.Roles[i+1:]...)
|
||||
c.log.Printf("WRN duplicate role found: %s", r.Name)
|
||||
}
|
||||
|
||||
r.Match = sanitize(r.Match)
|
||||
r.tablesMap = make(map[string]*RoleTable)
|
||||
|
||||
for n, table := range r.Tables {
|
||||
r.tablesMap[table.Name] = &r.Tables[n]
|
||||
}
|
||||
|
||||
c.roles[r.Name] = r
|
||||
}
|
||||
|
||||
if _, ok := c.roles["user"]; !ok {
|
||||
u := Role{Name: "user"}
|
||||
c.Roles = append(c.Roles, u)
|
||||
c.roles["user"] = &u
|
||||
}
|
||||
|
||||
if _, ok := c.roles["anon"]; !ok {
|
||||
c.log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
|
||||
c.AuthFailBlock = true
|
||||
}
|
||||
|
||||
if len(c.RolesQuery) == 0 {
|
||||
c.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||
}
|
||||
|
||||
if len(c.RolesQuery) == 0 {
|
||||
c.abacEnabled = false
|
||||
} else {
|
||||
switch len(c.Roles) {
|
||||
case 0, 1:
|
||||
c.abacEnabled = false
|
||||
case 2:
|
||||
_, ok1 := c.roles["anon"]
|
||||
_, ok2 := c.roles["user"]
|
||||
c.abacEnabled = !(ok1 && ok2)
|
||||
default:
|
||||
c.abacEnabled = true
|
||||
}
|
||||
}
|
||||
|
||||
// Auths: validate and sanitize
|
||||
am := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Auths); i++ {
|
||||
a := &c.Auths[i]
|
||||
a.Name = strings.ToLower(a.Name)
|
||||
|
||||
if _, ok := am[a.Name]; ok {
|
||||
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
|
||||
c.log.Printf("WRN duplicate auth found: %s", a.Name)
|
||||
}
|
||||
am[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
// Actions: validate and sanitize
|
||||
axm := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Actions); i++ {
|
||||
a := &c.Actions[i]
|
||||
a.Name = strings.ToLower(a.Name)
|
||||
a.AuthName = strings.ToLower(a.AuthName)
|
||||
|
||||
if _, ok := axm[a.Name]; ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
c.log.Printf("WRN duplicate action found: %s", a.Name)
|
||||
}
|
||||
|
||||
if _, ok := am[a.AuthName]; !ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
c.log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
|
||||
}
|
||||
axm[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
c.valid = true
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetDBTableAliases function returns a map with database tables as keys
|
||||
// and a list of aliases as values
|
||||
func (c *Config) GetDBTableAliases() map[string][]string {
|
||||
m := make(map[string][]string, len(c.Tables))
|
||||
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
|
||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
// IsABACEnabled function returns true if attribute based access control is enabled
|
||||
func (c *Config) IsABACEnabled() bool {
|
||||
return c.abacEnabled
|
||||
}
|
||||
|
||||
// IsAnonRoleDefined function returns true if the config has configuration for the `anon` role
|
||||
func (c *Config) IsAnonRoleDefined() bool {
|
||||
_, ok := c.roles["anon"]
|
||||
return ok
|
||||
}
|
||||
|
||||
// GetRole function returns returns the Role struct by name
|
||||
func (c *Config) GetRole(name string) *Role {
|
||||
role := c.roles[name]
|
||||
return role
|
||||
}
|
||||
|
||||
// ConfigPathUsed function returns the path to the current config file (excluding filename)
|
||||
func (c *Config) ConfigPathUsed() string {
|
||||
return path.Dir(c.vi.ConfigFileUsed())
|
||||
}
|
||||
|
||||
// WriteConfigAs function writes the config to a file
|
||||
// Format defined by extension (eg: .yml, .json)
|
||||
func (c *Config) WriteConfigAs(fname string) error {
|
||||
return c.vi.WriteConfigAs(fname)
|
||||
}
|
||||
|
||||
// Log function returns the logger
|
||||
func (c *Config) Log() *log.Logger {
|
||||
return c.log
|
||||
}
|
||||
|
||||
// LogLevel function returns the log level
|
||||
func (c *Config) LogLevel() int {
|
||||
return c.logLevel
|
||||
}
|
||||
|
||||
// IsValid function returns true if the Config struct is initialized and valid
|
||||
func (c *Config) IsValid() bool {
|
||||
return c.valid
|
||||
}
|
||||
|
||||
// GetTable function returns the RoleTable struct for a Role by table name
|
||||
func (r *Role) GetTable(name string) *RoleTable {
|
||||
table := r.tablesMap[name]
|
||||
return table
|
||||
}
|
@ -1,13 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestInitConf(t *testing.T) {
|
||||
_, err := NewConfig("../examples/rails-app/config/supergraph")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
}
|
226
config/dev.yml
Normal file
226
config/dev.yml
Normal file
@ -0,0 +1,226 @@
|
||||
app_name: "Super Graph Development"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
|
||||
# debug, error, warn, info, none
|
||||
log_level: "debug"
|
||||
|
||||
# enable or disable http compression (uses gzip)
|
||||
http_compress: true
|
||||
|
||||
# When production mode is 'true' only queries
|
||||
# from the allow list are permitted.
|
||||
# When it's 'false' all queries are saved to the
|
||||
# the allow list in ./config/allow.list
|
||||
production: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
# response
|
||||
enable_tracing: true
|
||||
|
||||
# Watch the config folder and reload Super Graph
|
||||
# with the new configs when a change is detected
|
||||
reload_on_config_change: true
|
||||
|
||||
# File that points to the database seeding script
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
migrations_path: ./config/migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
secret_key: supercalifajalistics
|
||||
|
||||
# CORS: A list of origins a cross-domain request can be executed from.
|
||||
# If the special * value is present in the list, all origins will be allowed.
|
||||
# An origin may contain a wildcard (*) to replace 0 or more
|
||||
# characters (i.e.: http://*.domain.com).
|
||||
cors_allowed_origins: ["*"]
|
||||
|
||||
# Debug Cross Origin Resource Sharing requests
|
||||
cors_debug: true
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
# SG_DATABASE_PORT
|
||||
# SG_DATABASE_USER
|
||||
# SG_DATABASE_PASSWORD
|
||||
|
||||
# Auth related environment Variables
|
||||
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
# SG_AUTH_RAILS_REDIS_URL
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
# inflections:
|
||||
# person: people
|
||||
# sheep: sheep
|
||||
|
||||
auth:
|
||||
# Can be 'rails' or 'jwt'
|
||||
type: rails
|
||||
cookie: _app_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header for testing.
|
||||
# Disable in production
|
||||
creds_in_header: true
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
# various cookies formats.
|
||||
version: 5.2
|
||||
|
||||
# Found in 'Rails.application.config.secret_key_base'
|
||||
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
|
||||
|
||||
# Remote cookie store. (memcache or redis)
|
||||
# url: redis://redis:6379
|
||||
# password: ""
|
||||
# max_idle: 80
|
||||
# max_active: 12000
|
||||
|
||||
# In most cases you don't need these
|
||||
# salt: "encrypted cookie"
|
||||
# sign_salt: "signed encrypted cookie"
|
||||
# auth_salt: "authenticated encrypted cookie"
|
||||
|
||||
# jwt:
|
||||
# provider: auth0
|
||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
||||
database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: app_development
|
||||
user: postgres
|
||||
password: postgres
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 1m
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: customers
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
pass_headers:
|
||||
- cookie
|
||||
set_headers:
|
||||
- name: Host
|
||||
value: 0.0.0.0
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
|
||||
- name: deals
|
||||
table: products
|
||||
|
||||
- name: users
|
||||
columns:
|
||||
- name: email
|
||||
related_to: products.name
|
||||
|
||||
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
query:
|
||||
limit: 10
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
insert:
|
||||
block: false
|
||||
|
||||
update:
|
||||
block: false
|
||||
|
||||
delete:
|
||||
block: false
|
||||
|
||||
- name: deals
|
||||
query:
|
||||
limit: 3
|
||||
aggregation: false
|
||||
|
||||
- name: purchases
|
||||
query:
|
||||
limit: 3
|
||||
aggregation: false
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
query:
|
||||
limit: 50
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
disable_functions: false
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
presets:
|
||||
- user_id: "$user_id"
|
||||
- created_at: "now"
|
||||
- updated_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
presets:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
block: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1000
|
||||
tables:
|
||||
- name: users
|
||||
filters: []
|
67
config/prod.yml
Normal file
67
config/prod.yml
Normal file
@ -0,0 +1,67 @@
|
||||
# Inherit config from this other config file
|
||||
# so I only need to overwrite some values
|
||||
inherits: dev
|
||||
|
||||
app_name: "Super Graph Production"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: false
|
||||
|
||||
# debug, error, warn, info, none
|
||||
log_level: "info"
|
||||
|
||||
# enable or disable http compression (uses gzip)
|
||||
http_compress: true
|
||||
|
||||
# When production mode is 'true' only queries
|
||||
# from the allow list are permitted.
|
||||
# When it's 'false' all queries are saved to the
|
||||
# the allow list in ./config/allow.list
|
||||
production: true
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
auth_fail_block: true
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
# response
|
||||
enable_tracing: true
|
||||
|
||||
# File that points to the database seeding script
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
# migrations_path: migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
# secret_key: supercalifajalistics
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
# SG_DATABASE_PORT
|
||||
# SG_DATABASE_USER
|
||||
# SG_DATABASE_PASSWORD
|
||||
|
||||
# Auth related environment Variables
|
||||
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
|
||||
# SG_AUTH_RAILS_REDIS_URL
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: app_production
|
||||
user: postgres
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 5m
|
116
config/seed.js
Normal file
116
config/seed.js
Normal file
@ -0,0 +1,116 @@
|
||||
var user_count = 10
|
||||
customer_count = 100
|
||||
product_count = 50
|
||||
purchase_count = 100
|
||||
|
||||
var users = []
|
||||
customers = []
|
||||
products = []
|
||||
|
||||
for (i = 0; i < user_count; i++) {
|
||||
var pwd = fake.password()
|
||||
var data = {
|
||||
full_name: fake.name(),
|
||||
avatar: fake.avatar_url(200),
|
||||
phone: fake.phone(),
|
||||
email: fake.email(),
|
||||
password: pwd,
|
||||
password_confirmation: pwd,
|
||||
created_at: "now",
|
||||
updated_at: "now"
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
user(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data })
|
||||
|
||||
users.push(res.user)
|
||||
}
|
||||
|
||||
for (i = 0; i < product_count; i++) {
|
||||
var n = Math.floor(Math.random() * users.length)
|
||||
var user = users[n]
|
||||
|
||||
var desc = [
|
||||
fake.beer_style(),
|
||||
fake.beer_hop(),
|
||||
fake.beer_yeast(),
|
||||
fake.beer_ibu(),
|
||||
fake.beer_alcohol(),
|
||||
fake.beer_blg(),
|
||||
].join(", ")
|
||||
|
||||
var data = {
|
||||
name: fake.beer_name(),
|
||||
description: desc,
|
||||
price: fake.price()
|
||||
//user_id: user.id,
|
||||
//created_at: "now",
|
||||
//updated_at: "now"
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
product(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data }, {
|
||||
user_id: 5
|
||||
})
|
||||
products.push(res.product)
|
||||
}
|
||||
|
||||
for (i = 0; i < customer_count; i++) {
|
||||
var pwd = fake.password()
|
||||
|
||||
var data = {
|
||||
stripe_id: "CUS-" + fake.uuid(),
|
||||
full_name: fake.name(),
|
||||
phone: fake.phone(),
|
||||
email: fake.email(),
|
||||
password: pwd,
|
||||
password_confirmation: pwd,
|
||||
created_at: "now",
|
||||
updated_at: "now"
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
customer(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data })
|
||||
customers.push(res.customer)
|
||||
}
|
||||
|
||||
for (i = 0; i < purchase_count; i++) {
|
||||
var sale_type = fake.rand_string(["rented", "bought"])
|
||||
|
||||
if (sale_type === "rented") {
|
||||
var due_date = fake.date()
|
||||
var returned = fake.date()
|
||||
}
|
||||
|
||||
var data = {
|
||||
customer_id: customers[Math.floor(Math.random() * customer_count)].id,
|
||||
product_id: products[Math.floor(Math.random() * product_count)].id,
|
||||
sale_type: sale_type,
|
||||
quantity: Math.floor(Math.random() * 10),
|
||||
due_date: due_date,
|
||||
returned: returned,
|
||||
created_at: "now",
|
||||
updated_at: "now"
|
||||
}
|
||||
|
||||
var res = graphql(" \
|
||||
mutation { \
|
||||
purchase(insert: $data) { \
|
||||
id \
|
||||
} \
|
||||
}", { data: data })
|
||||
|
||||
console.log(res)
|
||||
}
|
@ -1,52 +0,0 @@
|
||||
package config
|
||||
|
||||
import (
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
)
|
||||
|
||||
var (
|
||||
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
||||
)
|
||||
|
||||
func sanitize(s string) string {
|
||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
||||
|
||||
s1 := strings.Map(func(r rune) rune {
|
||||
if unicode.IsSpace(r) {
|
||||
return ' '
|
||||
}
|
||||
return r
|
||||
}, s0)
|
||||
|
||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
||||
return strings.ToLower(m)
|
||||
})
|
||||
}
|
||||
|
||||
func GetConfigName() string {
|
||||
if len(os.Getenv("GO_ENV")) == 0 {
|
||||
return "dev"
|
||||
}
|
||||
|
||||
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(ge, "pro"):
|
||||
return "prod"
|
||||
|
||||
case strings.HasPrefix(ge, "sta"):
|
||||
return "stage"
|
||||
|
||||
case strings.HasPrefix(ge, "tes"):
|
||||
return "test"
|
||||
|
||||
case strings.HasPrefix(ge, "dev"):
|
||||
return "dev"
|
||||
}
|
||||
|
||||
return ge
|
||||
}
|
50
core/api.go
50
core/api.go
@ -9,7 +9,6 @@
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
@ -20,7 +19,7 @@
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
conf, err := config.NewConfig("./config")
|
||||
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
@ -53,10 +52,9 @@ import (
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"log"
|
||||
_log "log"
|
||||
"os"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core/internal/allow"
|
||||
"github.com/dosco/super-graph/core/internal/crypto"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
@ -80,36 +78,32 @@ const (
|
||||
// SuperGraph struct is an instance of the Super Graph engine it holds all the required information like
|
||||
// datase schemas, relationships, etc that the GraphQL to SQL compiler would need to do it's job.
|
||||
type SuperGraph struct {
|
||||
conf *config.Config
|
||||
db *sql.DB
|
||||
schema *psql.DBSchema
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
prepared map[string]*preparedItem
|
||||
getRole *sql.Stmt
|
||||
qc *qcode.Compiler
|
||||
pc *psql.Compiler
|
||||
}
|
||||
|
||||
// NewConfig functions initializes config using a config.Core struct
|
||||
func NewConfig(core config.Core, configPath string, logger *log.Logger) (*config.Config, error) {
|
||||
c, err := config.NewConfigFrom(&config.Config{Core: core}, configPath, logger)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return c, nil
|
||||
conf *Config
|
||||
db *sql.DB
|
||||
log *_log.Logger
|
||||
schema *psql.DBSchema
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
prepared map[string]*preparedItem
|
||||
roles map[string]*Role
|
||||
getRole *sql.Stmt
|
||||
abacEnabled bool
|
||||
anonExists bool
|
||||
qc *qcode.Compiler
|
||||
pc *psql.Compiler
|
||||
}
|
||||
|
||||
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
||||
// schemas and relationships
|
||||
func NewSuperGraph(conf *config.Config, db *sql.DB) (*SuperGraph, error) {
|
||||
if !conf.IsValid() {
|
||||
return nil, fmt.Errorf("invalid config")
|
||||
}
|
||||
|
||||
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
||||
sg := &SuperGraph{
|
||||
conf: conf,
|
||||
db: db,
|
||||
log: _log.New(os.Stdout, "", 0),
|
||||
}
|
||||
|
||||
if err := sg.initConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := sg.initCompilers(); err != nil {
|
||||
|
@ -7,13 +7,12 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
)
|
||||
|
||||
type stmt struct {
|
||||
role *config.Role
|
||||
role *Role
|
||||
qc *qcode.QCode
|
||||
skipped uint32
|
||||
sql string
|
||||
@ -29,7 +28,7 @@ func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string)
|
||||
return sg.buildRoleStmt(query, vars, "anon")
|
||||
}
|
||||
|
||||
if sg.conf.IsABACEnabled() {
|
||||
if sg.abacEnabled {
|
||||
return sg.buildMultiStmt(query, vars)
|
||||
}
|
||||
|
||||
@ -41,8 +40,8 @@ func (sg *SuperGraph) buildStmt(qt qcode.QType, query, vars []byte, role string)
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) buildRoleStmt(query, vars []byte, role string) ([]stmt, error) {
|
||||
ro := sg.conf.GetRole(role)
|
||||
if ro == nil {
|
||||
ro, ok := sg.roles[role]
|
||||
if !ok {
|
||||
return nil, fmt.Errorf(`roles '%s' not defined in c.sg.config`, role)
|
||||
}
|
||||
|
||||
@ -168,7 +167,7 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
return w.String(), nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *config.Role) bool {
|
||||
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
|
||||
for _, id := range qc.Roots {
|
||||
t, err := sg.schema.GetTable(qc.Selects[id].Name)
|
||||
if err != nil {
|
||||
|
286
core/config.go
286
core/config.go
@ -2,164 +2,162 @@ package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func addTables(c *config.Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
if len(t.Table) == 0 || len(t.Columns) == 0 {
|
||||
continue
|
||||
// Core struct contains core specific config value
|
||||
type Config struct {
|
||||
SecretKey string `mapstructure:"secret_key"`
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
AllowListFile string `mapstructure:"allow_list_file"`
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
Vars map[string]string `mapstructure:"variables"`
|
||||
Blocklist []string
|
||||
Tables []Table
|
||||
RolesQuery string `mapstructure:"roles_query"`
|
||||
Roles []Role
|
||||
Inflections map[string]string
|
||||
}
|
||||
|
||||
// Table struct defines a database table
|
||||
type Table struct {
|
||||
Name string
|
||||
Table string
|
||||
Blocklist []string
|
||||
Remotes []Remote
|
||||
Columns []Column
|
||||
}
|
||||
|
||||
// Column struct defines a database column
|
||||
type Column struct {
|
||||
Name string
|
||||
Type string
|
||||
ForeignKey string `mapstructure:"related_to"`
|
||||
}
|
||||
|
||||
// Remote struct defines a remote API endpoint
|
||||
type Remote struct {
|
||||
Name string
|
||||
ID string
|
||||
Path string
|
||||
URL string
|
||||
Debug bool
|
||||
PassHeaders []string `mapstructure:"pass_headers"`
|
||||
SetHeaders []struct {
|
||||
Name string
|
||||
Value string
|
||||
} `mapstructure:"set_headers"`
|
||||
}
|
||||
|
||||
// Role struct contains role specific access control values for for all database tables
|
||||
type Role struct {
|
||||
Name string
|
||||
Match string
|
||||
Tables []RoleTable
|
||||
tm map[string]*RoleTable
|
||||
}
|
||||
|
||||
// RoleTable struct contains role specific access control values for a database table
|
||||
type RoleTable struct {
|
||||
Name string
|
||||
|
||||
Query Query
|
||||
Insert Insert
|
||||
Update Update
|
||||
Delete Delete
|
||||
}
|
||||
|
||||
// Query struct contains access control values for query operations
|
||||
type Query struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for insert operations
|
||||
type Insert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Insert struct contains access control values for update operations
|
||||
type Update struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// Delete struct contains access control values for delete operations
|
||||
type Delete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block bool
|
||||
}
|
||||
|
||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func ReadInConfig(configFile string) (*Config, error) {
|
||||
cpath := path.Dir(configFile)
|
||||
cfile := path.Base(configFile)
|
||||
vi := newViper(cpath, cfile)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
|
||||
if len(inherits) != 0 {
|
||||
vi = newViper(cpath, inherits)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if err := addTable(di, t.Columns, t); err != nil {
|
||||
return err
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addTable(di *psql.DBInfo, cols []config.Column, t config.Table) error {
|
||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' not found on table '%s'",
|
||||
t.Name, t.Table)
|
||||
}
|
||||
vi.SetConfigName(cfile)
|
||||
|
||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
t.Name, t.Table, bc.Type)
|
||||
}
|
||||
|
||||
table := psql.DBTable{
|
||||
Name: t.Name,
|
||||
Key: strings.ToLower(t.Name),
|
||||
Type: bc.Type,
|
||||
}
|
||||
|
||||
columns := make([]psql.DBColumn, 0, len(cols))
|
||||
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
columns = append(columns, psql.DBColumn{
|
||||
Name: c.Name,
|
||||
Key: strings.ToLower(c.Name),
|
||||
Type: c.Type,
|
||||
})
|
||||
}
|
||||
|
||||
di.AddTable(table, columns)
|
||||
bc.FKeyTable = t.Name
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKeys(c *config.Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
for _, c := range t.Columns {
|
||||
if len(c.ForeignKey) == 0 {
|
||||
continue
|
||||
}
|
||||
if err := addForeignKey(di, c, t); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKey(di *psql.DBInfo, c config.Column, t config.Table) error {
|
||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid table '%s' or column '%s' in config.Config",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
if len(v) != 2 {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in config.Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
fkt, fkc := v[0], v[1]
|
||||
c2, ok := di.GetColumn(fkt, fkc)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in config.Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
c1.FKeyTable = fkt
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addRoles(c *config.Config, qc *qcode.Compiler) error {
|
||||
for _, r := range c.Roles {
|
||||
for _, t := range r.Tables {
|
||||
if err := addRole(qc, r, t); err != nil {
|
||||
return err
|
||||
}
|
||||
if err := vi.MergeInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
c := &Config{}
|
||||
|
||||
if err := vi.Unmarshal(&c); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||
}
|
||||
|
||||
if len(c.AllowListFile) == 0 {
|
||||
c.AllowListFile = path.Join(cpath, "allow.list")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func addRole(qc *qcode.Compiler, r config.Role, t config.RoleTable) error {
|
||||
blockFilter := []string{"false"}
|
||||
func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi := viper.New()
|
||||
|
||||
query := qcode.QueryConfig{
|
||||
Limit: t.Query.Limit,
|
||||
Filters: t.Query.Filters,
|
||||
Columns: t.Query.Columns,
|
||||
DisableFunctions: t.Query.DisableFunctions,
|
||||
}
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
if t.Query.Block {
|
||||
query.Filters = blockFilter
|
||||
}
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.AddConfigPath("./config")
|
||||
|
||||
insert := qcode.InsertConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
}
|
||||
|
||||
if t.Insert.Block {
|
||||
insert.Filters = blockFilter
|
||||
}
|
||||
|
||||
update := qcode.UpdateConfig{
|
||||
Filters: t.Update.Filters,
|
||||
Columns: t.Update.Columns,
|
||||
Presets: t.Update.Presets,
|
||||
}
|
||||
|
||||
if t.Update.Block {
|
||||
update.Filters = blockFilter
|
||||
}
|
||||
|
||||
delete := qcode.DeleteConfig{
|
||||
Filters: t.Delete.Filters,
|
||||
Columns: t.Delete.Columns,
|
||||
}
|
||||
|
||||
if t.Delete.Block {
|
||||
delete.Filters = blockFilter
|
||||
}
|
||||
|
||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||
Query: query,
|
||||
Insert: insert,
|
||||
Update: update,
|
||||
Delete: delete,
|
||||
})
|
||||
return vi
|
||||
}
|
||||
|
10
core/core.go
10
core/core.go
@ -63,7 +63,7 @@ func (sg *SuperGraph) initCompilers() error {
|
||||
return err
|
||||
}
|
||||
|
||||
sg.schema, err = psql.NewDBSchema(di, sg.conf.GetDBTableAliases())
|
||||
sg.schema, err = psql.NewDBSchema(di, getDBTableAliases(sg.conf))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -92,7 +92,7 @@ func (c *scontext) execQuery() ([]byte, error) {
|
||||
// var st *stmt
|
||||
var err error
|
||||
|
||||
if c.sg.conf.Production {
|
||||
if c.sg.conf.UseAllowList {
|
||||
data, _, err = c.resolvePreparedSQL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
@ -115,7 +115,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
var err error
|
||||
|
||||
mutation := (c.res.op == qcode.QTMutation)
|
||||
useRoleQuery := c.sg.conf.IsABACEnabled() && mutation
|
||||
useRoleQuery := c.sg.abacEnabled && mutation
|
||||
useTx := useRoleQuery || c.sg.conf.SetUserID
|
||||
|
||||
if useTx {
|
||||
@ -148,7 +148,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||
|
||||
c.res.role = role
|
||||
|
||||
ps, ok := prepared[stmtHash(c.res.name, role)]
|
||||
ps, ok := c.sg.prepared[stmtHash(c.res.name, role)]
|
||||
if !ok {
|
||||
return nil, nil, errNotFound
|
||||
}
|
||||
@ -198,7 +198,7 @@ func (c *scontext) resolveSQL() ([]byte, *stmt, error) {
|
||||
var err error
|
||||
|
||||
mutation := (c.res.op == qcode.QTMutation)
|
||||
useRoleQuery := c.sg.conf.IsABACEnabled() && mutation
|
||||
useRoleQuery := c.sg.abacEnabled && mutation
|
||||
useTx := useRoleQuery || c.sg.conf.SetUserID
|
||||
|
||||
if useTx {
|
||||
|
284
core/init.go
Normal file
284
core/init.go
Normal file
@ -0,0 +1,284 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/gobuffalo/flect"
|
||||
)
|
||||
|
||||
func (sg *SuperGraph) initConfig() error {
|
||||
c := sg.conf
|
||||
|
||||
for k, v := range c.Inflections {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
// Variables: Validate and sanitize
|
||||
for k, v := range c.Vars {
|
||||
c.Vars[k] = sanitizeVars(v)
|
||||
}
|
||||
|
||||
// Tables: Validate and sanitize
|
||||
tm := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Tables); i++ {
|
||||
t := &c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
|
||||
if _, ok := tm[t.Name]; ok {
|
||||
sg.conf.Tables = append(c.Tables[:i], c.Tables[i+1:]...)
|
||||
sg.log.Printf("WRN duplicate table found: %s", t.Name)
|
||||
}
|
||||
tm[t.Name] = struct{}{}
|
||||
|
||||
t.Table = flect.Pluralize(strings.ToLower(t.Table))
|
||||
}
|
||||
|
||||
sg.roles = make(map[string]*Role)
|
||||
|
||||
for i := 0; i < len(c.Roles); i++ {
|
||||
role := &c.Roles[i]
|
||||
role.Name = sanitize(role.Name)
|
||||
|
||||
if _, ok := sg.roles[role.Name]; ok {
|
||||
c.Roles = append(c.Roles[:i], c.Roles[i+1:]...)
|
||||
sg.log.Printf("WRN duplicate role found: %s", role.Name)
|
||||
}
|
||||
|
||||
role.Match = sanitize(role.Match)
|
||||
role.tm = make(map[string]*RoleTable)
|
||||
|
||||
for n, table := range role.Tables {
|
||||
role.tm[table.Name] = &role.Tables[n]
|
||||
}
|
||||
|
||||
sg.roles[role.Name] = role
|
||||
}
|
||||
|
||||
// If user role not defined then create it
|
||||
if _, ok := sg.roles["user"]; !ok {
|
||||
ur := Role{
|
||||
Name: "user",
|
||||
tm: make(map[string]*RoleTable),
|
||||
}
|
||||
c.Roles = append(c.Roles, ur)
|
||||
sg.roles["user"] = &ur
|
||||
}
|
||||
|
||||
// Roles: validate and sanitize
|
||||
c.RolesQuery = sanitize(c.RolesQuery)
|
||||
|
||||
if len(c.RolesQuery) == 0 {
|
||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||
}
|
||||
|
||||
_, userExists := sg.roles["user"]
|
||||
_, sg.anonExists = sg.roles["anon"]
|
||||
|
||||
sg.abacEnabled = userExists && len(c.RolesQuery) != 0
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func getDBTableAliases(c *Config) map[string][]string {
|
||||
m := make(map[string][]string, len(c.Tables))
|
||||
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
|
||||
if len(t.Table) == 0 || len(t.Columns) != 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func addTables(c *Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
if len(t.Table) == 0 || len(t.Columns) == 0 {
|
||||
continue
|
||||
}
|
||||
if err := addTable(di, t.Columns, t); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addTable(di *psql.DBInfo, cols []Column, t Table) error {
|
||||
bc, ok := di.GetColumn(t.Table, t.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' not found on table '%s'",
|
||||
t.Name, t.Table)
|
||||
}
|
||||
|
||||
if bc.Type != "json" && bc.Type != "jsonb" {
|
||||
return fmt.Errorf(
|
||||
"Column '%s' in table '%s' is of type '%s'. Only JSON or JSONB is valid",
|
||||
t.Name, t.Table, bc.Type)
|
||||
}
|
||||
|
||||
table := psql.DBTable{
|
||||
Name: t.Name,
|
||||
Key: strings.ToLower(t.Name),
|
||||
Type: bc.Type,
|
||||
}
|
||||
|
||||
columns := make([]psql.DBColumn, 0, len(cols))
|
||||
|
||||
for i := range cols {
|
||||
c := cols[i]
|
||||
columns = append(columns, psql.DBColumn{
|
||||
Name: c.Name,
|
||||
Key: strings.ToLower(c.Name),
|
||||
Type: c.Type,
|
||||
})
|
||||
}
|
||||
|
||||
di.AddTable(table, columns)
|
||||
bc.FKeyTable = t.Name
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKeys(c *Config, di *psql.DBInfo) error {
|
||||
for _, t := range c.Tables {
|
||||
for _, c := range t.Columns {
|
||||
if len(c.ForeignKey) == 0 {
|
||||
continue
|
||||
}
|
||||
if err := addForeignKey(di, c, t); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
|
||||
c1, ok := di.GetColumn(t.Name, c.Name)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid table '%s' or column '%s' in Config",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
v := strings.SplitN(c.ForeignKey, ".", 2)
|
||||
if len(v) != 2 {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
fkt, fkc := v[0], v[1]
|
||||
c2, ok := di.GetColumn(fkt, fkc)
|
||||
if !ok {
|
||||
return fmt.Errorf(
|
||||
"Invalid foreign_key in Config for table '%s' and column '%s",
|
||||
t.Name, c.Name)
|
||||
}
|
||||
|
||||
c1.FKeyTable = fkt
|
||||
c1.FKeyColID = []int16{c2.ID}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addRoles(c *Config, qc *qcode.Compiler) error {
|
||||
for _, r := range c.Roles {
|
||||
for _, t := range r.Tables {
|
||||
if err := addRole(qc, r, t); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func addRole(qc *qcode.Compiler, r Role, t RoleTable) error {
|
||||
blockFilter := []string{"false"}
|
||||
|
||||
query := qcode.QueryConfig{
|
||||
Limit: t.Query.Limit,
|
||||
Filters: t.Query.Filters,
|
||||
Columns: t.Query.Columns,
|
||||
DisableFunctions: t.Query.DisableFunctions,
|
||||
}
|
||||
|
||||
if t.Query.Block {
|
||||
query.Filters = blockFilter
|
||||
}
|
||||
|
||||
insert := qcode.InsertConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
}
|
||||
|
||||
if t.Insert.Block {
|
||||
insert.Filters = blockFilter
|
||||
}
|
||||
|
||||
update := qcode.UpdateConfig{
|
||||
Filters: t.Update.Filters,
|
||||
Columns: t.Update.Columns,
|
||||
Presets: t.Update.Presets,
|
||||
}
|
||||
|
||||
if t.Update.Block {
|
||||
update.Filters = blockFilter
|
||||
}
|
||||
|
||||
delete := qcode.DeleteConfig{
|
||||
Filters: t.Delete.Filters,
|
||||
Columns: t.Delete.Columns,
|
||||
}
|
||||
|
||||
if t.Delete.Block {
|
||||
delete.Filters = blockFilter
|
||||
}
|
||||
|
||||
return qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||
Query: query,
|
||||
Insert: insert,
|
||||
Update: update,
|
||||
Delete: delete,
|
||||
})
|
||||
}
|
||||
|
||||
func (r *Role) GetTable(name string) *RoleTable {
|
||||
return r.tm[name]
|
||||
}
|
||||
|
||||
func sanitize(value string) string {
|
||||
return strings.ToLower(strings.TrimSpace(value))
|
||||
}
|
||||
|
||||
var (
|
||||
varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||
varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
||||
)
|
||||
|
||||
func sanitizeVars(s string) string {
|
||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
||||
|
||||
s1 := strings.Map(func(r rune) rune {
|
||||
if unicode.IsSpace(r) {
|
||||
return ' '
|
||||
}
|
||||
return r
|
||||
}, s0)
|
||||
|
||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
||||
return strings.ToLower(m)
|
||||
})
|
||||
}
|
@ -7,7 +7,6 @@ import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path"
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
@ -35,11 +34,11 @@ type Config struct {
|
||||
Persist bool
|
||||
}
|
||||
|
||||
func New(cpath string, conf Config) (*List, error) {
|
||||
func New(filename string, conf Config) (*List, error) {
|
||||
al := List{}
|
||||
|
||||
if len(cpath) != 0 {
|
||||
fp := path.Join(cpath, "allow.list")
|
||||
if len(filename) != 0 {
|
||||
fp := filename
|
||||
|
||||
if _, err := os.Stat(fp); err == nil {
|
||||
al.filepath = fp
|
||||
@ -73,10 +72,10 @@ func New(cpath string, conf Config) (*List, error) {
|
||||
return nil, errors.New("allow.list not found")
|
||||
}
|
||||
|
||||
if len(cpath) == 0 {
|
||||
if len(filename) == 0 {
|
||||
al.filepath = "./config/allow.list"
|
||||
} else {
|
||||
al.filepath = path.Join(cpath, "allow.list")
|
||||
al.filepath = filename
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -23,17 +23,13 @@ type preparedItem struct {
|
||||
roleArg bool
|
||||
}
|
||||
|
||||
var (
|
||||
prepared map[string]*preparedItem
|
||||
)
|
||||
|
||||
func (sg *SuperGraph) initPrepared() error {
|
||||
ct := context.Background()
|
||||
|
||||
if sg.allowList.IsPersist() {
|
||||
return nil
|
||||
}
|
||||
prepared = make(map[string]*preparedItem)
|
||||
sg.prepared = make(map[string]*preparedItem)
|
||||
|
||||
tx, err := sg.db.BeginTx(ct, nil)
|
||||
if err != nil {
|
||||
@ -100,7 +96,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
var stmts1 []stmt
|
||||
var err error
|
||||
|
||||
if sg.conf.IsABACEnabled() {
|
||||
if sg.abacEnabled {
|
||||
stmts1, err = sg.buildMultiStmt(qb, vars)
|
||||
} else {
|
||||
stmts1, err = sg.buildRoleStmt(qb, vars, "user")
|
||||
@ -117,7 +113,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
return err
|
||||
}
|
||||
|
||||
if sg.conf.IsAnonRoleDefined() {
|
||||
if sg.anonExists {
|
||||
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
|
||||
|
||||
stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
|
||||
@ -184,7 +180,7 @@ func (sg *SuperGraph) prepare(ct context.Context, tx *sql.Tx, st []stmt, key str
|
||||
func (sg *SuperGraph) prepareRoleStmt(tx *sql.Tx) error {
|
||||
var err error
|
||||
|
||||
if !sg.conf.IsABACEnabled() {
|
||||
if !sg.abacEnabled {
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -255,11 +251,16 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
var ac allow.Config
|
||||
var err error
|
||||
|
||||
if !sg.conf.Production {
|
||||
if len(sg.conf.AllowListFile) == 0 {
|
||||
sg.conf.UseAllowList = false
|
||||
sg.log.Printf("WRN allow list disabled no file specified")
|
||||
}
|
||||
|
||||
if sg.conf.UseAllowList {
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||
}
|
||||
|
||||
sg.allowList, err = allow.New(sg.conf.ConfigPathUsed(), ac)
|
||||
sg.allowList, err = allow.New(sg.conf.AllowListFile, ac)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to initialize allow list: %w", err)
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ import (
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/config"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
@ -36,7 +35,7 @@ type resolvFn struct {
|
||||
// }
|
||||
// }
|
||||
|
||||
// func initRemotes(t config.Table) error {
|
||||
// func initRemotes(t Table) error {
|
||||
// h := xxhash.New()
|
||||
|
||||
// for _, r := range t.Remotes {
|
||||
@ -92,7 +91,7 @@ type resolvFn struct {
|
||||
// return nil
|
||||
// }
|
||||
|
||||
func buildFn(r config.Remote) func(http.Header, []byte) ([]byte, error) {
|
||||
func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
||||
reqURL := strings.Replace(r.URL, "$id", "%s", 1)
|
||||
client := &http.Client{}
|
||||
|
||||
|
@ -1,5 +1,5 @@
|
||||
<template>
|
||||
<div class="shadow bg-white p-4 flex items-start" :class="className">
|
||||
<div class="shadow p-4 flex items-start" :class="className">
|
||||
<slot name="image"></slot>
|
||||
<div class="pl-4">
|
||||
<h2 class="p-0">
|
||||
|
@ -2,33 +2,33 @@
|
||||
<div>
|
||||
<main aria-labelledby="main-title" >
|
||||
<Navbar />
|
||||
<div style="height: 3.6rem"></div>
|
||||
|
||||
<div class="container mx-auto mt-24">
|
||||
<div class="container mx-auto pt-4">
|
||||
<div class="text-center">
|
||||
<div class="text-center text-4xl text-gray-800 leading-tight">
|
||||
<div class="text-center text-3xl md:text-4xl text-black leading-tight font-semibold">
|
||||
Fetch data without code
|
||||
</div>
|
||||
|
||||
<NavLink
|
||||
class="inline-block px-4 py-3 my-8 bg-blue-600 text-blue-100 font-bold rounded"
|
||||
class="inline-block px-4 py-3 my-8 bg-blue-600 text-white font-bold rounded"
|
||||
:item="actionLink"
|
||||
/>
|
||||
|
||||
<a
|
||||
class="px-4 py-3 my-8 border-2 border-gray-500 text-gray-600 font-bold rounded"
|
||||
class="px-4 py-3 my-8 border-2 border-blue-600 text-blue-600 font-bold rounded"
|
||||
href="https://github.com/dosco/super-graph"
|
||||
target="_blank"
|
||||
>Github</a>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="container mx-auto mb-8">
|
||||
<div class="flex flex-wrap">
|
||||
<div class="w-100 md:w-1/2 bg-indigo-300 text-indigo-800 text-lg p-4">
|
||||
<div class="text-center text-2xl font-bold pb-2">Before, struggle with SQL</div>
|
||||
<pre>
|
||||
|
||||
<div class="container mx-auto mb-8 mt-0 md:mt-20 bg-green-100">
|
||||
<div class="flex flex-wrap">
|
||||
<div class="w-100 md:w-1/2 border border-green-500 text-gray-6 00 text-sm md:text-lg p-6">
|
||||
<div class="text-xl font-bold pb-4">Before, struggle with SQL</div>
|
||||
<pre>
|
||||
type User struct {
|
||||
gorm.Model
|
||||
Profile Profile
|
||||
@ -50,8 +50,9 @@ db.Model(&user).
|
||||
and more ...
|
||||
</pre>
|
||||
</div>
|
||||
<div class="w-100 md:w-1/2 bg-green-300 text-black text-lg p-4">
|
||||
<div class="text-center text-2xl font-bold pb-2">With Super Graph, just ask.</div>
|
||||
|
||||
<div class="w-100 md:w-1/2 border border-l md:border-l-0 border-green-500 text-blue-900 text-sm md:text-lg p-6">
|
||||
<div class="text-xl font-bold pb-4">With Super Graph, just ask.</div>
|
||||
<pre>
|
||||
query {
|
||||
user(id: 5) {
|
||||
@ -59,26 +60,24 @@ query {
|
||||
first_name
|
||||
last_name
|
||||
picture_url
|
||||
}
|
||||
posts(first: 20, order_by: { score: desc }) {
|
||||
slug
|
||||
title
|
||||
created_at
|
||||
cached_votes_total
|
||||
vote(where: { user_id: { eq: $user_id } }) {
|
||||
id
|
||||
posts(first: 20, order_by: { score: desc }) {
|
||||
slug
|
||||
title
|
||||
created_at
|
||||
votes_total
|
||||
votes { created_at }
|
||||
author { id name }
|
||||
tags { id name }
|
||||
}
|
||||
author { id name }
|
||||
tags { id name }
|
||||
posts_cursor
|
||||
}
|
||||
posts_cursor
|
||||
}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div>
|
||||
<div class="mt-0 md:mt-20">
|
||||
<div
|
||||
class="flex flex-wrap mx-2 md:mx-20"
|
||||
v-if="data.features && data.features.length"
|
||||
@ -89,24 +88,21 @@ query {
|
||||
:key="index"
|
||||
>
|
||||
<div class="p-8">
|
||||
<h2 class="md:text-xl text-blue-800 font-medium border-0 mb-1">{{ feature.title }}</h2>
|
||||
<p class="md:text-xl text-gray-700 leading-snug">{{ feature.details }}</p>
|
||||
<h2 class="text-lg uppercase border-0">{{ feature.title }}</h2>
|
||||
<div class="text-xl text-gray-900 leading-snug">{{ feature.details }}</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="bg-gray-100 mt-10">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<div class="pt-0 md:pt-20">
|
||||
<div class="container mx-auto p-10">
|
||||
|
||||
<div class="pb-8 hidden md:flex justify-center">
|
||||
<div class="flex justify-center pb-20">
|
||||
<img src="arch-basic.svg">
|
||||
</div>
|
||||
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 text-center mb-4">
|
||||
What is Super Graph?
|
||||
</h1>
|
||||
<div class="text-2xl md:text-3xl">
|
||||
Super Graph is a library and service that fetches data from any Postgres database using just GraphQL. No more struggling with ORMs and SQL to wrangle data out of the database. No more having to figure out the right joins or making ineffiient queries. However complex the GraphQL, Super Graph will always generate just one single efficient SQL query. The goal is to save you time and money so you can focus on you're apps core value.
|
||||
</div>
|
||||
@ -114,17 +110,7 @@ query {
|
||||
</div>
|
||||
|
||||
|
||||
<div class="container mx-auto flex flex-wrap">
|
||||
<div class="md:w-1/2">
|
||||
<img src="/graphql.png">
|
||||
</div>
|
||||
|
||||
<div class="md:w-1/2">
|
||||
<img src="/json.png">
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="mt-10 py-10 md:py-20">
|
||||
<div class="pt-20">
|
||||
<div class="container mx-auto px-10 md:px-0">
|
||||
<h1 class="uppercase font-semibold text-2xl text-blue-800 text-center">
|
||||
Try Super Graph
|
||||
@ -133,20 +119,18 @@ query {
|
||||
<h1 class="uppercase font-semibold text-lg text-gray-800">
|
||||
Deploy as a service using docker
|
||||
</h1>
|
||||
<div class="bg-gray-800 text-indigo-300 p-4 rounded">
|
||||
<div class="p-4 rounded bg-black text-white">
|
||||
<pre>$ git clone https://github.com/dosco/super-graph && cd super-graph && make install</pre>
|
||||
<pre>$ super-graph new blog; cd blog</pre>
|
||||
<pre>$ docker-compose run blog_api ./super-graph db:setup</pre>
|
||||
<pre>$ docker-compose up</pre>
|
||||
</div>
|
||||
|
||||
<div class="border-t mt-4 pb-4"></div>
|
||||
|
||||
<h1 class="uppercase font-semibold text-lg text-gray-800">
|
||||
Or use it with your own code
|
||||
</h1>
|
||||
<div class="text-md">
|
||||
<pre class="bg-gray-800 text-indigo-300 p-4 rounded">
|
||||
<pre class="p-4 rounded bg-black text-white">
|
||||
package main
|
||||
|
||||
import (
|
||||
@ -194,7 +178,7 @@ func main() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-100 mt-10">
|
||||
<div class="pt-0 md:pt-20">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
The story of {{ data.heroText }}
|
||||
@ -245,7 +229,7 @@ func main() {
|
||||
</div>
|
||||
-->
|
||||
|
||||
<div class="border-t py-10">
|
||||
<div class="pt-0 md:pt-20">
|
||||
<div class="block md:hidden w-100">
|
||||
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen style="width: 100%; height: 250px;">
|
||||
</iframe>
|
||||
@ -267,7 +251,101 @@ func main() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-200 mt-10">
|
||||
<div class="container mx-auto pt-0 md:pt-20">
|
||||
<div class="flex flex-wrap bg-green-100">
|
||||
<div class="w-100 md:w-1/2 border border-green-500 text-gray-6 00 text-sm md:text-lg p-6">
|
||||
<div class="text-xl font-bold pb-4">No more joins joins, json, orms, just use GraphQL. Fetch all the data want in the structure you need.</div>
|
||||
<pre>
|
||||
query {
|
||||
thread {
|
||||
slug
|
||||
title
|
||||
published
|
||||
createdAt : created_at
|
||||
totalVotes : cached_votes_total
|
||||
totalPosts : cached_posts_total
|
||||
vote : thread_vote(where: { user_id: { eq: $user_id } }) {
|
||||
created_at
|
||||
}
|
||||
topics {
|
||||
slug
|
||||
name
|
||||
}
|
||||
author : me {
|
||||
slug
|
||||
}
|
||||
posts(first: 1, order_by: { score: desc }) {
|
||||
slug
|
||||
body
|
||||
published
|
||||
createdAt : created_at
|
||||
totalVotes : cached_votes_total
|
||||
totalComments : cached_comments_total
|
||||
vote {
|
||||
created_at
|
||||
}
|
||||
author : user {
|
||||
slug
|
||||
firstName : first_name
|
||||
lastName : last_name
|
||||
}
|
||||
}
|
||||
posts_cursor
|
||||
}
|
||||
}
|
||||
</pre>
|
||||
</div>
|
||||
|
||||
<div class="w-100 md:w-1/2 border border-l md:border-l-0 border-green-500 text-blue-900 text-sm md:text-lg p-6">
|
||||
<div class="text-xl font-bold pb-4">Instant results using a single highly optimized SQL. It's just that simple.</div>
|
||||
<pre>
|
||||
{
|
||||
"data": {
|
||||
"thread": {
|
||||
"slug": "eveniet-ex-24",
|
||||
"vote": null,
|
||||
"posts": [
|
||||
{
|
||||
"body": "Dolor laborum harum sed sit est ducimus temporibus velit non nobis repudiandae nobis suscipit commodi voluptatem debitis sed voluptas sequi officia.",
|
||||
"slug": "illum-in-voluptas-1418",
|
||||
"vote": null,
|
||||
"author": {
|
||||
"slug": "sigurd-kemmer",
|
||||
"lastName": "Effertz",
|
||||
"firstName": "Brandt"
|
||||
},
|
||||
"createdAt": "2020-04-07T04:22:42.115874+00:00",
|
||||
"published": true,
|
||||
"totalVotes": 0,
|
||||
"totalComments": 2
|
||||
}
|
||||
],
|
||||
"title": "In aut qui deleniti quia dolore quasi porro tenetur voluptatem ut adita alias fugit explicabo.",
|
||||
"author": null,
|
||||
"topics": [
|
||||
{
|
||||
"name": "CloudRun",
|
||||
"slug": "cloud-run"
|
||||
},
|
||||
{
|
||||
"name": "Postgres",
|
||||
"slug": "postgres"
|
||||
}
|
||||
],
|
||||
"createdAt": "2020-04-07T04:22:38.099482+00:00",
|
||||
"published": true,
|
||||
"totalPosts": 24,
|
||||
"totalVotes": 0,
|
||||
"posts_cursor": "mpeBl6L+QfJHc3cmLkLDj9pOdEZYTt5KQtLsazG3TLITB3hJhg=="
|
||||
}
|
||||
}
|
||||
}
|
||||
</pre>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="pt-0 md:pt-20">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
Build Secure Apps
|
||||
@ -292,8 +370,8 @@ func main() {
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<div class="pt-0 md:py -20">
|
||||
<div class="container mx-auto">
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
More Features
|
||||
</h1>
|
||||
|
@ -1,6 +1,9 @@
|
||||
@tailwind base;
|
||||
|
||||
@css {
|
||||
body, .navbar, .navbar .links {
|
||||
@apply bg-white text-black border-0 !important;
|
||||
}
|
||||
h1 {
|
||||
@apply font-semibold text-3xl border-0 py-4
|
||||
}
|
||||
|
Reference in New Issue
Block a user