Make go get to install work.

This commit is contained in:
Vikram Rangnekar
2020-04-16 00:26:32 -04:00
parent 40c99e9ef3
commit 09d6460a13
99 changed files with 240 additions and 73 deletions

7
internal/scripts/bench.sh Executable file
View File

@ -0,0 +1,7 @@
#!/bin/sh
if brew ls --versions wrk > /dev/null; then
wrk -t12 -c400 -d30s --timeout 10s --script=query.lua --latency http://localhost:8080/api/v1/graphql
else
brew install wek
wrk -t12 -c400 -d30s --timeout 10s --script=query.lua --latency http://localhost:8080/api/v1/graphql
fi

View File

@ -0,0 +1,3 @@
wrk.method = "POST"
wrk.headers["Content-Type"] = "application/json"
wrk.body = [[{"query":" { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } "}]]

13
internal/scripts/start.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/sh
if [ $1 = "secrets" ]
then
./sops --config ./config "${@:2}"
exit 0
fi
if test -f "./config/$SECRETS_FILE"
then
./sops --config ./config exec-env "./config/$SECRETS_FILE" "$*"
else
$@
fi

1
internal/serv/.gitignore vendored Normal file
View File

@ -0,0 +1 @@
static.go

40
internal/serv/actions.go Normal file
View File

@ -0,0 +1,40 @@
package serv
import (
"fmt"
"net/http"
)
type actionFn func(w http.ResponseWriter, r *http.Request) error
func newAction(a *Action) (http.Handler, error) {
var fn actionFn
var err error
if len(a.SQL) != 0 {
fn, err = newSQLAction(a)
} else {
return nil, fmt.Errorf("invalid config for action '%s'", a.Name)
}
if err != nil {
return nil, err
}
httpFn := func(w http.ResponseWriter, r *http.Request) {
if err := fn(w, r); err != nil {
renderErr(w, err, nil)
}
}
return http.HandlerFunc(httpFn), nil
}
func newSQLAction(a *Action) (actionFn, error) {
fn := func(w http.ResponseWriter, r *http.Request) error {
_, err := db.ExecContext(r.Context(), a.SQL)
return err
}
return fn, nil
}

111
internal/serv/api.go Normal file
View File

@ -0,0 +1,111 @@
package serv
import (
"time"
"github.com/dosco/super-graph/core"
"github.com/dosco/super-graph/internal/serv/internal/auth"
"github.com/spf13/viper"
)
const (
LogLevelNone int = iota
LogLevelInfo
LogLevelWarn
LogLevelError
LogLevelDebug
)
type Core = core.Config
// Config struct holds the Super Graph config values
type Config struct {
Core `mapstructure:",squash"`
Serv `mapstructure:",squash"`
cpath string
vi *viper.Viper
}
// Serv struct contains config values used by the Super Graph service
type Serv struct {
AppName string `mapstructure:"app_name"`
Production bool
LogLevel string `mapstructure:"log_level"`
HostPort string `mapstructure:"host_port"`
Host string
Port string
HTTPGZip bool `mapstructure:"http_compress"`
WebUI bool `mapstructure:"web_ui"`
EnableTracing bool `mapstructure:"enable_tracing"`
WatchAndReload bool `mapstructure:"reload_on_config_change"`
AuthFailBlock bool `mapstructure:"auth_fail_block"`
SeedFile string `mapstructure:"seed_file"`
MigrationsPath string `mapstructure:"migrations_path"`
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
Auth auth.Auth
Auths []auth.Auth
DB struct {
Type string
Host string
Port uint16
DBName string
User string
Password string
Schema string
PoolSize int32 `mapstructure:"pool_size"`
MaxRetries int `mapstructure:"max_retries"`
PingTimeout time.Duration `mapstructure:"ping_timeout"`
EnableTLS bool `mapstructure:"enable_tls"`
ServerName string `mapstructure:"server_name"`
ServerCert string `mapstructure:"server_cert"`
ClientCert string `mapstructure:"client_cert"`
ClientKey string `mapstructure:"client_key"`
} `mapstructure:"database"`
Actions []Action
}
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
// Action struct contains config values for a Super Graph service action
type Action struct {
Name string
SQL string
AuthName string `mapstructure:"auth_name"`
}

190
internal/serv/cmd.go Normal file
View File

@ -0,0 +1,190 @@
package serv
import (
"database/sql"
"fmt"
_log "log"
"os"
"runtime"
"github.com/spf13/cobra"
"go.uber.org/zap"
)
//go:generate rice embed-go
const (
serverName = "Super Graph"
)
var (
// These variables are set using -ldflags
version string
gitBranch string
lastCommitSHA string
lastCommitTime string
)
var (
log *_log.Logger // logger
zlog *zap.Logger // fast logger
logLevel int // log level
conf *Config // parsed config
confPath string // path to the config file
db *sql.DB // database connection pool
secretKey [32]byte // encryption key
)
func Cmd() {
log = _log.New(os.Stdout, "", 0)
zlog = zap.NewExample()
rootCmd := &cobra.Command{
Use: "super-graph",
Short: BuildDetails(),
}
rootCmd.AddCommand(&cobra.Command{
Use: "serv",
Short: "Run the super-graph service",
Run: cmdServ,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:create",
Short: "Create database",
Run: cmdDBCreate,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:drop",
Short: "Drop database",
Run: cmdDBDrop,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:seed",
Short: "Run the seed script to seed the database",
Run: cmdDBSeed,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:migrate",
Short: "Migrate the database",
Long: `Migrate the database to destination migration version.
Destination migration version can be one of the following value types:
Migrate to the most recent migration.
e.g. db:migrate up
Rollback the most recent migration.
e.g. db:migrate down
Migrate to a specific migration.
e.g. db:migrate 42
Migrate forward N steps.
e.g. db:migrate +3
Migrate backward N steps.
e.g. db:migrate -2
Redo previous N steps (migrate backward N steps then forward N steps).
e.g. db:migrate -+1
`,
Run: cmdDBMigrate,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:status",
Short: "Print current migration status",
Run: cmdDBStatus,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:new NAME",
Short: "Generate a new migration",
Long: "Generate a new migration with the next sequence number and provided name",
Run: cmdDBNew,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:setup",
Short: "Setup database",
Long: "This command will create, migrate and seed the database",
Run: cmdDBSetup,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:reset",
Short: "Reset database",
Long: "This command will drop, create, migrate and seed the database (won't run in production)",
Run: cmdDBReset,
})
rootCmd.AddCommand(&cobra.Command{
Use: "new APP-NAME",
Short: "Create a new application",
Long: "Generate all the required files to start on a new Super Graph app",
Run: cmdNew,
})
// rootCmd.AddCommand(&cobra.Command{
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
// Short: "Dump config to file",
// Long: "Dump current config to a file in the selected format",
// Run: cmdConfDump,
// })
rootCmd.AddCommand(&cobra.Command{
Use: "version",
Short: "Super Graph binary version information",
Run: cmdVersion,
})
rootCmd.PersistentFlags().StringVar(&confPath,
"path", "./config", "path to config files")
if err := rootCmd.Execute(); err != nil {
log.Fatalf("ERR %s", err)
}
}
func cmdVersion(cmd *cobra.Command, args []string) {
fmt.Printf("%s\n", BuildDetails())
}
func BuildDetails() string {
if len(version) == 0 {
return fmt.Sprintf(`
Super Graph (unknown version)
For documentation, visit https://supergraph.dev
To build with version information please use the Makefile
> git clone https://github.com/dosco/super-graph
> cd super-graph && make install
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar
`)
}
return fmt.Sprintf(`
Super Graph %v
For documentation, visit https://supergraph.dev
Commit SHA-1 : %v
Commit timestamp : %v
Branch : %v
Go version : %v
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar
`,
version,
lastCommitSHA,
lastCommitTime,
gitBranch,
runtime.Version())
}

21
internal/serv/cmd_conf.go Normal file
View File

@ -0,0 +1,21 @@
package serv
// func cmdConfDump(cmd *cobra.Command, args []string) {
// if len(args) != 1 {
// cmd.Help() //nolint: errcheck
// os.Exit(1)
// }
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
// conf, err := initConf()
// if err != nil {
// log.Fatalf("ERR failed to read config: %s", err)
// }
// if err := conf.WriteConfigAs(fname); err != nil {
// log.Fatalf("ERR failed to write config: %s", err)
// }
// log.Printf("INF config dumped to ./%s", fname)
// }

View File

@ -0,0 +1,317 @@
package serv
import (
"fmt"
"os"
"path"
"path/filepath"
"strconv"
"strings"
"time"
"github.com/dosco/super-graph/internal/serv/internal/migrate"
"github.com/spf13/cobra"
)
var newMigrationText = `-- Write your migrate up statements here
---- create above / drop below ----
-- Write your migrate down statements here. If this migration is irreversible
-- Then delete the separator line above.
`
func cmdDBSetup(cmd *cobra.Command, args []string) {
initConfOnce()
cmdDBCreate(cmd, []string{})
cmdDBMigrate(cmd, []string{"up"})
sfile := path.Join(conf.cpath, conf.SeedFile)
_, err := os.Stat(sfile)
if err == nil {
cmdDBSeed(cmd, []string{})
return
}
if !os.IsNotExist(err) {
log.Fatalf("ERR unable to check if '%s' exists: %s", sfile, err)
}
log.Printf("WRN failed to read seed file '%s'", sfile)
}
func cmdDBReset(cmd *cobra.Command, args []string) {
initConfOnce()
if conf.Production {
log.Fatalln("ERR db:reset does not work in production")
}
cmdDBDrop(cmd, []string{})
cmdDBSetup(cmd, []string{})
}
func cmdDBCreate(cmd *cobra.Command, args []string) {
initConfOnce()
db, err := initDB(conf, false)
if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer db.Close()
sql := fmt.Sprintf(`CREATE DATABASE "%s"`, conf.DB.DBName)
_, err = db.Exec(sql)
if err != nil {
log.Fatalf("ERR failed to create database: %s", err)
}
log.Printf("INF created database '%s'", conf.DB.DBName)
}
func cmdDBDrop(cmd *cobra.Command, args []string) {
initConfOnce()
db, err := initDB(conf, false)
if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer db.Close()
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS "%s"`, conf.DB.DBName)
_, err = db.Exec(sql)
if err != nil {
log.Fatalf("ERR failed to drop database: %s", err)
}
log.Printf("INF dropped database '%s'", conf.DB.DBName)
}
func cmdDBNew(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help() //nolint: errcheck
os.Exit(1)
}
initConfOnce()
name := args[0]
m, err := migrate.FindMigrations(conf.MigrationsPath)
if err != nil {
log.Fatalf("ERR error loading migrations: %s", err)
}
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
// Write new migration
mpath := filepath.Join(conf.MigrationsPath, mname)
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
if err != nil {
log.Fatalf("ERR %s", err)
}
defer mfile.Close()
_, err = mfile.WriteString(newMigrationText)
if err != nil {
log.Fatalf("ERR %s", err)
}
log.Printf("INR created migration '%s'", mpath)
}
func cmdDBMigrate(cmd *cobra.Command, args []string) {
if len(args) == 0 {
cmd.Help() //nolint: errcheck
os.Exit(1)
}
initConfOnce()
dest := args[0]
conn, err := initDB(conf, true)
if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close()
m, err := migrate.NewMigrator(conn, "schema_version")
if err != nil {
log.Fatalf("ERR failed to initializing migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
if err != nil {
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
log.Fatalf("ERR no migrations found")
}
m.OnStart = func(sequence int32, name, direction, sql string) {
log.Printf("INF %s executing %s %s\n%s\n\n",
time.Now().Format("2006-01-02 15:04:05"), name, direction, sql)
}
var currentVersion int32
currentVersion, err = m.GetCurrentVersion()
if err != nil {
fmt.Fprintf(os.Stderr, "Unable to get current version:\n %v\n", err)
os.Exit(1)
}
mustParseDestination := func(d string) int32 {
var n int64
n, err = strconv.ParseInt(d, 10, 32)
if err != nil {
log.Fatalf("ERR invalid destination: %s", err)
}
return int32(n)
}
if dest == "up" {
err = m.Migrate()
} else if dest == "down" {
err = m.MigrateTo(currentVersion - 1)
} else if len(dest) >= 3 && dest[0:2] == "-+" {
err = m.MigrateTo(currentVersion - mustParseDestination(dest[2:]))
if err == nil {
err = m.MigrateTo(currentVersion)
}
} else if len(dest) >= 2 && dest[0] == '-' {
err = m.MigrateTo(currentVersion - mustParseDestination(dest[1:]))
} else if len(dest) >= 2 && dest[0] == '+' {
err = m.MigrateTo(currentVersion + mustParseDestination(dest[1:]))
} else {
cmd.Help() //nolint: errcheck
os.Exit(1)
}
if err != nil {
log.Fatalf("ERR %s", err)
// if err, ok := err.(m.MigrationPgError); ok {
// if err.Detail != "" {
// log.Fatalf("ERR %s", err.Detail)
// }
// if err.Position != 0 {
// ele, err := ExtractErrorLine(err.Sql, int(err.Position))
// if err != nil {
// log.Fatalf("ERR %s", err)
// }
// log.Fatalf("INF line %d, %s%s", ele.LineNum, ele.Text)
// }
// }
}
log.Println("INF migration done")
}
func cmdDBStatus(cmd *cobra.Command, args []string) {
initConfOnce()
db, err := initDB(conf, true)
if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer db.Close()
m, err := migrate.NewMigrator(db, "schema_version")
if err != nil {
log.Fatalf("ERR failed to initialize migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath)
if err != nil {
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
log.Fatalf("ERR no migrations found")
}
mver, err := m.GetCurrentVersion()
if err != nil {
log.Fatalf("ERR failed to retrieve migration: %s", err)
}
var status string
behindCount := len(m.Migrations) - int(mver)
if behindCount == 0 {
status = "up to date"
} else {
status = "migration(s) pending"
}
log.Printf("INF status: %s, version: %d of %d, host: %s, database: %s",
status, mver, len(m.Migrations), conf.DB.Host, conf.DB.DBName)
}
type ErrorLineExtract struct {
LineNum int // Line number starting with 1
ColumnNum int // Column number starting with 1
Text string // Text of the line without a new line character.
}
// ExtractErrorLine takes source and character position extracts the line
// number, column number, and the line of text.
//
// The first character is position 1.
func ExtractErrorLine(source string, position int) (ErrorLineExtract, error) {
ele := ErrorLineExtract{LineNum: 1}
if position > len(source) {
return ele, fmt.Errorf("position (%d) is greater than source length (%d)", position, len(source))
}
lines := strings.SplitAfter(source, "\n")
for _, ele.Text = range lines {
if position-len(ele.Text) < 1 {
ele.ColumnNum = position
break
}
ele.LineNum += 1
position -= len(ele.Text)
}
ele.Text = strings.TrimSuffix(ele.Text, "\n")
return ele, nil
}
func getMigrationVars() map[string]interface{} {
return map[string]interface{}{
"app_name": strings.Title(conf.AppName),
"app_name_slug": strings.ToLower(strings.Replace(conf.AppName, " ", "_", -1)),
"env": strings.ToLower(os.Getenv("GO_ENV")),
}
}
func initConfOnce() {
var err error
if conf != nil {
return
}
conf, err = initConf()
if err != nil {
log.Fatalf("ERR failed to read config: %s", err)
}
}

150
internal/serv/cmd_new.go Normal file
View File

@ -0,0 +1,150 @@
package serv
import (
"bytes"
"fmt"
"io"
"io/ioutil"
"os"
"path"
"strings"
rice "github.com/GeertJohan/go.rice"
"github.com/spf13/cobra"
"github.com/valyala/fasttemplate"
)
func cmdNew(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help() //nolint: errcheck
os.Exit(1)
}
tmpl := newTempl(map[string]string{
"app_name": strings.Title(strings.Join(args, " ")),
"app_name_slug": strings.ToLower(strings.Join(args, "_")),
})
// Create app folder and add relevant files
name := args[0]
appPath := path.Join("./", name)
ifNotExists(appPath, func(p string) error {
return os.Mkdir(p, os.ModePerm)
})
ifNotExists(path.Join(appPath, "Dockerfile"), func(p string) error {
if v, err := tmpl.get("Dockerfile"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
ifNotExists(path.Join(appPath, "docker-compose.yml"), func(p string) error {
if v, err := tmpl.get("docker-compose.yml"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
// Create app config folder and add relevant files
appConfigPath := path.Join(appPath, "config")
ifNotExists(appConfigPath, func(p string) error {
return os.Mkdir(p, os.ModePerm)
})
ifNotExists(path.Join(appConfigPath, "dev.yml"), func(p string) error {
if v, err := tmpl.get("dev.yml"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
ifNotExists(path.Join(appConfigPath, "prod.yml"), func(p string) error {
if v, err := tmpl.get("prod.yml"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
ifNotExists(path.Join(appConfigPath, "seed.js"), func(p string) error {
if v, err := tmpl.get("seed.js"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
// Create app migrations folder and add relevant files
appMigrationsPath := path.Join(appConfigPath, "migrations")
ifNotExists(appMigrationsPath, func(p string) error {
return os.Mkdir(p, os.ModePerm)
})
ifNotExists(path.Join(appMigrationsPath, "0_init.sql"), func(p string) error {
if v, err := tmpl.get("0_init.sql"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
log.Printf("INR app '%s' initialized", name)
}
type Templ struct {
*rice.Box
data map[string]string
}
func newTempl(data map[string]string) *Templ {
return &Templ{rice.MustFindBox("./tmpl"), data}
}
func (t *Templ) get(name string) ([]byte, error) {
v := t.MustString(name)
b := bytes.Buffer{}
tmpl := fasttemplate.New(v, "{%", "%}")
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
return w.Write([]byte(val))
}
return 0, fmt.Errorf("unknown template variable '%s'", tag)
})
if err != nil {
return nil, err
}
return b.Bytes(), nil
}
func ifNotExists(filePath string, doFn func(string) error) {
_, err := os.Stat(filePath)
if err == nil {
log.Printf("ERR create skipped '%s' exists", filePath)
return
}
if !os.IsNotExist(err) {
log.Fatalf("ERR unable to check if '%s' exists", filePath)
}
err = doFn(filePath)
if err != nil {
log.Fatalf("ERR unable to create '%s'", filePath)
}
log.Printf("INR created '%s'", filePath)
}

413
internal/serv/cmd_seed.go Normal file
View File

@ -0,0 +1,413 @@
package serv
import (
"context"
"encoding/csv"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"math/rand"
"os"
"path"
"strconv"
"strings"
"github.com/brianvoe/gofakeit"
"github.com/dop251/goja"
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
)
func cmdDBSeed(cmd *cobra.Command, args []string) {
var err error
if conf, err = initConf(); err != nil {
log.Fatalf("ERR failed to read config: %s", err)
}
conf.Production = false
db, err = initDB(conf, true)
if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err)
}
sfile := path.Join(conf.cpath, conf.SeedFile)
b, err := ioutil.ReadFile(sfile)
if err != nil {
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
}
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
}
graphQLFn := func(query string, data interface{}, opt map[string]string) map[string]interface{} {
return graphQLFunc(sg, query, data, opt)
}
vm := goja.New()
vm.Set("graphql", graphQLFn)
//vm.Set("import_csv", importCSV)
console := vm.NewObject()
console.Set("log", logFunc) //nolint: errcheck
vm.Set("console", console)
fake := vm.NewObject()
setFakeFuncs(fake)
vm.Set("fake", fake)
_, err = vm.RunScript("seed.js", string(b))
if err != nil {
log.Fatalf("ERR failed to execute script: %s", err)
}
log.Println("INF seed script done")
}
// func runFunc(call goja.FunctionCall) {
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
ct := context.Background()
if v, ok := opt["user_id"]; ok && len(v) != 0 {
ct = context.WithValue(ct, core.UserIDKey, v)
}
// var role string
// if v, ok := opt["role"]; ok && len(v) != 0 {
// role = v
// } else {
// role = "user"
// }
var vars []byte
var err error
if vars, err = json.Marshal(data); err != nil {
log.Fatalf("ERR %s", err)
}
res, err := sg.GraphQL(ct, query, vars)
if err != nil {
log.Fatalf("ERR %s", err)
}
val := make(map[string]interface{})
if err = json.Unmarshal(res.Data, &val); err != nil {
log.Fatalf("ERR %s", err)
}
return val
}
type csvSource struct {
rows [][]string
i int
}
func NewCSVSource(filename string) (*csvSource, error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer f.Close()
r := csv.NewReader(f)
rows, err := r.ReadAll()
if err != nil {
return nil, err
}
return &csvSource{rows: rows}, nil
}
func (c *csvSource) Next() bool {
return c.i < len(c.rows)
}
func (c *csvSource) Values() ([]interface{}, error) {
var vals []interface{}
var err error
for _, v := range c.rows[c.i] {
switch {
case len(v) == 0:
vals = append(vals, "")
case isDigit(v):
var n int
if n, err = strconv.Atoi(v); err == nil {
vals = append(vals, n)
}
case strings.EqualFold(v, "true") || strings.EqualFold(v, "false"):
var b bool
if b, err = strconv.ParseBool(v); err == nil {
vals = append(vals, b)
}
default:
vals = append(vals, v)
}
if err != nil {
return nil, fmt.Errorf("%w (line no %d)", err, c.i)
}
}
c.i++
return vals, nil
}
func isDigit(v string) bool {
for i := range v {
if v[i] < '0' || v[i] > '9' {
return false
}
}
return true
}
func (c *csvSource) Err() error {
return nil
}
// func importCSV(table, filename string) int64 {
// if filename[0] != '/' {
// filename = path.Join(conf.ConfigPathUsed(), filename)
// }
// s, err := NewCSVSource(filename)
// if err != nil {
// log.Fatalf("ERR %s", err)
// }
// var cols []string
// colval, _ := s.Values()
// for _, c := range colval {
// cols = append(cols, c.(string))
// }
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
// cols,
// s)
// if err != nil {
// err = fmt.Errorf("%w (line no %d)", err, s.i)
// log.Fatalf("ERR %s", err)
// }
// return n
// }
//nolint: errcheck
func logFunc(args ...interface{}) {
for _, arg := range args {
if _, ok := arg.(map[string]interface{}); ok {
j, err := json.MarshalIndent(arg, "", " ")
if err != nil {
continue
}
os.Stdout.Write(j)
} else {
io.WriteString(os.Stdout, fmt.Sprintf("%v", arg))
}
io.WriteString(os.Stdout, "\n")
}
}
func avatarURL(size int) string {
if size == 0 {
size = 200
}
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
}
func imageURL(width int, height int) string {
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
}
//nolint: errcheck
func setFakeFuncs(f *goja.Object) {
gofakeit.Seed(0)
// Person
f.Set("person", gofakeit.Person)
f.Set("name", gofakeit.Name)
f.Set("name_prefix", gofakeit.NamePrefix)
f.Set("name_suffix", gofakeit.NameSuffix)
f.Set("first_name", gofakeit.FirstName)
f.Set("last_name", gofakeit.LastName)
f.Set("gender", gofakeit.Gender)
f.Set("ssn", gofakeit.SSN)
f.Set("contact", gofakeit.Contact)
f.Set("email", gofakeit.Email)
f.Set("phone", gofakeit.Phone)
f.Set("phone_formatted", gofakeit.PhoneFormatted)
f.Set("username", gofakeit.Username)
f.Set("password", gofakeit.Password)
// Address
f.Set("address", gofakeit.Address)
f.Set("city", gofakeit.City)
f.Set("country", gofakeit.Country)
f.Set("country_abr", gofakeit.CountryAbr)
f.Set("state", gofakeit.State)
f.Set("state_abr", gofakeit.StateAbr)
f.Set("status_code", gofakeit.StatusCode)
f.Set("street", gofakeit.Street)
f.Set("street_name", gofakeit.StreetName)
f.Set("street_number", gofakeit.StreetNumber)
f.Set("street_prefix", gofakeit.StreetPrefix)
f.Set("street_suffix", gofakeit.StreetSuffix)
f.Set("zip", gofakeit.Zip)
f.Set("latitude", gofakeit.Latitude)
f.Set("latitude_in_range", gofakeit.LatitudeInRange)
f.Set("longitude", gofakeit.Longitude)
f.Set("longitude_in_range", gofakeit.LongitudeInRange)
// Beer
f.Set("beer_alcohol", gofakeit.BeerAlcohol)
f.Set("beer_hop", gofakeit.BeerHop)
f.Set("beer_ibu", gofakeit.BeerIbu)
f.Set("beer_blg", gofakeit.BeerBlg)
f.Set("beer_malt", gofakeit.BeerMalt)
f.Set("beer_name", gofakeit.BeerName)
f.Set("beer_style", gofakeit.BeerStyle)
f.Set("beer_yeast", gofakeit.BeerYeast)
// Cars
f.Set("vehicle", gofakeit.Vehicle)
f.Set("vehicle_type", gofakeit.VehicleType)
f.Set("car_maker", gofakeit.CarMaker)
f.Set("car_model", gofakeit.CarModel)
f.Set("fuel_type", gofakeit.FuelType)
f.Set("transmission_gear_type", gofakeit.TransmissionGearType)
// Text
f.Set("word", gofakeit.Word)
f.Set("sentence", gofakeit.Sentence)
f.Set("paragraph", gofakeit.Paragraph)
f.Set("question", gofakeit.Question)
f.Set("quote", gofakeit.Quote)
// Misc
f.Set("generate", gofakeit.Generate)
f.Set("boolean", gofakeit.Bool)
f.Set("uuid", gofakeit.UUID)
// Colors
f.Set("color", gofakeit.Color)
f.Set("hex_color", gofakeit.HexColor)
f.Set("rgb_color", gofakeit.RGBColor)
f.Set("safe_color", gofakeit.SafeColor)
// Internet
f.Set("url", gofakeit.URL)
f.Set("image_url", imageURL)
f.Set("avatar_url", avatarURL)
f.Set("domain_name", gofakeit.DomainName)
f.Set("domain_suffix", gofakeit.DomainSuffix)
f.Set("ipv4_address", gofakeit.IPv4Address)
f.Set("ipv6_address", gofakeit.IPv6Address)
f.Set("simple_status_code", gofakeit.SimpleStatusCode)
f.Set("http_method", gofakeit.HTTPMethod)
f.Set("user_agent", gofakeit.UserAgent)
f.Set("user_agent_firefox", gofakeit.FirefoxUserAgent)
f.Set("user_agent_chrome", gofakeit.ChromeUserAgent)
f.Set("user_agent_opera", gofakeit.OperaUserAgent)
f.Set("user_agent_safari", gofakeit.SafariUserAgent)
// Date / Time
f.Set("date", gofakeit.Date)
f.Set("date_range", gofakeit.DateRange)
f.Set("nano_second", gofakeit.NanoSecond)
f.Set("second", gofakeit.Second)
f.Set("minute", gofakeit.Minute)
f.Set("hour", gofakeit.Hour)
f.Set("month", gofakeit.Month)
f.Set("day", gofakeit.Day)
f.Set("weekday", gofakeit.WeekDay)
f.Set("year", gofakeit.Year)
f.Set("timezone", gofakeit.TimeZone)
f.Set("timezone_abv", gofakeit.TimeZoneAbv)
f.Set("timezone_full", gofakeit.TimeZoneFull)
f.Set("timezone_offset", gofakeit.TimeZoneOffset)
// Payment
f.Set("price", gofakeit.Price)
f.Set("credit_card", gofakeit.CreditCard)
f.Set("credit_card_cvv", gofakeit.CreditCardCvv)
f.Set("credit_card_number", gofakeit.CreditCardNumber)
f.Set("credit_card_number_luhn", gofakeit.CreditCardNumberLuhn)
f.Set("credit_card_type", gofakeit.CreditCardType)
f.Set("currency", gofakeit.Currency)
f.Set("currency_long", gofakeit.CurrencyLong)
f.Set("currency_short", gofakeit.CurrencyShort)
// Company
f.Set("bs", gofakeit.BS)
f.Set("buzzword", gofakeit.BuzzWord)
f.Set("company", gofakeit.Company)
f.Set("company_suffix", gofakeit.CompanySuffix)
f.Set("job", gofakeit.Job)
f.Set("job_description", gofakeit.JobDescriptor)
f.Set("job_level", gofakeit.JobLevel)
f.Set("job_title", gofakeit.JobTitle)
// Hacker
f.Set("hacker_abbreviation", gofakeit.HackerAbbreviation)
f.Set("hacker_adjective", gofakeit.HackerAdjective)
f.Set("hacker_ingverb", gofakeit.HackerIngverb)
f.Set("hacker_noun", gofakeit.HackerNoun)
f.Set("hacker_phrase", gofakeit.HackerPhrase)
f.Set("hacker_verb", gofakeit.HackerVerb)
//Hipster
f.Set("hipster_word", gofakeit.HipsterWord)
f.Set("hipster_paragraph", gofakeit.HipsterParagraph)
f.Set("hipster_sentence", gofakeit.HipsterSentence)
//Languages
//f.Set("language", gofakeit.Language)
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
// File
f.Set("extension", gofakeit.Extension)
f.Set("mine_type", gofakeit.MimeType)
// Numbers
f.Set("number", gofakeit.Number)
f.Set("numerify", gofakeit.Numerify)
f.Set("int8", gofakeit.Int8)
f.Set("int16", gofakeit.Int16)
f.Set("int32", gofakeit.Int32)
f.Set("int64", gofakeit.Int64)
f.Set("uint8", gofakeit.Uint8)
f.Set("uint16", gofakeit.Uint16)
f.Set("uint32", gofakeit.Uint32)
f.Set("uint64", gofakeit.Uint64)
f.Set("float32", gofakeit.Float32)
f.Set("float32_range", gofakeit.Float32Range)
f.Set("float64", gofakeit.Float64)
f.Set("float64_range", gofakeit.Float64Range)
f.Set("shuffle_ints", gofakeit.ShuffleInts)
f.Set("mac_address", gofakeit.MacAddress)
// String
f.Set("digit", gofakeit.Digit)
f.Set("letter", gofakeit.Letter)
f.Set("lexify", gofakeit.Lexify)
f.Set("rand_string", gofakeit.RandString)
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
f.Set("numerify", gofakeit.Numerify)
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
}

37
internal/serv/cmd_serv.go Normal file
View File

@ -0,0 +1,37 @@
package serv
import (
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
)
var (
sg *core.SuperGraph
)
func cmdServ(cmd *cobra.Command, args []string) {
var err error
conf, err = initConf()
if err != nil {
fatalInProd(err, "failed to read config")
}
initWatcher()
db, err = initDB(conf, true)
if err != nil {
fatalInProd(err, "failed to connect to database")
}
// if conf != nil && db != nil {
// initResolvers()
// }
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
fatalInProd(err, "failed to initialize Super Graph")
}
startHTTP()
}

115
internal/serv/config.go Normal file
View File

@ -0,0 +1,115 @@
package serv
import (
"fmt"
"os"
"path"
"strings"
"github.com/spf13/viper"
)
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
// environment variable. This is the best way to create a new Super Graph config.
func ReadInConfig(configFile string) (*Config, error) {
cpath := path.Dir(configFile)
cfile := path.Base(configFile)
vi := newViper(cpath, cfile)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
inherits := vi.GetString("inherits")
if len(inherits) != 0 {
vi = newViper(cpath, inherits)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
if vi.IsSet("inherits") {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
}
vi.SetConfigName(cfile)
if err := vi.MergeInConfig(); err != nil {
return nil, err
}
}
c := &Config{cpath: cpath, vi: vi}
if err := vi.Unmarshal(&c); err != nil {
return nil, fmt.Errorf("failed to decode config, %v", err)
}
if len(c.Core.AllowListFile) == 0 {
c.Core.AllowListFile = path.Join(cpath, "allow.list")
}
return c, nil
}
func newViper(configPath, configFile string) *viper.Viper {
vi := viper.New()
vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.AddConfigPath(configPath)
vi.SetConfigName(configFile)
vi.AddConfigPath("./config")
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432)
vi.SetDefault("database.user", "postgres")
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("host", "HOST") //nolint: errcheck
vi.BindEnv("port", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)
return vi
}
func GetConfigName() string {
if len(os.Getenv("GO_ENV")) == 0 {
return "dev"
}
ge := strings.ToLower(os.Getenv("GO_ENV"))
switch {
case strings.HasPrefix(ge, "pro"):
return "prod"
case strings.HasPrefix(ge, "sta"):
return "stage"
case strings.HasPrefix(ge, "tes"):
return "test"
case strings.HasPrefix(ge, "dev"):
return "dev"
}
return ge
}

7
internal/serv/core.go Normal file
View File

@ -0,0 +1,7 @@
package serv
// func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
// return nil
// }

View File

@ -0,0 +1,21 @@
query {
products(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# no duplicate prices returned
distinct: [ price ]
# only items with an id >= 30 and < 30 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}
}

11
internal/serv/fuzz.go Normal file
View File

@ -0,0 +1,11 @@
// +build gofuzz
package serv
func Fuzz(data []byte) int {
gql := string(data)
QueryName(gql)
gqlHash(gql, nil, "")
return 1
}

View File

@ -0,0 +1,15 @@
package serv
import "testing"
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"query",
"q",
"que",
}
for _, f := range crashers {
gqlHash(f, nil, "")
}
}

25
internal/serv/health.go Normal file
View File

@ -0,0 +1,25 @@
package serv
import (
"context"
"net/http"
)
var healthyResponse = []byte("All's Well")
func health(w http.ResponseWriter, _ *http.Request) {
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
defer cancel()
if err := db.PingContext(ct); err != nil {
log.Printf("ERR error pinging database: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
if _, err := w.Write(healthyResponse); err != nil {
log.Printf("ERR error writing healthy response: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
}

124
internal/serv/http.go Normal file
View File

@ -0,0 +1,124 @@
package serv
import (
"encoding/json"
"errors"
"io"
"io/ioutil"
"net/http"
"strings"
"github.com/dosco/super-graph/core"
"github.com/dosco/super-graph/internal/serv/internal/auth"
"github.com/rs/cors"
"go.uber.org/zap"
)
const (
maxReadBytes = 100000 // 100Kb
introspectionQuery = "IntrospectionQuery"
)
var (
errUnauthorized = errors.New("not authorized")
)
type gqlReq struct {
OpName string `json:"operationName"`
Query string `json:"query"`
Vars json.RawMessage `json:"variables"`
}
type errorResp struct {
Error error `json:"error"`
}
func apiV1Handler() http.Handler {
h, err := auth.WithAuth(http.HandlerFunc(apiV1), &conf.Auth)
if err != nil {
log.Fatalf("ERR %s", err)
}
if len(conf.AllowedOrigins) != 0 {
c := cors.New(cors.Options{
AllowedOrigins: conf.AllowedOrigins,
AllowCredentials: true,
Debug: conf.DebugCORS,
})
h = c.Handler(h)
}
return h
}
func apiV1(w http.ResponseWriter, r *http.Request) {
ct := r.Context()
//nolint: errcheck
if conf.AuthFailBlock && !auth.IsAuth(ct) {
renderErr(w, errUnauthorized, nil)
return
}
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
if err != nil {
renderErr(w, err, nil)
return
}
defer r.Body.Close()
req := gqlReq{}
err = json.Unmarshal(b, &req)
if err != nil {
renderErr(w, err, nil)
return
}
if strings.EqualFold(req.OpName, introspectionQuery) {
introspect(w)
return
}
res, err := sg.GraphQL(ct, req.Query, req.Vars)
if logLevel >= LogLevelDebug {
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
}
if err != nil {
renderErr(w, err, res)
return
}
json.NewEncoder(w).Encode(res)
if logLevel >= LogLevelInfo {
zlog.Info("success",
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
}
}
//nolint: errcheck
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
if err == errUnauthorized {
w.WriteHeader(http.StatusUnauthorized)
}
json.NewEncoder(w).Encode(&errorResp{err})
if logLevel >= LogLevelError {
if res != nil {
zlog.Error(err.Error(),
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
} else {
zlog.Error(err.Error())
}
}
}

199
internal/serv/init.go Normal file
View File

@ -0,0 +1,199 @@
package serv
import (
"crypto/tls"
"crypto/x509"
"database/sql"
"errors"
"fmt"
"io/ioutil"
"path"
"strings"
"time"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/stdlib"
//_ "github.com/jackc/pgx/v4/stdlib"
)
const (
PEM_SIG = "--BEGIN "
)
func initConf() (*Config, error) {
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
if err != nil {
return nil, err
}
switch c.LogLevel {
case "debug":
logLevel = LogLevelDebug
case "error":
logLevel = LogLevelError
case "warn":
logLevel = LogLevelWarn
case "info":
logLevel = LogLevelInfo
default:
logLevel = LogLevelNone
}
// Auths: validate and sanitize
am := make(map[string]struct{})
for i := 0; i < len(c.Auths); i++ {
a := &c.Auths[i]
a.Name = sanitize(a.Name)
if _, ok := am[a.Name]; ok {
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
log.Printf("WRN duplicate auth found: %s", a.Name)
}
am[a.Name] = struct{}{}
}
// Actions: validate and sanitize
axm := make(map[string]struct{})
for i := 0; i < len(c.Actions); i++ {
a := &c.Actions[i]
a.Name = sanitize(a.Name)
a.AuthName = sanitize(a.AuthName)
if _, ok := axm[a.Name]; ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN duplicate action found: %s", a.Name)
}
if _, ok := am[a.AuthName]; !ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
}
axm[a.Name] = struct{}{}
}
var anonFound bool
for _, r := range c.Roles {
if sanitize(r.Name) == "anon" {
anonFound = true
}
}
if !anonFound {
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
c.AuthFailBlock = false
}
return c, nil
}
func initDB(c *Config, useDB bool) (*sql.DB, error) {
var db *sql.DB
var err error
config, _ := pgx.ParseConfig("")
config.Host = c.DB.Host
config.Port = c.DB.Port
config.User = c.DB.User
config.Password = c.DB.Password
config.RuntimeParams = map[string]string{
"application_name": c.AppName,
"search_path": c.DB.Schema,
}
if useDB {
config.Database = c.DB.DBName
}
if c.DB.EnableTLS {
if len(c.DB.ServerName) == 0 {
return nil, errors.New("server_name is required")
}
if len(c.DB.ServerCert) == 0 {
return nil, errors.New("server_cert is required")
}
if len(c.DB.ClientCert) == 0 {
return nil, errors.New("client_cert is required")
}
if len(c.DB.ClientKey) == 0 {
return nil, errors.New("client_key is required")
}
rootCertPool := x509.NewCertPool()
var pem []byte
var err error
if strings.Contains(c.DB.ServerCert, PEM_SIG) {
pem = []byte(c.DB.ServerCert)
} else {
pem, err = ioutil.ReadFile(c.DB.ServerCert)
}
if err != nil {
return nil, fmt.Errorf("db tls: %w", err)
}
if ok := rootCertPool.AppendCertsFromPEM(pem); !ok {
return nil, errors.New("db tls: failed to append pem")
}
clientCert := make([]tls.Certificate, 0, 1)
var certs tls.Certificate
if strings.Contains(c.DB.ClientCert, PEM_SIG) {
certs, err = tls.X509KeyPair([]byte(c.DB.ClientCert), []byte(c.DB.ClientKey))
} else {
certs, err = tls.LoadX509KeyPair(c.DB.ClientCert, c.DB.ClientKey)
}
if err != nil {
return nil, fmt.Errorf("db tls: %w", err)
}
clientCert = append(clientCert, certs)
config.TLSConfig = &tls.Config{
RootCAs: rootCertPool,
Certificates: clientCert,
ServerName: c.DB.ServerName,
}
}
// switch c.LogLevel {
// case "debug":
// config.LogLevel = pgx.LogLevelDebug
// case "info":
// config.LogLevel = pgx.LogLevelInfo
// case "warn":
// config.LogLevel = pgx.LogLevelWarn
// case "error":
// config.LogLevel = pgx.LogLevelError
// default:
// config.LogLevel = pgx.LogLevelNone
// }
//config.Logger = NewSQLLogger(logger)
// if c.DB.MaxRetries != 0 {
// opt.MaxRetries = c.DB.MaxRetries
// }
// if c.DB.PoolSize != 0 {
// config.MaxConns = conf.DB.PoolSize
// }
for i := 1; i < 10; i++ {
db = stdlib.OpenDB(*config)
if db == nil {
break
}
time.Sleep(time.Duration(i*100) * time.Millisecond)
}
if err != nil {
return nil, err
}
return db, nil
}

View File

@ -0,0 +1,127 @@
package auth
import (
"context"
"fmt"
"net/http"
"github.com/dosco/super-graph/core"
)
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
userIDProvider := r.Header.Get("X-User-ID-Provider")
if len(userIDProvider) != 0 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
}
userID := r.Header.Get("X-User-ID")
if len(userID) != 0 {
ctx = context.WithValue(ctx, core.UserIDKey, userID)
}
userRole := r.Header.Get("X-User-Role")
if len(userRole) != 0 {
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
}
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
hdr := ac.Header
if len(hdr.Name) == 0 {
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
}
if !hdr.Exists && len(hdr.Value) == 0 {
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
}
return func(w http.ResponseWriter, r *http.Request) {
var fo1 bool
value := r.Header.Get(hdr.Name)
switch {
case hdr.Exists:
fo1 = (len(value) == 0)
default:
fo1 = (value != hdr.Value)
}
if fo1 {
http.Error(w, "401 unauthorized", http.StatusUnauthorized)
return
}
next.ServeHTTP(w, r)
}, nil
}
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
var err error
if ac.CredsInHeader {
next, err = SimpleHandler(ac, next)
}
if err != nil {
return nil, err
}
switch ac.Type {
case "rails":
return RailsHandler(ac, next)
case "jwt":
return JwtHandler(ac, next)
case "header":
return HeaderHandler(ac, next)
}
return next, nil
}
func IsAuth(ct context.Context) bool {
return ct.Value(core.UserIDKey) != nil
}

View File

@ -0,0 +1,105 @@
package auth
import (
"context"
"io/ioutil"
"net/http"
"strings"
jwt "github.com/dgrijalva/jwt-go"
"github.com/dosco/super-graph/core"
)
const (
authHeader = "Authorization"
jwtAuth0 int = iota + 1
)
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
var key interface{}
var jwtProvider int
cookie := ac.Cookie
if ac.JWT.Provider == "auth0" {
jwtProvider = jwtAuth0
}
secret := ac.JWT.Secret
publicKeyFile := ac.JWT.PubKeyFile
switch {
case len(secret) != 0:
key = []byte(secret)
case len(publicKeyFile) != 0:
kd, err := ioutil.ReadFile(publicKeyFile)
if err != nil {
return nil, err
}
switch ac.JWT.PubKeyType {
case "ecdsa":
key, err = jwt.ParseECPublicKeyFromPEM(kd)
case "rsa":
key, err = jwt.ParseRSAPublicKeyFromPEM(kd)
default:
key, err = jwt.ParseECPublicKeyFromPEM(kd)
}
if err != nil {
return nil, err
}
}
return func(w http.ResponseWriter, r *http.Request) {
var tok string
if len(cookie) != 0 {
ck, err := r.Cookie(cookie)
if err != nil {
next.ServeHTTP(w, r)
return
}
tok = ck.Value
} else {
ah := r.Header.Get(authHeader)
if len(ah) < 10 {
next.ServeHTTP(w, r)
return
}
tok = ah[7:]
}
token, err := jwt.ParseWithClaims(tok, &jwt.StandardClaims{}, func(token *jwt.Token) (interface{}, error) {
return key, nil
})
if err != nil {
next.ServeHTTP(w, r)
return
}
if claims, ok := token.Claims.(*jwt.StandardClaims); ok {
ctx := r.Context()
if jwtProvider == jwtAuth0 {
sub := strings.Split(claims.Subject, "|")
if len(sub) != 2 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
}
} else {
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
}
next.ServeHTTP(w, r.WithContext(ctx))
return
}
next.ServeHTTP(w, r)
}, nil
}

View File

@ -0,0 +1,190 @@
package auth
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"strings"
"github.com/bradfitz/gomemcache/memcache"
"github.com/dosco/super-graph/core"
"github.com/dosco/super-graph/internal/serv/internal/rails"
"github.com/garyburd/redigo/redis"
)
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
ru := ac.Rails.URL
if strings.HasPrefix(ru, "memcache:") {
return RailsMemcacheHandler(ac, next)
}
if strings.HasPrefix(ru, "redis:") {
return RailsRedisHandler(ac, next)
}
return RailsCookieHandler(ac, next)
}
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rp := &redis.Pool{
MaxIdle: ac.Rails.MaxIdle,
MaxActive: ac.Rails.MaxActive,
Dial: func() (redis.Conn, error) {
c, err := redis.DialURL(ac.Rails.URL)
if err != nil {
return nil, err
}
pwd := ac.Rails.Password
if len(pwd) != 0 {
if _, err := c.Do("AUTH", pwd); err != nil {
return nil, err
}
}
return c, nil
},
}
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil {
next.ServeHTTP(w, r)
return
}
key := fmt.Sprintf("session:%s", ck.Value)
sessionData, err := redis.Bytes(rp.Get().Do("GET", key))
if err != nil {
next.ServeHTTP(w, r)
return
}
userID, err := rails.ParseCookie(string(sessionData))
if err != nil {
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rURL, err := url.Parse(ac.Rails.URL)
if err != nil {
return nil, err
}
mc := memcache.New(rURL.Host)
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil {
next.ServeHTTP(w, r)
return
}
key := fmt.Sprintf("session:%s", ck.Value)
item, err := mc.Get(key)
if err != nil {
next.ServeHTTP(w, r)
return
}
userID, err := rails.ParseCookie(string(item.Value))
if err != nil {
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
ra, err := railsAuth(ac)
if err != nil {
return nil, err
}
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil || len(ck.Value) == 0 {
// logger.Warn().Err(err).Msg("rails cookie missing")
next.ServeHTTP(w, r)
return
}
userID, err := ra.ParseCookie(ck.Value)
if err != nil {
// logger.Warn().Err(err).Msg("failed to parse rails cookie")
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func railsAuth(ac *Auth) (*rails.Auth, error) {
secret := ac.Rails.SecretKeyBase
if len(secret) == 0 {
return nil, errors.New("no auth.rails.secret_key_base defined")
}
version := ac.Rails.Version
if len(version) == 0 {
return nil, errors.New("no auth.rails.version defined")
}
ra, err := rails.NewAuth(version, secret)
if err != nil {
return nil, err
}
if len(ac.Rails.Salt) != 0 {
ra.Salt = ac.Rails.Salt
}
if len(ac.Rails.SignSalt) != 0 {
ra.SignSalt = ac.Rails.SignSalt
}
if len(ac.Rails.AuthSalt) != 0 {
ra.AuthSalt = ac.Rails.AuthSalt
}
return ra, nil
}

View File

@ -0,0 +1,370 @@
package migrate
import (
"bytes"
"context"
"database/sql"
"fmt"
"io/ioutil"
"os"
"path/filepath"
"regexp"
"strconv"
"strings"
"text/template"
"github.com/pkg/errors"
)
var migrationPattern = regexp.MustCompile(`\A(\d+)_[^\.]+\.sql\z`)
var ErrNoFwMigration = errors.Errorf("no sql in forward migration step")
type BadVersionError string
func (e BadVersionError) Error() string {
return string(e)
}
type IrreversibleMigrationError struct {
m *Migration
}
func (e IrreversibleMigrationError) Error() string {
return fmt.Sprintf("Irreversible migration: %d - %s", e.m.Sequence, e.m.Name)
}
type NoMigrationsFoundError struct {
Path string
}
func (e NoMigrationsFoundError) Error() string {
return fmt.Sprintf("No migrations found at %s", e.Path)
}
type MigrationPgError struct {
Sql string
Error error
}
type Migration struct {
Sequence int32
Name string
UpSQL string
DownSQL string
}
type MigratorOptions struct {
// DisableTx causes the Migrator not to run migrations in a transaction.
DisableTx bool
// MigratorFS is the interface used for collecting the migrations.
MigratorFS MigratorFS
}
type Migrator struct {
db *sql.DB
versionTable string
options *MigratorOptions
Migrations []*Migration
OnStart func(int32, string, string, string) // OnStart is called when a migration is run with the sequence, name, direction, and SQL
Data map[string]interface{} // Data available to use in migrations
}
func NewMigrator(db *sql.DB, versionTable string) (m *Migrator, err error) {
return NewMigratorEx(db, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
}
func NewMigratorEx(db *sql.DB, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
m = &Migrator{db: db, versionTable: versionTable, options: opts}
err = m.ensureSchemaVersionTableExists()
m.Migrations = make([]*Migration, 0)
m.Data = make(map[string]interface{})
return
}
type MigratorFS interface {
ReadDir(dirname string) ([]os.FileInfo, error)
ReadFile(filename string) ([]byte, error)
Glob(pattern string) (matches []string, err error)
}
type defaultMigratorFS struct{}
func (defaultMigratorFS) ReadDir(dirname string) ([]os.FileInfo, error) {
return ioutil.ReadDir(dirname)
}
func (defaultMigratorFS) ReadFile(filename string) ([]byte, error) {
return ioutil.ReadFile(filename)
}
func (defaultMigratorFS) Glob(pattern string) ([]string, error) {
return filepath.Glob(pattern)
}
func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
path = strings.TrimRight(path, string(filepath.Separator))
fileInfos, err := fs.ReadDir(path)
if err != nil {
return nil, err
}
paths := make([]string, 0, len(fileInfos))
for _, fi := range fileInfos {
if fi.IsDir() {
continue
}
matches := migrationPattern.FindStringSubmatch(fi.Name())
if len(matches) != 2 {
continue
}
n, err := strconv.ParseInt(matches[1], 10, 32)
if err != nil {
// The regexp already validated that the prefix is all digits so this *should* never fail
return nil, err
}
mcount := len(paths)
if n < int64(mcount) {
return nil, fmt.Errorf("Duplicate migration %d", n)
}
if int64(mcount) < n {
return nil, fmt.Errorf("Missing migration %d", mcount)
}
paths = append(paths, filepath.Join(path, fi.Name()))
}
return paths, nil
}
func FindMigrations(path string) ([]string, error) {
return FindMigrationsEx(path, defaultMigratorFS{})
}
func (m *Migrator) LoadMigrations(path string) error {
path = strings.TrimRight(path, string(filepath.Separator))
mainTmpl := template.New("main")
sharedPaths, err := m.options.MigratorFS.Glob(filepath.Join(path, "*", "*.sql"))
if err != nil {
return err
}
for _, p := range sharedPaths {
body, err := m.options.MigratorFS.ReadFile(p)
if err != nil {
return err
}
name := strings.Replace(p, path+string(filepath.Separator), "", 1)
_, err = mainTmpl.New(name).Parse(string(body))
if err != nil {
return err
}
}
paths, err := FindMigrationsEx(path, m.options.MigratorFS)
if err != nil {
return err
}
if len(paths) == 0 {
return NoMigrationsFoundError{Path: path}
}
for _, p := range paths {
body, err := m.options.MigratorFS.ReadFile(p)
if err != nil {
return err
}
pieces := strings.SplitN(string(body), "---- create above / drop below ----", 2)
var upSQL, downSQL string
upSQL = strings.TrimSpace(pieces[0])
upSQL, err = m.evalMigration(mainTmpl.New(filepath.Base(p)+" up"), upSQL)
if err != nil {
return err
}
// Make sure there is SQL in the forward migration step.
containsSQL := false
for _, v := range strings.Split(upSQL, "\n") {
// Only account for regular single line comment, empty line and space/comment combination
cleanString := strings.TrimSpace(v)
if len(cleanString) != 0 &&
!strings.HasPrefix(cleanString, "--") {
containsSQL = true
break
}
}
if !containsSQL {
return ErrNoFwMigration
}
if len(pieces) == 2 {
downSQL = strings.TrimSpace(pieces[1])
downSQL, err = m.evalMigration(mainTmpl.New(filepath.Base(p)+" down"), downSQL)
if err != nil {
return err
}
}
m.AppendMigration(filepath.Base(p), upSQL, downSQL)
}
return nil
}
func (m *Migrator) evalMigration(tmpl *template.Template, sql string) (string, error) {
tmpl, err := tmpl.Parse(sql)
if err != nil {
return "", err
}
var buf bytes.Buffer
err = tmpl.Execute(&buf, m.Data)
if err != nil {
return "", err
}
return buf.String(), nil
}
func (m *Migrator) AppendMigration(name, upSQL, downSQL string) {
m.Migrations = append(
m.Migrations,
&Migration{
Sequence: int32(len(m.Migrations)) + 1,
Name: name,
UpSQL: upSQL,
DownSQL: downSQL,
})
}
// Migrate runs pending migrations
// It calls m.OnStart when it begins a migration
func (m *Migrator) Migrate() error {
return m.MigrateTo(int32(len(m.Migrations)))
}
// MigrateTo migrates to targetVersion
func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
// Lock to ensure multiple migrations cannot occur simultaneously
lockNum := int64(9628173550095224) // arbitrary random number
if _, lockErr := m.db.Exec("select pg_try_advisory_lock($1)", lockNum); lockErr != nil {
return lockErr
}
defer func() {
_, unlockErr := m.db.Exec("select pg_advisory_unlock($1)", lockNum)
if err == nil && unlockErr != nil {
err = unlockErr
}
}()
currentVersion, err := m.GetCurrentVersion()
if err != nil {
return err
}
if targetVersion < 0 || int32(len(m.Migrations)) < targetVersion {
errMsg := fmt.Sprintf("destination version %d is outside the valid versions of 0 to %d", targetVersion, len(m.Migrations))
return BadVersionError(errMsg)
}
if currentVersion < 0 || int32(len(m.Migrations)) < currentVersion {
errMsg := fmt.Sprintf("current version %d is outside the valid versions of 0 to %d", currentVersion, len(m.Migrations))
return BadVersionError(errMsg)
}
var direction int32
if currentVersion < targetVersion {
direction = 1
} else {
direction = -1
}
for currentVersion != targetVersion {
var current *Migration
var sql, directionName string
var sequence int32
if direction == 1 {
current = m.Migrations[currentVersion]
sequence = current.Sequence
sql = current.UpSQL
directionName = "up"
} else {
current = m.Migrations[currentVersion-1]
sequence = current.Sequence - 1
sql = current.DownSQL
directionName = "down"
if current.DownSQL == "" {
return IrreversibleMigrationError{m: current}
}
}
ctx := context.Background()
tx, err := m.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback() //nolint: errcheck
// Fire on start callback
if m.OnStart != nil {
m.OnStart(current.Sequence, current.Name, directionName, sql)
}
// Execute the migration
_, err = tx.Exec(sql)
if err != nil {
// if err, ok := err.(pgx.PgError); ok {
// return MigrationPgError{Sql: sql, PgError: err}
// }
return err
}
// Reset all database connection settings. Important to do before updating version as search_path may have been changed.
// if _, err := tx.Exec(ctx, "reset all"); err != nil {
// return err
// }
// Add one to the version
_, err = tx.Exec("update "+m.versionTable+" set version=$1", sequence)
if err != nil {
return err
}
err = tx.Commit()
if err != nil {
return err
}
currentVersion = currentVersion + direction
}
return nil
}
func (m *Migrator) GetCurrentVersion() (v int32, err error) {
err = m.db.QueryRow("select version from " + m.versionTable).Scan(&v)
return v, err
}
func (m *Migrator) ensureSchemaVersionTableExists() (err error) {
_, err = m.db.Exec(fmt.Sprintf(`
create table if not exists %s(version int4 not null);
insert into %s(version)
select 0
where 0=(select count(*) from %s);
`, m.versionTable, m.versionTable, m.versionTable))
return err
}

View File

@ -0,0 +1,352 @@
package migrate_test
/*
import (
. "gopkg.in/check.v1"
)
type MigrateSuite struct {
conn *pgx.Conn
}
func Test(t *testing.T) { TestingT(t) }
var _ = Suite(&MigrateSuite{})
var versionTable string = "schema_version_non_default"
func (s *MigrateSuite) SetUpTest(c *C) {
var err error
s.conn, err = pgx.Connect(*defaultConnectionParameters)
c.Assert(err, IsNil)
s.cleanupSampleMigrator(c)
}
func (s *MigrateSuite) currentVersion(c *C) int32 {
var n int32
err := s.conn.QueryRow("select version from " + versionTable).Scan(&n)
c.Assert(err, IsNil)
return n
}
func (s *MigrateSuite) Exec(c *C, sql string, arguments ...interface{}) pgx.CommandTag {
commandTag, err := s.conn.Exec(sql, arguments...)
c.Assert(err, IsNil)
return commandTag
}
func (s *MigrateSuite) tableExists(c *C, tableName string) bool {
var exists bool
err := s.conn.QueryRow(
"select exists(select 1 from information_schema.tables where table_catalog=$1 and table_name=$2)",
defaultConnectionParameters.Database,
tableName,
).Scan(&exists)
c.Assert(err, IsNil)
return exists
}
func (s *MigrateSuite) createEmptyMigrator(c *C) *migrate.Migrator {
var err error
m, err := migrate.NewMigrator(s.conn, versionTable)
c.Assert(err, IsNil)
return m
}
func (s *MigrateSuite) createSampleMigrator(c *C) *migrate.Migrator {
m := s.createEmptyMigrator(c)
m.AppendMigration("Create t1", "create table t1(id serial);", "drop table t1;")
m.AppendMigration("Create t2", "create table t2(id serial);", "drop table t2;")
m.AppendMigration("Create t3", "create table t3(id serial);", "drop table t3;")
return m
}
func (s *MigrateSuite) cleanupSampleMigrator(c *C) {
tables := []string{versionTable, "t1", "t2", "t3"}
for _, table := range tables {
s.Exec(c, "drop table if exists "+table)
}
}
func (s *MigrateSuite) TestNewMigrator(c *C) {
var m *migrate.Migrator
var err error
// Initial run
m, err = migrate.NewMigrator(s.conn, versionTable)
c.Assert(err, IsNil)
// Creates version table
schemaVersionExists := s.tableExists(c, versionTable)
c.Assert(schemaVersionExists, Equals, true)
// Succeeds when version table is already created
m, err = migrate.NewMigrator(s.conn, versionTable)
c.Assert(err, IsNil)
initialVersion, err := m.GetCurrentVersion()
c.Assert(err, IsNil)
c.Assert(initialVersion, Equals, int32(0))
}
func (s *MigrateSuite) TestAppendMigration(c *C) {
m := s.createEmptyMigrator(c)
name := "Create t"
upSQL := "create t..."
downSQL := "drop t..."
m.AppendMigration(name, upSQL, downSQL)
c.Assert(len(m.Migrations), Equals, 1)
c.Assert(m.Migrations[0].Name, Equals, name)
c.Assert(m.Migrations[0].UpSQL, Equals, upSQL)
c.Assert(m.Migrations[0].DownSQL, Equals, downSQL)
}
func (s *MigrateSuite) TestLoadMigrationsMissingDirectory(c *C) {
m := s.createEmptyMigrator(c)
err := m.LoadMigrations("testdata/missing")
c.Assert(err, ErrorMatches, "open testdata/missing: no such file or directory")
}
func (s *MigrateSuite) TestLoadMigrationsEmptyDirectory(c *C) {
m := s.createEmptyMigrator(c)
err := m.LoadMigrations("testdata/empty")
c.Assert(err, ErrorMatches, "No migrations found at testdata/empty")
}
func (s *MigrateSuite) TestFindMigrationsWithGaps(c *C) {
_, err := migrate.FindMigrations("testdata/gap")
c.Assert(err, ErrorMatches, "Missing migration 2")
}
func (s *MigrateSuite) TestFindMigrationsWithDuplicate(c *C) {
_, err := migrate.FindMigrations("testdata/duplicate")
c.Assert(err, ErrorMatches, "Duplicate migration 2")
}
func (s *MigrateSuite) TestLoadMigrations(c *C) {
m := s.createEmptyMigrator(c)
m.Data = map[string]interface{}{"prefix": "foo"}
err := m.LoadMigrations("testdata/sample")
c.Assert(err, IsNil)
c.Assert(m.Migrations, HasLen, 5)
c.Check(m.Migrations[0].Name, Equals, "001_create_t1.sql")
c.Check(m.Migrations[0].UpSQL, Equals, `create table t1(
id serial primary key
);`)
c.Check(m.Migrations[0].DownSQL, Equals, "drop table t1;")
c.Check(m.Migrations[1].Name, Equals, "002_create_t2.sql")
c.Check(m.Migrations[1].UpSQL, Equals, `create table t2(
id serial primary key
);`)
c.Check(m.Migrations[1].DownSQL, Equals, "drop table t2;")
c.Check(m.Migrations[2].Name, Equals, "003_irreversible.sql")
c.Check(m.Migrations[2].UpSQL, Equals, "drop table t2;")
c.Check(m.Migrations[2].DownSQL, Equals, "")
c.Check(m.Migrations[3].Name, Equals, "004_data_interpolation.sql")
c.Check(m.Migrations[3].UpSQL, Equals, "create table foo_bar(id serial primary key);")
c.Check(m.Migrations[3].DownSQL, Equals, "drop table foo_bar;")
}
func (s *MigrateSuite) TestLoadMigrationsNoForward(c *C) {
var err error
m, err := migrate.NewMigrator(s.conn, versionTable)
c.Assert(err, IsNil)
m.Data = map[string]interface{}{"prefix": "foo"}
err = m.LoadMigrations("testdata/noforward")
c.Assert(err, Equals, migrate.ErrNoFwMigration)
}
func (s *MigrateSuite) TestMigrate(c *C) {
m := s.createSampleMigrator(c)
err := m.Migrate()
c.Assert(err, IsNil)
currentVersion := s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(3))
}
func (s *MigrateSuite) TestMigrateToLifeCycle(c *C) {
m := s.createSampleMigrator(c)
var onStartCallUpCount int
var onStartCallDownCount int
m.OnStart = func(_ int32, _, direction, _ string) {
switch direction {
case "up":
onStartCallUpCount++
case "down":
onStartCallDownCount++
default:
c.Fatalf("Unexpected direction: %s", direction)
}
}
// Migrate from 0 up to 1
err := m.MigrateTo(1)
c.Assert(err, IsNil)
currentVersion := s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(1))
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, false)
c.Assert(s.tableExists(c, "t3"), Equals, false)
c.Assert(onStartCallUpCount, Equals, 1)
c.Assert(onStartCallDownCount, Equals, 0)
// Migrate from 1 up to 3
err = m.MigrateTo(3)
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(3))
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, true)
c.Assert(s.tableExists(c, "t3"), Equals, true)
c.Assert(onStartCallUpCount, Equals, 3)
c.Assert(onStartCallDownCount, Equals, 0)
// Migrate from 3 to 3 is no-op
err = m.MigrateTo(3)
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, true)
c.Assert(s.tableExists(c, "t3"), Equals, true)
c.Assert(onStartCallUpCount, Equals, 3)
c.Assert(onStartCallDownCount, Equals, 0)
// Migrate from 3 down to 1
err = m.MigrateTo(1)
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(1))
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, false)
c.Assert(s.tableExists(c, "t3"), Equals, false)
c.Assert(onStartCallUpCount, Equals, 3)
c.Assert(onStartCallDownCount, Equals, 2)
// Migrate from 1 down to 0
err = m.MigrateTo(0)
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(0))
c.Assert(s.tableExists(c, "t1"), Equals, false)
c.Assert(s.tableExists(c, "t2"), Equals, false)
c.Assert(s.tableExists(c, "t3"), Equals, false)
c.Assert(onStartCallUpCount, Equals, 3)
c.Assert(onStartCallDownCount, Equals, 3)
// Migrate back up to 3
err = m.MigrateTo(3)
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(3))
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, true)
c.Assert(s.tableExists(c, "t3"), Equals, true)
c.Assert(onStartCallUpCount, Equals, 6)
c.Assert(onStartCallDownCount, Equals, 3)
}
func (s *MigrateSuite) TestMigrateToBoundaries(c *C) {
m := s.createSampleMigrator(c)
// Migrate to -1 is error
err := m.MigrateTo(-1)
c.Assert(err, ErrorMatches, "destination version -1 is outside the valid versions of 0 to 3")
// Migrate past end is error
err = m.MigrateTo(int32(len(m.Migrations)) + 1)
c.Assert(err, ErrorMatches, "destination version 4 is outside the valid versions of 0 to 3")
// When schema version says it is negative
s.Exec(c, "update "+versionTable+" set version=-1")
err = m.MigrateTo(int32(1))
c.Assert(err, ErrorMatches, "current version -1 is outside the valid versions of 0 to 3")
// When schema version says it is negative
s.Exec(c, "update "+versionTable+" set version=4")
err = m.MigrateTo(int32(1))
c.Assert(err, ErrorMatches, "current version 4 is outside the valid versions of 0 to 3")
}
func (s *MigrateSuite) TestMigrateToIrreversible(c *C) {
m := s.createEmptyMigrator(c)
m.AppendMigration("Foo", "drop table if exists t3", "")
err := m.MigrateTo(1)
c.Assert(err, IsNil)
err = m.MigrateTo(0)
c.Assert(err, ErrorMatches, "Irreversible migration: 1 - Foo")
}
func (s *MigrateSuite) TestMigrateToDisableTx(c *C) {
m, err := migrate.NewMigratorEx(s.conn, versionTable, &migrate.MigratorOptions{DisableTx: true})
c.Assert(err, IsNil)
m.AppendMigration("Create t1", "create table t1(id serial);", "drop table t1;")
m.AppendMigration("Create t2", "create table t2(id serial);", "drop table t2;")
m.AppendMigration("Create t3", "create table t3(id serial);", "drop table t3;")
tx, err := s.conn.Begin()
c.Assert(err, IsNil)
err = m.MigrateTo(3)
c.Assert(err, IsNil)
currentVersion := s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(3))
c.Assert(s.tableExists(c, "t1"), Equals, true)
c.Assert(s.tableExists(c, "t2"), Equals, true)
c.Assert(s.tableExists(c, "t3"), Equals, true)
err = tx.Rollback()
c.Assert(err, IsNil)
currentVersion = s.currentVersion(c)
c.Assert(currentVersion, Equals, int32(0))
c.Assert(s.tableExists(c, "t1"), Equals, false)
c.Assert(s.tableExists(c, "t2"), Equals, false)
c.Assert(s.tableExists(c, "t3"), Equals, false)
}
func Example_OnStartMigrationProgressLogging() {
conn, err := pgx.Connect(*defaultConnectionParameters)
if err != nil {
fmt.Printf("Unable to establish connection: %v", err)
return
}
// Clear any previous runs
if _, err = conn.Exec("drop table if exists schema_version"); err != nil {
fmt.Printf("Unable to drop schema_version table: %v", err)
return
}
var m *migrate.Migrator
m, err = migrate.NewMigrator(conn, "schema_version")
if err != nil {
fmt.Printf("Unable to create migrator: %v", err)
return
}
m.OnStart = func(_ int32, name, direction, _ string) {
fmt.Printf("Migrating %s: %s", direction, name)
}
m.AppendMigration("create a table", "create temporary table foo(id serial primary key)", "")
if err = m.Migrate(); err != nil {
fmt.Printf("Unexpected failure migrating: %v", err)
return
}
// Output:
// Migrating up: create a table
}
*/

View File

@ -0,0 +1,7 @@
create table t1(
id serial primary key
);
---- create above / drop below ----
drop table t1;

View File

@ -0,0 +1,7 @@
create table t2(
id serial primary key
);
---- create above / drop below ----
drop table t2;

View File

@ -0,0 +1,7 @@
create table duplicate(
id serial primary key
);
---- create above / drop below ----
drop table duplicate;

View File

@ -0,0 +1,7 @@
create table t1(
id serial primary key
);
---- create above / drop below ----
drop table t1;

View File

@ -0,0 +1 @@
drop table t2;

View File

@ -0,0 +1,7 @@
-- no SQL here
-- nor here, just all comments.
-- comment with space before
---- create above / drop below ----
drop table t1;

View File

@ -0,0 +1,7 @@
create table t1(
id serial primary key
);
---- create above / drop below ----
drop table t1;

View File

@ -0,0 +1,7 @@
create table t2(
id serial primary key
);
---- create above / drop below ----
drop table t2;

View File

@ -0,0 +1 @@
drop table t2;

View File

@ -0,0 +1,5 @@
create table {{.prefix}}_bar(id serial primary key);
---- create above / drop below ----
drop table {{.prefix}}_bar;

View File

@ -0,0 +1,5 @@
{{ template "shared/v1_001.sql" . }}
---- create above / drop below ----
drop view {{.prefix}}v1;

View File

@ -0,0 +1 @@
create view {{.prefix}}v1 as select * from t1;

View File

@ -0,0 +1 @@
-- This file should be ignored because it does not start with a number.

View File

@ -0,0 +1,175 @@
package rails
import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"github.com/adjust/gorails/marshal"
)
const (
salt = "encrypted cookie"
signSalt = "signed encrypted cookie"
authSalt = "authenticated encrypted cookie"
railsCipher = "aes-256-cbc"
railsCipher52 = "aes-256-gcm"
)
var (
errSessionData = errors.New("error decoding session data")
)
type Auth struct {
Cipher string
Secret string
Salt string
SignSalt string
AuthSalt string
}
func NewAuth(version, secret string) (*Auth, error) {
ra := &Auth{
Secret: secret,
Salt: salt,
SignSalt: signSalt,
AuthSalt: authSalt,
}
var v1, v2 int
var err error
sv := strings.Split(version, ".")
if len(sv) >= 2 {
if v1, err = strconv.Atoi(sv[0]); err != nil {
return nil, err
}
if v2, err = strconv.Atoi(sv[1]); err != nil {
return nil, err
}
}
if v1 >= 5 && v2 >= 2 {
ra.Cipher = railsCipher52
} else {
ra.Cipher = railsCipher
}
return ra, nil
}
func (ra *Auth) ParseCookie(cookie string) (userID string, err error) {
var dcookie []byte
switch ra.Cipher {
case railsCipher:
dcookie, err = parseCookie(cookie, ra.Secret, ra.Salt, ra.SignSalt)
case railsCipher52:
dcookie, err = parseCookie52(cookie, ra.Secret, ra.AuthSalt)
default:
err = fmt.Errorf("unknown rails cookie cipher '%s'", ra.Cipher)
}
if err != nil {
return
}
if dcookie[0] != '{' {
userID, err = getUserId4(dcookie)
} else {
userID, err = getUserId(dcookie)
}
return
}
func ParseCookie(cookie string) (string, error) {
if cookie[0] != '{' {
return getUserId4([]byte(cookie))
}
return getUserId([]byte(cookie))
}
func getUserId(data []byte) (userID string, err error) {
var sessionData map[string]interface{}
err = json.Unmarshal(data, &sessionData)
if err != nil {
return
}
userKey, ok := sessionData["warden.user.user.key"]
if !ok {
err = errors.New("key 'warden.user.user.key' not found in session data")
}
items, ok := userKey.([]interface{})
if !ok {
err = errSessionData
return
}
if len(items) != 2 {
err = errSessionData
return
}
uids, ok := items[0].([]interface{})
if !ok {
err = errSessionData
return
}
uid, ok := uids[0].(float64)
if !ok {
err = errSessionData
return
}
userID = fmt.Sprintf("%d", int64(uid))
return
}
func getUserId4(data []byte) (userID string, err error) {
sessionData, err := marshal.CreateMarshalledObject(data).GetAsMap()
if err != nil {
return
}
wardenData, ok := sessionData["warden.user.user.key"]
if !ok {
err = errSessionData
return
}
wardenUserKey, err := wardenData.GetAsArray()
if err != nil {
return
}
if len(wardenUserKey) < 1 {
err = errSessionData
return
}
userData, err := wardenUserKey[0].GetAsArray()
if err != nil {
return
}
if len(userData) < 1 {
err = errSessionData
return
}
uid, err := userData[0].GetAsInteger()
if err != nil {
return
}
userID = fmt.Sprintf("%d", uid)
return
}

View File

@ -0,0 +1,79 @@
package rails
import (
"testing"
)
func TestRailsEncryptedSession1(t *testing.T) {
cookie := "dDdjMW5jYUNYaFpBT1BSdFgwQkk4ZWNlT214L1FnM0pyZzZ1d21nSnVTTm9zS0ljN000S1JmT3cxcTNtRld2Ny0tQUFBQUFBQUFBQUFBQUFBQUFBQUFBQT09--75d8323b0f0e41cf4d5aabee1b229b1be76b83b6"
secret := "development_secret"
ra := Auth{
Cipher: railsCipher,
Secret: secret,
Salt: salt,
SignSalt: signSalt,
}
userID, err := ra.ParseCookie(cookie)
if err != nil {
t.Error(err)
return
}
if userID != "1" {
t.Errorf("Expecting userID 1 got %s", userID)
}
}
func TestRailsEncryptedSession52(t *testing.T) {
cookie :=
"fZy1lt%2FIuXh2cpQgy3wWjbvabh1AqJX%2Bt6qO4D95DOZIpDhMyK2HqPFeNoaBtrXCUa9%2BDQuvbs1GX6tuccEAp14QPLNhm0PPJS5U1pRHqPLWaqT%2BBPYP%2BY9bo677komm9CPuOCOqBKf7rv3%2F4ptLmVO7iefB%2FP2ZlkV1848Johv5q%2B5PGyMxII2BEQnBdS3Petw6lRu741Bquc8z9VofC3t4%2F%2BLxVz%2BvBbTg--VL0MorYITXB8Dj3W--0yr0sr6pRU%2FwlYMQ%2BpEifA%3D%3D"
secret := "0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566"
ra := Auth{
Cipher: railsCipher52,
Secret: secret,
AuthSalt: authSalt,
}
userID, err := ra.ParseCookie(cookie)
if err != nil {
t.Error(err)
return
}
if userID != "2" {
t.Errorf("Expecting userID 2 got %s", userID)
}
}
func TestRailsJsonSession(t *testing.T) {
sessionData := `{"warden.user.user.key":[[1],"secret"]}`
userID, err := getUserId([]byte(sessionData))
if err != nil {
t.Error(err)
return
}
if userID != "1" {
t.Errorf("Expecting userID 1 got %s", userID)
}
}
func TestRailsMarshaledSession(t *testing.T) {
sessionData := "\x04\b{\bI\"\x15member_return_to\x06:\x06ETI\"\x06/\x06;\x00TI\"\x19warden.user.user.key\x06;\x00T[\a[\x06i\aI\"\"$2a$11$6SgXdvO9hld82kQAvpEY3e\x06;\x00TI\"\x10_csrf_token\x06;\x00FI\"17lqwj1UsTTgbXBQKH4ipCNW32uLusvfSPds1txppMec=\x06;\x00F"
userID, err := getUserId4([]byte(sessionData))
if err != nil {
t.Error(err)
return
}
if userID != "2" {
t.Errorf("Expecting userID 2 got %s", userID)
}
}

View File

@ -0,0 +1,62 @@
package rails
import (
"crypto/aes"
"crypto/cipher"
"crypto/sha1"
"encoding/base64"
"net/url"
"strings"
"github.com/adjust/gorails/session"
"golang.org/x/crypto/pbkdf2"
)
func parseCookie(cookie, secretKeyBase, salt, signSalt string) ([]byte, error) {
return session.DecryptSignedCookie(
cookie,
secretKeyBase,
salt,
signSalt)
}
// {"session_id":"a71d6ffcd4ed5572ea2097f569eb95ef","warden.user.user.key":[[2],"$2a$11$q9Br7m4wJxQvF11hAHvTZO"],"_csrf_token":"HsYgrD2YBaWAabOYceN0hluNRnGuz49XiplmMPt43aY="}
func parseCookie52(cookie, secretKeyBase, authSalt string) ([]byte, error) {
ecookie, err := url.QueryUnescape(cookie)
if err != nil {
return nil, err
}
vectors := strings.Split(ecookie, "--")
body, err := base64.RawStdEncoding.DecodeString(vectors[0])
if err != nil {
return nil, err
}
iv, err := base64.RawStdEncoding.DecodeString(vectors[1])
if err != nil {
return nil, err
}
tag, err := base64.StdEncoding.DecodeString(vectors[2])
if err != nil {
return nil, err
}
key := pbkdf2.Key([]byte(secretKeyBase), []byte(authSalt),
1000, 32, sha1.New)
c, err := aes.NewCipher(key)
if err != nil {
return nil, err
}
gcm, err := cipher.NewGCM(c)
if err != nil {
return nil, err
}
return gcm.Open(nil, iv, append(body, tag...), nil)
}

37
internal/serv/introsp.go Normal file
View File

@ -0,0 +1,37 @@
package serv
import "net/http"
//nolint: errcheck
func introspect(w http.ResponseWriter) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{
"data": {
"__schema": {
"queryType": {
"name": "Query"
},
"mutationType": null,
"subscriptionType": null
}
},
"extensions":{
"tracing":{
"version":1,
"startTime":"2019-06-04T19:53:31.093Z",
"endTime":"2019-06-04T19:53:31.108Z",
"duration":15219720,
"execution": {
"resolvers": [{
"path": ["__schema"],
"parentType": "Query",
"fieldName": "__schema",
"returnType": "__Schema!",
"startOffset": 50950,
"duration": 17187
}]
}
}
}
}`))
}

206
internal/serv/reload.go Normal file
View File

@ -0,0 +1,206 @@
// Package reload offers lightweight automatic reloading of running processes.
//
// After initialisation with reload.Do() any changes to the binary will
// restart the process.
//
// Example:
//
// go func() {
// err := reload.Do(log.Printf)
// if err != nil {
// panic(err)
// }
// }()
//
// A list of additional directories to watch can be added:
//
// go func() {
// err := reload.Do(log.Printf, reload.Dir("tpl", reloadTpl)
// if err != nil {
// panic(err)
// }
// }()
//
// Note that this package won't prevent race conditions (e.g. when assigning to
// a global templates variable). You'll need to use sync.RWMutex yourself.
package serv
import (
"fmt"
"os"
"path/filepath"
"runtime"
"strings"
"syscall"
"time"
"github.com/fsnotify/fsnotify"
"github.com/pkg/errors"
)
var binSelf string
type dir struct {
path string
cb func()
}
// Dir is an additional directory to watch for changes. Directories are watched
// non-recursively.
//
// The second argument is the callback that to run when the directory changes.
// Use reload.ReExec() to restart the process.
func Dir(path string, cb func()) dir { return dir{path, cb} } // nolint: golint
// Do reload the current process when its binary changes.
//
// The log function is used to display an informational startup message and
// errors. It works well with e.g. the standard log package or Logrus.
//
// The error return will only return initialisation errors. Once initialized it
// will use the log function to print errors, rather than return.
func Do(log func(string, ...interface{}), additional ...dir) error {
watcher, err := fsnotify.NewWatcher()
if err != nil {
return errors.Wrap(err, "cannot setup watcher")
}
defer watcher.Close() // nolint: errcheck
binSelf, err = self()
if err != nil {
return err
}
// Watch the directory, because a recompile renames the existing
// file (rather than rewriting it), so we won't get events for that.
dirs := make([]string, len(additional)+1)
dirs[0] = filepath.Dir(binSelf)
for i := range additional {
path, err := filepath.Abs(additional[i].path)
if err != nil {
return errors.Wrapf(err, "cannot get absolute path to %q: %v",
additional[i].path, err)
}
s, err := os.Stat(path)
if err != nil {
return errors.Wrap(err, "os.Stat")
}
if !s.IsDir() {
return errors.Errorf("not a directory: %q; can only watch directories",
additional[i].path)
}
additional[i].path = path
dirs[i+1] = path
}
done := make(chan bool)
go func() {
for {
select {
case err := <-watcher.Errors:
// Standard logger doesn't have anything other than Print,
// Panic, and Fatal :-/ Printf() is probably best.
log("reload error: %v", err)
case event := <-watcher.Events:
// Ensure that we use the correct events, as they are not uniform across
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
if conf != nil && strings.HasSuffix(event.Name, "/allow.list") {
continue
}
if conf.Production {
continue
}
log("INF Reloading, file changed detected: %s", event)
var trigger bool
switch runtime.GOOS {
case "darwin", "freebsd", "openbsd", "netbsd", "dragonfly":
trigger = event.Op&fsnotify.Create == fsnotify.Create
case "linux":
trigger = event.Op&fsnotify.Write == fsnotify.Write
default:
trigger = event.Op&fsnotify.Create == fsnotify.Create
log("reload: untested GOOS %q; this package may not work correctly", runtime.GOOS)
}
if !trigger {
continue
}
if event.Name == binSelf {
// Wait for writes to finish.
time.Sleep(100 * time.Millisecond)
ReExec()
}
for _, a := range additional {
if strings.HasPrefix(event.Name, a.path) {
time.Sleep(100 * time.Millisecond)
a.cb()
}
}
}
}
}()
for _, d := range dirs {
if err := watcher.Add(d); err != nil {
return errors.Wrapf(err, "cannot add %q to watcher", d)
}
}
add := ""
if len(additional) > 0 {
reldirs := make([]string, len(dirs)-1)
for i := range dirs[1:] {
reldirs[i] = relpath(dirs[i+1])
}
add = fmt.Sprintf(" (additional dirs: %s)", strings.Join(reldirs, ", "))
}
log("restarting %q when it changes%s", relpath(binSelf), add)
<-done
return nil
}
// Exec replaces the current process with a new copy of itself.
func ReExec() {
err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ())
if err != nil {
log.Fatalf("ERR cannot restart: %s", err)
}
}
// Get location to executable.
func self() (string, error) {
bin := os.Args[0]
if !filepath.IsAbs(bin) {
var err error
bin, err = os.Executable()
if err != nil {
return "", errors.Wrapf(err,
"cannot get path to binary %q (launch with absolute path): %v",
os.Args[0], err)
}
}
return bin, nil
}
// Get path relative to cwd
func relpath(p string) string {
cwd, err := os.Getwd()
if err != nil {
return p
}
if strings.HasPrefix(p, cwd) {
return "./" + strings.TrimLeft(p[len(cwd):], "/")
}
return p
}

338
internal/serv/rice-box.go Normal file

File diff suppressed because one or more lines are too long

179
internal/serv/serv.go Normal file
View File

@ -0,0 +1,179 @@
package serv
import (
"context"
"fmt"
"net/http"
"os"
"os/signal"
"strings"
"time"
rice "github.com/GeertJohan/go.rice"
"github.com/NYTimes/gziphandler"
"github.com/dosco/super-graph/internal/serv/internal/auth"
)
func initWatcher() {
cpath := conf.cpath
if conf != nil && !conf.WatchAndReload {
return
}
var d dir
if len(cpath) == 0 || cpath == "./" {
d = Dir("./config", ReExec)
} else {
d = Dir(cpath, ReExec)
}
go func() {
err := Do(log.Printf, d)
if err != nil {
log.Fatalf("ERR %s", err)
}
}()
}
func startHTTP() {
var hostPort string
var appName string
defaultHP := "0.0.0.0:8080"
env := os.Getenv("GO_ENV")
if conf != nil {
appName = conf.AppName
hp := strings.SplitN(conf.HostPort, ":", 2)
if len(hp) == 2 {
if len(conf.Host) != 0 {
hp[0] = conf.Host
}
if len(conf.Port) != 0 {
hp[1] = conf.Port
}
hostPort = fmt.Sprintf("%s:%s", hp[0], hp[1])
}
}
if len(hostPort) == 0 {
hostPort = defaultHP
}
routes, err := routeHandler()
if err != nil {
log.Fatalf("ERR %s", err)
}
srv := &http.Server{
Addr: hostPort,
Handler: routes,
ReadTimeout: 5 * time.Second,
WriteTimeout: 10 * time.Second,
MaxHeaderBytes: 1 << 20,
}
idleConnsClosed := make(chan struct{})
go func() {
sigint := make(chan os.Signal, 1)
signal.Notify(sigint, os.Interrupt)
<-sigint
if err := srv.Shutdown(context.Background()); err != nil {
log.Fatalln("INF shutdown signal received")
}
close(idleConnsClosed)
}()
srv.RegisterOnShutdown(func() {
db.Close()
})
log.Printf("INF version: %s, git-branch: %s, host-port: %s, app-name: %s, env: %s\n",
version, gitBranch, hostPort, appName, env)
log.Printf("INF %s started\n", serverName)
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
log.Fatalln("INF server closed")
}
<-idleConnsClosed
}
func routeHandler() (http.Handler, error) {
mux := http.NewServeMux()
if conf == nil {
return mux, nil
}
routes := map[string]http.Handler{
"/health": http.HandlerFunc(health),
"/api/v1/graphql": apiV1Handler(),
}
if err := setActionRoutes(routes); err != nil {
return nil, err
}
if conf.WebUI {
routes["/"] = http.FileServer(rice.MustFindBox("./web/build").HTTPBox())
}
if conf.HTTPGZip {
gz := gziphandler.MustNewGzipLevelHandler(6)
for k, v := range routes {
routes[k] = gz(v)
}
}
for k, v := range routes {
mux.Handle(k, v)
}
fn := func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Server", serverName)
mux.ServeHTTP(w, r)
}
return http.HandlerFunc(fn), nil
}
func setActionRoutes(routes map[string]http.Handler) error {
var err error
for _, a := range conf.Actions {
var fn http.Handler
fn, err = newAction(&a)
if err != nil {
break
}
p := fmt.Sprintf("/api/v1/actions/%s", strings.ToLower(a.Name))
if ac := findAuth(a.AuthName); ac != nil {
routes[p], err = auth.WithAuth(fn, ac)
} else {
routes[p] = fn
}
if err != nil {
return err
}
}
return nil
}
func findAuth(name string) *auth.Auth {
for _, a := range conf.Auths {
if strings.EqualFold(a.Name, name) {
return &a
}
}
return nil
}

43
internal/serv/sqllog.go Normal file
View File

@ -0,0 +1,43 @@
package serv
// import (
// "context"
// "github.com/jackc/pgx/v4"
// "github.com/rs/zerolog"
// )
// type Logger struct {
// logger zerolog.Logger
// }
// // NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
// // logging fascade as output.
// func NewSQLLogger(logger zerolog.Logger) *Logger {
// return &Logger{
// logger: // logger.With().Logger(),
// }
// }
// func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
// var zlevel zerolog.Level
// switch level {
// case pgx.LogLevelNone:
// zlevel = zerolog.NoLevel
// case pgx.LogLevelError:
// zlevel = zerolog.ErrorLevel
// case pgx.LogLevelWarn:
// zlevel = zerolog.WarnLevel
// case pgx.LogLevelDebug, pgx.LogLevelInfo:
// zlevel = zerolog.DebugLevel
// default:
// zlevel = zerolog.DebugLevel
// }
// if sql, ok := data["sql"]; ok {
// delete(data, "sql")
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
// } else {
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(msg)
// }
// }

View File

@ -0,0 +1,17 @@
-- Write your migrate up statements here
CREATE TABLE public.users (
id bigint GENERATED ALWAYS AS IDENTITY PRIMARY KEY,
full_name text,
email text UNIQUE NOT NULL CHECK (length(email) < 255),
created_at timestamptz NOT NULL NOT NULL DEFAULT NOW(),
updated_at timestamptz NOT NULL NOT NULL DEFAULT NOW()
);
---- create above / drop below ----
-- Write your down migrate statements here. If this migration is irreversible
-- then delete the separator line above.
DROP TABLE public.users

View File

@ -0,0 +1,4 @@
FROM dosco/super-graph:latest
WORKDIR /
COPY config/* /config/

238
internal/serv/tmpl/dev.yml Normal file
View File

@ -0,0 +1,238 @@
app_name: "{% app_name %} Development"
host_port: 0.0.0.0:8080
web_ui: true
# debug, error, warn, info
log_level: "info"
# enable or disable http compression (uses gzip)
http_compress: true
# When production mode is 'true' only queries
# from the allow list are permitted.
# When it's 'false' all queries are saved to the
# the allow list in ./config/allow.list
production: false
# Throw a 401 on auth failure for queries that need auth
auth_fail_block: false
# Latency tracing for database queries and remote joins
# the resulting latency information is returned with the
# response
enable_tracing: true
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: true
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
# this must be a relative path under the config path
migrations_path: ./migrations
# Secret key for general encryption operations like
# encrypting the cursor data
secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: false
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
# SG_DATABASE_USER
# SG_DATABASE_PASSWORD
# Auth related environment Variables
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
# SG_AUTH_RAILS_REDIS_URL
# SG_AUTH_RAILS_REDIS_PASSWORD
# SG_AUTH_JWT_PUBLIC_KEY_FILE
# inflections:
# person: people
# sheep: sheep
auth:
# Can be 'rails', 'jwt' or 'header'
type: rails
cookie: _{% app_name_slug %}_session
# Comment this out if you want to disable setting
# the user_id via a header for testing.
# Disable in production
creds_in_header: true
rails:
# Rails version this is used for reading the
# various cookies formats.
version: 5.2
# Found in 'Rails.application.config.secret_key_base'
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
# Remote cookie store. (memcache or redis)
# url: redis://redis:6379
# password: ""
# max_idle: 80
# max_active: 12000
# In most cases you don't need these
# salt: "encrypted cookie"
# sign_salt: "signed encrypted cookie"
# auth_salt: "authenticated encrypted cookie"
# jwt:
# provider: auth0
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
# public_key_file: /secrets/public_key.pem
# public_key_type: ecdsa #rsa
# header:
# name: dnt
# exists: true
# value: localhost:8080
# You can add additional named auths to use with actions
# In this example actions using this auth can only be
# called from the Google Appengine Cron service that
# sets a special header to all it's requests
auths:
- name: from_taskqueue
type: header
header:
name: X-Appengine-Cron
exists: true
database:
type: postgres
host: db
port: 5432
dbname: {% app_name_slug %}_development
user: postgres
password: postgres
#schema: "public"
#pool_size: 10
#max_retries: 0
#log_level: "debug"
# Set session variable "user.id" to the user id
# Enable this if you need the user id in triggers, etc
set_user_id: false
# database ping timeout is used for db health checking
ping_timeout: 1m
# Set up an secure tls encrypted db connection
enable_tls: false
# Required for tls. For example with Google Cloud SQL it's
# <gcp-project-id>:<cloud-sql-instance>"
# server_name: blah
# Required for tls. Can be a file path or the contents of the pem file
# server_cert: ./server-ca.pem
# Required for tls. Can be a file path or the contents of the pem file
# client_cert: ./client-cert.pem
# Required for tls. Can be a file path or the contents of the pem file
# client_key: ./client-key.pem
# Define additional variables here to be used with filters
variables:
#admin_account_id: "5"
admin_account_id: "sql:select id from users where admin = true limit 1"
# Field and table names that you wish to block
blocklist:
- ar_internal_metadata
- schema_migrations
- secret
- password
- encrypted
- token
# Create custom actions with their own api endpoints
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
# A request to this url will execute the configured SQL query
# which in this case refreshes a materialized view in the database.
# The auth_name is from one of the configured auths
actions:
- name: refresh_leaderboard_users
sql: REFRESH MATERIALIZED VIEW CONCURRENTLY "leaderboard_users"
auth_name: from_taskqueue
tables:
- name: customers
remotes:
- name: payments
id: stripe_id
url: http://rails_app:3000/stripe/$id
path: data
# debug: true
pass_headers:
- cookie
set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
- # You can create new fields that have a
# real db table backing them
name: me
table: users
#roles_query: "SELECT * FROM users WHERE id = $user_id"
roles:
- name: anon
tables:
- name: users
query:
limit: 10
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
presets:
- user_id: "$user_id"
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
presets:
- updated_at: "now"
delete:
block: true
# - name: admin
# match: id = 1000
# tables:
# - name: users
# filters: []

View File

@ -0,0 +1,59 @@
version: '3.4'
services:
# Postgres DB
db:
image: postgres:12
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- "5432:5432"
# Yugabyte DB
# yb-master:
# image: yugabytedb/yugabyte:latest
# container_name: yb-master-n1
# command: [ "/home/yugabyte/bin/yb-master",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--master_addresses=yb-master-n1:7100",
# "--replication_factor=1",
# "--enable_ysql=true"]
# ports:
# - "7000:7000"
# environment:
# SERVICE_7000_NAME: yb-master
# db:
# image: yugabytedb/yugabyte:latest
# container_name: yb-tserver-n1
# command: [ "/home/yugabyte/bin/yb-tserver",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--start_pgsql_proxy",
# "--tserver_master_addrs=yb-master-n1:7100"]
# ports:
# - "9042:9042"
# - "6379:6379"
# - "5433:5433"
# - "9000:9000"
# environment:
# SERVICE_5433_NAME: ysql
# SERVICE_9042_NAME: ycql
# SERVICE_6379_NAME: yedis
# SERVICE_9000_NAME: yb-tserver
# depends_on:
# - yb-master
{% app_name_slug %}_api:
image: dosco/super-graph:latest
environment:
GO_ENV: "development"
# Uncomment below for Yugabyte DB
# SG_DATABASE_PORT: 5433
# SG_DATABASE_USER: yugabyte
# SG_DATABASE_PASSWORD: yugabyte
volumes:
- ./config:/config
ports:
- "8080:8080"
depends_on:
- db

View File

@ -0,0 +1,96 @@
# Inherit config from this other config file
# so I only need to overwrite some values
inherits: dev
app_name: "{% app_name %} Production"
host_port: 0.0.0.0:8080
web_ui: false
# debug, error, warn, info
log_level: "warn"
# enable or disable http compression (uses gzip)
http_compress: true
# When production mode is 'true' only queries
# from the allow list are permitted.
# When it's 'false' all queries are saved to the
# the allow list in ./config/allow.list
production: true
# Throw a 401 on auth failure for queries that need auth
auth_fail_block: true
# Latency tracing for database queries and remote joins
# the resulting latency information is returned with the
# response
enable_tracing: false
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: false
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
# migrations_path: migrations
# Secret key for general encryption operations like
# encrypting the cursor data
# secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
# cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
# cors_debug: false
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
# SG_DATABASE_USER
# SG_DATABASE_PASSWORD
# Auth related environment Variables
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
# SG_AUTH_RAILS_REDIS_URL
# SG_AUTH_RAILS_REDIS_PASSWORD
# SG_AUTH_JWT_PUBLIC_KEY_FILE
database:
type: postgres
host: db
port: 5432
dbname: {% app_name_slug %}_production
user: postgres
password: postgres
#pool_size: 10
#max_retries: 0
#log_level: "debug"
# Set session variable "user.id" to the user id
# Enable this if you need the user id in triggers, etc
set_user_id: false
# database ping timeout is used for db health checking
ping_timeout: 5m
# Set up an secure tls encrypted db connection
enable_tls: false
# Required for tls. For example with Google Cloud SQL it's
# <gcp-project-id>:<cloud-sql-instance>"
# server_name: blah
# Required for tls. Can be a file path or the contents of the pem file
# server_cert: ./server-ca.pem
# Required for tls. Can be a file path or the contents of the pem file
# client_cert: ./client-cert.pem
# Required for tls. Can be a file path or the contents of the pem file
# client_key: ./client-key.pem

View File

@ -0,0 +1,19 @@
// Example script to seed database
var users = [];
for (i = 0; i < 10; i++) {
var data = {
full_name: fake.name(),
email: fake.email()
}
var res = graphql(" \
mutation { \
user(insert: $data) { \
id \
} \
}", { data: data })
users.push(res.user)
}

121
internal/serv/utils.go Normal file
View File

@ -0,0 +1,121 @@
package serv
import (
"bytes"
"crypto/sha1"
"encoding/hex"
"io"
"os"
"sort"
"strings"
"sync"
"github.com/dosco/super-graph/jsn"
)
// nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b)
h := sha1.New()
query := "query"
s, e := 0, 0
space := []byte{' '}
starting := true
var b0, b1 byte
if len(b) == 0 {
return ""
}
for {
if starting && b[e] == 'q' {
n := 0
se := e
for e < len(b) && n < len(query) && b[e] == query[n] {
n++
e++
}
if n != len(query) {
io.WriteString(h, strings.ToLower(b[se:e]))
}
}
if e >= len(b) {
break
}
if ws(b[e]) {
for e < len(b) && ws(b[e]) {
e++
}
if e < len(b) {
b1 = b[e]
}
if al(b0) && al(b1) {
h.Write(space)
}
} else {
starting = false
s = e
for e < len(b) && !ws(b[e]) {
e++
}
if e != 0 {
b0 = b[(e - 1)]
}
io.WriteString(h, strings.ToLower(b[s:e]))
}
if e >= len(b) {
break
}
}
if len(role) != 0 {
io.WriteString(h, role)
}
if len(vars) == 0 {
return hex.EncodeToString(h.Sum(nil))
}
fields := jsn.Keys([]byte(vars))
sort.Slice(fields, func(i, j int) bool {
return bytes.Compare(fields[i], fields[j]) == -1
})
for i := range fields {
h.Write(fields[i])
}
return hex.EncodeToString(h.Sum(nil))
}
func ws(b byte) bool {
return b == ' ' || b == '\n' || b == '\t' || b == ','
}
func al(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
}
func fatalInProd(err error, msg string) {
var wg sync.WaitGroup
if isDev() {
log.Printf("ERR %s: %s", msg, err)
} else {
log.Fatalf("ERR %s: %s", msg, err)
}
wg.Add(1)
wg.Wait()
}
func isDev() bool {
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
}
func sanitize(value string) string {
return strings.ToLower(strings.TrimSpace(value))
}

231
internal/serv/utils_test.go Normal file
View File

@ -0,0 +1,231 @@
package serv
import (
"strings"
"testing"
)
func TestGQLHash1(t *testing.T) {
var v1 = `
products(
limit: 30,
where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}`
var v2 = `
products
(limit: 30, where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
} `
h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHash2(t *testing.T) {
var v1 = `
{
products(
limit: 30
order_by: { price: desc }
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) {
id
name
price
user {
id
email
}
}
}`
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHash3(t *testing.T) {
var v1 = `users {
id
email
picture: avatar
products(limit: 2, where: {price: {gt: 10}}) {
id
name
description
}
}`
var v2 = `
users {
id
email
picture: avatar
products(limit: 2, where: {price: {gt: 10}}) {
id
name
description
}
}
`
h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHash4(t *testing.T) {
var v1 = `
query {
products(
limit: 30
order_by: { price: desc }
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) {
id
name
price
user {
id
email
}
}
}`
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHashWithVars1(t *testing.T) {
var q1 = `
products(
limit: 30,
where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}`
var v1 = `
{
"insert": {
"name": "Hello",
"description": "World",
"created_at": "now",
"updated_at": "now",
"test": { "type2": "b", "type1": "a" }
},
"user": 123
}`
var q2 = `
products
(limit: 30, where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
} `
var v2 = `{
"insert": {
"created_at": "now",
"test": { "type1": "a", "type2": "b" },
"name": "Hello",
"updated_at": "now",
"description": "World"
},
"user": 123
}`
h1 := gqlHash(q1, []byte(v1), "user")
h2 := gqlHash(q2, []byte(v2), "user")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHashWithVars2(t *testing.T) {
var q1 = `
products(
limit: 30,
where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}`
var v1 = `
{
"insert": [{
"name": "Hello",
"description": "World",
"created_at": "now",
"updated_at": "now",
"test": { "type2": "b", "type1": "a" }
},
{
"name": "Hello",
"description": "World",
"created_at": "now",
"updated_at": "now",
"test": { "type2": "b", "type1": "a" }
}],
"user": 123
}`
var q2 = `
products
(limit: 30, where: { id: { AND: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
} `
var v2 = `{
"insert": {
"created_at": "now",
"test": { "type1": "a", "type2": "b" },
"name": "Hello",
"updated_at": "now",
"description": "World"
},
"user": 123
}`
h1 := gqlHash(q1, []byte(v1), "user")
h2 := gqlHash(q2, []byte(v2), "user")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}

26
internal/serv/web/.gitignore vendored Executable file
View File

@ -0,0 +1,26 @@
# See https://help.github.com/articles/ignoring-files/ for more about ignoring files.
# dependencies
/node_modules
# testing
/coverage
# production
# /build
# development
/src/components/dataviz/core/*.js.map
# misc
.DS_Store
.env.local
.env.development.local
.env.test.local
.env.production.local
npm-debug.log*
yarn-debug.log*
yarn-error.log*
.vscode

View File

@ -0,0 +1,30 @@
{
"files": {
"main.css": "/static/css/main.c6b5c55c.chunk.css",
"main.js": "/static/js/main.04d74040.chunk.js",
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
"index.html": "/index.html",
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
"service-worker.js": "/service-worker.js",
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
},
"entrypoints": [
"static/js/runtime-main.4aea9da3.js",
"static/js/2.03370bd3.chunk.js",
"static/css/main.c6b5c55c.chunk.css",
"static/js/main.04d74040.chunk.js"
]
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>

View File

@ -0,0 +1,15 @@
{
"short_name": "Super Graph",
"name": "Super Graph - GraphQL API for Rails",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

@ -0,0 +1,58 @@
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
"url": "/index.html"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/css/main.c6b5c55c.chunk.css"
},
{
"revision": "c156a125990ddf5dcc51",
"url": "/static/js/2.03370bd3.chunk.js"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/js/main.04d74040.chunk.js"
},
{
"revision": "427262b6771d3f49a7c5",
"url": "/static/js/runtime-main.4aea9da3.js"
},
{
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
},
{
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
},
{
"revision": "7f98f032085704c8943ec2d1925c7c84",
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
},
{
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
"url": "/static/media/getDefinition.js.4dbec62f.flow"
},
{
"revision": "65b0979ac23feca49e4411883fd8eaab",
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
},
{
"revision": "d94118379d362fc161aa1246bcc14d43",
"url": "/static/media/getHoverInformation.js.d9411837.flow"
},
{
"revision": "c04e3998712b37a96f0bfd283fa06b52",
"url": "/static/media/getOutline.js.c04e3998.flow"
},
{
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
"url": "/static/media/index.js.02c24280.flow"
},
{
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
"url": "/static/media/logo.57ee3b60.png"
}
]);

View File

@ -0,0 +1,39 @@
/**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
importScripts(
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
);
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
workbox.core.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [].concat(self.__precacheManifest || []);
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
});

View File

@ -0,0 +1,2 @@
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */

View File

@ -0,0 +1 @@
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
//# sourceMappingURL=main.04d74040.chunk.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
//# sourceMappingURL=runtime-main.4aea9da3.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,328 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
DocumentNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
TypeDefinitionNode,
NamedTypeNode,
} from 'graphql';
import type {
CompletionItem,
DefinitionQueryResult,
Diagnostic,
GraphQLCache,
GraphQLConfig,
GraphQLProjectConfig,
Uri,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import type {Hover} from 'vscode-languageserver-types';
import {Kind, parse, print} from 'graphql';
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
import {getHoverInformation} from './getHoverInformation';
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
import {
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
getDefinitionQueryResultForNamedType,
} from './getDefinition';
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
const {
FRAGMENT_DEFINITION,
OBJECT_TYPE_DEFINITION,
INTERFACE_TYPE_DEFINITION,
ENUM_TYPE_DEFINITION,
UNION_TYPE_DEFINITION,
SCALAR_TYPE_DEFINITION,
INPUT_OBJECT_TYPE_DEFINITION,
SCALAR_TYPE_EXTENSION,
OBJECT_TYPE_EXTENSION,
INTERFACE_TYPE_EXTENSION,
UNION_TYPE_EXTENSION,
ENUM_TYPE_EXTENSION,
INPUT_OBJECT_TYPE_EXTENSION,
DIRECTIVE_DEFINITION,
FRAGMENT_SPREAD,
OPERATION_DEFINITION,
NAMED_TYPE,
} = Kind;
export class GraphQLLanguageService {
_graphQLCache: GraphQLCache;
_graphQLConfig: GraphQLConfig;
constructor(cache: GraphQLCache) {
this._graphQLCache = cache;
this._graphQLConfig = cache.getGraphQLConfig();
}
async getDiagnostics(
query: string,
uri: Uri,
isRelayCompatMode?: boolean,
): Promise<Array<Diagnostic>> {
// Perform syntax diagnostics first, as this doesn't require
// schema/fragment definitions, even the project configuration.
let queryHasExtensions = false;
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
const schemaPath = projectConfig.schemaPath;
try {
const queryAST = parse(query);
if (!schemaPath || uri !== schemaPath) {
queryHasExtensions = queryAST.definitions.some(definition => {
switch (definition.kind) {
case OBJECT_TYPE_DEFINITION:
case INTERFACE_TYPE_DEFINITION:
case ENUM_TYPE_DEFINITION:
case UNION_TYPE_DEFINITION:
case SCALAR_TYPE_DEFINITION:
case INPUT_OBJECT_TYPE_DEFINITION:
case SCALAR_TYPE_EXTENSION:
case OBJECT_TYPE_EXTENSION:
case INTERFACE_TYPE_EXTENSION:
case UNION_TYPE_EXTENSION:
case ENUM_TYPE_EXTENSION:
case INPUT_OBJECT_TYPE_EXTENSION:
case DIRECTIVE_DEFINITION:
return true;
}
return false;
});
}
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
// If there's a matching config, proceed to prepare to run validation
let source = query;
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
query,
fragmentDefinitions,
);
const dependenciesSource = fragmentDependencies.reduce(
(prev, cur) => `${prev} ${print(cur.definition)}`,
'',
);
source = `${source} ${dependenciesSource}`;
let validationAst = null;
try {
validationAst = parse(source);
} catch (error) {
// the query string is already checked to be parsed properly - errors
// from this parse must be from corrupted fragment dependencies.
// For IDEs we don't care for errors outside of the currently edited
// query, so we return an empty array here.
return [];
}
// Check if there are custom validation rules to be used
let customRules;
const customRulesModulePath =
projectConfig.extensions.customValidationRules;
if (customRulesModulePath) {
/* eslint-disable no-implicit-coercion */
const rulesPath = require.resolve(`${customRulesModulePath}`);
if (rulesPath) {
customRules = require(`${rulesPath}`)(this._graphQLConfig);
}
/* eslint-enable no-implicit-coercion */
}
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName, queryHasExtensions)
.catch(() => null);
if (!schema) {
return [];
}
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
}
async getAutocompleteSuggestions(
query: string,
position: Position,
filePath: Uri,
): Promise<Array<CompletionItem>> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getAutocompleteSuggestions(schema, query, position);
}
return [];
}
async getHoverInformation(
query: string,
position: Position,
filePath: Uri,
): Promise<Hover.contents> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getHoverInformation(schema, query, position);
}
return '';
}
async getDefinition(
query: string,
position: Position,
filePath: Uri,
): Promise<?DefinitionQueryResult> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
let ast;
try {
ast = parse(query);
} catch (error) {
return null;
}
const node = getASTNodeAtPosition(query, ast, position);
if (node) {
switch (node.kind) {
case FRAGMENT_SPREAD:
return this._getDefinitionForFragmentSpread(
query,
ast,
node,
filePath,
projectConfig,
);
case FRAGMENT_DEFINITION:
case OPERATION_DEFINITION:
return getDefinitionQueryResultForDefinitionNode(
filePath,
query,
(node: FragmentDefinitionNode | OperationDefinitionNode),
);
case NAMED_TYPE:
return this._getDefinitionForNamedType(
query,
ast,
node,
filePath,
projectConfig,
);
}
}
return null;
}
async _getDefinitionForNamedType(
query: string,
ast: DocumentNode,
node: NamedTypeNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
ast,
objectTypeDefinitions,
);
const localObjectTypeDefinitions = ast.definitions.filter(
definition =>
definition.kind === OBJECT_TYPE_DEFINITION ||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
definition.kind === ENUM_TYPE_DEFINITION,
);
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
TypeDefinitionNode,
>);
const localOperationDefinationInfos = typeCastedDefs.map(
(definition: TypeDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForNamedType(
query,
node,
dependencies.concat(localOperationDefinationInfos),
);
return result;
}
async _getDefinitionForFragmentSpread(
query: string,
ast: DocumentNode,
node: FragmentSpreadNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
ast,
fragmentDefinitions,
);
const localFragDefinitions = ast.definitions.filter(
definition => definition.kind === FRAGMENT_DEFINITION,
);
const typeCastedDefs = ((localFragDefinitions: any): Array<
FragmentDefinitionNode,
>);
const localFragInfos = typeCastedDefs.map(
(definition: FragmentDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForFragmentSpread(
query,
node,
dependencies.concat(localFragInfos),
);
return result;
}
}

View File

@ -0,0 +1,204 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
import {isCompositeType} from 'graphql';
import {
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
} from 'graphql/type/introspection';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
// Utility for returning the state representing the Definition this token state
// is within, if any.
export function getDefinitionState(tokenState: State): ?State {
let definitionState;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
case 'Mutation':
case 'Subscription':
case 'FragmentDefinition':
definitionState = state;
break;
}
});
return definitionState;
}
// Gets the field definition given a type and field name
export function getFieldDef(
schema: GraphQLSchema,
type: GraphQLType,
fieldName: string,
): ?GraphQLField<*, *> {
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
return SchemaMetaFieldDef;
}
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
return TypeMetaFieldDef;
}
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
return TypeNameMetaFieldDef;
}
if (type.getFields && typeof type.getFields === 'function') {
return (type.getFields()[fieldName]: any);
}
return null;
}
// Utility for iterating through a CodeMirror parse state stack bottom-up.
export function forEachState(
stack: State,
fn: (state: State) => ?TypeInfo,
): void {
const reverseStateStack = [];
let state = stack;
while (state && state.kind) {
reverseStateStack.push(state);
state = state.prevState;
}
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
fn(reverseStateStack[i]);
}
}
export function objectValues(object: Object): Array<any> {
const keys = Object.keys(object);
const len = keys.length;
const values = new Array(len);
for (let i = 0; i < len; ++i) {
values[i] = object[keys[i]];
}
return values;
}
// Create the expected hint response given a possible list and a token
export function hintList(
token: ContextToken,
list: Array<CompletionItem>,
): Array<CompletionItem> {
return filterAndSortList(list, normalizeText(token.string));
}
// Given a list of hint entries and currently typed text, sort and filter to
// provide a concise list.
function filterAndSortList(
list: Array<CompletionItem>,
text: string,
): Array<CompletionItem> {
if (!text) {
return filterNonEmpty(list, entry => !entry.isDeprecated);
}
const byProximity = list.map(entry => ({
proximity: getProximity(normalizeText(entry.label), text),
entry,
}));
const conciseMatches = filterNonEmpty(
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
pair => !pair.entry.isDeprecated,
);
const sortedMatches = conciseMatches.sort(
(a, b) =>
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
a.proximity - b.proximity ||
a.entry.label.length - b.entry.label.length,
);
return sortedMatches.map(pair => pair.entry);
}
// Filters the array by the predicate, unless it results in an empty array,
// in which case return the original array.
function filterNonEmpty(
array: Array<Object>,
predicate: (entry: Object) => boolean,
): Array<Object> {
const filtered = array.filter(predicate);
return filtered.length === 0 ? array : filtered;
}
function normalizeText(text: string): string {
return text.toLowerCase().replace(/\W/g, '');
}
// Determine a numeric proximity for a suggestion based on current text.
function getProximity(suggestion: string, text: string): number {
// start with lexical distance
let proximity = lexicalDistance(text, suggestion);
if (suggestion.length > text.length) {
// do not penalize long suggestions.
proximity -= suggestion.length - text.length - 1;
// penalize suggestions not starting with this phrase
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
}
return proximity;
}
/**
* Computes the lexical distance between strings A and B.
*
* The "distance" between two strings is given by counting the minimum number
* of edits needed to transform string A into string B. An edit can be an
* insertion, deletion, or substitution of a single character, or a swap of two
* adjacent characters.
*
* This distance can be useful for detecting typos in input or sorting
*
* @param {string} a
* @param {string} b
* @return {int} distance in number of edits
*/
function lexicalDistance(a: string, b: string): number {
let i;
let j;
const d = [];
const aLength = a.length;
const bLength = b.length;
for (i = 0; i <= aLength; i++) {
d[i] = [i];
}
for (j = 1; j <= bLength; j++) {
d[0][j] = j;
}
for (i = 1; i <= aLength; i++) {
for (j = 1; j <= bLength; j++) {
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
d[i][j] = Math.min(
d[i - 1][j] + 1,
d[i][j - 1] + 1,
d[i - 1][j - 1] + cost,
);
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
}
}
}
return d[aLength][bLength];
}

View File

@ -0,0 +1,665 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
FragmentDefinitionNode,
GraphQLDirective,
GraphQLSchema,
} from 'graphql';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import {
GraphQLBoolean,
GraphQLEnumType,
GraphQLInputObjectType,
GraphQLList,
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
assertAbstractType,
doTypesOverlap,
getNamedType,
getNullableType,
isAbstractType,
isCompositeType,
isInputType,
} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
forEachState,
getDefinitionState,
getFieldDef,
hintList,
objectValues,
} from './autocompleteUtils';
/**
* Given GraphQLSchema, queryText, and context of the current position within
* the source text, provide a list of typeahead entries.
*/
export function getAutocompleteSuggestions(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Array<CompletionItem> {
const token = contextToken || getTokenAtPosition(queryText, cursor);
const state =
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
// relieve flow errors by checking if `state` exists
if (!state) {
return [];
}
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
// Definition kinds
if (kind === 'Document') {
return hintList(token, [
{label: 'query'},
{label: 'mutation'},
{label: 'subscription'},
{label: 'fragment'},
{label: '{'},
]);
}
// Field names
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
return getSuggestionsForFieldNames(token, typeInfo, schema);
}
// Argument names
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
const argDefs = typeInfo.argDefs;
if (argDefs) {
return hintList(
token,
argDefs.map(argDef => ({
label: argDef.name,
detail: String(argDef.type),
documentation: argDef.description,
})),
);
}
}
// Input Object fields
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
if (typeInfo.objectFieldDefs) {
const objectFields = objectValues(typeInfo.objectFieldDefs);
return hintList(
token,
objectFields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
})),
);
}
}
// Input values: Enum and Boolean
if (
kind === 'EnumValue' ||
(kind === 'ListValue' && step === 1) ||
(kind === 'ObjectField' && step === 2) ||
(kind === 'Argument' && step === 2)
) {
return getSuggestionsForInputValues(token, typeInfo);
}
// Fragment type conditions
if (
(kind === 'TypeCondition' && step === 1) ||
(kind === 'NamedType' &&
state.prevState != null &&
state.prevState.kind === 'TypeCondition')
) {
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
}
// Fragment spread names
if (kind === 'FragmentSpread' && step === 1) {
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
}
// Variable definition types
if (
(kind === 'VariableDefinition' && step === 2) ||
(kind === 'ListType' && step === 1) ||
(kind === 'NamedType' &&
state.prevState &&
(state.prevState.kind === 'VariableDefinition' ||
state.prevState.kind === 'ListType'))
) {
return getSuggestionsForVariableDefinition(token, schema);
}
// Directive names
if (kind === 'Directive') {
return getSuggestionsForDirective(token, state, schema);
}
return [];
}
// Helper functions to get suggestions for each kinds
function getSuggestionsForFieldNames(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (typeInfo.parentType) {
const parentType = typeInfo.parentType;
const fields =
parentType.getFields instanceof Function
? objectValues(parentType.getFields())
: [];
if (isAbstractType(parentType)) {
fields.push(TypeNameMetaFieldDef);
}
if (parentType === schema.getQueryType()) {
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
}
return hintList(
token,
fields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
isDeprecated: field.isDeprecated,
deprecationReason: field.deprecationReason,
})),
);
}
return [];
}
function getSuggestionsForInputValues(
token: ContextToken,
typeInfo: TypeInfo,
): Array<CompletionItem> {
const namedInputType = getNamedType(typeInfo.inputType);
if (namedInputType instanceof GraphQLEnumType) {
const values = namedInputType.getValues();
return hintList(
token,
values.map(value => ({
label: value.name,
detail: String(namedInputType),
documentation: value.description,
isDeprecated: value.isDeprecated,
deprecationReason: value.deprecationReason,
})),
);
} else if (namedInputType === GraphQLBoolean) {
return hintList(token, [
{
label: 'true',
detail: String(GraphQLBoolean),
documentation: 'Not false.',
},
{
label: 'false',
detail: String(GraphQLBoolean),
documentation: 'Not true.',
},
]);
}
return [];
}
function getSuggestionsForFragmentTypeConditions(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
let possibleTypes;
if (typeInfo.parentType) {
if (isAbstractType(typeInfo.parentType)) {
const abstractType = assertAbstractType(typeInfo.parentType);
// Collect both the possible Object types as well as the interfaces
// they implement.
const possibleObjTypes = schema.getPossibleTypes(abstractType);
const possibleIfaceMap = Object.create(null);
possibleObjTypes.forEach(type => {
type.getInterfaces().forEach(iface => {
possibleIfaceMap[iface.name] = iface;
});
});
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
} else {
// The parent type is a non-abstract Object type, so the only possible
// type that can be used is that same type.
possibleTypes = [typeInfo.parentType];
}
} else {
const typeMap = schema.getTypeMap();
possibleTypes = objectValues(typeMap).filter(isCompositeType);
}
return hintList(
token,
possibleTypes.map(type => {
const namedType = getNamedType(type);
return {
label: String(type),
documentation: (namedType && namedType.description) || '',
};
}),
);
}
function getSuggestionsForFragmentSpread(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
queryText: string,
): Array<CompletionItem> {
const typeMap = schema.getTypeMap();
const defState = getDefinitionState(token.state);
const fragments = getFragmentDefinitions(queryText);
// Filter down to only the fragments which may exist here.
const relevantFrags = fragments.filter(
frag =>
// Only include fragments with known types.
typeMap[frag.typeCondition.name.value] &&
// Only include fragments which are not cyclic.
!(
defState &&
defState.kind === 'FragmentDefinition' &&
defState.name === frag.name.value
) &&
// Only include fragments which could possibly be spread here.
isCompositeType(typeInfo.parentType) &&
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
doTypesOverlap(
schema,
typeInfo.parentType,
typeMap[frag.typeCondition.name.value],
),
);
return hintList(
token,
relevantFrags.map(frag => ({
label: frag.name.value,
detail: String(typeMap[frag.typeCondition.name.value]),
documentation: `fragment ${frag.name.value} on ${
frag.typeCondition.name.value
}`,
})),
);
}
function getFragmentDefinitions(
queryText: string,
): Array<FragmentDefinitionNode> {
const fragmentDefs = [];
runOnlineParser(queryText, (_, state) => {
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
fragmentDefs.push({
kind: 'FragmentDefinition',
name: {
kind: 'Name',
value: state.name,
},
selectionSet: {
kind: 'SelectionSet',
selections: [],
},
typeCondition: {
kind: 'NamedType',
name: {
kind: 'Name',
value: state.type,
},
},
});
}
});
return fragmentDefs;
}
function getSuggestionsForVariableDefinition(
token: ContextToken,
schema: GraphQLSchema,
): Array<CompletionItem> {
const inputTypeMap = schema.getTypeMap();
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
return hintList(
token,
inputTypes.map(type => ({
label: type.name,
documentation: type.description,
})),
);
}
function getSuggestionsForDirective(
token: ContextToken,
state: State,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (state.prevState && state.prevState.kind) {
const directives = schema
.getDirectives()
.filter(directive => canUseDirective(state.prevState, directive));
return hintList(
token,
directives.map(directive => ({
label: directive.name,
documentation: directive.description || '',
})),
);
}
return [];
}
export function getTokenAtPosition(
queryText: string,
cursor: Position,
): ContextToken {
let styleAtCursor = null;
let stateAtCursor = null;
let stringAtCursor = null;
const token = runOnlineParser(queryText, (stream, state, style, index) => {
if (index === cursor.line) {
if (stream.getCurrentPosition() >= cursor.character) {
styleAtCursor = style;
stateAtCursor = {...state};
stringAtCursor = stream.current();
return 'BREAK';
}
}
});
// Return the state/style of parsed token in case those at cursor aren't
// available.
return {
start: token.start,
end: token.end,
string: stringAtCursor || token.string,
state: stateAtCursor || token.state,
style: styleAtCursor || token.style,
};
}
/**
* Provides an utility function to parse a given query text and construct a
* `token` context object.
* A token context provides useful information about the token/style that
* CharacterStream currently possesses, as well as the end state and style
* of the token.
*/
type callbackFnType = (
stream: CharacterStream,
state: State,
style: string,
index: number,
) => void | 'BREAK';
function runOnlineParser(
queryText: string,
callback: callbackFnType,
): ContextToken {
const lines = queryText.split('\n');
const parser = onlineParser();
let state = parser.startState();
let style = '';
let stream: CharacterStream = new CharacterStream('');
for (let i = 0; i < lines.length; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
style = parser.token(stream, state);
const code = callback(stream, state, style, i);
if (code === 'BREAK') {
break;
}
}
// Above while loop won't run if there is an empty line.
// Run the callback one more time to catch this.
callback(stream, state, style, i);
if (!state.kind) {
state = parser.startState();
}
}
return {
start: stream.getStartOfToken(),
end: stream.getCurrentPosition(),
string: stream.current(),
state,
style,
};
}
function canUseDirective(
state: $PropertyType<State, 'prevState'>,
directive: GraphQLDirective,
): boolean {
if (!state || !state.kind) {
return false;
}
const kind = state.kind;
const locations = directive.locations;
switch (kind) {
case 'Query':
return locations.indexOf('QUERY') !== -1;
case 'Mutation':
return locations.indexOf('MUTATION') !== -1;
case 'Subscription':
return locations.indexOf('SUBSCRIPTION') !== -1;
case 'Field':
case 'AliasedField':
return locations.indexOf('FIELD') !== -1;
case 'FragmentDefinition':
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
case 'FragmentSpread':
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
case 'InlineFragment':
return locations.indexOf('INLINE_FRAGMENT') !== -1;
// Schema Definitions
case 'SchemaDef':
return locations.indexOf('SCHEMA') !== -1;
case 'ScalarDef':
return locations.indexOf('SCALAR') !== -1;
case 'ObjectTypeDef':
return locations.indexOf('OBJECT') !== -1;
case 'FieldDef':
return locations.indexOf('FIELD_DEFINITION') !== -1;
case 'InterfaceDef':
return locations.indexOf('INTERFACE') !== -1;
case 'UnionDef':
return locations.indexOf('UNION') !== -1;
case 'EnumDef':
return locations.indexOf('ENUM') !== -1;
case 'EnumValue':
return locations.indexOf('ENUM_VALUE') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_OBJECT') !== -1;
case 'InputValueDef':
const prevStateKind = state.prevState && state.prevState.kind;
switch (prevStateKind) {
case 'ArgumentsDef':
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
}
}
return false;
}
// Utility for collecting rich type information given any token's state
// from the graphql-mode parser.
export function getTypeInfo(
schema: GraphQLSchema,
tokenState: State,
): TypeInfo {
let argDef;
let argDefs;
let directiveDef;
let enumValue;
let fieldDef;
let inputType;
let objectFieldDefs;
let parentType;
let type;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
type = schema.getQueryType();
break;
case 'Mutation':
type = schema.getMutationType();
break;
case 'Subscription':
type = schema.getSubscriptionType();
break;
case 'InlineFragment':
case 'FragmentDefinition':
if (state.type) {
type = schema.getType(state.type);
}
break;
case 'Field':
case 'AliasedField':
if (!type || !state.name) {
fieldDef = null;
} else {
fieldDef = parentType
? getFieldDef(schema, parentType, state.name)
: null;
type = fieldDef ? fieldDef.type : null;
}
break;
case 'SelectionSet':
parentType = getNamedType(type);
break;
case 'Directive':
directiveDef = state.name ? schema.getDirective(state.name) : null;
break;
case 'Arguments':
if (!state.prevState) {
argDefs = null;
} else {
switch (state.prevState.kind) {
case 'Field':
argDefs = fieldDef && fieldDef.args;
break;
case 'Directive':
argDefs = directiveDef && directiveDef.args;
break;
case 'AliasedField':
const name = state.prevState && state.prevState.name;
if (!name) {
argDefs = null;
break;
}
const field = parentType
? getFieldDef(schema, parentType, name)
: null;
if (!field) {
argDefs = null;
break;
}
argDefs = field.args;
break;
default:
argDefs = null;
break;
}
}
break;
case 'Argument':
if (argDefs) {
for (let i = 0; i < argDefs.length; i++) {
if (argDefs[i].name === state.name) {
argDef = argDefs[i];
break;
}
}
}
inputType = argDef && argDef.type;
break;
case 'EnumValue':
const enumType = getNamedType(inputType);
enumValue =
enumType instanceof GraphQLEnumType
? find(enumType.getValues(), val => val.value === state.name)
: null;
break;
case 'ListValue':
const nullableType = getNullableType(inputType);
inputType =
nullableType instanceof GraphQLList ? nullableType.ofType : null;
break;
case 'ObjectValue':
const objectType = getNamedType(inputType);
objectFieldDefs =
objectType instanceof GraphQLInputObjectType
? objectType.getFields()
: null;
break;
case 'ObjectField':
const objectField =
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
inputType = objectField && objectField.type;
break;
case 'NamedType':
if (state.name) {
type = schema.getType(state.name);
}
break;
}
});
return {
argDef,
argDefs,
directiveDef,
enumValue,
fieldDef,
inputType,
objectFieldDefs,
parentType,
type,
};
}
// Returns the first item in the array which causes predicate to return truthy.
function find(array, predicate) {
for (let i = 0; i < array.length; i++) {
if (predicate(array[i])) {
return array[i];
}
}
return null;
}

View File

@ -0,0 +1,136 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
NamedTypeNode,
TypeDefinitionNode,
} from 'graphql';
import type {
Definition,
DefinitionQueryResult,
FragmentInfo,
Position,
Range,
Uri,
ObjectTypeInfo,
} from 'graphql-language-service-types';
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
import invariant from 'assert';
export const LANGUAGE = 'GraphQL';
function getRange(text: string, node: ASTNode): Range {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return locToRange(text, location);
}
function getPosition(text: string, node: ASTNode): Position {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return offsetToPosition(text, location.start);
}
export async function getDefinitionQueryResultForNamedType(
text: string,
node: NamedTypeNode,
dependencies: Array<ObjectTypeInfo>,
): Promise<DefinitionQueryResult> {
const name = node.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name && definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL type ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForNodeDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, node)),
};
}
export async function getDefinitionQueryResultForFragmentSpread(
text: string,
fragment: FragmentSpreadNode,
dependencies: Array<FragmentInfo>,
): Promise<DefinitionQueryResult> {
const name = fragment.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForFragmentDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, fragment)),
};
}
export function getDefinitionQueryResultForDefinitionNode(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): DefinitionQueryResult {
return {
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
queryRange: definition.name ? [getRange(text, definition.name)] : [],
};
}
function getDefinitionForFragmentDefinition(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}
function getDefinitionForNodeDefinition(
path: Uri,
text: string,
definition: TypeDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}

View File

@ -0,0 +1,172 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
DocumentNode,
GraphQLError,
GraphQLSchema,
Location,
SourceLocation,
} from 'graphql';
import type {
Diagnostic,
CustomValidationRule,
} from 'graphql-language-service-types';
import invariant from 'assert';
import {findDeprecatedUsages, parse} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
Position,
Range,
validateWithCustomRules,
} from 'graphql-language-service-utils';
export const SEVERITY = {
ERROR: 1,
WARNING: 2,
INFORMATION: 3,
HINT: 4,
};
export function getDiagnostics(
query: string,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
let ast = null;
try {
ast = parse(query);
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
return validateQuery(ast, schema, customRules, isRelayCompatMode);
}
export function validateQuery(
ast: DocumentNode,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
// We cannot validate the query unless a schema is provided.
if (!schema) {
return [];
}
const validationErrorAnnotations = mapCat(
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
error => annotations(error, SEVERITY.ERROR, 'Validation'),
);
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
// support older versions of graphql-js.
const deprecationWarningAnnotations = !findDeprecatedUsages
? []
: mapCat(findDeprecatedUsages(schema, ast), error =>
annotations(error, SEVERITY.WARNING, 'Deprecation'),
);
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
}
// General utility for map-cating (aka flat-mapping).
function mapCat<T>(
array: Array<T>,
mapper: (item: T) => Array<any>,
): Array<any> {
return Array.prototype.concat.apply([], array.map(mapper));
}
function annotations(
error: GraphQLError,
severity: number,
type: string,
): Array<Diagnostic> {
if (!error.nodes) {
return [];
}
return error.nodes.map(node => {
const highlightNode =
node.kind !== 'Variable' && node.name
? node.name
: node.variable
? node.variable
: node;
invariant(error.locations, 'GraphQL validation error requires locations.');
const loc = error.locations[0];
const highlightLoc = getLocation(highlightNode);
const end = loc.column + (highlightLoc.end - highlightLoc.start);
return {
source: `GraphQL: ${type}`,
message: error.message,
severity,
range: new Range(
new Position(loc.line - 1, loc.column - 1),
new Position(loc.line - 1, end),
),
};
});
}
export function getRange(location: SourceLocation, queryText: string) {
const parser = onlineParser();
const state = parser.startState();
const lines = queryText.split('\n');
invariant(
lines.length >= location.line,
'Query text must have more lines than where the error happened',
);
let stream = null;
for (let i = 0; i < location.line; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
const style = parser.token(stream, state);
if (style === 'invalidchar') {
break;
}
}
}
invariant(stream, 'Expected Parser stream to be available.');
const line = location.line - 1;
const start = stream.getStartOfToken();
const end = stream.getCurrentPosition();
return new Range(new Position(line, start), new Position(line, end));
}
/**
* Get location info from a node in a type-safe way.
*
* The only way a node could not have a location is if we initialized the parser
* (and therefore the lexer) with the `noLocation` option, but we always
* call `parse` without options above.
*/
function getLocation(node: any): Location {
const typeCastedNode = (node: ASTNode);
const location = typeCastedNode.loc;
invariant(location, 'Expected ASTNode to have a location.');
return location;
}

View File

@ -0,0 +1,186 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
/**
* Ported from codemirror-graphql
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
*/
import type {GraphQLSchema} from 'graphql';
import type {ContextToken} from 'graphql-language-service-types';
import type {Hover} from 'vscode-languageserver-types';
import type {Position} from 'graphql-language-service-utils';
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
import {GraphQLNonNull, GraphQLList} from 'graphql';
export function getHoverInformation(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Hover.contents {
const token = contextToken || getTokenAtPosition(queryText, cursor);
if (!schema || !token || !token.state) {
return [];
}
const state = token.state;
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
const options = {schema};
// Given a Schema and a Token, produce the contents of an info tooltip.
// To do this, create a div element that we will render "into" and then pass
// it to various rendering functions.
if (
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
) {
const into = [];
renderField(into, typeInfo, options);
renderDescription(into, options, typeInfo.fieldDef);
return into.join('').trim();
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
const into = [];
renderDirective(into, typeInfo, options);
renderDescription(into, options, typeInfo.directiveDef);
return into.join('').trim();
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
const into = [];
renderArg(into, typeInfo, options);
renderDescription(into, options, typeInfo.argDef);
return into.join('').trim();
} else if (
kind === 'EnumValue' &&
typeInfo.enumValue &&
typeInfo.enumValue.description
) {
const into = [];
renderEnumValue(into, typeInfo, options);
renderDescription(into, options, typeInfo.enumValue);
return into.join('').trim();
} else if (
kind === 'NamedType' &&
typeInfo.type &&
typeInfo.type.description
) {
const into = [];
renderType(into, typeInfo, options, typeInfo.type);
renderDescription(into, options, typeInfo.type);
return into.join('').trim();
}
}
function renderField(into, typeInfo, options) {
renderQualifiedField(into, typeInfo, options);
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
}
function renderQualifiedField(into, typeInfo, options) {
if (!typeInfo.fieldDef) {
return;
}
const fieldName = (typeInfo.fieldDef.name: string);
if (fieldName.slice(0, 2) !== '__') {
renderType(into, typeInfo, options, typeInfo.parentType);
text(into, '.');
}
text(into, fieldName);
}
function renderDirective(into, typeInfo, options) {
if (!typeInfo.directiveDef) {
return;
}
const name = '@' + typeInfo.directiveDef.name;
text(into, name);
}
function renderArg(into, typeInfo, options) {
if (typeInfo.directiveDef) {
renderDirective(into, typeInfo, options);
} else if (typeInfo.fieldDef) {
renderQualifiedField(into, typeInfo, options);
}
if (!typeInfo.argDef) {
return;
}
const name = typeInfo.argDef.name;
text(into, '(');
text(into, name);
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
text(into, ')');
}
function renderTypeAnnotation(into, typeInfo, options, t) {
text(into, ': ');
renderType(into, typeInfo, options, t);
}
function renderEnumValue(into, typeInfo, options) {
if (!typeInfo.enumValue) {
return;
}
const name = typeInfo.enumValue.name;
renderType(into, typeInfo, options, typeInfo.inputType);
text(into, '.');
text(into, name);
}
function renderType(into, typeInfo, options, t) {
if (!t) {
return;
}
if (t instanceof GraphQLNonNull) {
renderType(into, typeInfo, options, t.ofType);
text(into, '!');
} else if (t instanceof GraphQLList) {
text(into, '[');
renderType(into, typeInfo, options, t.ofType);
text(into, ']');
} else {
text(into, t.name);
}
}
function renderDescription(into, options, def) {
if (!def) {
return;
}
const description =
typeof def.description === 'string' ? def.description : null;
if (description) {
text(into, '\n\n');
text(into, description);
}
renderDeprecation(into, options, def);
}
function renderDeprecation(into, options, def) {
if (!def) {
return;
}
const reason =
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
if (!reason) {
return;
}
text(into, '\n\n');
text(into, 'Deprecated: ');
text(into, reason);
}
function text(into: string[], content: string) {
into.push(content);
}

View File

@ -0,0 +1,121 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
Outline,
TextToken,
TokenKind,
} from 'graphql-language-service-types';
import {Kind, parse, visit} from 'graphql';
import {offsetToPosition} from 'graphql-language-service-utils';
const {INLINE_FRAGMENT} = Kind;
const OUTLINEABLE_KINDS = {
Field: true,
OperationDefinition: true,
Document: true,
SelectionSet: true,
Name: true,
FragmentDefinition: true,
FragmentSpread: true,
InlineFragment: true,
};
type OutlineTreeConverterType = {[name: string]: Function};
export function getOutline(queryText: string): ?Outline {
let ast;
try {
ast = parse(queryText);
} catch (error) {
return null;
}
const visitorFns = outlineTreeConverter(queryText);
const outlineTrees = visit(ast, {
leave(node) {
if (
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
visitorFns[node.kind]
) {
return visitorFns[node.kind](node);
}
return null;
},
});
return {outlineTrees};
}
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
const meta = node => ({
representativeName: node.name,
startPosition: offsetToPosition(docText, node.loc.start),
endPosition: offsetToPosition(docText, node.loc.end),
children: node.selectionSet || [],
});
return {
Field: node => {
const tokenizedText = node.alias
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
: [];
tokenizedText.push(buildToken('plain', node.name));
return {tokenizedText, ...meta(node)};
},
OperationDefinition: node => ({
tokenizedText: [
buildToken('keyword', node.operation),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
Document: node => node.definitions,
SelectionSet: node =>
concatMap(node.selections, child => {
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
}),
Name: node => node.value,
FragmentDefinition: node => ({
tokenizedText: [
buildToken('keyword', 'fragment'),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
FragmentSpread: node => ({
tokenizedText: [
buildToken('plain', '...'),
buildToken('class-name', node.name),
],
...meta(node),
}),
InlineFragment: node => node.selectionSet,
};
}
function buildToken(kind: TokenKind, value: string): TextToken {
return {kind, value};
}
function concatMap(arr: Array<any>, fn: Function): Array<any> {
const res = [];
for (let i = 0; i < arr.length; i++) {
const x = fn(arr[i], i);
if (Array.isArray(x)) {
res.push(...x);
} else {
res.push(x);
}
}
return res;
}

View File

@ -0,0 +1,31 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
export {
getDefinitionState,
getFieldDef,
forEachState,
objectValues,
hintList,
} from './autocompleteUtils';
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
export {
LANGUAGE,
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
} from './getDefinition';
export {getDiagnostics, validateQuery} from './getDiagnostics';
export {getOutline} from './getOutline';
export {getHoverInformation} from './getHoverInformation';
export {GraphQLLanguageService} from './GraphQLLanguageService';

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

View File

@ -0,0 +1,35 @@
{
"name": "web",
"version": "0.1.0",
"private": true,
"dependencies": {
"@apollographql/graphql-playground-react": "^1.7.31",
"apollo-link-ws": "^1.0.8",
"graphql": "^14.1.1",
"react": "^16.11.0",
"react-dom": "^16.11.0",
"react-scripts": "3.2.0",
"subscriptions-transport-ws": "^0.9.14"
},
"scripts": {
"start": "PORT=3001 react-scripts start",
"build": "react-scripts build",
"test": "react-scripts test",
"eject": "react-scripts eject"
},
"proxy": "http://localhost:8080",
"eslintConfig": {
"extends": "react-app"
},
"browserslist": [
">0.2%",
"not dead",
"not ie <= 11",
"not op_mini all"
],
"resolutions": {
"apollo-link-ws": "^1.0.8",
"graphql": "^14.1.1",
"subscriptions-transport-ws": "^0.9.14"
}
}

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1,43 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8" />
<link rel="shortcut icon" href="%PUBLIC_URL%/favicon.ico" />
<meta
name="viewport"
content="width=device-width, initial-scale=1, shrink-to-fit=no"
/>
<meta name="theme-color" content="#000000" />
<!--
manifest.json provides metadata used when your web app is installed on a
user's mobile device or desktop. See https://developers.google.com/web/fundamentals/web-app-manifest/
-->
<link rel="manifest" href="%PUBLIC_URL%/manifest.json" />
<link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet">
<!--
Notice the use of %PUBLIC_URL% in the tags above.
It will be replaced with the URL of the `public` folder during the build.
Only files inside the `public` folder can be referenced from the HTML.
Unlike "/favicon.ico" or "favicon.ico", "%PUBLIC_URL%/favicon.ico" will
work correctly both with client-side routing and a non-root public URL.
Learn how to configure a non-root public URL by running `npm run build`.
-->
<title>Super Graph - GraphQL API for Rails</title>
</head>
<body>
<noscript>You need to enable JavaScript to run this app.</noscript>
<div id="root"></div>
<!--
This HTML file is a template.
If you open it directly in the browser, you will see an empty page.
You can add webfonts, meta tags, or analytics to this file.
The build step will place the bundled scripts into the <body> tag.
To begin the development, run `npm start` or `yarn start`.
To create a production bundle, use `npm run build` or `yarn build`.
-->
</body>
</html>

View File

@ -0,0 +1,15 @@
{
"short_name": "Super Graph",
"name": "Super Graph - GraphQL API for Rails",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

67
internal/serv/web/src/App.js Executable file
View File

@ -0,0 +1,67 @@
import React, { Component } from 'react'
import { Provider } from 'react-redux'
import { Playground, store } from '@apollographql/graphql-playground-react'
import './index.css'
const fetch = window.fetch
window.fetch = function() {
arguments[1].credentials = 'include'
return Promise.resolve(fetch.apply(global, arguments))
}
class App extends Component {
render() {
return (
<div>
<header style={{
background: '#09141b',
color: '#03a9f4',
letterSpacing: '0.15rem',
height: '65px',
display: 'flex',
alignItems: 'center'
}}
>
<h3 style={{
textDecoration: 'none',
margin: '0px',
fontSize: '18px',
}}
>
<span style={{
textTransform: 'uppercase',
marginLeft: '20px',
paddingRight: '10px',
borderRight: '1px solid #fff'
}}>
Super Graph
</span>
<span style={{
fontSize: '16px',
marginLeft: '10px',
color: '#fff'
}}>
Instant GraphQL</span>
</h3>
</header>
<Provider store={store}>
<Playground
endpoint="/api/v1/graphql"
settings="{
'schema.polling.enable': false,
'request.credentials': 'include',
'general.betaUpdates': true,
'editor.reuseHeaders': true,
'editor.theme': 'dark'
}"
/>
</Provider>
</div>
);
}
}
export default App;

View File

@ -0,0 +1,9 @@
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
it('renders without crashing', () => {
const div = document.createElement('div');
ReactDOM.render(<App />, div);
ReactDOM.unmountComponentAtNode(div);
});

19
internal/serv/web/src/index.css Executable file
View File

@ -0,0 +1,19 @@
body {
margin: 0;
padding: 0;
font-family: -apple-system, BlinkMacSystemFont, "Segoe UI", "Roboto", "Oxygen",
"Ubuntu", "Cantarell", "Fira Sans", "Droid Sans", "Helvetica Neue",
sans-serif;
-webkit-font-smoothing: antialiased;
-moz-osx-font-smoothing: grayscale;
background-color: #0f202d;
}
code {
font-family: source-code-pro, Menlo, Monaco, Consolas, "Courier New",
monospace;
}
.playground > div:nth-child(2) {
height: calc(100vh - 131px);
}

11
internal/serv/web/src/index.js Executable file
View File

@ -0,0 +1,11 @@
import React from 'react';
import ReactDOM from 'react-dom';
import App from './App';
//import * as serviceWorker from './serviceWorker';
ReactDOM.render(<App />, document.getElementById('root'));
// If you want your app to work offline and load faster, you can change
// unregister() to register() below. Note this comes with some pitfalls.
// Learn more about service workers: http://bit.ly/CRA-PWA
//serviceWorker.unregister();

7
internal/serv/web/src/logo.svg Executable file
View File

@ -0,0 +1,7 @@
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3">
<g fill="#61DAFB">
<path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/>
<circle cx="420.9" cy="296.5" r="45.7"/>
<path d="M520.5 78.1z"/>
</g>
</svg>

After

Width:  |  Height:  |  Size: 2.6 KiB

View File

@ -0,0 +1,135 @@
// This optional code is used to register a service worker.
// register() is not called by default.
// This lets the app load faster on subsequent visits in production, and gives
// it offline capabilities. However, it also means that developers (and users)
// will only see deployed updates on subsequent visits to a page, after all the
// existing tabs open on the page have been closed, since previously cached
// resources are updated in the background.
// To learn more about the benefits of this model and instructions on how to
// opt-in, read http://bit.ly/CRA-PWA
const isLocalhost = Boolean(
window.location.hostname === 'localhost' ||
// [::1] is the IPv6 localhost address.
window.location.hostname === '[::1]' ||
// 127.0.0.1/8 is considered localhost for IPv4.
window.location.hostname.match(
/^127(?:\.(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)){3}$/
)
);
export function register(config) {
if (process.env.NODE_ENV === 'production' && 'serviceWorker' in navigator) {
// The URL constructor is available in all browsers that support SW.
const publicUrl = new URL(process.env.PUBLIC_URL, window.location.href);
if (publicUrl.origin !== window.location.origin) {
// Our service worker won't work if PUBLIC_URL is on a different origin
// from what our page is served on. This might happen if a CDN is used to
// serve assets; see https://github.com/facebook/create-react-app/issues/2374
return;
}
window.addEventListener('load', () => {
const swUrl = `${process.env.PUBLIC_URL}/service-worker.js`;
if (isLocalhost) {
// This is running on localhost. Let's check if a service worker still exists or not.
checkValidServiceWorker(swUrl, config);
// Add some additional logging to localhost, pointing developers to the
// service worker/PWA documentation.
navigator.serviceWorker.ready.then(() => {
console.log(
'This web app is being served cache-first by a service ' +
'worker. To learn more, visit http://bit.ly/CRA-PWA'
);
});
} else {
// Is not localhost. Just register service worker
registerValidSW(swUrl, config);
}
});
}
}
function registerValidSW(swUrl, config) {
navigator.serviceWorker
.register(swUrl)
.then(registration => {
registration.onupdatefound = () => {
const installingWorker = registration.installing;
if (installingWorker == null) {
return;
}
installingWorker.onstatechange = () => {
if (installingWorker.state === 'installed') {
if (navigator.serviceWorker.controller) {
// At this point, the updated precached content has been fetched,
// but the previous service worker will still serve the older
// content until all client tabs are closed.
console.log(
'New content is available and will be used when all ' +
'tabs for this page are closed. See http://bit.ly/CRA-PWA.'
);
// Execute callback
if (config && config.onUpdate) {
config.onUpdate(registration);
}
} else {
// At this point, everything has been precached.
// It's the perfect time to display a
// "Content is cached for offline use." message.
console.log('Content is cached for offline use.');
// Execute callback
if (config && config.onSuccess) {
config.onSuccess(registration);
}
}
}
};
};
})
.catch(error => {
console.error('Error during service worker registration:', error);
});
}
function checkValidServiceWorker(swUrl, config) {
// Check if the service worker can be found. If it can't reload the page.
fetch(swUrl)
.then(response => {
// Ensure service worker exists, and that we really are getting a JS file.
const contentType = response.headers.get('content-type');
if (
response.status === 404 ||
(contentType != null && contentType.indexOf('javascript') === -1)
) {
// No service worker found. Probably a different app. Reload the page.
navigator.serviceWorker.ready.then(registration => {
registration.unregister().then(() => {
window.location.reload();
});
});
} else {
// Service worker found. Proceed as normal.
registerValidSW(swUrl, config);
}
})
.catch(() => {
console.log(
'No internet connection found. App is running in offline mode.'
);
});
}
export function unregister() {
if ('serviceWorker' in navigator) {
navigator.serviceWorker.ready.then(registration => {
registration.unregister();
});
}
}

11416
internal/serv/web/yarn.lock Normal file

File diff suppressed because it is too large Load Diff

33
internal/serv/ws.go Normal file
View File

@ -0,0 +1,33 @@
package serv
/*
func apiv1Ws(w http.ResponseWriter, r *http.Request) {
c, err := upgrader.Upgrade(w, r, nil)
if err != nil {
fmt.Println(err)
return
}
defer c.Close()
for {
mt, message, err := c.ReadMessage()
if err != nil {
fmt.Println("read:", err)
break
}
fmt.Printf("recv: %s", message)
err = c.WriteMessage(mt, message)
if err != nil {
fmt.Println("write:", err)
break
}
}
}
func serve(w http.ResponseWriter, r *http.Request) {
// if websocket.IsWebSocketUpgrade(r) {
// apiv1Ws(w, r)
// return
// }
apiv1Http(w, r)
}
*/