Compare commits

..

9 Commits

228 changed files with 5995 additions and 4689 deletions

1
.gitignore vendored
View File

@ -24,7 +24,6 @@
/demo/tmp
.vscode
main
.DS_Store
.swp
.release

View File

@ -7,7 +7,7 @@ rules:
- name: run
match: \.go$
ignore: web|examples|docs|_test\.go$
command: go run main.go serv
command: go run cmd/main.go serv
- name: test
match: _test\.go$
command: go test -cover {PKG}

View File

@ -1,7 +1,7 @@
# stage: 1
FROM node:10 as react-build
WORKDIR /web
COPY web/ ./
COPY /cmd/internal/serv/web/ ./
RUN yarn
RUN yarn build
@ -22,8 +22,8 @@ RUN chmod 755 /usr/local/bin/sops
WORKDIR /app
COPY . /app
RUN mkdir -p /app/web/build
COPY --from=react-build /web/build/ ./web/build/
RUN mkdir -p /app/cmd/internal/serv/web/build
COPY --from=react-build /web/build/ ./cmd/internal/serv/web/build
RUN go mod vendor
RUN make build
@ -41,7 +41,7 @@ RUN mkdir -p /config
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=go-build /app/config/* /config/
COPY --from=go-build /app/super-graph .
COPY --from=go-build /app/scripts/start.sh .
COPY --from=go-build /app/cmd/scripts/start.sh .
COPY --from=go-build /usr/local/bin/sops .
RUN chmod +x /super-graph

View File

@ -28,14 +28,14 @@ BIN_DIR := $(GOPATH)/bin
GORICE := $(BIN_DIR)/rice
GOLANGCILINT := $(BIN_DIR)/golangci-lint
GITCHGLOG := $(BIN_DIR)/git-chglog
WEB_BUILD_DIR := ./web/build/manifest.json
WEB_BUILD_DIR := ./cmd/internal/serv/web/build/manifest.json
$(GORICE):
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
$(WEB_BUILD_DIR):
@echo "First install Yarn and create a build of the web UI found under ./web"
@echo "Command: cd web && yarn && yarn build"
@echo "First install Yarn and create a build of the web UI then re-run make install"
@echo "Run this command: yarn --cwd cmd/internal/serv/web/ build"
@exit 1
$(GITCHGLOG):
@ -57,7 +57,7 @@ os = $(word 1, $@)
$(PLATFORMS): lint test
@mkdir -p release
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 cmd/main.go
release: windows linux darwin
@ -69,7 +69,7 @@ gen: $(GORICE) $(WEB_BUILD_DIR)
@go generate ./...
$(BINARY): clean
@go build $(BUILD_FLAGS) -o $(BINARY)
@go build $(BUILD_FLAGS) -o $(BINARY) cmd/main.go
clean:
@rm -f $(BINARY)
@ -77,11 +77,11 @@ clean:
run: clean
@go run $(BUILD_FLAGS) main.go $(ARGS)
install: gen
install:
@echo $(GOPATH)
@echo "Commit Hash: `git rev-parse HEAD`"
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
@go install $(BUILD_FLAGS)
@go install $(BUILD_FLAGS) cmd
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
uninstall: clean

View File

@ -7,7 +7,7 @@ import (
type actionFn func(w http.ResponseWriter, r *http.Request) error
func newAction(a configAction) (http.Handler, error) {
func newAction(a *Action) (http.Handler, error) {
var fn actionFn
var err error
@ -23,17 +23,16 @@ func newAction(a configAction) (http.Handler, error) {
httpFn := func(w http.ResponseWriter, r *http.Request) {
if err := fn(w, r); err != nil {
errlog.Error().Err(err).Send()
errorResp(w, err)
renderErr(w, err, nil)
}
}
return http.HandlerFunc(httpFn), nil
}
func newSQLAction(a configAction) (actionFn, error) {
func newSQLAction(a *Action) (actionFn, error) {
fn := func(w http.ResponseWriter, r *http.Request) error {
_, err := db.Exec(r.Context(), a.SQL)
_, err := db.ExecContext(r.Context(), a.SQL)
return err
}

106
cmd/internal/serv/api.go Normal file
View File

@ -0,0 +1,106 @@
package serv
import (
"time"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
"github.com/dosco/super-graph/core"
"github.com/spf13/viper"
)
const (
LogLevelNone int = iota
LogLevelInfo
LogLevelWarn
LogLevelError
LogLevelDebug
)
type Core = core.Config
// Config struct holds the Super Graph config values
type Config struct {
Core `mapstructure:",squash"`
Serv `mapstructure:",squash"`
cpath string
vi *viper.Viper
}
// Serv struct contains config values used by the Super Graph service
type Serv struct {
AppName string `mapstructure:"app_name"`
Production bool
LogLevel string `mapstructure:"log_level"`
HostPort string `mapstructure:"host_port"`
Host string
Port string
HTTPGZip bool `mapstructure:"http_compress"`
WebUI bool `mapstructure:"web_ui"`
EnableTracing bool `mapstructure:"enable_tracing"`
WatchAndReload bool `mapstructure:"reload_on_config_change"`
AuthFailBlock bool `mapstructure:"auth_fail_block"`
SeedFile string `mapstructure:"seed_file"`
MigrationsPath string `mapstructure:"migrations_path"`
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
Auth auth.Auth
Auths []auth.Auth
DB struct {
Type string
Host string
Port uint16
DBName string
User string
Password string
Schema string
PoolSize int32 `mapstructure:"pool_size"`
MaxRetries int `mapstructure:"max_retries"`
PingTimeout time.Duration `mapstructure:"ping_timeout"`
} `mapstructure:"database"`
Actions []Action
}
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
// Action struct contains config values for a Super Graph service action
type Action struct {
Name string
SQL string
AuthName string `mapstructure:"auth_name"`
}

View File

@ -1,17 +1,14 @@
package serv
import (
"database/sql"
"fmt"
_log "log"
"os"
"runtime"
"strings"
"github.com/dosco/super-graph/allow"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode"
"github.com/jackc/pgx/v4/pgxpool"
"github.com/rs/zerolog"
"github.com/spf13/cobra"
"github.com/spf13/viper"
"go.uber.org/zap"
)
//go:generate rice embed-go
@ -29,21 +26,18 @@ var (
)
var (
logger zerolog.Logger // logger for everything but errors
errlog zerolog.Logger // logger for errors includes line numbers
conf *config // parsed config
confPath string // path to the config file
db *pgxpool.Pool // database connection pool
schema *psql.DBSchema // database tables, columns and relationships
allowList *allow.List // allow.list is contains queries allowed in production
qcompile *qcode.Compiler // qcode compiler
pcompile *psql.Compiler // postgres sql compiler
secretKey [32]byte // encryption key
internalKey [32]byte // encryption key used for internal needs
log *_log.Logger // logger
zlog *zap.Logger // fast logger
logLevel int // log level
conf *Config // parsed config
confPath string // path to the config file
db *sql.DB // database connection pool
secretKey [32]byte // encryption key
)
func Cmd() {
initLog()
log = _log.New(os.Stdout, "", 0)
zlog = zap.NewExample()
rootCmd := &cobra.Command{
Use: "super-graph",
@ -136,12 +130,12 @@ e.g. db:migrate -+1
Run: cmdNew,
})
rootCmd.AddCommand(&cobra.Command{
Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
Short: "Dump config to file",
Long: "Dump current config to a file in the selected format",
Run: cmdConfDump,
})
// rootCmd.AddCommand(&cobra.Command{
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
// Short: "Dump config to file",
// Long: "Dump current config to a file in the selected format",
// Run: cmdConfDump,
// })
rootCmd.AddCommand(&cobra.Command{
Use: "version",
@ -149,11 +143,11 @@ e.g. db:migrate -+1
Run: cmdVersion,
})
rootCmd.Flags().StringVar(&confPath,
rootCmd.PersistentFlags().StringVar(&confPath,
"path", "./config", "path to config files")
if err := rootCmd.Execute(); err != nil {
errlog.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
}
}

View File

@ -0,0 +1,21 @@
package serv
// func cmdConfDump(cmd *cobra.Command, args []string) {
// if len(args) != 1 {
// cmd.Help() //nolint: errcheck
// os.Exit(1)
// }
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
// conf, err := initConf()
// if err != nil {
// log.Fatalf("ERR failed to read config: %s", err)
// }
// if err := conf.WriteConfigAs(fname); err != nil {
// log.Fatalf("ERR failed to write config: %s", err)
// }
// log.Printf("INF config dumped to ./%s", fname)
// }

View File

@ -1,7 +1,6 @@
package serv
import (
"context"
"fmt"
"os"
"path"
@ -10,7 +9,7 @@ import (
"strings"
"time"
"github.com/dosco/super-graph/migrate"
"github.com/dosco/super-graph/cmd/internal/serv/internal/migrate"
"github.com/spf13/cobra"
)
@ -27,7 +26,7 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
cmdDBCreate(cmd, []string{})
cmdDBMigrate(cmd, []string{"up"})
sfile := path.Join(confPath, conf.SeedFile)
sfile := path.Join(conf.cpath, conf.SeedFile)
_, err := os.Stat(sfile)
if err == nil {
@ -36,61 +35,59 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
}
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile)
log.Fatalf("ERR unable to check if '%s' exists: %s", sfile, err)
}
logger.Warn().Msgf("failed to read seed file '%s'", sfile)
log.Printf("WRN failed to read seed file '%s'", sfile)
}
func cmdDBReset(cmd *cobra.Command, args []string) {
initConfOnce()
if conf.Production {
errlog.Fatal().Msg("db:reset does not work in production")
return
log.Fatalln("ERR db:reset does not work in production")
}
cmdDBDrop(cmd, []string{})
cmdDBSetup(cmd, []string{})
}
func cmdDBCreate(cmd *cobra.Command, args []string) {
initConfOnce()
ctx := context.Background()
conn, err := initDB(conf, false)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(ctx)
defer db.Close()
sql := fmt.Sprintf(`CREATE DATABASE "%s"`, conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
_, err = db.Exec(sql)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to create database")
log.Fatalf("ERR failed to create database: %s", err)
}
logger.Info().Msgf("created database '%s'", conf.DB.DBName)
log.Printf("INF created database '%s'", conf.DB.DBName)
}
func cmdDBDrop(cmd *cobra.Command, args []string) {
initConfOnce()
ctx := context.Background()
conn, err := initDB(conf, false)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(ctx)
defer db.Close()
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS "%s"`, conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
_, err = db.Exec(sql)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to create database")
log.Fatalf("ERR failed to drop database: %s", err)
}
logger.Info().Msgf("dropped database '%s'", conf.DB.DBName)
log.Printf("INF dropped database '%s'", conf.DB.DBName)
}
func cmdDBNew(cmd *cobra.Command, args []string) {
@ -104,8 +101,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
m, err := migrate.FindMigrations(conf.MigrationsPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading migrations:\n %v\n", err)
os.Exit(1)
log.Fatalf("ERR error loading migrations: %s", err)
}
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
@ -114,17 +110,16 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
mpath := filepath.Join(conf.MigrationsPath, mname)
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
log.Fatalf("ERR %s", err)
}
defer mfile.Close()
_, err = mfile.WriteString(newMigrationText)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
log.Fatalf("ERR %s", err)
}
logger.Info().Msgf("created migration '%s'", mpath)
log.Printf("INR created migration '%s'", mpath)
}
func cmdDBMigrate(cmd *cobra.Command, args []string) {
@ -136,30 +131,30 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
initConfOnce()
dest := args[0]
conn, err := initDB(conf, true)
conn, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(context.Background())
defer conn.Close()
m, err := migrate.NewMigrator(conn, "schema_version")
if err != nil {
errlog.Fatal().Err(err).Msg("failed to initializing migrator")
log.Fatalf("ERR failed to initializing migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath)
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
if err != nil {
errlog.Fatal().Err(err).Msg("failed to load migrations")
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
errlog.Fatal().Msg("No migrations found")
log.Fatalf("ERR no migrations found")
}
m.OnStart = func(sequence int32, name, direction, sql string) {
logger.Info().Msgf("%s executing %s %s\n%s\n\n",
log.Printf("INF %s executing %s %s\n%s\n\n",
time.Now().Format("2006-01-02 15:04:05"), name, direction, sql)
}
@ -174,7 +169,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
var n int64
n, err = strconv.ParseInt(d, 10, 32)
if err != nil {
errlog.Fatal().Err(err).Msg("invalid destination")
log.Fatalf("ERR invalid destination: %s", err)
}
return int32(n)
}
@ -203,58 +198,56 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
}
if err != nil {
logger.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
// if err, ok := err.(m.MigrationPgError); ok {
// if err.Detail != "" {
// info.Err(err).Msg(err.Detail)
// log.Fatalf("ERR %s", err.Detail)
// }
// if err.Position != 0 {
// ele, err := ExtractErrorLine(err.Sql, int(err.Position))
// if err != nil {
// errlog.Fatal().Err(err).Send()
// log.Fatalf("ERR %s", err)
// }
// prefix := fmt.Sprintf()
// logger.Info().Msgf("line %d, %s%s", ele.LineNum, prefix, ele.Text)
// log.Fatalf("INF line %d, %s%s", ele.LineNum, ele.Text)
// }
// }
// os.Exit(1)
}
logger.Info().Msg("migration done")
log.Println("INF migration done")
}
func cmdDBStatus(cmd *cobra.Command, args []string) {
initConfOnce()
conn, err := initDB(conf, true)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(context.Background())
defer db.Close()
m, err := migrate.NewMigrator(conn, "schema_version")
m, err := migrate.NewMigrator(db, "schema_version")
if err != nil {
errlog.Fatal().Err(err).Msg("failed to initialize migrator")
log.Fatalf("ERR failed to initialize migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to load migrations")
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
errlog.Fatal().Msg("no migrations found")
log.Fatalf("ERR no migrations found")
}
mver, err := m.GetCurrentVersion()
if err != nil {
errlog.Fatal().Err(err).Msg("failed to retrieve migration")
log.Fatalf("ERR failed to retrieve migration: %s", err)
}
var status string
@ -265,10 +258,8 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
status = "migration(s) pending"
}
fmt.Println("status: ", status)
fmt.Printf("version: %d of %d\n", mver, len(m.Migrations))
fmt.Println("host: ", conf.DB.Host)
fmt.Println("database:", conf.DB.DBName)
log.Printf("INF status: %s, version: %d of %d, host: %s, database: %s",
status, mver, len(m.Migrations), conf.DB.Host, conf.DB.DBName)
}
type ErrorLineExtract struct {
@ -315,9 +306,12 @@ func getMigrationVars() map[string]interface{} {
func initConfOnce() {
var err error
if conf == nil {
if conf, err = initConf(); err != nil {
errlog.Fatal().Err(err).Msg("failed to read config")
}
if conf != nil {
return
}
conf, err = initConf()
if err != nil {
log.Fatalf("ERR failed to read config: %s", err)
}
}

View File

@ -98,7 +98,7 @@ func cmdNew(cmd *cobra.Command, args []string) {
}
})
logger.Info().Msgf("app '%s' initialized", name)
log.Printf("INR app '%s' initialized", name)
}
type Templ struct {
@ -107,7 +107,7 @@ type Templ struct {
}
func newTempl(data map[string]string) *Templ {
return &Templ{rice.MustFindBox("../tmpl"), data}
return &Templ{rice.MustFindBox("./tmpl"), data}
}
func (t *Templ) get(name string) ([]byte, error) {
@ -133,18 +133,18 @@ func ifNotExists(filePath string, doFn func(string) error) {
_, err := os.Stat(filePath)
if err == nil {
logger.Info().Err(err).Msgf("create skipped '%s' exists", filePath)
log.Printf("ERR create skipped '%s' exists", filePath)
return
}
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath)
log.Fatalf("ERR unable to check if '%s' exists", filePath)
}
err = doFn(filePath)
if err != nil {
errlog.Fatal().Err(err).Msgf("unable to create '%s'", filePath)
log.Fatalf("ERR unable to create '%s'", filePath)
}
logger.Info().Msgf("created '%s'", filePath)
log.Printf("INR created '%s'", filePath)
}

View File

@ -1,7 +1,6 @@
package serv
import (
"bytes"
"context"
"encoding/csv"
"encoding/json"
@ -16,37 +15,43 @@ import (
"github.com/brianvoe/gofakeit"
"github.com/dop251/goja"
"github.com/jackc/pgx/v4"
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
"github.com/valyala/fasttemplate"
)
func cmdDBSeed(cmd *cobra.Command, args []string) {
var err error
if conf, err = initConf(); err != nil {
errlog.Fatal().Err(err).Msg("failed to read config")
log.Fatalf("ERR failed to read config: %s", err)
}
conf.Production = false
db, err = initDBPool(conf)
db, err = initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
initCompiler()
sfile := path.Join(conf.cpath, conf.SeedFile)
sfile := path.Join(confPath, conf.SeedFile)
b, err := ioutil.ReadFile(path.Join(confPath, conf.SeedFile))
b, err := ioutil.ReadFile(sfile)
if err != nil {
errlog.Fatal().Err(err).Msgf("failed to read seed file '%s'", sfile)
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
}
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
}
graphQLFn := func(query string, data interface{}, opt map[string]string) map[string]interface{} {
return graphQLFunc(sg, query, data, opt)
}
vm := goja.New()
vm.Set("graphql", graphQLFunc)
vm.Set("import_csv", importCSV)
vm.Set("graphql", graphQLFn)
//vm.Set("import_csv", importCSV)
console := vm.NewObject()
console.Set("log", logFunc) //nolint: errcheck
@ -58,77 +63,44 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
_, err = vm.RunScript("seed.js", string(b))
if err != nil {
errlog.Fatal().Err(err).Msg("failed to execute script")
log.Fatalf("ERR failed to execute script: %s", err)
}
logger.Info().Msg("seed script done")
log.Println("INF seed script done")
}
//func runFunc(call goja.FunctionCall) {
func graphQLFunc(query string, data interface{}, opt map[string]string) map[string]interface{} {
vars, err := json.Marshal(data)
if err != nil {
errlog.Fatal().Err(err).Send()
}
c := context.Background()
// func runFunc(call goja.FunctionCall) {
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
ct := context.Background()
if v, ok := opt["user_id"]; ok && len(v) != 0 {
c = context.WithValue(c, userIDKey, v)
ct = context.WithValue(ct, core.UserIDKey, v)
}
var role string
// var role string
if v, ok := opt["role"]; ok && len(v) != 0 {
role = v
} else {
role = "user"
// if v, ok := opt["role"]; ok && len(v) != 0 {
// role = v
// } else {
// role = "user"
// }
var vars []byte
var err error
if vars, err = json.Marshal(data); err != nil {
log.Fatalf("ERR %s", err)
}
stmts, err := buildRoleStmt([]byte(query), vars, role)
res, err := sg.GraphQL(ct, query, vars)
if err != nil {
errlog.Fatal().Err(err).Msg("graphql query failed")
}
st := stmts[0]
buf := &bytes.Buffer{}
t := fasttemplate.New(st.sql, openVar, closeVar)
_, err = t.ExecuteFunc(buf, argMap(c, vars))
if err != nil {
errlog.Fatal().Err(err).Send()
}
finalSQL := buf.String()
tx, err := db.Begin(c)
if err != nil {
errlog.Fatal().Err(err).Send()
}
defer tx.Rollback(c) //nolint: errcheck
if conf.DB.SetUserID {
if err := setLocalUserID(c, tx); err != nil {
errlog.Fatal().Err(err).Send()
}
}
var root []byte
if err = tx.QueryRow(context.Background(), finalSQL).Scan(&root); err != nil {
errlog.Fatal().Err(err).Msg("sql query failed")
}
if err := tx.Commit(c); err != nil {
errlog.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
}
val := make(map[string]interface{})
err = json.Unmarshal(root, &val)
if err != nil {
errlog.Fatal().Err(err).Send()
if err = json.Unmarshal(res.Data, &val); err != nil {
log.Fatalf("ERR %s", err)
}
return val
@ -203,36 +175,34 @@ func (c *csvSource) Err() error {
return nil
}
func importCSV(table, filename string) int64 {
if filename[0] != '/' {
filename = path.Join(confPath, filename)
}
// func importCSV(table, filename string) int64 {
// if filename[0] != '/' {
// filename = path.Join(conf.ConfigPathUsed(), filename)
// }
s, err := NewCSVSource(filename)
if err != nil {
errlog.Fatal().Err(err).Send()
}
// s, err := NewCSVSource(filename)
// if err != nil {
// log.Fatalf("ERR %s", err)
// }
var cols []string
colval, _ := s.Values()
// var cols []string
// colval, _ := s.Values()
for _, c := range colval {
cols = append(cols, c.(string))
}
// for _, c := range colval {
// cols = append(cols, c.(string))
// }
n, err := db.CopyFrom(
context.Background(),
pgx.Identifier{table},
cols,
s)
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
// cols,
// s)
if err != nil {
err = fmt.Errorf("%w (line no %d)", err, s.i)
errlog.Fatal().Err(err).Send()
}
// if err != nil {
// err = fmt.Errorf("%w (line no %d)", err, s.i)
// log.Fatalf("ERR %s", err)
// }
return n
}
// return n
// }
//nolint: errcheck
func logFunc(args ...interface{}) {

View File

@ -0,0 +1,37 @@
package serv
import (
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
)
var (
sg *core.SuperGraph
)
func cmdServ(cmd *cobra.Command, args []string) {
var err error
conf, err = initConf()
if err != nil {
fatalInProd(err, "failed to read config")
}
initWatcher()
db, err = initDB(conf)
if err != nil {
fatalInProd(err, "failed to connect to database")
}
// if conf != nil && db != nil {
// initResolvers()
// }
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
fatalInProd(err, "failed to initialize Super Graph")
}
startHTTP()
}

115
cmd/internal/serv/config.go Normal file
View File

@ -0,0 +1,115 @@
package serv
import (
"fmt"
"os"
"path"
"strings"
"github.com/spf13/viper"
)
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
// environment variable. This is the best way to create a new Super Graph config.
func ReadInConfig(configFile string) (*Config, error) {
cpath := path.Dir(configFile)
cfile := path.Base(configFile)
vi := newViper(cpath, cfile)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
inherits := vi.GetString("inherits")
if len(inherits) != 0 {
vi = newViper(cpath, inherits)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
if vi.IsSet("inherits") {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
}
vi.SetConfigName(cfile)
if err := vi.MergeInConfig(); err != nil {
return nil, err
}
}
c := &Config{cpath: cpath, vi: vi}
if err := vi.Unmarshal(&c); err != nil {
return nil, fmt.Errorf("failed to decode config, %v", err)
}
if len(c.Core.AllowListFile) == 0 {
c.Core.AllowListFile = path.Join(cpath, "allow.list")
}
return c, nil
}
func newViper(configPath, configFile string) *viper.Viper {
vi := viper.New()
vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.AddConfigPath(configPath)
vi.SetConfigName(configFile)
vi.AddConfigPath("./config")
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432)
vi.SetDefault("database.user", "postgres")
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("host", "HOST") //nolint: errcheck
vi.BindEnv("port", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)
return vi
}
func GetConfigName() string {
if len(os.Getenv("GO_ENV")) == 0 {
return "dev"
}
ge := strings.ToLower(os.Getenv("GO_ENV"))
switch {
case strings.HasPrefix(ge, "pro"):
return "prod"
case strings.HasPrefix(ge, "sta"):
return "stage"
case strings.HasPrefix(ge, "tes"):
return "test"
case strings.HasPrefix(ge, "dev"):
return "dev"
}
return ge
}

View File

@ -0,0 +1,7 @@
package serv
// func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
// return nil
// }

View File

@ -0,0 +1,25 @@
package serv
import (
"context"
"net/http"
)
var healthyResponse = []byte("All's Well")
func health(w http.ResponseWriter, _ *http.Request) {
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
defer cancel()
if err := db.PingContext(ct); err != nil {
log.Printf("ERR error pinging database: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
if _, err := w.Write(healthyResponse); err != nil {
log.Printf("ERR error writing healthy response: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
}

124
cmd/internal/serv/http.go Normal file
View File

@ -0,0 +1,124 @@
package serv
import (
"encoding/json"
"errors"
"io"
"io/ioutil"
"net/http"
"strings"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
"github.com/dosco/super-graph/core"
"github.com/rs/cors"
"go.uber.org/zap"
)
const (
maxReadBytes = 100000 // 100Kb
introspectionQuery = "IntrospectionQuery"
)
var (
errUnauthorized = errors.New("not authorized")
)
type gqlReq struct {
OpName string `json:"operationName"`
Query string `json:"query"`
Vars json.RawMessage `json:"variables"`
}
type errorResp struct {
Error error `json:"error"`
}
func apiV1Handler() http.Handler {
h, err := auth.WithAuth(http.HandlerFunc(apiV1), &conf.Auth)
if err != nil {
log.Fatalf("ERR %s", err)
}
if len(conf.AllowedOrigins) != 0 {
c := cors.New(cors.Options{
AllowedOrigins: conf.AllowedOrigins,
AllowCredentials: true,
Debug: conf.DebugCORS,
})
h = c.Handler(h)
}
return h
}
func apiV1(w http.ResponseWriter, r *http.Request) {
ct := r.Context()
//nolint: errcheck
if conf.AuthFailBlock && !auth.IsAuth(ct) {
renderErr(w, errUnauthorized, nil)
return
}
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
if err != nil {
renderErr(w, err, nil)
return
}
defer r.Body.Close()
req := gqlReq{}
err = json.Unmarshal(b, &req)
if err != nil {
renderErr(w, err, nil)
return
}
if strings.EqualFold(req.OpName, introspectionQuery) {
introspect(w)
return
}
res, err := sg.GraphQL(ct, req.Query, req.Vars)
if logLevel >= LogLevelDebug {
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
}
if err != nil {
renderErr(w, err, res)
return
}
json.NewEncoder(w).Encode(res)
if logLevel >= LogLevelInfo {
zlog.Info("success",
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
}
}
//nolint: errcheck
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
if err == errUnauthorized {
w.WriteHeader(http.StatusUnauthorized)
}
json.NewEncoder(w).Encode(&errorResp{err})
if logLevel >= LogLevelError {
if res != nil {
zlog.Error(err.Error(),
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
} else {
zlog.Error(err.Error())
}
}
}

153
cmd/internal/serv/init.go Normal file
View File

@ -0,0 +1,153 @@
package serv
import (
"database/sql"
"fmt"
"path"
"time"
_ "github.com/jackc/pgx/v4/stdlib"
)
func initConf() (*Config, error) {
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
if err != nil {
return nil, err
}
switch c.LogLevel {
case "debug":
logLevel = LogLevelDebug
case "error":
logLevel = LogLevelError
case "warn":
logLevel = LogLevelWarn
case "info":
logLevel = LogLevelInfo
default:
logLevel = LogLevelNone
}
// Auths: validate and sanitize
am := make(map[string]struct{})
for i := 0; i < len(c.Auths); i++ {
a := &c.Auths[i]
a.Name = sanitize(a.Name)
if _, ok := am[a.Name]; ok {
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
log.Printf("WRN duplicate auth found: %s", a.Name)
}
am[a.Name] = struct{}{}
}
// Actions: validate and sanitize
axm := make(map[string]struct{})
for i := 0; i < len(c.Actions); i++ {
a := &c.Actions[i]
a.Name = sanitize(a.Name)
a.AuthName = sanitize(a.AuthName)
if _, ok := axm[a.Name]; ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN duplicate action found: %s", a.Name)
}
if _, ok := am[a.AuthName]; !ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
}
axm[a.Name] = struct{}{}
}
var anonFound bool
for _, r := range c.Roles {
if sanitize(r.Name) == "anon" {
anonFound = true
}
}
if !anonFound {
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
c.AuthFailBlock = false
}
return c, nil
}
func initDB(c *Config) (*sql.DB, error) {
var db *sql.DB
var err error
cs := fmt.Sprintf("postgres://%s:%s@%s:%d/%s",
c.DB.User, c.DB.Password,
c.DB.Host, c.DB.Port, c.DB.DBName)
for i := 1; i < 10; i++ {
db, err = sql.Open("pgx", cs)
if err == nil {
break
}
time.Sleep(time.Duration(i*100) * time.Millisecond)
}
if err != nil {
return nil, err
}
return db, nil
// config, _ := pgxpool.ParseConfig("")
// config.ConnConfig.Host = c.DB.Host
// config.ConnConfig.Port = c.DB.Port
// config.ConnConfig.Database = c.DB.DBName
// config.ConnConfig.User = c.DB.User
// config.ConnConfig.Password = c.DB.Password
// config.ConnConfig.RuntimeParams = map[string]string{
// "application_name": c.AppName,
// "search_path": c.DB.Schema,
// }
// switch c.LogLevel {
// case "debug":
// config.ConnConfig.LogLevel = pgx.LogLevelDebug
// case "info":
// config.ConnConfig.LogLevel = pgx.LogLevelInfo
// case "warn":
// config.ConnConfig.LogLevel = pgx.LogLevelWarn
// case "error":
// config.ConnConfig.LogLevel = pgx.LogLevelError
// default:
// config.ConnConfig.LogLevel = pgx.LogLevelNone
// }
// config.ConnConfig.Logger = NewSQLLogger(logger)
// // if c.DB.MaxRetries != 0 {
// // opt.MaxRetries = c.DB.MaxRetries
// // }
// if c.DB.PoolSize != 0 {
// config.MaxConns = conf.DB.PoolSize
// }
// var db *pgxpool.Pool
// var err error
// for i := 1; i < 10; i++ {
// db, err = pgxpool.ConnectConfig(context.Background(), config)
// if err == nil {
// break
// }
// time.Sleep(time.Duration(i*100) * time.Millisecond)
// }
// if err != nil {
// return nil, err
// }
// return db, nil
}

View File

@ -0,0 +1,127 @@
package auth
import (
"context"
"fmt"
"net/http"
"github.com/dosco/super-graph/core"
)
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
userIDProvider := r.Header.Get("X-User-ID-Provider")
if len(userIDProvider) != 0 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
}
userID := r.Header.Get("X-User-ID")
if len(userID) != 0 {
ctx = context.WithValue(ctx, core.UserIDKey, userID)
}
userRole := r.Header.Get("X-User-Role")
if len(userRole) != 0 {
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
}
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
hdr := ac.Header
if len(hdr.Name) == 0 {
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
}
if !hdr.Exists && len(hdr.Value) == 0 {
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
}
return func(w http.ResponseWriter, r *http.Request) {
var fo1 bool
value := r.Header.Get(hdr.Name)
switch {
case hdr.Exists:
fo1 = (len(value) == 0)
default:
fo1 = (value != hdr.Value)
}
if fo1 {
http.Error(w, "401 unauthorized", http.StatusUnauthorized)
return
}
next.ServeHTTP(w, r)
}, nil
}
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
var err error
if ac.CredsInHeader {
next, err = SimpleHandler(ac, next)
}
if err != nil {
return nil, err
}
switch ac.Type {
case "rails":
return RailsHandler(ac, next)
case "jwt":
return JwtHandler(ac, next)
case "header":
return HeaderHandler(ac, next)
}
return next, nil
}
func IsAuth(ct context.Context) bool {
return ct.Value(core.UserIDKey) != nil
}

View File

@ -1,4 +1,4 @@
package serv
package auth
import (
"context"
@ -7,6 +7,7 @@ import (
"strings"
jwt "github.com/dgrijalva/jwt-go"
"github.com/dosco/super-graph/core"
)
const (
@ -14,18 +15,18 @@ const (
jwtAuth0 int = iota + 1
)
func jwtHandler(authc configAuth, next http.Handler) http.HandlerFunc {
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
var key interface{}
var jwtProvider int
cookie := authc.Cookie
cookie := ac.Cookie
if authc.JWT.Provider == "auth0" {
if ac.JWT.Provider == "auth0" {
jwtProvider = jwtAuth0
}
secret := authc.JWT.Secret
publicKeyFile := authc.JWT.PubKeyFile
secret := ac.JWT.Secret
publicKeyFile := ac.JWT.PubKeyFile
switch {
case len(secret) != 0:
@ -34,10 +35,10 @@ func jwtHandler(authc configAuth, next http.Handler) http.HandlerFunc {
case len(publicKeyFile) != 0:
kd, err := ioutil.ReadFile(publicKeyFile)
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
switch authc.JWT.PubKeyType {
switch ac.JWT.PubKeyType {
case "ecdsa":
key, err = jwt.ParseECPublicKeyFromPEM(kd)
@ -50,7 +51,7 @@ func jwtHandler(authc configAuth, next http.Handler) http.HandlerFunc {
}
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
}
@ -88,11 +89,11 @@ func jwtHandler(authc configAuth, next http.Handler) http.HandlerFunc {
if jwtProvider == jwtAuth0 {
sub := strings.Split(claims.Subject, "|")
if len(sub) != 2 {
ctx = context.WithValue(ctx, userIDProviderKey, sub[0])
ctx = context.WithValue(ctx, userIDKey, sub[1])
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
}
} else {
ctx = context.WithValue(ctx, userIDKey, claims.Subject)
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
}
next.ServeHTTP(w, r.WithContext(ctx))
@ -100,5 +101,5 @@ func jwtHandler(authc configAuth, next http.Handler) http.HandlerFunc {
}
next.ServeHTTP(w, r)
}
}, nil
}

View File

@ -1,4 +1,4 @@
package serv
package auth
import (
"context"
@ -9,50 +9,53 @@ import (
"strings"
"github.com/bradfitz/gomemcache/memcache"
"github.com/dosco/super-graph/rails"
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
"github.com/dosco/super-graph/core"
"github.com/garyburd/redigo/redis"
)
func railsHandler(authc configAuth, next http.Handler) http.HandlerFunc {
ru := authc.Rails.URL
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
ru := ac.Rails.URL
if strings.HasPrefix(ru, "memcache:") {
return railsMemcacheHandler(authc, next)
return RailsMemcacheHandler(ac, next)
}
if strings.HasPrefix(ru, "redis:") {
return railsRedisHandler(authc, next)
return RailsRedisHandler(ac, next)
}
return railsCookieHandler(authc, next)
return RailsCookieHandler(ac, next)
}
func railsRedisHandler(authc configAuth, next http.Handler) http.HandlerFunc {
cookie := authc.Cookie
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
errlog.Fatal().Msg("no auth.cookie defined")
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(authc.Rails.URL) == 0 {
errlog.Fatal().Msg("no auth.rails.url defined")
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rp := &redis.Pool{
MaxIdle: authc.Rails.MaxIdle,
MaxActive: authc.Rails.MaxActive,
MaxIdle: ac.Rails.MaxIdle,
MaxActive: ac.Rails.MaxActive,
Dial: func() (redis.Conn, error) {
c, err := redis.DialURL(authc.Rails.URL)
c, err := redis.DialURL(ac.Rails.URL)
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
pwd := authc.Rails.Password
pwd := ac.Rails.Password
if len(pwd) != 0 {
if _, err := c.Do("AUTH", pwd); err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
}
return c, err
return c, nil
},
}
@ -76,24 +79,25 @@ func railsRedisHandler(authc configAuth, next http.Handler) http.HandlerFunc {
return
}
ctx := context.WithValue(r.Context(), userIDKey, userID)
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}
}, nil
}
func railsMemcacheHandler(authc configAuth, next http.Handler) http.HandlerFunc {
cookie := authc.Cookie
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
errlog.Fatal().Msg("no auth.cookie defined")
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(authc.Rails.URL) == 0 {
errlog.Fatal().Msg("no auth.rails.url defined")
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rURL, err := url.Parse(authc.Rails.URL)
rURL, err := url.Parse(ac.Rails.URL)
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
mc := memcache.New(rURL.Host)
@ -118,49 +122,49 @@ func railsMemcacheHandler(authc configAuth, next http.Handler) http.HandlerFunc
return
}
ctx := context.WithValue(r.Context(), userIDKey, userID)
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}
}, nil
}
func railsCookieHandler(authc configAuth, next http.Handler) http.HandlerFunc {
cookie := authc.Cookie
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
errlog.Fatal().Msg("no auth.cookie defined")
return nil, fmt.Errorf("no auth.cookie defined")
}
ra, err := railsAuth(authc)
ra, err := railsAuth(ac)
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil || len(ck.Value) == 0 {
logger.Warn().Err(err).Msg("rails cookie missing")
// logger.Warn().Err(err).Msg("rails cookie missing")
next.ServeHTTP(w, r)
return
}
userID, err := ra.ParseCookie(ck.Value)
if err != nil {
logger.Warn().Err(err).Msg("failed to parse rails cookie")
// logger.Warn().Err(err).Msg("failed to parse rails cookie")
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), userIDKey, userID)
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}
}, nil
}
func railsAuth(authc configAuth) (*rails.Auth, error) {
secret := authc.Rails.SecretKeyBase
func railsAuth(ac *Auth) (*rails.Auth, error) {
secret := ac.Rails.SecretKeyBase
if len(secret) == 0 {
return nil, errors.New("no auth.rails.secret_key_base defined")
}
version := authc.Rails.Version
version := ac.Rails.Version
if len(version) == 0 {
return nil, errors.New("no auth.rails.version defined")
}
@ -170,16 +174,16 @@ func railsAuth(authc configAuth) (*rails.Auth, error) {
return nil, err
}
if len(authc.Rails.Salt) != 0 {
ra.Salt = authc.Rails.Salt
if len(ac.Rails.Salt) != 0 {
ra.Salt = ac.Rails.Salt
}
if len(authc.Rails.SignSalt) != 0 {
ra.SignSalt = authc.Rails.SignSalt
if len(ac.Rails.SignSalt) != 0 {
ra.SignSalt = ac.Rails.SignSalt
}
if len(authc.Rails.AuthSalt) != 0 {
ra.AuthSalt = authc.Rails.AuthSalt
if len(ac.Rails.AuthSalt) != 0 {
ra.AuthSalt = ac.Rails.AuthSalt
}
return ra, nil

View File

@ -3,6 +3,7 @@ package migrate
import (
"bytes"
"context"
"database/sql"
"fmt"
"io/ioutil"
"os"
@ -12,7 +13,6 @@ import (
"strings"
"text/template"
"github.com/jackc/pgx/v4"
"github.com/pkg/errors"
)
@ -62,7 +62,7 @@ type MigratorOptions struct {
}
type Migrator struct {
conn *pgx.Conn
db *sql.DB
versionTable string
options *MigratorOptions
Migrations []*Migration
@ -70,12 +70,12 @@ type Migrator struct {
Data map[string]interface{} // Data available to use in migrations
}
func NewMigrator(conn *pgx.Conn, versionTable string) (m *Migrator, err error) {
return NewMigratorEx(conn, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
func NewMigrator(db *sql.DB, versionTable string) (m *Migrator, err error) {
return NewMigratorEx(db, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
}
func NewMigratorEx(conn *pgx.Conn, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
m = &Migrator{conn: conn, versionTable: versionTable, options: opts}
func NewMigratorEx(db *sql.DB, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
m = &Migrator{db: db, versionTable: versionTable, options: opts}
err = m.ensureSchemaVersionTableExists()
m.Migrations = make([]*Migration, 0)
m.Data = make(map[string]interface{})
@ -254,14 +254,13 @@ func (m *Migrator) Migrate() error {
// MigrateTo migrates to targetVersion
func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
ctx := context.Background()
// Lock to ensure multiple migrations cannot occur simultaneously
lockNum := int64(9628173550095224) // arbitrary random number
if _, lockErr := m.conn.Exec(ctx, "select pg_try_advisory_lock($1)", lockNum); lockErr != nil {
if _, lockErr := m.db.Exec("select pg_try_advisory_lock($1)", lockNum); lockErr != nil {
return lockErr
}
defer func() {
_, unlockErr := m.conn.Exec(ctx, "select pg_advisory_unlock($1)", lockNum)
_, unlockErr := m.db.Exec("select pg_advisory_unlock($1)", lockNum)
if err == nil && unlockErr != nil {
err = unlockErr
}
@ -310,11 +309,11 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
ctx := context.Background()
tx, err := m.conn.Begin(ctx)
tx, err := m.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback(ctx) //nolint: errcheck
defer tx.Rollback() //nolint: errcheck
// Fire on start callback
if m.OnStart != nil {
@ -322,7 +321,7 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
}
// Execute the migration
_, err = tx.Exec(ctx, sql)
_, err = tx.Exec(sql)
if err != nil {
// if err, ok := err.(pgx.PgError); ok {
// return MigrationPgError{Sql: sql, PgError: err}
@ -336,12 +335,12 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
// }
// Add one to the version
_, err = tx.Exec(ctx, "update "+m.versionTable+" set version=$1", sequence)
_, err = tx.Exec("update "+m.versionTable+" set version=$1", sequence)
if err != nil {
return err
}
err = tx.Commit(ctx)
err = tx.Commit()
if err != nil {
return err
}
@ -353,14 +352,13 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
}
func (m *Migrator) GetCurrentVersion() (v int32, err error) {
err = m.conn.QueryRow(context.Background(),
"select version from "+m.versionTable).Scan(&v)
err = m.db.QueryRow("select version from " + m.versionTable).Scan(&v)
return v, err
}
func (m *Migrator) ensureSchemaVersionTableExists() (err error) {
_, err = m.conn.Exec(context.Background(), fmt.Sprintf(`
_, err = m.db.Exec(fmt.Sprintf(`
create table if not exists %s(version int4 not null);
insert into %s(version)

View File

@ -116,7 +116,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
continue
}
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
log("INF Reloading, file changed detected: %s", event)
var trigger bool
switch runtime.GOOS {
@ -172,7 +172,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
func ReExec() {
err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ())
if err != nil {
errlog.Fatal().Err(err).Msg("cannot restart")
log.Fatalf("ERR cannot restart: %s", err)
}
}

File diff suppressed because one or more lines are too long

View File

@ -11,49 +11,11 @@ import (
rice "github.com/GeertJohan/go.rice"
"github.com/NYTimes/gziphandler"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
)
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
di, err := psql.GetDBInfo(db)
if err != nil {
return nil, nil, err
}
if err = addTables(c, di); err != nil {
return nil, nil, err
}
if err = addForeignKeys(c, di); err != nil {
return nil, nil, err
}
schema, err = psql.NewDBSchema(di, c.getAliasMap())
if err != nil {
return nil, nil, err
}
qc, err := qcode.NewCompiler(qcode.Config{
Blocklist: c.DB.Blocklist,
})
if err != nil {
return nil, nil, err
}
if err := addRoles(c, qc); err != nil {
return nil, nil, err
}
pc := psql.NewCompiler(psql.Config{
Schema: schema,
Vars: c.DB.Vars,
})
return qc, pc, nil
}
func initWatcher(cpath string) {
func initWatcher() {
cpath := conf.cpath
if conf != nil && !conf.WatchAndReload {
return
}
@ -66,9 +28,9 @@ func initWatcher(cpath string) {
}
go func() {
err := Do(logger.Printf, d)
err := Do(log.Printf, d)
if err != nil {
errlog.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
}
}()
}
@ -103,7 +65,7 @@ func startHTTP() {
routes, err := routeHandler()
if err != nil {
errlog.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
}
srv := &http.Server{
@ -121,7 +83,7 @@ func startHTTP() {
<-sigint
if err := srv.Shutdown(context.Background()); err != nil {
errlog.Error().Err(err).Msg("shutdown signal received")
log.Fatalln("INF shutdown signal received")
}
close(idleConnsClosed)
}()
@ -130,16 +92,13 @@ func startHTTP() {
db.Close()
})
logger.Info().
Str("version", version).
Str("git_branch", gitBranch).
Str("host_post", hostPort).
Str("app_name", appName).
Str("env", env).
Msgf("%s listening", serverName)
log.Printf("INF version: %s, git-branch: %s, host-port: %s, app-name: %s, env: %s\n",
version, gitBranch, hostPort, appName, env)
log.Printf("INF %s started\n", serverName)
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
errlog.Error().Err(err).Msg("server closed")
log.Fatalln("INF server closed")
}
<-idleConnsClosed
@ -162,7 +121,7 @@ func routeHandler() (http.Handler, error) {
}
if conf.WebUI {
routes["/"] = http.FileServer(rice.MustFindBox("../web/build").HTTPBox())
routes["/"] = http.FileServer(rice.MustFindBox("./web/build").HTTPBox())
}
if conf.HTTPGZip {
@ -190,29 +149,31 @@ func setActionRoutes(routes map[string]http.Handler) error {
for _, a := range conf.Actions {
var fn http.Handler
fn, err = newAction(a)
fn, err = newAction(&a)
if err != nil {
break
}
p := fmt.Sprintf("/api/v1/actions/%s", strings.ToLower(a.Name))
if authc, ok := findAuth(a.AuthName); ok {
routes[p] = withAuth(fn, authc)
if ac := findAuth(a.AuthName); ac != nil {
routes[p], err = auth.WithAuth(fn, ac)
} else {
routes[p] = fn
}
if err != nil {
return err
}
}
return nil
}
func findAuth(name string) (configAuth, bool) {
var authc configAuth
func findAuth(name string) *auth.Auth {
for _, a := range conf.Auths {
if strings.EqualFold(a.Name, name) {
return a, true
return &a
}
}
return authc, false
return nil
}

View File

@ -0,0 +1,43 @@
package serv
// import (
// "context"
// "github.com/jackc/pgx/v4"
// "github.com/rs/zerolog"
// )
// type Logger struct {
// logger zerolog.Logger
// }
// // NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
// // logging fascade as output.
// func NewSQLLogger(logger zerolog.Logger) *Logger {
// return &Logger{
// logger: // logger.With().Logger(),
// }
// }
// func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
// var zlevel zerolog.Level
// switch level {
// case pgx.LogLevelNone:
// zlevel = zerolog.NoLevel
// case pgx.LogLevelError:
// zlevel = zerolog.ErrorLevel
// case pgx.LogLevelWarn:
// zlevel = zerolog.WarnLevel
// case pgx.LogLevelDebug, pgx.LogLevelInfo:
// zlevel = zerolog.DebugLevel
// default:
// zlevel = zerolog.DebugLevel
// }
// if sql, ok := data["sql"]; ok {
// delete(data, "sql")
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
// } else {
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(msg)
// }
// }

View File

@ -2,7 +2,7 @@ app_name: "{% app_name %} Development"
host_port: 0.0.0.0:8080
web_ui: true
# debug, info, warn, error, fatal, panic
# debug, error, warn, info
log_level: "info"
# enable or disable http compression (uses gzip)
@ -30,7 +30,8 @@ reload_on_config_change: true
# seed_file: seed.js
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# this must be a relative path under the config path
migrations_path: ./migrations
# Secret key for general encryption operations like
# encrypting the cursor data

View File

@ -6,7 +6,7 @@ app_name: "{% app_name %} Production"
host_port: 0.0.0.0:8080
web_ui: false
# debug, info, warn, error, fatal, panic, disable
# debug, error, warn, info
log_level: "warn"
# enable or disable http compression (uses gzip)

View File

@ -5,6 +5,7 @@ import (
"crypto/sha1"
"encoding/hex"
"io"
"os"
"sort"
"strings"
"sync"
@ -23,14 +24,6 @@ func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
return v
}
// nolint: errcheck
func stmtHash(name string, role string) string {
h := sha1.New()
io.WriteString(h, strings.ToLower(name))
io.WriteString(h, role)
return hex.EncodeToString(h.Sum(nil))
}
// nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b)
@ -117,25 +110,23 @@ func al(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
}
func findStmt(role string, stmts []stmt) *stmt {
for i := range stmts {
if stmts[i].role.Name != role {
continue
}
return &stmts[i]
}
return nil
}
func fatalInProd(err error, msg string) {
var wg sync.WaitGroup
if !isDev() {
errlog.Fatal().Err(err).Msg(msg)
if isDev() {
log.Printf("ERR %s: %s", msg, err)
} else {
log.Fatalf("ERR %s: %s", msg, err)
}
errlog.Error().Err(err).Msg(msg)
wg.Add(1)
wg.Wait()
}
func isDev() bool {
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
}
func sanitize(value string) string {
return strings.ToLower(strings.TrimSpace(value))
}

View File

@ -7,7 +7,7 @@
/coverage
# production
/build
# /build
# development
/src/components/dataviz/core/*.js.map

View File

@ -0,0 +1,30 @@
{
"files": {
"main.css": "/static/css/main.c6b5c55c.chunk.css",
"main.js": "/static/js/main.04d74040.chunk.js",
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
"index.html": "/index.html",
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
"service-worker.js": "/service-worker.js",
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
},
"entrypoints": [
"static/js/runtime-main.4aea9da3.js",
"static/js/2.03370bd3.chunk.js",
"static/css/main.c6b5c55c.chunk.css",
"static/js/main.04d74040.chunk.js"
]
}

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>

View File

@ -0,0 +1,58 @@
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
"url": "/index.html"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/css/main.c6b5c55c.chunk.css"
},
{
"revision": "c156a125990ddf5dcc51",
"url": "/static/js/2.03370bd3.chunk.js"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/js/main.04d74040.chunk.js"
},
{
"revision": "427262b6771d3f49a7c5",
"url": "/static/js/runtime-main.4aea9da3.js"
},
{
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
},
{
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
},
{
"revision": "7f98f032085704c8943ec2d1925c7c84",
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
},
{
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
"url": "/static/media/getDefinition.js.4dbec62f.flow"
},
{
"revision": "65b0979ac23feca49e4411883fd8eaab",
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
},
{
"revision": "d94118379d362fc161aa1246bcc14d43",
"url": "/static/media/getHoverInformation.js.d9411837.flow"
},
{
"revision": "c04e3998712b37a96f0bfd283fa06b52",
"url": "/static/media/getOutline.js.c04e3998.flow"
},
{
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
"url": "/static/media/index.js.02c24280.flow"
},
{
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
"url": "/static/media/logo.57ee3b60.png"
}
]);

View File

@ -0,0 +1,39 @@
/**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
importScripts(
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
);
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
workbox.core.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [].concat(self.__precacheManifest || []);
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
});

View File

@ -0,0 +1,2 @@
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */

View File

@ -0,0 +1 @@
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
//# sourceMappingURL=main.04d74040.chunk.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
//# sourceMappingURL=runtime-main.4aea9da3.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,328 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
DocumentNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
TypeDefinitionNode,
NamedTypeNode,
} from 'graphql';
import type {
CompletionItem,
DefinitionQueryResult,
Diagnostic,
GraphQLCache,
GraphQLConfig,
GraphQLProjectConfig,
Uri,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import type {Hover} from 'vscode-languageserver-types';
import {Kind, parse, print} from 'graphql';
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
import {getHoverInformation} from './getHoverInformation';
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
import {
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
getDefinitionQueryResultForNamedType,
} from './getDefinition';
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
const {
FRAGMENT_DEFINITION,
OBJECT_TYPE_DEFINITION,
INTERFACE_TYPE_DEFINITION,
ENUM_TYPE_DEFINITION,
UNION_TYPE_DEFINITION,
SCALAR_TYPE_DEFINITION,
INPUT_OBJECT_TYPE_DEFINITION,
SCALAR_TYPE_EXTENSION,
OBJECT_TYPE_EXTENSION,
INTERFACE_TYPE_EXTENSION,
UNION_TYPE_EXTENSION,
ENUM_TYPE_EXTENSION,
INPUT_OBJECT_TYPE_EXTENSION,
DIRECTIVE_DEFINITION,
FRAGMENT_SPREAD,
OPERATION_DEFINITION,
NAMED_TYPE,
} = Kind;
export class GraphQLLanguageService {
_graphQLCache: GraphQLCache;
_graphQLConfig: GraphQLConfig;
constructor(cache: GraphQLCache) {
this._graphQLCache = cache;
this._graphQLConfig = cache.getGraphQLConfig();
}
async getDiagnostics(
query: string,
uri: Uri,
isRelayCompatMode?: boolean,
): Promise<Array<Diagnostic>> {
// Perform syntax diagnostics first, as this doesn't require
// schema/fragment definitions, even the project configuration.
let queryHasExtensions = false;
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
const schemaPath = projectConfig.schemaPath;
try {
const queryAST = parse(query);
if (!schemaPath || uri !== schemaPath) {
queryHasExtensions = queryAST.definitions.some(definition => {
switch (definition.kind) {
case OBJECT_TYPE_DEFINITION:
case INTERFACE_TYPE_DEFINITION:
case ENUM_TYPE_DEFINITION:
case UNION_TYPE_DEFINITION:
case SCALAR_TYPE_DEFINITION:
case INPUT_OBJECT_TYPE_DEFINITION:
case SCALAR_TYPE_EXTENSION:
case OBJECT_TYPE_EXTENSION:
case INTERFACE_TYPE_EXTENSION:
case UNION_TYPE_EXTENSION:
case ENUM_TYPE_EXTENSION:
case INPUT_OBJECT_TYPE_EXTENSION:
case DIRECTIVE_DEFINITION:
return true;
}
return false;
});
}
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
// If there's a matching config, proceed to prepare to run validation
let source = query;
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
query,
fragmentDefinitions,
);
const dependenciesSource = fragmentDependencies.reduce(
(prev, cur) => `${prev} ${print(cur.definition)}`,
'',
);
source = `${source} ${dependenciesSource}`;
let validationAst = null;
try {
validationAst = parse(source);
} catch (error) {
// the query string is already checked to be parsed properly - errors
// from this parse must be from corrupted fragment dependencies.
// For IDEs we don't care for errors outside of the currently edited
// query, so we return an empty array here.
return [];
}
// Check if there are custom validation rules to be used
let customRules;
const customRulesModulePath =
projectConfig.extensions.customValidationRules;
if (customRulesModulePath) {
/* eslint-disable no-implicit-coercion */
const rulesPath = require.resolve(`${customRulesModulePath}`);
if (rulesPath) {
customRules = require(`${rulesPath}`)(this._graphQLConfig);
}
/* eslint-enable no-implicit-coercion */
}
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName, queryHasExtensions)
.catch(() => null);
if (!schema) {
return [];
}
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
}
async getAutocompleteSuggestions(
query: string,
position: Position,
filePath: Uri,
): Promise<Array<CompletionItem>> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getAutocompleteSuggestions(schema, query, position);
}
return [];
}
async getHoverInformation(
query: string,
position: Position,
filePath: Uri,
): Promise<Hover.contents> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getHoverInformation(schema, query, position);
}
return '';
}
async getDefinition(
query: string,
position: Position,
filePath: Uri,
): Promise<?DefinitionQueryResult> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
let ast;
try {
ast = parse(query);
} catch (error) {
return null;
}
const node = getASTNodeAtPosition(query, ast, position);
if (node) {
switch (node.kind) {
case FRAGMENT_SPREAD:
return this._getDefinitionForFragmentSpread(
query,
ast,
node,
filePath,
projectConfig,
);
case FRAGMENT_DEFINITION:
case OPERATION_DEFINITION:
return getDefinitionQueryResultForDefinitionNode(
filePath,
query,
(node: FragmentDefinitionNode | OperationDefinitionNode),
);
case NAMED_TYPE:
return this._getDefinitionForNamedType(
query,
ast,
node,
filePath,
projectConfig,
);
}
}
return null;
}
async _getDefinitionForNamedType(
query: string,
ast: DocumentNode,
node: NamedTypeNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
ast,
objectTypeDefinitions,
);
const localObjectTypeDefinitions = ast.definitions.filter(
definition =>
definition.kind === OBJECT_TYPE_DEFINITION ||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
definition.kind === ENUM_TYPE_DEFINITION,
);
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
TypeDefinitionNode,
>);
const localOperationDefinationInfos = typeCastedDefs.map(
(definition: TypeDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForNamedType(
query,
node,
dependencies.concat(localOperationDefinationInfos),
);
return result;
}
async _getDefinitionForFragmentSpread(
query: string,
ast: DocumentNode,
node: FragmentSpreadNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
ast,
fragmentDefinitions,
);
const localFragDefinitions = ast.definitions.filter(
definition => definition.kind === FRAGMENT_DEFINITION,
);
const typeCastedDefs = ((localFragDefinitions: any): Array<
FragmentDefinitionNode,
>);
const localFragInfos = typeCastedDefs.map(
(definition: FragmentDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForFragmentSpread(
query,
node,
dependencies.concat(localFragInfos),
);
return result;
}
}

View File

@ -0,0 +1,204 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
import {isCompositeType} from 'graphql';
import {
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
} from 'graphql/type/introspection';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
// Utility for returning the state representing the Definition this token state
// is within, if any.
export function getDefinitionState(tokenState: State): ?State {
let definitionState;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
case 'Mutation':
case 'Subscription':
case 'FragmentDefinition':
definitionState = state;
break;
}
});
return definitionState;
}
// Gets the field definition given a type and field name
export function getFieldDef(
schema: GraphQLSchema,
type: GraphQLType,
fieldName: string,
): ?GraphQLField<*, *> {
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
return SchemaMetaFieldDef;
}
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
return TypeMetaFieldDef;
}
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
return TypeNameMetaFieldDef;
}
if (type.getFields && typeof type.getFields === 'function') {
return (type.getFields()[fieldName]: any);
}
return null;
}
// Utility for iterating through a CodeMirror parse state stack bottom-up.
export function forEachState(
stack: State,
fn: (state: State) => ?TypeInfo,
): void {
const reverseStateStack = [];
let state = stack;
while (state && state.kind) {
reverseStateStack.push(state);
state = state.prevState;
}
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
fn(reverseStateStack[i]);
}
}
export function objectValues(object: Object): Array<any> {
const keys = Object.keys(object);
const len = keys.length;
const values = new Array(len);
for (let i = 0; i < len; ++i) {
values[i] = object[keys[i]];
}
return values;
}
// Create the expected hint response given a possible list and a token
export function hintList(
token: ContextToken,
list: Array<CompletionItem>,
): Array<CompletionItem> {
return filterAndSortList(list, normalizeText(token.string));
}
// Given a list of hint entries and currently typed text, sort and filter to
// provide a concise list.
function filterAndSortList(
list: Array<CompletionItem>,
text: string,
): Array<CompletionItem> {
if (!text) {
return filterNonEmpty(list, entry => !entry.isDeprecated);
}
const byProximity = list.map(entry => ({
proximity: getProximity(normalizeText(entry.label), text),
entry,
}));
const conciseMatches = filterNonEmpty(
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
pair => !pair.entry.isDeprecated,
);
const sortedMatches = conciseMatches.sort(
(a, b) =>
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
a.proximity - b.proximity ||
a.entry.label.length - b.entry.label.length,
);
return sortedMatches.map(pair => pair.entry);
}
// Filters the array by the predicate, unless it results in an empty array,
// in which case return the original array.
function filterNonEmpty(
array: Array<Object>,
predicate: (entry: Object) => boolean,
): Array<Object> {
const filtered = array.filter(predicate);
return filtered.length === 0 ? array : filtered;
}
function normalizeText(text: string): string {
return text.toLowerCase().replace(/\W/g, '');
}
// Determine a numeric proximity for a suggestion based on current text.
function getProximity(suggestion: string, text: string): number {
// start with lexical distance
let proximity = lexicalDistance(text, suggestion);
if (suggestion.length > text.length) {
// do not penalize long suggestions.
proximity -= suggestion.length - text.length - 1;
// penalize suggestions not starting with this phrase
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
}
return proximity;
}
/**
* Computes the lexical distance between strings A and B.
*
* The "distance" between two strings is given by counting the minimum number
* of edits needed to transform string A into string B. An edit can be an
* insertion, deletion, or substitution of a single character, or a swap of two
* adjacent characters.
*
* This distance can be useful for detecting typos in input or sorting
*
* @param {string} a
* @param {string} b
* @return {int} distance in number of edits
*/
function lexicalDistance(a: string, b: string): number {
let i;
let j;
const d = [];
const aLength = a.length;
const bLength = b.length;
for (i = 0; i <= aLength; i++) {
d[i] = [i];
}
for (j = 1; j <= bLength; j++) {
d[0][j] = j;
}
for (i = 1; i <= aLength; i++) {
for (j = 1; j <= bLength; j++) {
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
d[i][j] = Math.min(
d[i - 1][j] + 1,
d[i][j - 1] + 1,
d[i - 1][j - 1] + cost,
);
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
}
}
}
return d[aLength][bLength];
}

View File

@ -0,0 +1,665 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
FragmentDefinitionNode,
GraphQLDirective,
GraphQLSchema,
} from 'graphql';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import {
GraphQLBoolean,
GraphQLEnumType,
GraphQLInputObjectType,
GraphQLList,
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
assertAbstractType,
doTypesOverlap,
getNamedType,
getNullableType,
isAbstractType,
isCompositeType,
isInputType,
} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
forEachState,
getDefinitionState,
getFieldDef,
hintList,
objectValues,
} from './autocompleteUtils';
/**
* Given GraphQLSchema, queryText, and context of the current position within
* the source text, provide a list of typeahead entries.
*/
export function getAutocompleteSuggestions(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Array<CompletionItem> {
const token = contextToken || getTokenAtPosition(queryText, cursor);
const state =
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
// relieve flow errors by checking if `state` exists
if (!state) {
return [];
}
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
// Definition kinds
if (kind === 'Document') {
return hintList(token, [
{label: 'query'},
{label: 'mutation'},
{label: 'subscription'},
{label: 'fragment'},
{label: '{'},
]);
}
// Field names
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
return getSuggestionsForFieldNames(token, typeInfo, schema);
}
// Argument names
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
const argDefs = typeInfo.argDefs;
if (argDefs) {
return hintList(
token,
argDefs.map(argDef => ({
label: argDef.name,
detail: String(argDef.type),
documentation: argDef.description,
})),
);
}
}
// Input Object fields
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
if (typeInfo.objectFieldDefs) {
const objectFields = objectValues(typeInfo.objectFieldDefs);
return hintList(
token,
objectFields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
})),
);
}
}
// Input values: Enum and Boolean
if (
kind === 'EnumValue' ||
(kind === 'ListValue' && step === 1) ||
(kind === 'ObjectField' && step === 2) ||
(kind === 'Argument' && step === 2)
) {
return getSuggestionsForInputValues(token, typeInfo);
}
// Fragment type conditions
if (
(kind === 'TypeCondition' && step === 1) ||
(kind === 'NamedType' &&
state.prevState != null &&
state.prevState.kind === 'TypeCondition')
) {
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
}
// Fragment spread names
if (kind === 'FragmentSpread' && step === 1) {
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
}
// Variable definition types
if (
(kind === 'VariableDefinition' && step === 2) ||
(kind === 'ListType' && step === 1) ||
(kind === 'NamedType' &&
state.prevState &&
(state.prevState.kind === 'VariableDefinition' ||
state.prevState.kind === 'ListType'))
) {
return getSuggestionsForVariableDefinition(token, schema);
}
// Directive names
if (kind === 'Directive') {
return getSuggestionsForDirective(token, state, schema);
}
return [];
}
// Helper functions to get suggestions for each kinds
function getSuggestionsForFieldNames(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (typeInfo.parentType) {
const parentType = typeInfo.parentType;
const fields =
parentType.getFields instanceof Function
? objectValues(parentType.getFields())
: [];
if (isAbstractType(parentType)) {
fields.push(TypeNameMetaFieldDef);
}
if (parentType === schema.getQueryType()) {
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
}
return hintList(
token,
fields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
isDeprecated: field.isDeprecated,
deprecationReason: field.deprecationReason,
})),
);
}
return [];
}
function getSuggestionsForInputValues(
token: ContextToken,
typeInfo: TypeInfo,
): Array<CompletionItem> {
const namedInputType = getNamedType(typeInfo.inputType);
if (namedInputType instanceof GraphQLEnumType) {
const values = namedInputType.getValues();
return hintList(
token,
values.map(value => ({
label: value.name,
detail: String(namedInputType),
documentation: value.description,
isDeprecated: value.isDeprecated,
deprecationReason: value.deprecationReason,
})),
);
} else if (namedInputType === GraphQLBoolean) {
return hintList(token, [
{
label: 'true',
detail: String(GraphQLBoolean),
documentation: 'Not false.',
},
{
label: 'false',
detail: String(GraphQLBoolean),
documentation: 'Not true.',
},
]);
}
return [];
}
function getSuggestionsForFragmentTypeConditions(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
let possibleTypes;
if (typeInfo.parentType) {
if (isAbstractType(typeInfo.parentType)) {
const abstractType = assertAbstractType(typeInfo.parentType);
// Collect both the possible Object types as well as the interfaces
// they implement.
const possibleObjTypes = schema.getPossibleTypes(abstractType);
const possibleIfaceMap = Object.create(null);
possibleObjTypes.forEach(type => {
type.getInterfaces().forEach(iface => {
possibleIfaceMap[iface.name] = iface;
});
});
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
} else {
// The parent type is a non-abstract Object type, so the only possible
// type that can be used is that same type.
possibleTypes = [typeInfo.parentType];
}
} else {
const typeMap = schema.getTypeMap();
possibleTypes = objectValues(typeMap).filter(isCompositeType);
}
return hintList(
token,
possibleTypes.map(type => {
const namedType = getNamedType(type);
return {
label: String(type),
documentation: (namedType && namedType.description) || '',
};
}),
);
}
function getSuggestionsForFragmentSpread(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
queryText: string,
): Array<CompletionItem> {
const typeMap = schema.getTypeMap();
const defState = getDefinitionState(token.state);
const fragments = getFragmentDefinitions(queryText);
// Filter down to only the fragments which may exist here.
const relevantFrags = fragments.filter(
frag =>
// Only include fragments with known types.
typeMap[frag.typeCondition.name.value] &&
// Only include fragments which are not cyclic.
!(
defState &&
defState.kind === 'FragmentDefinition' &&
defState.name === frag.name.value
) &&
// Only include fragments which could possibly be spread here.
isCompositeType(typeInfo.parentType) &&
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
doTypesOverlap(
schema,
typeInfo.parentType,
typeMap[frag.typeCondition.name.value],
),
);
return hintList(
token,
relevantFrags.map(frag => ({
label: frag.name.value,
detail: String(typeMap[frag.typeCondition.name.value]),
documentation: `fragment ${frag.name.value} on ${
frag.typeCondition.name.value
}`,
})),
);
}
function getFragmentDefinitions(
queryText: string,
): Array<FragmentDefinitionNode> {
const fragmentDefs = [];
runOnlineParser(queryText, (_, state) => {
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
fragmentDefs.push({
kind: 'FragmentDefinition',
name: {
kind: 'Name',
value: state.name,
},
selectionSet: {
kind: 'SelectionSet',
selections: [],
},
typeCondition: {
kind: 'NamedType',
name: {
kind: 'Name',
value: state.type,
},
},
});
}
});
return fragmentDefs;
}
function getSuggestionsForVariableDefinition(
token: ContextToken,
schema: GraphQLSchema,
): Array<CompletionItem> {
const inputTypeMap = schema.getTypeMap();
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
return hintList(
token,
inputTypes.map(type => ({
label: type.name,
documentation: type.description,
})),
);
}
function getSuggestionsForDirective(
token: ContextToken,
state: State,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (state.prevState && state.prevState.kind) {
const directives = schema
.getDirectives()
.filter(directive => canUseDirective(state.prevState, directive));
return hintList(
token,
directives.map(directive => ({
label: directive.name,
documentation: directive.description || '',
})),
);
}
return [];
}
export function getTokenAtPosition(
queryText: string,
cursor: Position,
): ContextToken {
let styleAtCursor = null;
let stateAtCursor = null;
let stringAtCursor = null;
const token = runOnlineParser(queryText, (stream, state, style, index) => {
if (index === cursor.line) {
if (stream.getCurrentPosition() >= cursor.character) {
styleAtCursor = style;
stateAtCursor = {...state};
stringAtCursor = stream.current();
return 'BREAK';
}
}
});
// Return the state/style of parsed token in case those at cursor aren't
// available.
return {
start: token.start,
end: token.end,
string: stringAtCursor || token.string,
state: stateAtCursor || token.state,
style: styleAtCursor || token.style,
};
}
/**
* Provides an utility function to parse a given query text and construct a
* `token` context object.
* A token context provides useful information about the token/style that
* CharacterStream currently possesses, as well as the end state and style
* of the token.
*/
type callbackFnType = (
stream: CharacterStream,
state: State,
style: string,
index: number,
) => void | 'BREAK';
function runOnlineParser(
queryText: string,
callback: callbackFnType,
): ContextToken {
const lines = queryText.split('\n');
const parser = onlineParser();
let state = parser.startState();
let style = '';
let stream: CharacterStream = new CharacterStream('');
for (let i = 0; i < lines.length; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
style = parser.token(stream, state);
const code = callback(stream, state, style, i);
if (code === 'BREAK') {
break;
}
}
// Above while loop won't run if there is an empty line.
// Run the callback one more time to catch this.
callback(stream, state, style, i);
if (!state.kind) {
state = parser.startState();
}
}
return {
start: stream.getStartOfToken(),
end: stream.getCurrentPosition(),
string: stream.current(),
state,
style,
};
}
function canUseDirective(
state: $PropertyType<State, 'prevState'>,
directive: GraphQLDirective,
): boolean {
if (!state || !state.kind) {
return false;
}
const kind = state.kind;
const locations = directive.locations;
switch (kind) {
case 'Query':
return locations.indexOf('QUERY') !== -1;
case 'Mutation':
return locations.indexOf('MUTATION') !== -1;
case 'Subscription':
return locations.indexOf('SUBSCRIPTION') !== -1;
case 'Field':
case 'AliasedField':
return locations.indexOf('FIELD') !== -1;
case 'FragmentDefinition':
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
case 'FragmentSpread':
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
case 'InlineFragment':
return locations.indexOf('INLINE_FRAGMENT') !== -1;
// Schema Definitions
case 'SchemaDef':
return locations.indexOf('SCHEMA') !== -1;
case 'ScalarDef':
return locations.indexOf('SCALAR') !== -1;
case 'ObjectTypeDef':
return locations.indexOf('OBJECT') !== -1;
case 'FieldDef':
return locations.indexOf('FIELD_DEFINITION') !== -1;
case 'InterfaceDef':
return locations.indexOf('INTERFACE') !== -1;
case 'UnionDef':
return locations.indexOf('UNION') !== -1;
case 'EnumDef':
return locations.indexOf('ENUM') !== -1;
case 'EnumValue':
return locations.indexOf('ENUM_VALUE') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_OBJECT') !== -1;
case 'InputValueDef':
const prevStateKind = state.prevState && state.prevState.kind;
switch (prevStateKind) {
case 'ArgumentsDef':
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
}
}
return false;
}
// Utility for collecting rich type information given any token's state
// from the graphql-mode parser.
export function getTypeInfo(
schema: GraphQLSchema,
tokenState: State,
): TypeInfo {
let argDef;
let argDefs;
let directiveDef;
let enumValue;
let fieldDef;
let inputType;
let objectFieldDefs;
let parentType;
let type;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
type = schema.getQueryType();
break;
case 'Mutation':
type = schema.getMutationType();
break;
case 'Subscription':
type = schema.getSubscriptionType();
break;
case 'InlineFragment':
case 'FragmentDefinition':
if (state.type) {
type = schema.getType(state.type);
}
break;
case 'Field':
case 'AliasedField':
if (!type || !state.name) {
fieldDef = null;
} else {
fieldDef = parentType
? getFieldDef(schema, parentType, state.name)
: null;
type = fieldDef ? fieldDef.type : null;
}
break;
case 'SelectionSet':
parentType = getNamedType(type);
break;
case 'Directive':
directiveDef = state.name ? schema.getDirective(state.name) : null;
break;
case 'Arguments':
if (!state.prevState) {
argDefs = null;
} else {
switch (state.prevState.kind) {
case 'Field':
argDefs = fieldDef && fieldDef.args;
break;
case 'Directive':
argDefs = directiveDef && directiveDef.args;
break;
case 'AliasedField':
const name = state.prevState && state.prevState.name;
if (!name) {
argDefs = null;
break;
}
const field = parentType
? getFieldDef(schema, parentType, name)
: null;
if (!field) {
argDefs = null;
break;
}
argDefs = field.args;
break;
default:
argDefs = null;
break;
}
}
break;
case 'Argument':
if (argDefs) {
for (let i = 0; i < argDefs.length; i++) {
if (argDefs[i].name === state.name) {
argDef = argDefs[i];
break;
}
}
}
inputType = argDef && argDef.type;
break;
case 'EnumValue':
const enumType = getNamedType(inputType);
enumValue =
enumType instanceof GraphQLEnumType
? find(enumType.getValues(), val => val.value === state.name)
: null;
break;
case 'ListValue':
const nullableType = getNullableType(inputType);
inputType =
nullableType instanceof GraphQLList ? nullableType.ofType : null;
break;
case 'ObjectValue':
const objectType = getNamedType(inputType);
objectFieldDefs =
objectType instanceof GraphQLInputObjectType
? objectType.getFields()
: null;
break;
case 'ObjectField':
const objectField =
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
inputType = objectField && objectField.type;
break;
case 'NamedType':
if (state.name) {
type = schema.getType(state.name);
}
break;
}
});
return {
argDef,
argDefs,
directiveDef,
enumValue,
fieldDef,
inputType,
objectFieldDefs,
parentType,
type,
};
}
// Returns the first item in the array which causes predicate to return truthy.
function find(array, predicate) {
for (let i = 0; i < array.length; i++) {
if (predicate(array[i])) {
return array[i];
}
}
return null;
}

View File

@ -0,0 +1,136 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
NamedTypeNode,
TypeDefinitionNode,
} from 'graphql';
import type {
Definition,
DefinitionQueryResult,
FragmentInfo,
Position,
Range,
Uri,
ObjectTypeInfo,
} from 'graphql-language-service-types';
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
import invariant from 'assert';
export const LANGUAGE = 'GraphQL';
function getRange(text: string, node: ASTNode): Range {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return locToRange(text, location);
}
function getPosition(text: string, node: ASTNode): Position {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return offsetToPosition(text, location.start);
}
export async function getDefinitionQueryResultForNamedType(
text: string,
node: NamedTypeNode,
dependencies: Array<ObjectTypeInfo>,
): Promise<DefinitionQueryResult> {
const name = node.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name && definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL type ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForNodeDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, node)),
};
}
export async function getDefinitionQueryResultForFragmentSpread(
text: string,
fragment: FragmentSpreadNode,
dependencies: Array<FragmentInfo>,
): Promise<DefinitionQueryResult> {
const name = fragment.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForFragmentDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, fragment)),
};
}
export function getDefinitionQueryResultForDefinitionNode(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): DefinitionQueryResult {
return {
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
queryRange: definition.name ? [getRange(text, definition.name)] : [],
};
}
function getDefinitionForFragmentDefinition(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}
function getDefinitionForNodeDefinition(
path: Uri,
text: string,
definition: TypeDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}

View File

@ -0,0 +1,172 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
DocumentNode,
GraphQLError,
GraphQLSchema,
Location,
SourceLocation,
} from 'graphql';
import type {
Diagnostic,
CustomValidationRule,
} from 'graphql-language-service-types';
import invariant from 'assert';
import {findDeprecatedUsages, parse} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
Position,
Range,
validateWithCustomRules,
} from 'graphql-language-service-utils';
export const SEVERITY = {
ERROR: 1,
WARNING: 2,
INFORMATION: 3,
HINT: 4,
};
export function getDiagnostics(
query: string,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
let ast = null;
try {
ast = parse(query);
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
return validateQuery(ast, schema, customRules, isRelayCompatMode);
}
export function validateQuery(
ast: DocumentNode,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
// We cannot validate the query unless a schema is provided.
if (!schema) {
return [];
}
const validationErrorAnnotations = mapCat(
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
error => annotations(error, SEVERITY.ERROR, 'Validation'),
);
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
// support older versions of graphql-js.
const deprecationWarningAnnotations = !findDeprecatedUsages
? []
: mapCat(findDeprecatedUsages(schema, ast), error =>
annotations(error, SEVERITY.WARNING, 'Deprecation'),
);
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
}
// General utility for map-cating (aka flat-mapping).
function mapCat<T>(
array: Array<T>,
mapper: (item: T) => Array<any>,
): Array<any> {
return Array.prototype.concat.apply([], array.map(mapper));
}
function annotations(
error: GraphQLError,
severity: number,
type: string,
): Array<Diagnostic> {
if (!error.nodes) {
return [];
}
return error.nodes.map(node => {
const highlightNode =
node.kind !== 'Variable' && node.name
? node.name
: node.variable
? node.variable
: node;
invariant(error.locations, 'GraphQL validation error requires locations.');
const loc = error.locations[0];
const highlightLoc = getLocation(highlightNode);
const end = loc.column + (highlightLoc.end - highlightLoc.start);
return {
source: `GraphQL: ${type}`,
message: error.message,
severity,
range: new Range(
new Position(loc.line - 1, loc.column - 1),
new Position(loc.line - 1, end),
),
};
});
}
export function getRange(location: SourceLocation, queryText: string) {
const parser = onlineParser();
const state = parser.startState();
const lines = queryText.split('\n');
invariant(
lines.length >= location.line,
'Query text must have more lines than where the error happened',
);
let stream = null;
for (let i = 0; i < location.line; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
const style = parser.token(stream, state);
if (style === 'invalidchar') {
break;
}
}
}
invariant(stream, 'Expected Parser stream to be available.');
const line = location.line - 1;
const start = stream.getStartOfToken();
const end = stream.getCurrentPosition();
return new Range(new Position(line, start), new Position(line, end));
}
/**
* Get location info from a node in a type-safe way.
*
* The only way a node could not have a location is if we initialized the parser
* (and therefore the lexer) with the `noLocation` option, but we always
* call `parse` without options above.
*/
function getLocation(node: any): Location {
const typeCastedNode = (node: ASTNode);
const location = typeCastedNode.loc;
invariant(location, 'Expected ASTNode to have a location.');
return location;
}

View File

@ -0,0 +1,186 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
/**
* Ported from codemirror-graphql
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
*/
import type {GraphQLSchema} from 'graphql';
import type {ContextToken} from 'graphql-language-service-types';
import type {Hover} from 'vscode-languageserver-types';
import type {Position} from 'graphql-language-service-utils';
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
import {GraphQLNonNull, GraphQLList} from 'graphql';
export function getHoverInformation(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Hover.contents {
const token = contextToken || getTokenAtPosition(queryText, cursor);
if (!schema || !token || !token.state) {
return [];
}
const state = token.state;
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
const options = {schema};
// Given a Schema and a Token, produce the contents of an info tooltip.
// To do this, create a div element that we will render "into" and then pass
// it to various rendering functions.
if (
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
) {
const into = [];
renderField(into, typeInfo, options);
renderDescription(into, options, typeInfo.fieldDef);
return into.join('').trim();
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
const into = [];
renderDirective(into, typeInfo, options);
renderDescription(into, options, typeInfo.directiveDef);
return into.join('').trim();
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
const into = [];
renderArg(into, typeInfo, options);
renderDescription(into, options, typeInfo.argDef);
return into.join('').trim();
} else if (
kind === 'EnumValue' &&
typeInfo.enumValue &&
typeInfo.enumValue.description
) {
const into = [];
renderEnumValue(into, typeInfo, options);
renderDescription(into, options, typeInfo.enumValue);
return into.join('').trim();
} else if (
kind === 'NamedType' &&
typeInfo.type &&
typeInfo.type.description
) {
const into = [];
renderType(into, typeInfo, options, typeInfo.type);
renderDescription(into, options, typeInfo.type);
return into.join('').trim();
}
}
function renderField(into, typeInfo, options) {
renderQualifiedField(into, typeInfo, options);
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
}
function renderQualifiedField(into, typeInfo, options) {
if (!typeInfo.fieldDef) {
return;
}
const fieldName = (typeInfo.fieldDef.name: string);
if (fieldName.slice(0, 2) !== '__') {
renderType(into, typeInfo, options, typeInfo.parentType);
text(into, '.');
}
text(into, fieldName);
}
function renderDirective(into, typeInfo, options) {
if (!typeInfo.directiveDef) {
return;
}
const name = '@' + typeInfo.directiveDef.name;
text(into, name);
}
function renderArg(into, typeInfo, options) {
if (typeInfo.directiveDef) {
renderDirective(into, typeInfo, options);
} else if (typeInfo.fieldDef) {
renderQualifiedField(into, typeInfo, options);
}
if (!typeInfo.argDef) {
return;
}
const name = typeInfo.argDef.name;
text(into, '(');
text(into, name);
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
text(into, ')');
}
function renderTypeAnnotation(into, typeInfo, options, t) {
text(into, ': ');
renderType(into, typeInfo, options, t);
}
function renderEnumValue(into, typeInfo, options) {
if (!typeInfo.enumValue) {
return;
}
const name = typeInfo.enumValue.name;
renderType(into, typeInfo, options, typeInfo.inputType);
text(into, '.');
text(into, name);
}
function renderType(into, typeInfo, options, t) {
if (!t) {
return;
}
if (t instanceof GraphQLNonNull) {
renderType(into, typeInfo, options, t.ofType);
text(into, '!');
} else if (t instanceof GraphQLList) {
text(into, '[');
renderType(into, typeInfo, options, t.ofType);
text(into, ']');
} else {
text(into, t.name);
}
}
function renderDescription(into, options, def) {
if (!def) {
return;
}
const description =
typeof def.description === 'string' ? def.description : null;
if (description) {
text(into, '\n\n');
text(into, description);
}
renderDeprecation(into, options, def);
}
function renderDeprecation(into, options, def) {
if (!def) {
return;
}
const reason =
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
if (!reason) {
return;
}
text(into, '\n\n');
text(into, 'Deprecated: ');
text(into, reason);
}
function text(into: string[], content: string) {
into.push(content);
}

View File

@ -0,0 +1,121 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
Outline,
TextToken,
TokenKind,
} from 'graphql-language-service-types';
import {Kind, parse, visit} from 'graphql';
import {offsetToPosition} from 'graphql-language-service-utils';
const {INLINE_FRAGMENT} = Kind;
const OUTLINEABLE_KINDS = {
Field: true,
OperationDefinition: true,
Document: true,
SelectionSet: true,
Name: true,
FragmentDefinition: true,
FragmentSpread: true,
InlineFragment: true,
};
type OutlineTreeConverterType = {[name: string]: Function};
export function getOutline(queryText: string): ?Outline {
let ast;
try {
ast = parse(queryText);
} catch (error) {
return null;
}
const visitorFns = outlineTreeConverter(queryText);
const outlineTrees = visit(ast, {
leave(node) {
if (
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
visitorFns[node.kind]
) {
return visitorFns[node.kind](node);
}
return null;
},
});
return {outlineTrees};
}
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
const meta = node => ({
representativeName: node.name,
startPosition: offsetToPosition(docText, node.loc.start),
endPosition: offsetToPosition(docText, node.loc.end),
children: node.selectionSet || [],
});
return {
Field: node => {
const tokenizedText = node.alias
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
: [];
tokenizedText.push(buildToken('plain', node.name));
return {tokenizedText, ...meta(node)};
},
OperationDefinition: node => ({
tokenizedText: [
buildToken('keyword', node.operation),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
Document: node => node.definitions,
SelectionSet: node =>
concatMap(node.selections, child => {
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
}),
Name: node => node.value,
FragmentDefinition: node => ({
tokenizedText: [
buildToken('keyword', 'fragment'),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
FragmentSpread: node => ({
tokenizedText: [
buildToken('plain', '...'),
buildToken('class-name', node.name),
],
...meta(node),
}),
InlineFragment: node => node.selectionSet,
};
}
function buildToken(kind: TokenKind, value: string): TextToken {
return {kind, value};
}
function concatMap(arr: Array<any>, fn: Function): Array<any> {
const res = [];
for (let i = 0; i < arr.length; i++) {
const x = fn(arr[i], i);
if (Array.isArray(x)) {
res.push(...x);
} else {
res.push(x);
}
}
return res;
}

View File

@ -0,0 +1,31 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
export {
getDefinitionState,
getFieldDef,
forEachState,
objectValues,
hintList,
} from './autocompleteUtils';
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
export {
LANGUAGE,
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
} from './getDefinition';
export {getDiagnostics, validateQuery} from './getDiagnostics';
export {getOutline} from './getOutline';
export {getHoverInformation} from './getHoverInformation';
export {GraphQLLanguageService} from './GraphQLLanguageService';

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1,15 @@
{
"short_name": "Super Graph",
"name": "Super Graph - GraphQL API for Rails",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

45
cmd/main.go Normal file
View File

@ -0,0 +1,45 @@
// Main package for the Super Graph service and command line tooling
/*
Super Graph
For documentation, visit https://supergraph.dev
Commit SHA-1 :
Commit timestamp :
Branch :
Go version : go1.14
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar.
Usage:
super-graph [command]
Available Commands:
conf:dump Dump config to file
db:create Create database
db:drop Drop database
db:migrate Migrate the database
db:new Generate a new migration
db:reset Reset database
db:seed Run the seed script to seed the database
db:setup Setup database
db:status Print current migration status
help Help about any command
new Create a new application
serv Run the super-graph service
version Super Graph binary version information
Flags:
-h, --help help for super-graph
--path string path to config files (default "./config")
Use "super-graph [command] --help" for more information about a command.
*/
package main
import "github.com/dosco/super-graph/cmd/internal/serv"
func main() {
serv.Cmd()
}

View File

@ -1,755 +0,0 @@
# http://localhost:8080/
variables {
"data": [
{
"name": "Protect Ya Neck",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Enter the Wu-Tang",
"created_at": "now",
"updated_at": "now"
}
]
}
mutation {
products(insert: $data) {
id
name
}
}
variables {
"update": {
"name": "Wu-Tang",
"description": "No description needed"
},
"product_id": 1
}
mutation {
products(id: $product_id, update: $update) {
id
name
description
}
}
query {
users {
id
email
picture: avatar
products(limit: 2, where: {price: {gt: 10}}) {
id
name
description
}
}
}
variables {
"data": [
{
"name": "Gumbo1",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Gumbo2",
"created_at": "now",
"updated_at": "now"
}
]
}
mutation {
products(id: 199, delete: true) {
id
name
}
}
query {
products {
id
name
user {
email
}
}
}
variables {
"data": {
"product_id": 5
}
}
mutation {
products(id: $product_id, delete: true) {
id
name
}
}
query {
products {
id
name
price
users {
email
}
}
}
variables {
"data": {
"email": "gfk@myspace.com",
"full_name": "Ghostface Killah",
"created_at": "now",
"updated_at": "now"
}
}
mutation {
user(insert: $data) {
id
}
}
variables {
"update": {
"name": "Helloo",
"description": "World \u003c\u003e"
},
"user": 123
}
mutation {
products(id: 5, update: $update) {
id
name
description
}
}
variables {
"data": {
"name": "WOOO",
"price": 50.5
}
}
mutation {
products(insert: $data) {
id
name
}
}
query getProducts {
products {
id
name
price
description
}
}
query {
deals {
id
name
price
}
}
variables {
"beer": "smoke"
}
query beerSearch {
products(search: $beer) {
id
name
search_rank
search_headline_description
}
}
query {
user {
id
full_name
}
}
variables {
"data": {
"email": "goo1@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now"
}
}
}
mutation {
user(insert: $data) {
id
full_name
email
product {
id
name
price
}
}
}
variables {
"data": {
"email": "goo12@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": [
{
"name": "Banana 1",
"price": 1.1,
"created_at": "now",
"updated_at": "now"
},
{
"name": "Banana 2",
"price": 2.2,
"created_at": "now",
"updated_at": "now"
}
]
}
}
mutation {
user(insert: $data) {
id
full_name
email
products {
id
name
price
}
}
}
variables {
"data": {
"name": "Banana 3",
"price": 1.1,
"created_at": "now",
"updated_at": "now",
"user": {
"email": "a2@a.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now"
}
}
}
mutation {
products(insert: $data) {
id
name
price
user {
id
full_name
email
}
}
}
variables {
"update": {
"name": "my_name",
"description": "my_desc"
}
}
mutation {
product(id: 15, update: $update, where: {id: {eq: 1}}) {
id
name
}
}
variables {
"update": {
"name": "my_name",
"description": "my_desc"
}
}
mutation {
product(update: $update, where: {id: {eq: 1}}) {
id
name
}
}
variables {
"update": {
"name": "my_name 2",
"description": "my_desc 2"
}
}
mutation {
product(update: $update, where: {id: {eq: 1}}) {
id
name
description
}
}
variables {
"data": {
"sale_type": "tuutuu",
"quantity": 5,
"due_date": "now",
"customer": {
"email": "thedude1@rug.com",
"full_name": "The Dude"
},
"product": {
"name": "Apple",
"price": 1.25
}
}
}
mutation {
purchase(update: $data, id: 5) {
sale_type
quantity
due_date
customer {
id
full_name
email
}
product {
id
name
price
}
}
}
variables {
"data": {
"email": "thedude@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": {
"where": {
"id": 2
},
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now"
}
}
}
mutation {
user(update: $data, where: {id: {eq: 8}}) {
id
full_name
email
product {
id
name
price
}
}
}
variables {
"data": {
"email": "thedude@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": {
"where": {
"id": 2
},
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now"
}
}
}
query {
user(where: {id: {eq: 8}}) {
id
product {
id
name
price
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"email": "thedude@rug.com"
}
}
}
query {
user {
email
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"email": "booboo@demo.com"
}
}
}
mutation {
product(update: $data, id: 6) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"email": "booboo@demo.com"
}
}
}
query {
product(id: 6) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"email": "thedude123@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": {
"connect": {
"id": 7
},
"disconnect": {
"id": 8
}
}
}
}
mutation {
user(update: $data, id: 6) {
id
full_name
email
product {
id
name
price
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"connect": {
"id": 5,
"email": "test@test.com"
}
}
}
}
mutation {
product(update: $data, id: 9) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"email": "thed44ude@rug.com",
"full_name": "The Dude",
"created_at": "now",
"updated_at": "now",
"product": {
"connect": {
"id": 5
}
}
}
}
mutation {
user(insert: $data) {
id
full_name
email
product {
id
name
price
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"connect": {
"id": 5
}
}
}
}
mutation {
product(insert: $data) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": [
{
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now",
"user": {
"connect": {
"id": 6
}
}
},
{
"name": "Coconut",
"price": 2.25,
"created_at": "now",
"updated_at": "now",
"user": {
"connect": {
"id": 3
}
}
}
]
}
mutation {
products(insert: $data) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": [
{
"name": "Apple",
"price": 1.25,
"created_at": "now",
"updated_at": "now"
},
{
"name": "Coconut",
"price": 2.25,
"created_at": "now",
"updated_at": "now"
}
]
}
mutation {
products(insert: $data) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"user": {
"connect": {
"id": 5,
"email": "test@test.com"
}
}
}
}
mutation {
product(update: $data, id: 9) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"user": {
"connect": {
"id": 5
}
}
}
}
mutation {
product(update: $data, id: 9) {
id
name
user {
id
full_name
email
}
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"user": {
"disconnect": {
"id": 5
}
}
}
}
mutation {
product(update: $data, id: 9) {
id
name
user_id
}
}
variables {
"data": {
"name": "Apple",
"price": 1.25,
"user": {
"disconnect": {
"id": 5
}
}
}
}
mutation {
product(update: $data, id: 2) {
id
name
user_id
}
}

View File

@ -1,226 +0,0 @@
app_name: "Super Graph Development"
host_port: 0.0.0.0:8080
web_ui: true
# debug, info, warn, error, fatal, panic
log_level: "debug"
# enable or disable http compression (uses gzip)
http_compress: true
# When production mode is 'true' only queries
# from the allow list are permitted.
# When it's 'false' all queries are saved to the
# the allow list in ./config/allow.list
production: false
# Throw a 401 on auth failure for queries that need auth
auth_fail_block: false
# Latency tracing for database queries and remote joins
# the resulting latency information is returned with the
# response
enable_tracing: true
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: true
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# Secret key for general encryption operations like
# encrypting the cursor data
secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: true
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
# SG_DATABASE_USER
# SG_DATABASE_PASSWORD
# Auth related environment Variables
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
# SG_AUTH_RAILS_REDIS_URL
# SG_AUTH_RAILS_REDIS_PASSWORD
# SG_AUTH_JWT_PUBLIC_KEY_FILE
# inflections:
# person: people
# sheep: sheep
auth:
# Can be 'rails' or 'jwt'
type: rails
cookie: _app_session
# Comment this out if you want to disable setting
# the user_id via a header for testing.
# Disable in production
creds_in_header: true
rails:
# Rails version this is used for reading the
# various cookies formats.
version: 5.2
# Found in 'Rails.application.config.secret_key_base'
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
# Remote cookie store. (memcache or redis)
# url: redis://redis:6379
# password: ""
# max_idle: 80
# max_active: 12000
# In most cases you don't need these
# salt: "encrypted cookie"
# sign_salt: "signed encrypted cookie"
# auth_salt: "authenticated encrypted cookie"
# jwt:
# provider: auth0
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
# public_key_file: /secrets/public_key.pem
# public_key_type: ecdsa #rsa
database:
type: postgres
host: db
port: 5432
dbname: app_development
user: postgres
password: postgres
#schema: "public"
#pool_size: 10
#max_retries: 0
#log_level: "debug"
# Set session variable "user.id" to the user id
# Enable this if you need the user id in triggers, etc
set_user_id: false
# database ping timeout is used for db health checking
ping_timeout: 1m
# Define additional variables here to be used with filters
variables:
admin_account_id: "5"
# Field and table names that you wish to block
blocklist:
- ar_internal_metadata
- schema_migrations
- secret
- password
- encrypted
- token
tables:
- name: customers
remotes:
- name: payments
id: stripe_id
url: http://rails_app:3000/stripe/$id
path: data
# debug: true
pass_headers:
- cookie
set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
- # You can create new fields that have a
# real db table backing them
name: me
table: users
- name: deals
table: products
- name: users
columns:
- name: email
related_to: products.name
roles_query: "SELECT * FROM users WHERE id = $user_id"
roles:
- name: anon
tables:
- name: products
query:
limit: 10
columns: ["id", "name", "description" ]
aggregation: false
insert:
block: false
update:
block: false
delete:
block: false
- name: deals
query:
limit: 3
aggregation: false
- name: purchases
query:
limit: 3
aggregation: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
presets:
- user_id: "$user_id"
- created_at: "now"
- updated_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
presets:
- updated_at: "now"
delete:
block: true
- name: admin
match: id = 1000
tables:
- name: users
filters: []

View File

@ -1,67 +0,0 @@
# Inherit config from this other config file
# so I only need to overwrite some values
inherits: dev
app_name: "Super Graph Production"
host_port: 0.0.0.0:8080
web_ui: false
# debug, info, warn, error, fatal, panic, disable
log_level: "info"
# enable or disable http compression (uses gzip)
http_compress: true
# When production mode is 'true' only queries
# from the allow list are permitted.
# When it's 'false' all queries are saved to the
# the allow list in ./config/allow.list
production: true
# Throw a 401 on auth failure for queries that need auth
auth_fail_block: true
# Latency tracing for database queries and remote joins
# the resulting latency information is returned with the
# response
enable_tracing: true
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
# migrations_path: migrations
# Secret key for general encryption operations like
# encrypting the cursor data
# secret_key: supercalifajalistics
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
# SG_DATABASE_USER
# SG_DATABASE_PASSWORD
# Auth related environment Variables
# SG_AUTH_RAILS_COOKIE_SECRET_KEY_BASE
# SG_AUTH_RAILS_REDIS_URL
# SG_AUTH_RAILS_REDIS_PASSWORD
# SG_AUTH_JWT_PUBLIC_KEY_FILE
database:
type: postgres
host: db
port: 5432
dbname: app_production
user: postgres
password: postgres
#pool_size: 10
#max_retries: 0
#log_level: "debug"
# Set session variable "user.id" to the user id
# Enable this if you need the user id in triggers, etc
set_user_id: false
# database ping timeout is used for db health checking
ping_timeout: 5m

Some files were not shown because too many files have changed in this diff Show More