Compare commits

...

60 Commits

Author SHA1 Message Date
1370d24985 Fix issue with make install 2020-04-12 20:35:31 -04:00
ef50c1957b Fix CloudRun connection issue 2020-04-12 10:09:37 -04:00
41ea6ef6f5 Fix readme add library usage 2020-04-11 16:41:10 -04:00
a266517d17 Remove config package 2020-04-11 02:45:06 -04:00
7831d27345 Refactor Super Graph into a library #26 2020-04-10 02:27:43 -04:00
e102da839e Fix issue with Postgres FUNC_MAX_ARGS by moving to row_to_json 2020-04-01 21:25:50 -04:00
68a378c00f Fix issue with prepared statements skipped on error 2020-03-31 01:28:39 -04:00
d96eaf14f4 Fix bugs with escape char handling 2020-03-30 10:03:47 -04:00
01e488b69d Fix for bug blocking anon queries 2020-03-21 20:11:04 -04:00
7a450b16ba Fix issue with detecting many to many relationships 2020-03-18 20:19:56 -04:00
1ad8cbf15b Fix minor parser bug 2020-03-17 23:03:41 -04:00
f69f1c67d5 Fix to remove left over debug log 2020-03-16 01:43:26 -04:00
a172193955 Fix to ensure cursor fields can be defined in the query 2020-03-16 01:40:47 -04:00
81338b6123 Fix issues blocking Apollo client 2020-03-14 01:35:42 -04:00
265b93b203 Fix for encrypted cursor in production mode bug 2020-03-06 21:38:01 +05:30
6c240e21b4 Fix bug related to 'anon' role prepared statements 2020-03-06 15:39:15 +05:30
7930719eaa Add ability to set CORS headers 2020-03-06 09:47:51 +05:30
cc687b1b2b Fix issue with Docerfile CMD 2020-03-05 09:13:52 +05:30
3033dcf1a9 Fix issue with setting PORT env var 2020-03-04 15:39:53 +05:30
0381982d19 Fix upx version issue in Dockerfile 2020-03-04 12:27:07 +05:30
2b0a798faa Add 'secrets' command to startup script 2020-03-03 19:44:14 +05:30
8b6c562ac1 Add CSV import command to seed javascript 2020-03-03 13:45:47 +05:30
a1fb89b762 Add support for SQL in variables 2020-02-29 10:35:48 +05:30
c82a7bff0d Misprint (#43) 2020-02-24 10:48:50 +05:30
7acf28bb3c Fix issue with upgrading to postgres 12 docker image #36 2020-02-24 02:37:21 +05:30
be5d4e976a Misprint (#41) 2020-02-24 02:04:23 +05:30
d1b884aec6 Misprint (#40) 2020-02-24 02:03:57 +05:30
4be4ce860b Misprint (#39) 2020-02-24 02:03:40 +05:30
dfa4caf540 Misprint (#37) 2020-02-24 02:03:27 +05:30
7763251fb7 fix "Try the demo app" in docs (#38)
* fix "Try the demo app" in docs

* fix "Get Started" setup in docs
2020-02-24 02:02:22 +05:30
51e105699e Fix corrupt json bug in jsn package 2020-02-24 02:00:11 +05:30
90694f8803 Fix spelling in docs (#34) 2020-02-23 15:41:04 +05:30
ad82f5b267 Fix spelling in docs (#35) 2020-02-23 15:40:42 +05:30
99b37a9c50 Fix bug related to new Postgres docker image 2020-02-23 10:28:32 +05:30
7ec1f59224 Fix bug with cursors and multiple order by 2020-02-23 02:28:37 +05:30
d3ecb1d6cc Fix bug with multi root queries 2020-02-21 10:29:37 +05:30
aed4170e8e Fix bug with cursor filters 2020-02-20 22:53:29 +05:30
c33e93ab37 Add support for cursors with multiple order by clauses 2020-02-19 10:22:44 +05:30
3d3e5d9c2b Add Yugabyte to docs 2020-02-12 08:42:53 +05:30
67b4a4d945 Fix issue with cursor as a variable 2020-02-11 11:41:35 +05:30
7413813138 Add pagination using opaque cursors 2020-02-10 12:15:37 +05:30
12007db76e Add support for Yugabyte DB 2020-02-07 11:42:14 +05:30
c85d379fe2 Add ability to add comments to the allow list 2020-02-04 00:20:25 -05:00
62fd1eac55 Add named auth and the all new action endpoints 2020-02-03 01:21:07 -05:00
1a3d74e1ce Fix issues surfaced by the fuzzer 2020-02-02 01:43:09 -05:00
3a4d885987 Fix to ensure only named queries are saved to the allow list 2020-02-01 10:54:19 -05:00
3bd9b199dd Fix bug with connect / disconnect on array relationships 2020-01-31 00:19:38 -05:00
4ffa1483a4 Add ability to treat JSON/JSONB columns as tables 2020-01-28 00:26:53 -05:00
52f3b1c7a2 Add mutation support for connect / disconnect with array relationships 2020-01-26 01:10:54 -05:00
2d466bfb12 Add skip query selectors that require auth in anon role 2020-01-20 23:38:17 -05:00
a0b8907c3c Fix various json parsing and sql generation bugs 2020-01-19 03:12:51 -05:00
8097ca3b8f Fixes example steps (#33) 2020-01-18 16:44:16 -05:00
0e498b0e94 Fix order by with aliases bug 2020-01-17 09:35:14 -05:00
3eb5b83070 Fix invalid update sql bug 2020-01-17 00:48:17 -05:00
e3c94d17d1 Add corrupt query validation 2020-01-16 01:44:19 -05:00
7240b27214 Fix for table alias relationship bug 2020-01-15 23:26:06 -05:00
f37d867e32 Fix remnant debug messages 2020-01-14 23:28:48 -05:00
5e75cc7b83 Merge branch 'master' of github.com:dosco/super-graph 2020-01-14 23:19:11 -05:00
d4dca86267 Fix new app creation bug #32 2020-01-14 23:16:55 -05:00
76340ab008 Remove *pgxpool.Pool arg from NewDBSchema (#31) 2020-01-14 01:08:04 -05:00
236 changed files with 10264 additions and 5654 deletions

7
.gitignore vendored
View File

@ -24,14 +24,15 @@
/demo/tmp
.vscode
main
.DS_Store
.swp
.release
main
super-graph
supergraph
*-fuzz.zip
crashers
suppressions
release
release
.gofuzz
*-fuzz.zip

View File

@ -7,7 +7,7 @@ rules:
- name: run
match: \.go$
ignore: web|examples|docs|_test\.go$
command: go run main.go serv
command: go run cmd/main.go serv
- name: test
match: _test\.go$
command: go test -cover {PKG}

View File

@ -1,24 +1,31 @@
# stage: 1
FROM node:10 as react-build
WORKDIR /web
COPY web/ ./
COPY /cmd/internal/serv/web/ ./
RUN yarn
RUN yarn build
# stage: 2
FROM golang:1.13.4-alpine as go-build
FROM golang:1.14-alpine as go-build
RUN apk update && \
apk add --no-cache make && \
apk add --no-cache git && \
apk add --no-cache jq && \
apk add --no-cache upx=3.95-r2
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
ARG SOPS_VERSION=3.5.0
ADD https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux /usr/local/bin/sops
RUN chmod 755 /usr/local/bin/sops
WORKDIR /app
COPY . /app
RUN mkdir -p /app/web/build
COPY --from=react-build /web/build/ ./web/build/
RUN mkdir -p /app/cmd/internal/serv/web/build
COPY --from=react-build /web/build/ ./cmd/internal/serv/web/build
RUN go mod vendor
RUN make build
@ -26,6 +33,8 @@ RUN echo "Compressing binary, will take a bit of time..." && \
upx --ultra-brute -qq super-graph && \
upx -t super-graph
# stage: 3
FROM alpine:latest
WORKDIR /
@ -36,10 +45,15 @@ RUN mkdir -p /config
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=go-build /app/config/* /config/
COPY --from=go-build /app/super-graph .
COPY --from=go-build /app/cmd/scripts/start.sh .
COPY --from=go-build /usr/local/bin/sops .
RUN chmod +x /super-graph
RUN chmod +x /start.sh
USER nobody
EXPOSE 8080
ENV GO_ENV production
CMD ./super-graph serv
ENTRYPOINT ["./start.sh"]
CMD ["./super-graph", "serv"]

View File

@ -28,14 +28,14 @@ BIN_DIR := $(GOPATH)/bin
GORICE := $(BIN_DIR)/rice
GOLANGCILINT := $(BIN_DIR)/golangci-lint
GITCHGLOG := $(BIN_DIR)/git-chglog
WEB_BUILD_DIR := ./web/build/manifest.json
WEB_BUILD_DIR := ./cmd/internal/serv/web/build/manifest.json
$(GORICE):
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
$(WEB_BUILD_DIR):
@echo "First install Yarn and create a build of the web UI found under ./web"
@echo "Command: cd web && yarn build"
@echo "First install Yarn and create a build of the web UI then re-run make install"
@echo "Run this command: yarn --cwd cmd/internal/serv/web/ build"
@exit 1
$(GITCHGLOG):
@ -57,7 +57,7 @@ os = $(word 1, $@)
$(PLATFORMS): lint test
@mkdir -p release
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 cmd/main.go
release: windows linux darwin
@ -69,7 +69,7 @@ gen: $(GORICE) $(WEB_BUILD_DIR)
@go generate ./...
$(BINARY): clean
@go build $(BUILD_FLAGS) -o $(BINARY)
@go build $(BUILD_FLAGS) -o $(BINARY) cmd/main.go
clean:
@rm -f $(BINARY)
@ -77,11 +77,10 @@ clean:
run: clean
@go run $(BUILD_FLAGS) main.go $(ARGS)
install:
@echo $(GOPATH)
install: build
@mv $(BINARY) $(GOPATH)/bin/$(BINARY)
@echo "Commit Hash: `git rev-parse HEAD`"
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
@go install $(BUILD_FLAGS)
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
uninstall: clean

View File

@ -1,26 +1,74 @@
<!-- <a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a> -->
<img src="docs/.vuepress/public/super-graph.png" width="250" />
<img src="docs/guide/.vuepress/public/super-graph.png" width="250" />
### Build web products faster. Secure high performance GraphQL
![Apache Public License 2.0](https://img.shields.io/github/license/dosco/super-graph.svg)
![Docker build](https://img.shields.io/docker/cloud/build/dosco/super-graph.svg)
![Cloud native](https://img.shields.io/badge/cloud--native-enabled-blue.svg)
[![GoDoc](https://img.shields.io/badge/godoc-reference-5272B4.svg)](https://pkg.go.dev/github.com/dosco/super-graph/core?tab=doc)
![Apache 2.0](https://img.shields.io/github/license/dosco/super-graph.svg?style=flat-square)
![Docker build](https://img.shields.io/docker/cloud/build/dosco/super-graph.svg?style=flat-square)
![Cloud native](https://img.shields.io/badge/cloud--native-enabled-blue.svg?style=flat-squareg)
[![Discord Chat](https://img.shields.io/discord/628796009539043348.svg)](https://discord.gg/6pSWCTZ)
## What's Super Graph?
## What is Super Graph
Designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance GraphQL API for Postgres DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a GO library and a service, use it in your own code or run it as a seperate service.
Is designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance and secure GraphQL API for Postgres DB. GraphQL queries are translated into a single fast SQL query. No more writing API code as you develop
your web frontend just make the query you need and Super Graph will do the rest.
## Using it as a service
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, role and attribute based access control, support for JWT tokens, built-in DB mutations and seeding, and a lot more.
```console
git clone https://github.com/dosco/super-graph
cd ./super-graph
make install
![GraphQL](docs/.vuepress/public/graphql.png?raw=true "")
super-graph new <app_name>
```
## Using it in your own code
## The story of Super Graph?
```golang
package main
import (
"database/sql"
"fmt"
"time"
"github.com/dosco/super-graph/core"
_ "github.com/jackc/pgx/v4/stdlib"
)
func main() {
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
if err != nil {
log.Fatalf(err)
}
conf, err := core.ReadInConfig("./config/dev.yml")
if err != nil {
log.Fatalf(err)
}
sg, err = core.NewSuperGraph(conf, db)
if err != nil {
log.Fatalf(err)
}
query := `
query {
posts {
id
title
}
}`
res, err := sg.GraphQL(context.Background(), query, nil)
if err != nil {
log.Fatalf(err)
}
fmt.Println(string(res.Data))
}
```
## About Super Graph
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
@ -37,6 +85,7 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
- Complex nested queries and mutations
- Auto learns database tables and relationships
- Role and Attribute based access control
- Opaque cursor based efficient pagination
- Full text search and aggregations
- JWT tokens supported (Auth0, etc)
- Join database queries with remote REST APIs
@ -48,16 +97,8 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
- Fuzz tested for security
- Database migrations tool
- Database seeding tool
- Works with Postgres and YugabyteDB
## Get started
```
git clone https://github.com/dosco/super-graph
cd ./super-graph
make install
super-graph new <app_name>
```
## Documentation

View File

@ -0,0 +1,40 @@
package serv
import (
"fmt"
"net/http"
)
type actionFn func(w http.ResponseWriter, r *http.Request) error
func newAction(a *Action) (http.Handler, error) {
var fn actionFn
var err error
if len(a.SQL) != 0 {
fn, err = newSQLAction(a)
} else {
return nil, fmt.Errorf("invalid config for action '%s'", a.Name)
}
if err != nil {
return nil, err
}
httpFn := func(w http.ResponseWriter, r *http.Request) {
if err := fn(w, r); err != nil {
renderErr(w, err, nil)
}
}
return http.HandlerFunc(httpFn), nil
}
func newSQLAction(a *Action) (actionFn, error) {
fn := func(w http.ResponseWriter, r *http.Request) error {
_, err := db.ExecContext(r.Context(), a.SQL)
return err
}
return fn, nil
}

106
cmd/internal/serv/api.go Normal file
View File

@ -0,0 +1,106 @@
package serv
import (
"time"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
"github.com/dosco/super-graph/core"
"github.com/spf13/viper"
)
const (
LogLevelNone int = iota
LogLevelInfo
LogLevelWarn
LogLevelError
LogLevelDebug
)
type Core = core.Config
// Config struct holds the Super Graph config values
type Config struct {
Core `mapstructure:",squash"`
Serv `mapstructure:",squash"`
cpath string
vi *viper.Viper
}
// Serv struct contains config values used by the Super Graph service
type Serv struct {
AppName string `mapstructure:"app_name"`
Production bool
LogLevel string `mapstructure:"log_level"`
HostPort string `mapstructure:"host_port"`
Host string
Port string
HTTPGZip bool `mapstructure:"http_compress"`
WebUI bool `mapstructure:"web_ui"`
EnableTracing bool `mapstructure:"enable_tracing"`
WatchAndReload bool `mapstructure:"reload_on_config_change"`
AuthFailBlock bool `mapstructure:"auth_fail_block"`
SeedFile string `mapstructure:"seed_file"`
MigrationsPath string `mapstructure:"migrations_path"`
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
Auth auth.Auth
Auths []auth.Auth
DB struct {
Type string
Host string
Port uint16
DBName string
User string
Password string
Schema string
PoolSize int32 `mapstructure:"pool_size"`
MaxRetries int `mapstructure:"max_retries"`
PingTimeout time.Duration `mapstructure:"ping_timeout"`
} `mapstructure:"database"`
Actions []Action
}
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
// Action struct contains config values for a Super Graph service action
type Action struct {
Name string
SQL string
AuthName string `mapstructure:"auth_name"`
}

176
cmd/internal/serv/cmd.go Normal file
View File

@ -0,0 +1,176 @@
package serv
import (
"database/sql"
"fmt"
_log "log"
"os"
"runtime"
"github.com/spf13/cobra"
"go.uber.org/zap"
)
//go:generate rice embed-go
const (
serverName = "Super Graph"
)
var (
// These variables are set using -ldflags
version string
gitBranch string
lastCommitSHA string
lastCommitTime string
)
var (
log *_log.Logger // logger
zlog *zap.Logger // fast logger
logLevel int // log level
conf *Config // parsed config
confPath string // path to the config file
db *sql.DB // database connection pool
secretKey [32]byte // encryption key
)
func Cmd() {
log = _log.New(os.Stdout, "", 0)
zlog = zap.NewExample()
rootCmd := &cobra.Command{
Use: "super-graph",
Short: BuildDetails(),
}
rootCmd.AddCommand(&cobra.Command{
Use: "serv",
Short: "Run the super-graph service",
Run: cmdServ,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:create",
Short: "Create database",
Run: cmdDBCreate,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:drop",
Short: "Drop database",
Run: cmdDBDrop,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:seed",
Short: "Run the seed script to seed the database",
Run: cmdDBSeed,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:migrate",
Short: "Migrate the database",
Long: `Migrate the database to destination migration version.
Destination migration version can be one of the following value types:
Migrate to the most recent migration.
e.g. db:migrate up
Rollback the most recent migration.
e.g. db:migrate down
Migrate to a specific migration.
e.g. db:migrate 42
Migrate forward N steps.
e.g. db:migrate +3
Migrate backward N steps.
e.g. db:migrate -2
Redo previous N steps (migrate backward N steps then forward N steps).
e.g. db:migrate -+1
`,
Run: cmdDBMigrate,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:status",
Short: "Print current migration status",
Run: cmdDBStatus,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:new NAME",
Short: "Generate a new migration",
Long: "Generate a new migration with the next sequence number and provided name",
Run: cmdDBNew,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:setup",
Short: "Setup database",
Long: "This command will create, migrate and seed the database",
Run: cmdDBSetup,
})
rootCmd.AddCommand(&cobra.Command{
Use: "db:reset",
Short: "Reset database",
Long: "This command will drop, create, migrate and seed the database (won't run in production)",
Run: cmdDBReset,
})
rootCmd.AddCommand(&cobra.Command{
Use: "new APP-NAME",
Short: "Create a new application",
Long: "Generate all the required files to start on a new Super Graph app",
Run: cmdNew,
})
// rootCmd.AddCommand(&cobra.Command{
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
// Short: "Dump config to file",
// Long: "Dump current config to a file in the selected format",
// Run: cmdConfDump,
// })
rootCmd.AddCommand(&cobra.Command{
Use: "version",
Short: "Super Graph binary version information",
Run: cmdVersion,
})
rootCmd.PersistentFlags().StringVar(&confPath,
"path", "./config", "path to config files")
if err := rootCmd.Execute(); err != nil {
log.Fatalf("ERR %s", err)
}
}
func cmdVersion(cmd *cobra.Command, args []string) {
fmt.Printf("%s\n", BuildDetails())
}
func BuildDetails() string {
return fmt.Sprintf(`
Super Graph %v
For documentation, visit https://supergraph.dev
Commit SHA-1 : %v
Commit timestamp : %v
Branch : %v
Go version : %v
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar.
`,
version,
lastCommitSHA,
lastCommitTime,
gitBranch,
runtime.Version())
}

View File

@ -0,0 +1,21 @@
package serv
// func cmdConfDump(cmd *cobra.Command, args []string) {
// if len(args) != 1 {
// cmd.Help() //nolint: errcheck
// os.Exit(1)
// }
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
// conf, err := initConf()
// if err != nil {
// log.Fatalf("ERR failed to read config: %s", err)
// }
// if err := conf.WriteConfigAs(fname); err != nil {
// log.Fatalf("ERR failed to write config: %s", err)
// }
// log.Printf("INF config dumped to ./%s", fname)
// }

View File

@ -1,7 +1,6 @@
package serv
import (
"context"
"fmt"
"os"
"path"
@ -10,7 +9,7 @@ import (
"strings"
"time"
"github.com/dosco/super-graph/migrate"
"github.com/dosco/super-graph/cmd/internal/serv/internal/migrate"
"github.com/spf13/cobra"
)
@ -27,7 +26,7 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
cmdDBCreate(cmd, []string{})
cmdDBMigrate(cmd, []string{"up"})
sfile := path.Join(confPath, conf.SeedFile)
sfile := path.Join(conf.cpath, conf.SeedFile)
_, err := os.Stat(sfile)
if err == nil {
@ -36,61 +35,59 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
}
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile)
log.Fatalf("ERR unable to check if '%s' exists: %s", sfile, err)
}
logger.Warn().Msgf("failed to read seed file '%s'", sfile)
log.Printf("WRN failed to read seed file '%s'", sfile)
}
func cmdDBReset(cmd *cobra.Command, args []string) {
initConfOnce()
if conf.Production {
errlog.Fatal().Msg("db:reset does not work in production")
return
log.Fatalln("ERR db:reset does not work in production")
}
cmdDBDrop(cmd, []string{})
cmdDBSetup(cmd, []string{})
}
func cmdDBCreate(cmd *cobra.Command, args []string) {
initConfOnce()
ctx := context.Background()
conn, err := initDB(conf, false)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(ctx)
defer db.Close()
sql := fmt.Sprintf("CREATE DATABASE %s", conf.DB.DBName)
sql := fmt.Sprintf(`CREATE DATABASE "%s"`, conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
_, err = db.Exec(sql)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to create database")
log.Fatalf("ERR failed to create database: %s", err)
}
logger.Info().Msgf("created database '%s'", conf.DB.DBName)
log.Printf("INF created database '%s'", conf.DB.DBName)
}
func cmdDBDrop(cmd *cobra.Command, args []string) {
initConfOnce()
ctx := context.Background()
conn, err := initDB(conf, false)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(ctx)
defer db.Close()
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS %s`, conf.DB.DBName)
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS "%s"`, conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
_, err = db.Exec(sql)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to create database")
log.Fatalf("ERR failed to drop database: %s", err)
}
logger.Info().Msgf("dropped database '%s'", conf.DB.DBName)
log.Printf("INF dropped database '%s'", conf.DB.DBName)
}
func cmdDBNew(cmd *cobra.Command, args []string) {
@ -104,8 +101,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
m, err := migrate.FindMigrations(conf.MigrationsPath)
if err != nil {
fmt.Fprintf(os.Stderr, "Error loading migrations:\n %v\n", err)
os.Exit(1)
log.Fatalf("ERR error loading migrations: %s", err)
}
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
@ -114,17 +110,16 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
mpath := filepath.Join(conf.MigrationsPath, mname)
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
log.Fatalf("ERR %s", err)
}
defer mfile.Close()
_, err = mfile.WriteString(newMigrationText)
if err != nil {
fmt.Fprintln(os.Stderr, err)
os.Exit(1)
log.Fatalf("ERR %s", err)
}
logger.Info().Msgf("created migration '%s'", mpath)
log.Printf("INR created migration '%s'", mpath)
}
func cmdDBMigrate(cmd *cobra.Command, args []string) {
@ -136,30 +131,30 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
initConfOnce()
dest := args[0]
conn, err := initDB(conf, true)
conn, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(context.Background())
defer conn.Close()
m, err := migrate.NewMigrator(conn, "schema_version")
if err != nil {
errlog.Fatal().Err(err).Msg("failed to initializing migrator")
log.Fatalf("ERR failed to initializing migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath)
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
if err != nil {
errlog.Fatal().Err(err).Msg("failed to load migrations")
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
errlog.Fatal().Msg("No migrations found")
log.Fatalf("ERR no migrations found")
}
m.OnStart = func(sequence int32, name, direction, sql string) {
logger.Info().Msgf("%s executing %s %s\n%s\n\n",
log.Printf("INF %s executing %s %s\n%s\n\n",
time.Now().Format("2006-01-02 15:04:05"), name, direction, sql)
}
@ -174,7 +169,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
var n int64
n, err = strconv.ParseInt(d, 10, 32)
if err != nil {
errlog.Fatal().Err(err).Msg("invalid destination")
log.Fatalf("ERR invalid destination: %s", err)
}
return int32(n)
}
@ -203,58 +198,56 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
}
if err != nil {
logger.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
// if err, ok := err.(m.MigrationPgError); ok {
// if err.Detail != "" {
// info.Err(err).Msg(err.Detail)
// log.Fatalf("ERR %s", err.Detail)
// }
// if err.Position != 0 {
// ele, err := ExtractErrorLine(err.Sql, int(err.Position))
// if err != nil {
// errlog.Fatal().Err(err).Send()
// log.Fatalf("ERR %s", err)
// }
// prefix := fmt.Sprintf()
// logger.Info().Msgf("line %d, %s%s", ele.LineNum, prefix, ele.Text)
// log.Fatalf("INF line %d, %s%s", ele.LineNum, ele.Text)
// }
// }
// os.Exit(1)
}
logger.Info().Msg("migration done")
log.Println("INF migration done")
}
func cmdDBStatus(cmd *cobra.Command, args []string) {
initConfOnce()
conn, err := initDB(conf, true)
db, err := initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
defer conn.Close(context.Background())
defer db.Close()
m, err := migrate.NewMigrator(conn, "schema_version")
m, err := migrate.NewMigrator(db, "schema_version")
if err != nil {
errlog.Fatal().Err(err).Msg("failed to initialize migrator")
log.Fatalf("ERR failed to initialize migrator: %s", err)
}
m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to load migrations")
log.Fatalf("ERR failed to load migrations: %s", err)
}
if len(m.Migrations) == 0 {
errlog.Fatal().Msg("no migrations found")
log.Fatalf("ERR no migrations found")
}
mver, err := m.GetCurrentVersion()
if err != nil {
errlog.Fatal().Err(err).Msg("failed to retrieve migration")
log.Fatalf("ERR failed to retrieve migration: %s", err)
}
var status string
@ -265,10 +258,8 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
status = "migration(s) pending"
}
fmt.Println("status: ", status)
fmt.Printf("version: %d of %d\n", mver, len(m.Migrations))
fmt.Println("host: ", conf.DB.Host)
fmt.Println("database:", conf.DB.DBName)
log.Printf("INF status: %s, version: %d of %d, host: %s, database: %s",
status, mver, len(m.Migrations), conf.DB.Host, conf.DB.DBName)
}
type ErrorLineExtract struct {
@ -311,3 +302,16 @@ func getMigrationVars() map[string]interface{} {
"env": strings.ToLower(os.Getenv("GO_ENV")),
}
}
func initConfOnce() {
var err error
if conf != nil {
return
}
conf, err = initConf()
if err != nil {
log.Fatalf("ERR failed to read config: %s", err)
}
}

View File

@ -90,15 +90,15 @@ func cmdNew(cmd *cobra.Command, args []string) {
return os.Mkdir(p, os.ModePerm)
})
ifNotExists(path.Join(appMigrationsPath, "100_init.sql"), func(p string) error {
if v, err := tmpl.get("100_init.sql"); err == nil {
ifNotExists(path.Join(appMigrationsPath, "0_init.sql"), func(p string) error {
if v, err := tmpl.get("0_init.sql"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err
}
})
logger.Info().Msgf("app '%s' initialized", name)
log.Printf("INR app '%s' initialized", name)
}
type Templ struct {
@ -107,7 +107,7 @@ type Templ struct {
}
func newTempl(data map[string]string) *Templ {
return &Templ{rice.MustFindBox("../tmpl"), data}
return &Templ{rice.MustFindBox("./tmpl"), data}
}
func (t *Templ) get(name string) ([]byte, error) {
@ -133,18 +133,18 @@ func ifNotExists(filePath string, doFn func(string) error) {
_, err := os.Stat(filePath)
if err == nil {
logger.Info().Err(err).Msgf("create skipped '%s' exists", filePath)
log.Printf("ERR create skipped '%s' exists", filePath)
return
}
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath)
log.Fatalf("ERR unable to check if '%s' exists", filePath)
}
err = doFn(filePath)
if err != nil {
errlog.Fatal().Err(err).Msgf("unable to create '%s'", filePath)
log.Fatalf("ERR unable to create '%s'", filePath)
}
logger.Info().Msgf("created '%s'", filePath)
log.Printf("INR created '%s'", filePath)
}

View File

@ -1,46 +1,57 @@
package serv
import (
"bytes"
"context"
"encoding/csv"
"encoding/json"
"fmt"
"io"
"io/ioutil"
"math/rand"
"os"
"path"
"strconv"
"strings"
"github.com/brianvoe/gofakeit"
"github.com/dop251/goja"
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
"github.com/valyala/fasttemplate"
)
func cmdDBSeed(cmd *cobra.Command, args []string) {
var err error
if conf, err = initConf(); err != nil {
errlog.Fatal().Err(err).Msg("failed to read config")
log.Fatalf("ERR failed to read config: %s", err)
}
conf.Production = false
db, err = initDBPool(conf)
db, err = initDB(conf)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to connect to database")
log.Fatalf("ERR failed to connect to database: %s", err)
}
initCompiler()
sfile := path.Join(conf.cpath, conf.SeedFile)
sfile := path.Join(confPath, conf.SeedFile)
b, err := ioutil.ReadFile(path.Join(confPath, conf.SeedFile))
b, err := ioutil.ReadFile(sfile)
if err != nil {
errlog.Fatal().Err(err).Msgf("failed to read seed file '%s'", sfile)
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
}
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
}
graphQLFn := func(query string, data interface{}, opt map[string]string) map[string]interface{} {
return graphQLFunc(sg, query, data, opt)
}
vm := goja.New()
vm.Set("graphql", graphQLFunc)
vm.Set("graphql", graphQLFn)
//vm.Set("import_csv", importCSV)
console := vm.NewObject()
console.Set("log", logFunc) //nolint: errcheck
@ -52,82 +63,147 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
_, err = vm.RunScript("seed.js", string(b))
if err != nil {
errlog.Fatal().Err(err).Msg("failed to execute script")
log.Fatalf("ERR failed to execute script: %s", err)
}
logger.Info().Msg("seed script done")
log.Println("INF seed script done")
}
//func runFunc(call goja.FunctionCall) {
func graphQLFunc(query string, data interface{}, opt map[string]string) map[string]interface{} {
vars, err := json.Marshal(data)
if err != nil {
errlog.Fatal().Err(err).Send()
}
c := context.Background()
// func runFunc(call goja.FunctionCall) {
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
ct := context.Background()
if v, ok := opt["user_id"]; ok && len(v) != 0 {
c = context.WithValue(c, userIDKey, v)
ct = context.WithValue(ct, core.UserIDKey, v)
}
var role string
// var role string
if v, ok := opt["role"]; ok && len(v) != 0 {
role = v
} else {
role = "user"
// if v, ok := opt["role"]; ok && len(v) != 0 {
// role = v
// } else {
// role = "user"
// }
var vars []byte
var err error
if vars, err = json.Marshal(data); err != nil {
log.Fatalf("ERR %s", err)
}
stmts, err := buildRoleStmt([]byte(query), vars, role)
res, err := sg.GraphQL(ct, query, vars)
if err != nil {
errlog.Fatal().Err(err).Msg("graphql query failed")
}
st := stmts[0]
buf := &bytes.Buffer{}
t := fasttemplate.New(st.sql, openVar, closeVar)
_, err = t.ExecuteFunc(buf, argMap(c, vars))
if err != nil {
errlog.Fatal().Err(err).Send()
}
finalSQL := buf.String()
tx, err := db.Begin(c)
if err != nil {
errlog.Fatal().Err(err).Send()
}
defer tx.Rollback(c) //nolint: errcheck
if conf.DB.SetUserID {
if err := setLocalUserID(c, tx); err != nil {
errlog.Fatal().Err(err).Send()
}
}
var root []byte
if err = tx.QueryRow(context.Background(), finalSQL).Scan(&root); err != nil {
errlog.Fatal().Err(err).Msg("sql query failed")
}
if err := tx.Commit(c); err != nil {
errlog.Fatal().Err(err).Send()
log.Fatalf("ERR %s", err)
}
val := make(map[string]interface{})
err = json.Unmarshal(root, &val)
if err != nil {
errlog.Fatal().Err(err).Send()
if err = json.Unmarshal(res.Data, &val); err != nil {
log.Fatalf("ERR %s", err)
}
return val
}
type csvSource struct {
rows [][]string
i int
}
func NewCSVSource(filename string) (*csvSource, error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer f.Close()
r := csv.NewReader(f)
rows, err := r.ReadAll()
if err != nil {
return nil, err
}
return &csvSource{rows: rows}, nil
}
func (c *csvSource) Next() bool {
return c.i < len(c.rows)
}
func (c *csvSource) Values() ([]interface{}, error) {
var vals []interface{}
var err error
for _, v := range c.rows[c.i] {
switch {
case len(v) == 0:
vals = append(vals, "")
case isDigit(v):
var n int
if n, err = strconv.Atoi(v); err == nil {
vals = append(vals, n)
}
case strings.EqualFold(v, "true") || strings.EqualFold(v, "false"):
var b bool
if b, err = strconv.ParseBool(v); err == nil {
vals = append(vals, b)
}
default:
vals = append(vals, v)
}
if err != nil {
return nil, fmt.Errorf("%w (line no %d)", err, c.i)
}
}
c.i++
return vals, nil
}
func isDigit(v string) bool {
for i := range v {
if v[i] < '0' || v[i] > '9' {
return false
}
}
return true
}
func (c *csvSource) Err() error {
return nil
}
// func importCSV(table, filename string) int64 {
// if filename[0] != '/' {
// filename = path.Join(conf.ConfigPathUsed(), filename)
// }
// s, err := NewCSVSource(filename)
// if err != nil {
// log.Fatalf("ERR %s", err)
// }
// var cols []string
// colval, _ := s.Values()
// for _, c := range colval {
// cols = append(cols, c.(string))
// }
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
// cols,
// s)
// if err != nil {
// err = fmt.Errorf("%w (line no %d)", err, s.i)
// log.Fatalf("ERR %s", err)
// }
// return n
// }
//nolint: errcheck
func logFunc(args ...interface{}) {
for _, arg := range args {
@ -145,6 +221,17 @@ func logFunc(args ...interface{}) {
}
}
func avatarURL(size int) string {
if size == 0 {
size = 200
}
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
}
func imageURL(width int, height int) string {
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
}
//nolint: errcheck
func setFakeFuncs(f *goja.Object) {
gofakeit.Seed(0)
@ -222,7 +309,8 @@ func setFakeFuncs(f *goja.Object) {
// Internet
f.Set("url", gofakeit.URL)
f.Set("image_url", gofakeit.ImageURL)
f.Set("image_url", imageURL)
f.Set("avatar_url", avatarURL)
f.Set("domain_name", gofakeit.DomainName)
f.Set("domain_suffix", gofakeit.DomainSuffix)
f.Set("ipv4_address", gofakeit.IPv4Address)

View File

@ -0,0 +1,37 @@
package serv
import (
"github.com/dosco/super-graph/core"
"github.com/spf13/cobra"
)
var (
sg *core.SuperGraph
)
func cmdServ(cmd *cobra.Command, args []string) {
var err error
conf, err = initConf()
if err != nil {
fatalInProd(err, "failed to read config")
}
initWatcher()
db, err = initDB(conf)
if err != nil {
fatalInProd(err, "failed to connect to database")
}
// if conf != nil && db != nil {
// initResolvers()
// }
sg, err = core.NewSuperGraph(&conf.Core, db)
if err != nil {
fatalInProd(err, "failed to initialize Super Graph")
}
startHTTP()
}

115
cmd/internal/serv/config.go Normal file
View File

@ -0,0 +1,115 @@
package serv
import (
"fmt"
"os"
"path"
"strings"
"github.com/spf13/viper"
)
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
// environment variable. This is the best way to create a new Super Graph config.
func ReadInConfig(configFile string) (*Config, error) {
cpath := path.Dir(configFile)
cfile := path.Base(configFile)
vi := newViper(cpath, cfile)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
inherits := vi.GetString("inherits")
if len(inherits) != 0 {
vi = newViper(cpath, inherits)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
if vi.IsSet("inherits") {
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
}
vi.SetConfigName(cfile)
if err := vi.MergeInConfig(); err != nil {
return nil, err
}
}
c := &Config{cpath: cpath, vi: vi}
if err := vi.Unmarshal(&c); err != nil {
return nil, fmt.Errorf("failed to decode config, %v", err)
}
if len(c.Core.AllowListFile) == 0 {
c.Core.AllowListFile = path.Join(cpath, "allow.list")
}
return c, nil
}
func newViper(configPath, configFile string) *viper.Viper {
vi := viper.New()
vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.AddConfigPath(configPath)
vi.SetConfigName(configFile)
vi.AddConfigPath("./config")
vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false)
vi.SetDefault("enable_tracing", false)
vi.SetDefault("auth_fail_block", "always")
vi.SetDefault("seed_file", "seed.js")
vi.SetDefault("database.type", "postgres")
vi.SetDefault("database.host", "localhost")
vi.SetDefault("database.port", 5432)
vi.SetDefault("database.user", "postgres")
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("host", "HOST") //nolint: errcheck
vi.BindEnv("port", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)
return vi
}
func GetConfigName() string {
if len(os.Getenv("GO_ENV")) == 0 {
return "dev"
}
ge := strings.ToLower(os.Getenv("GO_ENV"))
switch {
case strings.HasPrefix(ge, "pro"):
return "prod"
case strings.HasPrefix(ge, "sta"):
return "stage"
case strings.HasPrefix(ge, "tes"):
return "test"
case strings.HasPrefix(ge, "dev"):
return "dev"
}
return ge
}

View File

@ -0,0 +1,7 @@
package serv
// func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
// return nil
// }

View File

@ -4,7 +4,7 @@ package serv
func Fuzz(data []byte) int {
gql := string(data)
gqlName(gql)
QueryName(gql)
gqlHash(gql, nil, "")
return 1

View File

@ -10,7 +10,6 @@ func TestFuzzCrashers(t *testing.T) {
}
for _, f := range crashers {
_ = gqlName(f)
gqlHash(f, nil, "")
}
}

View File

@ -0,0 +1,25 @@
package serv
import (
"context"
"net/http"
)
var healthyResponse = []byte("All's Well")
func health(w http.ResponseWriter, _ *http.Request) {
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
defer cancel()
if err := db.PingContext(ct); err != nil {
log.Printf("ERR error pinging database: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
if _, err := w.Write(healthyResponse); err != nil {
log.Printf("ERR error writing healthy response: %s", err)
w.WriteHeader(http.StatusInternalServerError)
return
}
}

124
cmd/internal/serv/http.go Normal file
View File

@ -0,0 +1,124 @@
package serv
import (
"encoding/json"
"errors"
"io"
"io/ioutil"
"net/http"
"strings"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
"github.com/dosco/super-graph/core"
"github.com/rs/cors"
"go.uber.org/zap"
)
const (
maxReadBytes = 100000 // 100Kb
introspectionQuery = "IntrospectionQuery"
)
var (
errUnauthorized = errors.New("not authorized")
)
type gqlReq struct {
OpName string `json:"operationName"`
Query string `json:"query"`
Vars json.RawMessage `json:"variables"`
}
type errorResp struct {
Error error `json:"error"`
}
func apiV1Handler() http.Handler {
h, err := auth.WithAuth(http.HandlerFunc(apiV1), &conf.Auth)
if err != nil {
log.Fatalf("ERR %s", err)
}
if len(conf.AllowedOrigins) != 0 {
c := cors.New(cors.Options{
AllowedOrigins: conf.AllowedOrigins,
AllowCredentials: true,
Debug: conf.DebugCORS,
})
h = c.Handler(h)
}
return h
}
func apiV1(w http.ResponseWriter, r *http.Request) {
ct := r.Context()
//nolint: errcheck
if conf.AuthFailBlock && !auth.IsAuth(ct) {
renderErr(w, errUnauthorized, nil)
return
}
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
if err != nil {
renderErr(w, err, nil)
return
}
defer r.Body.Close()
req := gqlReq{}
err = json.Unmarshal(b, &req)
if err != nil {
renderErr(w, err, nil)
return
}
if strings.EqualFold(req.OpName, introspectionQuery) {
introspect(w)
return
}
res, err := sg.GraphQL(ct, req.Query, req.Vars)
if logLevel >= LogLevelDebug {
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
}
if err != nil {
renderErr(w, err, res)
return
}
json.NewEncoder(w).Encode(res)
if logLevel >= LogLevelInfo {
zlog.Info("success",
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
}
}
//nolint: errcheck
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
if err == errUnauthorized {
w.WriteHeader(http.StatusUnauthorized)
}
json.NewEncoder(w).Encode(&errorResp{err})
if logLevel >= LogLevelError {
if res != nil {
zlog.Error(err.Error(),
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
} else {
zlog.Error(err.Error())
}
}
}

154
cmd/internal/serv/init.go Normal file
View File

@ -0,0 +1,154 @@
package serv
import (
"database/sql"
"path"
"time"
"github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/stdlib"
//_ "github.com/jackc/pgx/v4/stdlib"
)
func initConf() (*Config, error) {
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
if err != nil {
return nil, err
}
switch c.LogLevel {
case "debug":
logLevel = LogLevelDebug
case "error":
logLevel = LogLevelError
case "warn":
logLevel = LogLevelWarn
case "info":
logLevel = LogLevelInfo
default:
logLevel = LogLevelNone
}
// Auths: validate and sanitize
am := make(map[string]struct{})
for i := 0; i < len(c.Auths); i++ {
a := &c.Auths[i]
a.Name = sanitize(a.Name)
if _, ok := am[a.Name]; ok {
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
log.Printf("WRN duplicate auth found: %s", a.Name)
}
am[a.Name] = struct{}{}
}
// Actions: validate and sanitize
axm := make(map[string]struct{})
for i := 0; i < len(c.Actions); i++ {
a := &c.Actions[i]
a.Name = sanitize(a.Name)
a.AuthName = sanitize(a.AuthName)
if _, ok := axm[a.Name]; ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN duplicate action found: %s", a.Name)
}
if _, ok := am[a.AuthName]; !ok {
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
}
axm[a.Name] = struct{}{}
}
var anonFound bool
for _, r := range c.Roles {
if sanitize(r.Name) == "anon" {
anonFound = true
}
}
if !anonFound {
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
c.AuthFailBlock = false
}
return c, nil
}
func initDB(c *Config) (*sql.DB, error) {
var db *sql.DB
var err error
// cs := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s",
// c.DB.Host, c.DB.Port,
// c.DB.User, c.DB.Password,
// c.DB.DBName)
// fmt.Println(">>", cs)
// for i := 1; i < 10; i++ {
// db, err = sql.Open("pgx", cs)
// if err == nil {
// break
// }
// time.Sleep(time.Duration(i*100) * time.Millisecond)
// }
// if err != nil {
// return nil, err
// }
// return db, nil
config, _ := pgx.ParseConfig("")
config.Host = c.DB.Host
config.Port = c.DB.Port
config.Database = c.DB.DBName
config.User = c.DB.User
config.Password = c.DB.Password
config.RuntimeParams = map[string]string{
"application_name": c.AppName,
"search_path": c.DB.Schema,
}
// switch c.LogLevel {
// case "debug":
// config.LogLevel = pgx.LogLevelDebug
// case "info":
// config.LogLevel = pgx.LogLevelInfo
// case "warn":
// config.LogLevel = pgx.LogLevelWarn
// case "error":
// config.LogLevel = pgx.LogLevelError
// default:
// config.LogLevel = pgx.LogLevelNone
// }
//config.Logger = NewSQLLogger(logger)
// if c.DB.MaxRetries != 0 {
// opt.MaxRetries = c.DB.MaxRetries
// }
// if c.DB.PoolSize != 0 {
// config.MaxConns = conf.DB.PoolSize
// }
for i := 1; i < 10; i++ {
db = stdlib.OpenDB(*config)
if db == nil {
break
}
time.Sleep(time.Duration(i*100) * time.Millisecond)
}
if err != nil {
return nil, err
}
return db, nil
}

View File

@ -0,0 +1,127 @@
package auth
import (
"context"
"fmt"
"net/http"
"github.com/dosco/super-graph/core"
)
// Auth struct contains authentication related config values used by the Super Graph service
type Auth struct {
Name string
Type string
Cookie string
CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct {
Version string
SecretKeyBase string `mapstructure:"secret_key_base"`
URL string
Password string
MaxIdle int `mapstructure:"max_idle"`
MaxActive int `mapstructure:"max_active"`
Salt string
SignSalt string `mapstructure:"sign_salt"`
AuthSalt string `mapstructure:"auth_salt"`
}
JWT struct {
Provider string
Secret string
PubKeyFile string `mapstructure:"public_key_file"`
PubKeyType string `mapstructure:"public_key_type"`
}
Header struct {
Name string
Value string
Exists bool
}
}
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
return func(w http.ResponseWriter, r *http.Request) {
ctx := r.Context()
userIDProvider := r.Header.Get("X-User-ID-Provider")
if len(userIDProvider) != 0 {
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
}
userID := r.Header.Get("X-User-ID")
if len(userID) != 0 {
ctx = context.WithValue(ctx, core.UserIDKey, userID)
}
userRole := r.Header.Get("X-User-Role")
if len(userRole) != 0 {
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
}
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
hdr := ac.Header
if len(hdr.Name) == 0 {
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
}
if !hdr.Exists && len(hdr.Value) == 0 {
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
}
return func(w http.ResponseWriter, r *http.Request) {
var fo1 bool
value := r.Header.Get(hdr.Name)
switch {
case hdr.Exists:
fo1 = (len(value) == 0)
default:
fo1 = (value != hdr.Value)
}
if fo1 {
http.Error(w, "401 unauthorized", http.StatusUnauthorized)
return
}
next.ServeHTTP(w, r)
}, nil
}
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
var err error
if ac.CredsInHeader {
next, err = SimpleHandler(ac, next)
}
if err != nil {
return nil, err
}
switch ac.Type {
case "rails":
return RailsHandler(ac, next)
case "jwt":
return JwtHandler(ac, next)
case "header":
return HeaderHandler(ac, next)
}
return next, nil
}
func IsAuth(ct context.Context) bool {
return ct.Value(core.UserIDKey) != nil
}

View File

@ -1,4 +1,4 @@
package serv
package auth
import (
"context"
@ -7,6 +7,7 @@ import (
"strings"
jwt "github.com/dgrijalva/jwt-go"
"github.com/dosco/super-graph/core"
)
const (
@ -14,18 +15,18 @@ const (
jwtAuth0 int = iota + 1
)
func jwtHandler(next http.Handler) http.HandlerFunc {
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
var key interface{}
var jwtProvider int
cookie := conf.Auth.Cookie
cookie := ac.Cookie
if conf.Auth.JWT.Provider == "auth0" {
if ac.JWT.Provider == "auth0" {
jwtProvider = jwtAuth0
}
secret := conf.Auth.JWT.Secret
publicKeyFile := conf.Auth.JWT.PubKeyFile
secret := ac.JWT.Secret
publicKeyFile := ac.JWT.PubKeyFile
switch {
case len(secret) != 0:
@ -34,10 +35,10 @@ func jwtHandler(next http.Handler) http.HandlerFunc {
case len(publicKeyFile) != 0:
kd, err := ioutil.ReadFile(publicKeyFile)
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
switch conf.Auth.JWT.PubKeyType {
switch ac.JWT.PubKeyType {
case "ecdsa":
key, err = jwt.ParseECPublicKeyFromPEM(kd)
@ -50,7 +51,7 @@ func jwtHandler(next http.Handler) http.HandlerFunc {
}
if err != nil {
errlog.Fatal().Err(err).Send()
return nil, err
}
}
@ -88,11 +89,11 @@ func jwtHandler(next http.Handler) http.HandlerFunc {
if jwtProvider == jwtAuth0 {
sub := strings.Split(claims.Subject, "|")
if len(sub) != 2 {
ctx = context.WithValue(ctx, userIDProviderKey, sub[0])
ctx = context.WithValue(ctx, userIDKey, sub[1])
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
}
} else {
ctx = context.WithValue(ctx, userIDKey, claims.Subject)
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
}
next.ServeHTTP(w, r.WithContext(ctx))
@ -100,5 +101,5 @@ func jwtHandler(next http.Handler) http.HandlerFunc {
}
next.ServeHTTP(w, r)
}
}, nil
}

View File

@ -0,0 +1,190 @@
package auth
import (
"context"
"errors"
"fmt"
"net/http"
"net/url"
"strings"
"github.com/bradfitz/gomemcache/memcache"
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
"github.com/dosco/super-graph/core"
"github.com/garyburd/redigo/redis"
)
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
ru := ac.Rails.URL
if strings.HasPrefix(ru, "memcache:") {
return RailsMemcacheHandler(ac, next)
}
if strings.HasPrefix(ru, "redis:") {
return RailsRedisHandler(ac, next)
}
return RailsCookieHandler(ac, next)
}
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rp := &redis.Pool{
MaxIdle: ac.Rails.MaxIdle,
MaxActive: ac.Rails.MaxActive,
Dial: func() (redis.Conn, error) {
c, err := redis.DialURL(ac.Rails.URL)
if err != nil {
return nil, err
}
pwd := ac.Rails.Password
if len(pwd) != 0 {
if _, err := c.Do("AUTH", pwd); err != nil {
return nil, err
}
}
return c, nil
},
}
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil {
next.ServeHTTP(w, r)
return
}
key := fmt.Sprintf("session:%s", ck.Value)
sessionData, err := redis.Bytes(rp.Get().Do("GET", key))
if err != nil {
next.ServeHTTP(w, r)
return
}
userID, err := rails.ParseCookie(string(sessionData))
if err != nil {
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
if len(ac.Rails.URL) == 0 {
return nil, fmt.Errorf("no auth.rails.url defined")
}
rURL, err := url.Parse(ac.Rails.URL)
if err != nil {
return nil, err
}
mc := memcache.New(rURL.Host)
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil {
next.ServeHTTP(w, r)
return
}
key := fmt.Sprintf("session:%s", ck.Value)
item, err := mc.Get(key)
if err != nil {
next.ServeHTTP(w, r)
return
}
userID, err := rails.ParseCookie(string(item.Value))
if err != nil {
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
cookie := ac.Cookie
if len(cookie) == 0 {
return nil, fmt.Errorf("no auth.cookie defined")
}
ra, err := railsAuth(ac)
if err != nil {
return nil, err
}
return func(w http.ResponseWriter, r *http.Request) {
ck, err := r.Cookie(cookie)
if err != nil || len(ck.Value) == 0 {
// logger.Warn().Err(err).Msg("rails cookie missing")
next.ServeHTTP(w, r)
return
}
userID, err := ra.ParseCookie(ck.Value)
if err != nil {
// logger.Warn().Err(err).Msg("failed to parse rails cookie")
next.ServeHTTP(w, r)
return
}
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
next.ServeHTTP(w, r.WithContext(ctx))
}, nil
}
func railsAuth(ac *Auth) (*rails.Auth, error) {
secret := ac.Rails.SecretKeyBase
if len(secret) == 0 {
return nil, errors.New("no auth.rails.secret_key_base defined")
}
version := ac.Rails.Version
if len(version) == 0 {
return nil, errors.New("no auth.rails.version defined")
}
ra, err := rails.NewAuth(version, secret)
if err != nil {
return nil, err
}
if len(ac.Rails.Salt) != 0 {
ra.Salt = ac.Rails.Salt
}
if len(ac.Rails.SignSalt) != 0 {
ra.SignSalt = ac.Rails.SignSalt
}
if len(ac.Rails.AuthSalt) != 0 {
ra.AuthSalt = ac.Rails.AuthSalt
}
return ra, nil
}

View File

@ -3,6 +3,7 @@ package migrate
import (
"bytes"
"context"
"database/sql"
"fmt"
"io/ioutil"
"os"
@ -12,7 +13,6 @@ import (
"strings"
"text/template"
"github.com/jackc/pgx/v4"
"github.com/pkg/errors"
)
@ -62,7 +62,7 @@ type MigratorOptions struct {
}
type Migrator struct {
conn *pgx.Conn
db *sql.DB
versionTable string
options *MigratorOptions
Migrations []*Migration
@ -70,12 +70,12 @@ type Migrator struct {
Data map[string]interface{} // Data available to use in migrations
}
func NewMigrator(conn *pgx.Conn, versionTable string) (m *Migrator, err error) {
return NewMigratorEx(conn, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
func NewMigrator(db *sql.DB, versionTable string) (m *Migrator, err error) {
return NewMigratorEx(db, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
}
func NewMigratorEx(conn *pgx.Conn, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
m = &Migrator{conn: conn, versionTable: versionTable, options: opts}
func NewMigratorEx(db *sql.DB, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
m = &Migrator{db: db, versionTable: versionTable, options: opts}
err = m.ensureSchemaVersionTableExists()
m.Migrations = make([]*Migration, 0)
m.Data = make(map[string]interface{})
@ -254,14 +254,13 @@ func (m *Migrator) Migrate() error {
// MigrateTo migrates to targetVersion
func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
ctx := context.Background()
// Lock to ensure multiple migrations cannot occur simultaneously
lockNum := int64(9628173550095224) // arbitrary random number
if _, lockErr := m.conn.Exec(ctx, "select pg_advisory_lock($1)", lockNum); lockErr != nil {
if _, lockErr := m.db.Exec("select pg_try_advisory_lock($1)", lockNum); lockErr != nil {
return lockErr
}
defer func() {
_, unlockErr := m.conn.Exec(ctx, "select pg_advisory_unlock($1)", lockNum)
_, unlockErr := m.db.Exec("select pg_advisory_unlock($1)", lockNum)
if err == nil && unlockErr != nil {
err = unlockErr
}
@ -310,11 +309,11 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
ctx := context.Background()
tx, err := m.conn.Begin(ctx)
tx, err := m.db.BeginTx(ctx, nil)
if err != nil {
return err
}
defer tx.Rollback(ctx) //nolint: errcheck
defer tx.Rollback() //nolint: errcheck
// Fire on start callback
if m.OnStart != nil {
@ -322,7 +321,7 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
}
// Execute the migration
_, err = tx.Exec(ctx, sql)
_, err = tx.Exec(sql)
if err != nil {
// if err, ok := err.(pgx.PgError); ok {
// return MigrationPgError{Sql: sql, PgError: err}
@ -331,17 +330,17 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
}
// Reset all database connection settings. Important to do before updating version as search_path may have been changed.
if _, err := tx.Exec(ctx, "reset all"); err != nil {
return err
}
// if _, err := tx.Exec(ctx, "reset all"); err != nil {
// return err
// }
// Add one to the version
_, err = tx.Exec(ctx, "update "+m.versionTable+" set version=$1", sequence)
_, err = tx.Exec("update "+m.versionTable+" set version=$1", sequence)
if err != nil {
return err
}
err = tx.Commit(ctx)
err = tx.Commit()
if err != nil {
return err
}
@ -353,14 +352,13 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
}
func (m *Migrator) GetCurrentVersion() (v int32, err error) {
err = m.conn.QueryRow(context.Background(),
"select version from "+m.versionTable).Scan(&v)
err = m.db.QueryRow("select version from " + m.versionTable).Scan(&v)
return v, err
}
func (m *Migrator) ensureSchemaVersionTableExists() (err error) {
_, err = m.conn.Exec(context.Background(), fmt.Sprintf(`
_, err = m.db.Exec(fmt.Sprintf(`
create table if not exists %s(version int4 not null);
insert into %s(version)

View File

@ -4,8 +4,9 @@ import (
"encoding/json"
"errors"
"fmt"
"strconv"
"strings"
"github.com/Masterminds/semver"
"github.com/adjust/gorails/marshal"
)
@ -37,17 +38,20 @@ func NewAuth(version, secret string) (*Auth, error) {
AuthSalt: authSalt,
}
ver, err := semver.NewVersion(version)
if err != nil {
return nil, fmt.Errorf("rails auth: %s", err)
var v1, v2 int
var err error
sv := strings.Split(version, ".")
if len(sv) >= 2 {
if v1, err = strconv.Atoi(sv[0]); err != nil {
return nil, err
}
if v2, err = strconv.Atoi(sv[1]); err != nil {
return nil, err
}
}
gt52, err := semver.NewConstraint(">= 5.2")
if err != nil {
return nil, fmt.Errorf("rails auth: %s", err)
}
if gt52.Check(ver) {
if v1 >= 5 && v2 >= 2 {
ra.Cipher = railsCipher52
} else {
ra.Cipher = railsCipher

View File

@ -108,11 +108,15 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
// Ensure that we use the correct events, as they are not uniform across
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
if !conf.Production && strings.HasSuffix(event.Name, "/allow.list") {
if conf != nil && strings.HasSuffix(event.Name, "/allow.list") {
continue
}
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
if conf.Production {
continue
}
log("INF Reloading, file changed detected: %s", event)
var trigger bool
switch runtime.GOOS {
@ -168,7 +172,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
func ReExec() {
err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ())
if err != nil {
errlog.Fatal().Err(err).Msg("cannot restart")
log.Fatalf("ERR cannot restart: %s", err)
}
}

File diff suppressed because one or more lines are too long

179
cmd/internal/serv/serv.go Normal file
View File

@ -0,0 +1,179 @@
package serv
import (
"context"
"fmt"
"net/http"
"os"
"os/signal"
"strings"
"time"
rice "github.com/GeertJohan/go.rice"
"github.com/NYTimes/gziphandler"
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
)
func initWatcher() {
cpath := conf.cpath
if conf != nil && !conf.WatchAndReload {
return
}
var d dir
if len(cpath) == 0 || cpath == "./" {
d = Dir("./config", ReExec)
} else {
d = Dir(cpath, ReExec)
}
go func() {
err := Do(log.Printf, d)
if err != nil {
log.Fatalf("ERR %s", err)
}
}()
}
func startHTTP() {
var hostPort string
var appName string
defaultHP := "0.0.0.0:8080"
env := os.Getenv("GO_ENV")
if conf != nil {
appName = conf.AppName
hp := strings.SplitN(conf.HostPort, ":", 2)
if len(hp) == 2 {
if len(conf.Host) != 0 {
hp[0] = conf.Host
}
if len(conf.Port) != 0 {
hp[1] = conf.Port
}
hostPort = fmt.Sprintf("%s:%s", hp[0], hp[1])
}
}
if len(hostPort) == 0 {
hostPort = defaultHP
}
routes, err := routeHandler()
if err != nil {
log.Fatalf("ERR %s", err)
}
srv := &http.Server{
Addr: hostPort,
Handler: routes,
ReadTimeout: 5 * time.Second,
WriteTimeout: 10 * time.Second,
MaxHeaderBytes: 1 << 20,
}
idleConnsClosed := make(chan struct{})
go func() {
sigint := make(chan os.Signal, 1)
signal.Notify(sigint, os.Interrupt)
<-sigint
if err := srv.Shutdown(context.Background()); err != nil {
log.Fatalln("INF shutdown signal received")
}
close(idleConnsClosed)
}()
srv.RegisterOnShutdown(func() {
db.Close()
})
log.Printf("INF version: %s, git-branch: %s, host-port: %s, app-name: %s, env: %s\n",
version, gitBranch, hostPort, appName, env)
log.Printf("INF %s started\n", serverName)
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
log.Fatalln("INF server closed")
}
<-idleConnsClosed
}
func routeHandler() (http.Handler, error) {
mux := http.NewServeMux()
if conf == nil {
return mux, nil
}
routes := map[string]http.Handler{
"/health": http.HandlerFunc(health),
"/api/v1/graphql": apiV1Handler(),
}
if err := setActionRoutes(routes); err != nil {
return nil, err
}
if conf.WebUI {
routes["/"] = http.FileServer(rice.MustFindBox("./web/build").HTTPBox())
}
if conf.HTTPGZip {
gz := gziphandler.MustNewGzipLevelHandler(6)
for k, v := range routes {
routes[k] = gz(v)
}
}
for k, v := range routes {
mux.Handle(k, v)
}
fn := func(w http.ResponseWriter, r *http.Request) {
w.Header().Set("Server", serverName)
mux.ServeHTTP(w, r)
}
return http.HandlerFunc(fn), nil
}
func setActionRoutes(routes map[string]http.Handler) error {
var err error
for _, a := range conf.Actions {
var fn http.Handler
fn, err = newAction(&a)
if err != nil {
break
}
p := fmt.Sprintf("/api/v1/actions/%s", strings.ToLower(a.Name))
if ac := findAuth(a.AuthName); ac != nil {
routes[p], err = auth.WithAuth(fn, ac)
} else {
routes[p] = fn
}
if err != nil {
return err
}
}
return nil
}
func findAuth(name string) *auth.Auth {
for _, a := range conf.Auths {
if strings.EqualFold(a.Name, name) {
return &a
}
}
return nil
}

View File

@ -0,0 +1,43 @@
package serv
// import (
// "context"
// "github.com/jackc/pgx/v4"
// "github.com/rs/zerolog"
// )
// type Logger struct {
// logger zerolog.Logger
// }
// // NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
// // logging fascade as output.
// func NewSQLLogger(logger zerolog.Logger) *Logger {
// return &Logger{
// logger: // logger.With().Logger(),
// }
// }
// func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
// var zlevel zerolog.Level
// switch level {
// case pgx.LogLevelNone:
// zlevel = zerolog.NoLevel
// case pgx.LogLevelError:
// zlevel = zerolog.ErrorLevel
// case pgx.LogLevelWarn:
// zlevel = zerolog.WarnLevel
// case pgx.LogLevelDebug, pgx.LogLevelInfo:
// zlevel = zerolog.DebugLevel
// default:
// zlevel = zerolog.DebugLevel
// }
// if sql, ok := data["sql"]; ok {
// delete(data, "sql")
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
// } else {
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(msg)
// }
// }

View File

@ -2,7 +2,7 @@ app_name: "{% app_name %} Development"
host_port: 0.0.0.0:8080
web_ui: true
# debug, info, warn, error, fatal, panic
# debug, error, warn, info
log_level: "info"
# enable or disable http compression (uses gzip)
@ -30,7 +30,21 @@ reload_on_config_change: true
# seed_file: seed.js
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# this must be a relative path under the config path
migrations_path: ./migrations
# Secret key for general encryption operations like
# encrypting the cursor data
secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: false
# Postgres related environment Variables
# SG_DATABASE_HOST
@ -49,7 +63,7 @@ migrations_path: ./config/migrations
# sheep: sheep
auth:
# Can be 'rails' or 'jwt'
# Can be 'rails', 'jwt' or 'header'
type: rails
cookie: _{% app_name_slug %}_session
@ -83,13 +97,29 @@ auth:
# public_key_file: /secrets/public_key.pem
# public_key_type: ecdsa #rsa
# header:
# name: dnt
# exists: true
# value: localhost:8080
# You can add additional named auths to use with actions
# In this example actions using this auth can only be
# called from the Google Appengine Cron service that
# sets a special header to all it's requests
auths:
- name: from_taskqueue
type: header
header:
name: X-Appengine-Cron
exists: true
database:
type: postgres
host: db
port: 5432
dbname: {% app_name_slug %}_development
user: postgres
password: ''
password: postgres
#schema: "public"
#pool_size: 10
@ -105,7 +135,9 @@ database:
# Define additional variables here to be used with filters
variables:
admin_account_id: "5"
#admin_account_id: "5"
admin_account_id: "sql:select id from users where admin = true limit 1"
# Field and table names that you wish to block
blocklist:
@ -116,6 +148,16 @@ database:
- encrypted
- token
# Create custom actions with their own api endpoints
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
# A request to this url will execute the configured SQL query
# which in this case refreshes a materialized view in the database.
# The auth_name is from one of the configured auths
actions:
- name: refresh_leaderboard_users
sql: REFRESH MATERIALIZED VIEW CONCURRENTLY "leaderboard_users"
auth_name: from_taskqueue
tables:
- name: customers
remotes:
@ -137,26 +179,15 @@ tables:
name: me
table: users
roles_query: "SELECT * FROM users WHERE id = $user_id"
#roles_query: "SELECT * FROM users WHERE id = $user_id"
roles:
- name: anon
tables:
- name: products
limit: 10
- name: users
query:
columns: ["id", "name", "description" ]
aggregation: false
insert:
block: false
update:
block: false
delete:
block: false
limit: 10
- name: user
tables:
@ -168,28 +199,24 @@ roles:
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
presets:
- user_id: "$user_id"
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
presets:
- updated_at: "now"
delete:
block: true
- name: admin
match: id = 1000
tables:
- name: users
filters: []
# - name: admin
# match: id = 1000
# tables:
# - name: users
# filters: []

View File

@ -0,0 +1,59 @@
version: '3.4'
services:
# Postgres DB
db:
image: postgres:12
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports:
- "5432:5432"
# Yugabyte DB
# yb-master:
# image: yugabytedb/yugabyte:latest
# container_name: yb-master-n1
# command: [ "/home/yugabyte/bin/yb-master",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--master_addresses=yb-master-n1:7100",
# "--replication_factor=1",
# "--enable_ysql=true"]
# ports:
# - "7000:7000"
# environment:
# SERVICE_7000_NAME: yb-master
# db:
# image: yugabytedb/yugabyte:latest
# container_name: yb-tserver-n1
# command: [ "/home/yugabyte/bin/yb-tserver",
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
# "--start_pgsql_proxy",
# "--tserver_master_addrs=yb-master-n1:7100"]
# ports:
# - "9042:9042"
# - "6379:6379"
# - "5433:5433"
# - "9000:9000"
# environment:
# SERVICE_5433_NAME: ysql
# SERVICE_9042_NAME: ycql
# SERVICE_6379_NAME: yedis
# SERVICE_9000_NAME: yb-tserver
# depends_on:
# - yb-master
{% app_name_slug %}_api:
image: dosco/super-graph:latest
environment:
GO_ENV: "development"
# Uncomment below for Yugabyte DB
# SG_DATABASE_PORT: 5433
# SG_DATABASE_USER: yugabyte
# SG_DATABASE_PASSWORD: yugabyte
volumes:
- ./config:/config
ports:
- "8080:8080"
depends_on:
- db

View File

@ -6,7 +6,7 @@ app_name: "{% app_name %} Production"
host_port: 0.0.0.0:8080
web_ui: false
# debug, info, warn, error, fatal, panic, disable
# debug, error, warn, info
log_level: "warn"
# enable or disable http compression (uses gzip)
@ -24,7 +24,11 @@ auth_fail_block: true
# Latency tracing for database queries and remote joins
# the resulting latency information is returned with the
# response
enable_tracing: true
enable_tracing: false
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: false
# File that points to the database seeding script
# seed_file: seed.js
@ -32,6 +36,19 @@ enable_tracing: true
# Path pointing to where the migrations can be found
# migrations_path: migrations
# Secret key for general encryption operations like
# encrypting the cursor data
# secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
# cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
# cors_debug: false
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
@ -48,9 +65,9 @@ database:
type: postgres
host: db
port: 5432
dbname: {% app_name_slug %}_development
dbname: {% app_name_slug %}_production
user: postgres
password: ''
password: postgres
#pool_size: 10
#max_retries: 0
#log_level: "debug"

View File

@ -5,23 +5,14 @@ import (
"crypto/sha1"
"encoding/hex"
"io"
"os"
"sort"
"strings"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/jsn"
)
// nolint: errcheck
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
h.WriteString(k1)
h.WriteString(k2)
v := h.Sum64()
h.Reset()
return v
}
// nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b)
@ -108,36 +99,23 @@ func al(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
}
func gqlName(b string) string {
state, s := 0, 0
func fatalInProd(err error, msg string) {
var wg sync.WaitGroup
for i := 0; i < len(b); i++ {
switch {
case state == 2 && b[i] == '{':
return b[s:i]
case state == 2 && b[i] == ' ':
return b[s:i]
case state == 1 && b[i] == '{':
return ""
case state == 1 && b[i] != ' ':
s = i
state = 2
case state == 1 && b[i] == ' ':
continue
case i != 0 && b[i] == ' ' && (b[i-1] == 'n' || b[i-1] == 'y'):
state = 1
}
if isDev() {
log.Printf("ERR %s: %s", msg, err)
} else {
log.Fatalf("ERR %s: %s", msg, err)
}
return ""
wg.Add(1)
wg.Wait()
}
func findStmt(role string, stmts []stmt) *stmt {
for i := range stmts {
if stmts[i].role.Name != role {
continue
}
return &stmts[i]
}
return nil
func isDev() bool {
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
}
func sanitize(value string) string {
return strings.ToLower(strings.TrimSpace(value))
}

View File

@ -229,80 +229,3 @@ func TestGQLHashWithVars2(t *testing.T) {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLName1(t *testing.T) {
var q = `
query {
products(
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) { id name } }`
name := gqlName(q)
if len(name) != 0 {
t.Fatal("Name should be empty, not ", name)
}
}
func TestGQLName2(t *testing.T) {
var q = `
query hakuna_matata {
products(
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) {
id
name
}
}`
name := gqlName(q)
if name != "hakuna_matata" {
t.Fatal("Name should be 'hakuna_matata', not ", name)
}
}
func TestGQLName3(t *testing.T) {
var q = `
mutation means{ users { id } }`
// var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
name := gqlName(q)
if name != "means" {
t.Fatal("Name should be 'means', not ", name)
}
}
func TestGQLName4(t *testing.T) {
var q = `
query no_worries
users {
id
}
}`
name := gqlName(q)
if name != "no_worries" {
t.Fatal("Name should be 'no_worries', not ", name)
}
}
func TestGQLName5(t *testing.T) {
var q = `
{
users {
id
}
}`
name := gqlName(q)
if len(name) != 0 {
t.Fatal("Name should be empty, not ", name)
}
}

View File

@ -7,7 +7,7 @@
/coverage
# production
/build
# /build
# development
/src/components/dataviz/core/*.js.map

View File

@ -0,0 +1,30 @@
{
"files": {
"main.css": "/static/css/main.c6b5c55c.chunk.css",
"main.js": "/static/js/main.04d74040.chunk.js",
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
"index.html": "/index.html",
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
"service-worker.js": "/service-worker.js",
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
},
"entrypoints": [
"static/js/runtime-main.4aea9da3.js",
"static/js/2.03370bd3.chunk.js",
"static/css/main.c6b5c55c.chunk.css",
"static/js/main.04d74040.chunk.js"
]
}

View File

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1 @@
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>

View File

@ -0,0 +1,58 @@
self.__precacheManifest = (self.__precacheManifest || []).concat([
{
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
"url": "/index.html"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/css/main.c6b5c55c.chunk.css"
},
{
"revision": "c156a125990ddf5dcc51",
"url": "/static/js/2.03370bd3.chunk.js"
},
{
"revision": "6e9467dc213a3e2b84ea",
"url": "/static/js/main.04d74040.chunk.js"
},
{
"revision": "427262b6771d3f49a7c5",
"url": "/static/js/runtime-main.4aea9da3.js"
},
{
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
},
{
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
},
{
"revision": "7f98f032085704c8943ec2d1925c7c84",
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
},
{
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
"url": "/static/media/getDefinition.js.4dbec62f.flow"
},
{
"revision": "65b0979ac23feca49e4411883fd8eaab",
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
},
{
"revision": "d94118379d362fc161aa1246bcc14d43",
"url": "/static/media/getHoverInformation.js.d9411837.flow"
},
{
"revision": "c04e3998712b37a96f0bfd283fa06b52",
"url": "/static/media/getOutline.js.c04e3998.flow"
},
{
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
"url": "/static/media/index.js.02c24280.flow"
},
{
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
"url": "/static/media/logo.57ee3b60.png"
}
]);

View File

@ -0,0 +1,39 @@
/**
* Welcome to your Workbox-powered service worker!
*
* You'll need to register this file in your web app and you should
* disable HTTP caching for this file too.
* See https://goo.gl/nhQhGp
*
* The rest of the code is auto-generated. Please don't update this file
* directly; instead, make changes to your Workbox build configuration
* and re-run your build process.
* See https://goo.gl/2aRDsh
*/
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
importScripts(
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
);
self.addEventListener('message', (event) => {
if (event.data && event.data.type === 'SKIP_WAITING') {
self.skipWaiting();
}
});
workbox.core.clientsClaim();
/**
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
* requests for URLs in the manifest.
* See https://goo.gl/S9QRab
*/
self.__precacheManifest = [].concat(self.__precacheManifest || []);
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
});

View File

@ -0,0 +1,2 @@
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */

View File

@ -0,0 +1 @@
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
//# sourceMappingURL=main.04d74040.chunk.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,2 @@
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
//# sourceMappingURL=runtime-main.4aea9da3.js.map

File diff suppressed because one or more lines are too long

View File

@ -0,0 +1,328 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
DocumentNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
TypeDefinitionNode,
NamedTypeNode,
} from 'graphql';
import type {
CompletionItem,
DefinitionQueryResult,
Diagnostic,
GraphQLCache,
GraphQLConfig,
GraphQLProjectConfig,
Uri,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import type {Hover} from 'vscode-languageserver-types';
import {Kind, parse, print} from 'graphql';
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
import {getHoverInformation} from './getHoverInformation';
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
import {
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
getDefinitionQueryResultForNamedType,
} from './getDefinition';
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
const {
FRAGMENT_DEFINITION,
OBJECT_TYPE_DEFINITION,
INTERFACE_TYPE_DEFINITION,
ENUM_TYPE_DEFINITION,
UNION_TYPE_DEFINITION,
SCALAR_TYPE_DEFINITION,
INPUT_OBJECT_TYPE_DEFINITION,
SCALAR_TYPE_EXTENSION,
OBJECT_TYPE_EXTENSION,
INTERFACE_TYPE_EXTENSION,
UNION_TYPE_EXTENSION,
ENUM_TYPE_EXTENSION,
INPUT_OBJECT_TYPE_EXTENSION,
DIRECTIVE_DEFINITION,
FRAGMENT_SPREAD,
OPERATION_DEFINITION,
NAMED_TYPE,
} = Kind;
export class GraphQLLanguageService {
_graphQLCache: GraphQLCache;
_graphQLConfig: GraphQLConfig;
constructor(cache: GraphQLCache) {
this._graphQLCache = cache;
this._graphQLConfig = cache.getGraphQLConfig();
}
async getDiagnostics(
query: string,
uri: Uri,
isRelayCompatMode?: boolean,
): Promise<Array<Diagnostic>> {
// Perform syntax diagnostics first, as this doesn't require
// schema/fragment definitions, even the project configuration.
let queryHasExtensions = false;
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
const schemaPath = projectConfig.schemaPath;
try {
const queryAST = parse(query);
if (!schemaPath || uri !== schemaPath) {
queryHasExtensions = queryAST.definitions.some(definition => {
switch (definition.kind) {
case OBJECT_TYPE_DEFINITION:
case INTERFACE_TYPE_DEFINITION:
case ENUM_TYPE_DEFINITION:
case UNION_TYPE_DEFINITION:
case SCALAR_TYPE_DEFINITION:
case INPUT_OBJECT_TYPE_DEFINITION:
case SCALAR_TYPE_EXTENSION:
case OBJECT_TYPE_EXTENSION:
case INTERFACE_TYPE_EXTENSION:
case UNION_TYPE_EXTENSION:
case ENUM_TYPE_EXTENSION:
case INPUT_OBJECT_TYPE_EXTENSION:
case DIRECTIVE_DEFINITION:
return true;
}
return false;
});
}
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
// If there's a matching config, proceed to prepare to run validation
let source = query;
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
query,
fragmentDefinitions,
);
const dependenciesSource = fragmentDependencies.reduce(
(prev, cur) => `${prev} ${print(cur.definition)}`,
'',
);
source = `${source} ${dependenciesSource}`;
let validationAst = null;
try {
validationAst = parse(source);
} catch (error) {
// the query string is already checked to be parsed properly - errors
// from this parse must be from corrupted fragment dependencies.
// For IDEs we don't care for errors outside of the currently edited
// query, so we return an empty array here.
return [];
}
// Check if there are custom validation rules to be used
let customRules;
const customRulesModulePath =
projectConfig.extensions.customValidationRules;
if (customRulesModulePath) {
/* eslint-disable no-implicit-coercion */
const rulesPath = require.resolve(`${customRulesModulePath}`);
if (rulesPath) {
customRules = require(`${rulesPath}`)(this._graphQLConfig);
}
/* eslint-enable no-implicit-coercion */
}
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName, queryHasExtensions)
.catch(() => null);
if (!schema) {
return [];
}
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
}
async getAutocompleteSuggestions(
query: string,
position: Position,
filePath: Uri,
): Promise<Array<CompletionItem>> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getAutocompleteSuggestions(schema, query, position);
}
return [];
}
async getHoverInformation(
query: string,
position: Position,
filePath: Uri,
): Promise<Hover.contents> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
const schema = await this._graphQLCache
.getSchema(projectConfig.projectName)
.catch(() => null);
if (schema) {
return getHoverInformation(schema, query, position);
}
return '';
}
async getDefinition(
query: string,
position: Position,
filePath: Uri,
): Promise<?DefinitionQueryResult> {
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
let ast;
try {
ast = parse(query);
} catch (error) {
return null;
}
const node = getASTNodeAtPosition(query, ast, position);
if (node) {
switch (node.kind) {
case FRAGMENT_SPREAD:
return this._getDefinitionForFragmentSpread(
query,
ast,
node,
filePath,
projectConfig,
);
case FRAGMENT_DEFINITION:
case OPERATION_DEFINITION:
return getDefinitionQueryResultForDefinitionNode(
filePath,
query,
(node: FragmentDefinitionNode | OperationDefinitionNode),
);
case NAMED_TYPE:
return this._getDefinitionForNamedType(
query,
ast,
node,
filePath,
projectConfig,
);
}
}
return null;
}
async _getDefinitionForNamedType(
query: string,
ast: DocumentNode,
node: NamedTypeNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
ast,
objectTypeDefinitions,
);
const localObjectTypeDefinitions = ast.definitions.filter(
definition =>
definition.kind === OBJECT_TYPE_DEFINITION ||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
definition.kind === ENUM_TYPE_DEFINITION,
);
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
TypeDefinitionNode,
>);
const localOperationDefinationInfos = typeCastedDefs.map(
(definition: TypeDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForNamedType(
query,
node,
dependencies.concat(localOperationDefinationInfos),
);
return result;
}
async _getDefinitionForFragmentSpread(
query: string,
ast: DocumentNode,
node: FragmentSpreadNode,
filePath: Uri,
projectConfig: GraphQLProjectConfig,
): Promise<?DefinitionQueryResult> {
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
projectConfig,
);
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
ast,
fragmentDefinitions,
);
const localFragDefinitions = ast.definitions.filter(
definition => definition.kind === FRAGMENT_DEFINITION,
);
const typeCastedDefs = ((localFragDefinitions: any): Array<
FragmentDefinitionNode,
>);
const localFragInfos = typeCastedDefs.map(
(definition: FragmentDefinitionNode) => ({
filePath,
content: query,
definition,
}),
);
const result = await getDefinitionQueryResultForFragmentSpread(
query,
node,
dependencies.concat(localFragInfos),
);
return result;
}
}

View File

@ -0,0 +1,204 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
import {isCompositeType} from 'graphql';
import {
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
} from 'graphql/type/introspection';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
// Utility for returning the state representing the Definition this token state
// is within, if any.
export function getDefinitionState(tokenState: State): ?State {
let definitionState;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
case 'Mutation':
case 'Subscription':
case 'FragmentDefinition':
definitionState = state;
break;
}
});
return definitionState;
}
// Gets the field definition given a type and field name
export function getFieldDef(
schema: GraphQLSchema,
type: GraphQLType,
fieldName: string,
): ?GraphQLField<*, *> {
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
return SchemaMetaFieldDef;
}
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
return TypeMetaFieldDef;
}
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
return TypeNameMetaFieldDef;
}
if (type.getFields && typeof type.getFields === 'function') {
return (type.getFields()[fieldName]: any);
}
return null;
}
// Utility for iterating through a CodeMirror parse state stack bottom-up.
export function forEachState(
stack: State,
fn: (state: State) => ?TypeInfo,
): void {
const reverseStateStack = [];
let state = stack;
while (state && state.kind) {
reverseStateStack.push(state);
state = state.prevState;
}
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
fn(reverseStateStack[i]);
}
}
export function objectValues(object: Object): Array<any> {
const keys = Object.keys(object);
const len = keys.length;
const values = new Array(len);
for (let i = 0; i < len; ++i) {
values[i] = object[keys[i]];
}
return values;
}
// Create the expected hint response given a possible list and a token
export function hintList(
token: ContextToken,
list: Array<CompletionItem>,
): Array<CompletionItem> {
return filterAndSortList(list, normalizeText(token.string));
}
// Given a list of hint entries and currently typed text, sort and filter to
// provide a concise list.
function filterAndSortList(
list: Array<CompletionItem>,
text: string,
): Array<CompletionItem> {
if (!text) {
return filterNonEmpty(list, entry => !entry.isDeprecated);
}
const byProximity = list.map(entry => ({
proximity: getProximity(normalizeText(entry.label), text),
entry,
}));
const conciseMatches = filterNonEmpty(
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
pair => !pair.entry.isDeprecated,
);
const sortedMatches = conciseMatches.sort(
(a, b) =>
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
a.proximity - b.proximity ||
a.entry.label.length - b.entry.label.length,
);
return sortedMatches.map(pair => pair.entry);
}
// Filters the array by the predicate, unless it results in an empty array,
// in which case return the original array.
function filterNonEmpty(
array: Array<Object>,
predicate: (entry: Object) => boolean,
): Array<Object> {
const filtered = array.filter(predicate);
return filtered.length === 0 ? array : filtered;
}
function normalizeText(text: string): string {
return text.toLowerCase().replace(/\W/g, '');
}
// Determine a numeric proximity for a suggestion based on current text.
function getProximity(suggestion: string, text: string): number {
// start with lexical distance
let proximity = lexicalDistance(text, suggestion);
if (suggestion.length > text.length) {
// do not penalize long suggestions.
proximity -= suggestion.length - text.length - 1;
// penalize suggestions not starting with this phrase
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
}
return proximity;
}
/**
* Computes the lexical distance between strings A and B.
*
* The "distance" between two strings is given by counting the minimum number
* of edits needed to transform string A into string B. An edit can be an
* insertion, deletion, or substitution of a single character, or a swap of two
* adjacent characters.
*
* This distance can be useful for detecting typos in input or sorting
*
* @param {string} a
* @param {string} b
* @return {int} distance in number of edits
*/
function lexicalDistance(a: string, b: string): number {
let i;
let j;
const d = [];
const aLength = a.length;
const bLength = b.length;
for (i = 0; i <= aLength; i++) {
d[i] = [i];
}
for (j = 1; j <= bLength; j++) {
d[0][j] = j;
}
for (i = 1; i <= aLength; i++) {
for (j = 1; j <= bLength; j++) {
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
d[i][j] = Math.min(
d[i - 1][j] + 1,
d[i][j - 1] + 1,
d[i - 1][j - 1] + cost,
);
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
}
}
}
return d[aLength][bLength];
}

View File

@ -0,0 +1,665 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
FragmentDefinitionNode,
GraphQLDirective,
GraphQLSchema,
} from 'graphql';
import type {
CompletionItem,
ContextToken,
State,
TypeInfo,
} from 'graphql-language-service-types';
import type {Position} from 'graphql-language-service-utils';
import {
GraphQLBoolean,
GraphQLEnumType,
GraphQLInputObjectType,
GraphQLList,
SchemaMetaFieldDef,
TypeMetaFieldDef,
TypeNameMetaFieldDef,
assertAbstractType,
doTypesOverlap,
getNamedType,
getNullableType,
isAbstractType,
isCompositeType,
isInputType,
} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
forEachState,
getDefinitionState,
getFieldDef,
hintList,
objectValues,
} from './autocompleteUtils';
/**
* Given GraphQLSchema, queryText, and context of the current position within
* the source text, provide a list of typeahead entries.
*/
export function getAutocompleteSuggestions(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Array<CompletionItem> {
const token = contextToken || getTokenAtPosition(queryText, cursor);
const state =
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
// relieve flow errors by checking if `state` exists
if (!state) {
return [];
}
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
// Definition kinds
if (kind === 'Document') {
return hintList(token, [
{label: 'query'},
{label: 'mutation'},
{label: 'subscription'},
{label: 'fragment'},
{label: '{'},
]);
}
// Field names
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
return getSuggestionsForFieldNames(token, typeInfo, schema);
}
// Argument names
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
const argDefs = typeInfo.argDefs;
if (argDefs) {
return hintList(
token,
argDefs.map(argDef => ({
label: argDef.name,
detail: String(argDef.type),
documentation: argDef.description,
})),
);
}
}
// Input Object fields
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
if (typeInfo.objectFieldDefs) {
const objectFields = objectValues(typeInfo.objectFieldDefs);
return hintList(
token,
objectFields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
})),
);
}
}
// Input values: Enum and Boolean
if (
kind === 'EnumValue' ||
(kind === 'ListValue' && step === 1) ||
(kind === 'ObjectField' && step === 2) ||
(kind === 'Argument' && step === 2)
) {
return getSuggestionsForInputValues(token, typeInfo);
}
// Fragment type conditions
if (
(kind === 'TypeCondition' && step === 1) ||
(kind === 'NamedType' &&
state.prevState != null &&
state.prevState.kind === 'TypeCondition')
) {
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
}
// Fragment spread names
if (kind === 'FragmentSpread' && step === 1) {
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
}
// Variable definition types
if (
(kind === 'VariableDefinition' && step === 2) ||
(kind === 'ListType' && step === 1) ||
(kind === 'NamedType' &&
state.prevState &&
(state.prevState.kind === 'VariableDefinition' ||
state.prevState.kind === 'ListType'))
) {
return getSuggestionsForVariableDefinition(token, schema);
}
// Directive names
if (kind === 'Directive') {
return getSuggestionsForDirective(token, state, schema);
}
return [];
}
// Helper functions to get suggestions for each kinds
function getSuggestionsForFieldNames(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (typeInfo.parentType) {
const parentType = typeInfo.parentType;
const fields =
parentType.getFields instanceof Function
? objectValues(parentType.getFields())
: [];
if (isAbstractType(parentType)) {
fields.push(TypeNameMetaFieldDef);
}
if (parentType === schema.getQueryType()) {
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
}
return hintList(
token,
fields.map(field => ({
label: field.name,
detail: String(field.type),
documentation: field.description,
isDeprecated: field.isDeprecated,
deprecationReason: field.deprecationReason,
})),
);
}
return [];
}
function getSuggestionsForInputValues(
token: ContextToken,
typeInfo: TypeInfo,
): Array<CompletionItem> {
const namedInputType = getNamedType(typeInfo.inputType);
if (namedInputType instanceof GraphQLEnumType) {
const values = namedInputType.getValues();
return hintList(
token,
values.map(value => ({
label: value.name,
detail: String(namedInputType),
documentation: value.description,
isDeprecated: value.isDeprecated,
deprecationReason: value.deprecationReason,
})),
);
} else if (namedInputType === GraphQLBoolean) {
return hintList(token, [
{
label: 'true',
detail: String(GraphQLBoolean),
documentation: 'Not false.',
},
{
label: 'false',
detail: String(GraphQLBoolean),
documentation: 'Not true.',
},
]);
}
return [];
}
function getSuggestionsForFragmentTypeConditions(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
): Array<CompletionItem> {
let possibleTypes;
if (typeInfo.parentType) {
if (isAbstractType(typeInfo.parentType)) {
const abstractType = assertAbstractType(typeInfo.parentType);
// Collect both the possible Object types as well as the interfaces
// they implement.
const possibleObjTypes = schema.getPossibleTypes(abstractType);
const possibleIfaceMap = Object.create(null);
possibleObjTypes.forEach(type => {
type.getInterfaces().forEach(iface => {
possibleIfaceMap[iface.name] = iface;
});
});
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
} else {
// The parent type is a non-abstract Object type, so the only possible
// type that can be used is that same type.
possibleTypes = [typeInfo.parentType];
}
} else {
const typeMap = schema.getTypeMap();
possibleTypes = objectValues(typeMap).filter(isCompositeType);
}
return hintList(
token,
possibleTypes.map(type => {
const namedType = getNamedType(type);
return {
label: String(type),
documentation: (namedType && namedType.description) || '',
};
}),
);
}
function getSuggestionsForFragmentSpread(
token: ContextToken,
typeInfo: TypeInfo,
schema: GraphQLSchema,
queryText: string,
): Array<CompletionItem> {
const typeMap = schema.getTypeMap();
const defState = getDefinitionState(token.state);
const fragments = getFragmentDefinitions(queryText);
// Filter down to only the fragments which may exist here.
const relevantFrags = fragments.filter(
frag =>
// Only include fragments with known types.
typeMap[frag.typeCondition.name.value] &&
// Only include fragments which are not cyclic.
!(
defState &&
defState.kind === 'FragmentDefinition' &&
defState.name === frag.name.value
) &&
// Only include fragments which could possibly be spread here.
isCompositeType(typeInfo.parentType) &&
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
doTypesOverlap(
schema,
typeInfo.parentType,
typeMap[frag.typeCondition.name.value],
),
);
return hintList(
token,
relevantFrags.map(frag => ({
label: frag.name.value,
detail: String(typeMap[frag.typeCondition.name.value]),
documentation: `fragment ${frag.name.value} on ${
frag.typeCondition.name.value
}`,
})),
);
}
function getFragmentDefinitions(
queryText: string,
): Array<FragmentDefinitionNode> {
const fragmentDefs = [];
runOnlineParser(queryText, (_, state) => {
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
fragmentDefs.push({
kind: 'FragmentDefinition',
name: {
kind: 'Name',
value: state.name,
},
selectionSet: {
kind: 'SelectionSet',
selections: [],
},
typeCondition: {
kind: 'NamedType',
name: {
kind: 'Name',
value: state.type,
},
},
});
}
});
return fragmentDefs;
}
function getSuggestionsForVariableDefinition(
token: ContextToken,
schema: GraphQLSchema,
): Array<CompletionItem> {
const inputTypeMap = schema.getTypeMap();
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
return hintList(
token,
inputTypes.map(type => ({
label: type.name,
documentation: type.description,
})),
);
}
function getSuggestionsForDirective(
token: ContextToken,
state: State,
schema: GraphQLSchema,
): Array<CompletionItem> {
if (state.prevState && state.prevState.kind) {
const directives = schema
.getDirectives()
.filter(directive => canUseDirective(state.prevState, directive));
return hintList(
token,
directives.map(directive => ({
label: directive.name,
documentation: directive.description || '',
})),
);
}
return [];
}
export function getTokenAtPosition(
queryText: string,
cursor: Position,
): ContextToken {
let styleAtCursor = null;
let stateAtCursor = null;
let stringAtCursor = null;
const token = runOnlineParser(queryText, (stream, state, style, index) => {
if (index === cursor.line) {
if (stream.getCurrentPosition() >= cursor.character) {
styleAtCursor = style;
stateAtCursor = {...state};
stringAtCursor = stream.current();
return 'BREAK';
}
}
});
// Return the state/style of parsed token in case those at cursor aren't
// available.
return {
start: token.start,
end: token.end,
string: stringAtCursor || token.string,
state: stateAtCursor || token.state,
style: styleAtCursor || token.style,
};
}
/**
* Provides an utility function to parse a given query text and construct a
* `token` context object.
* A token context provides useful information about the token/style that
* CharacterStream currently possesses, as well as the end state and style
* of the token.
*/
type callbackFnType = (
stream: CharacterStream,
state: State,
style: string,
index: number,
) => void | 'BREAK';
function runOnlineParser(
queryText: string,
callback: callbackFnType,
): ContextToken {
const lines = queryText.split('\n');
const parser = onlineParser();
let state = parser.startState();
let style = '';
let stream: CharacterStream = new CharacterStream('');
for (let i = 0; i < lines.length; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
style = parser.token(stream, state);
const code = callback(stream, state, style, i);
if (code === 'BREAK') {
break;
}
}
// Above while loop won't run if there is an empty line.
// Run the callback one more time to catch this.
callback(stream, state, style, i);
if (!state.kind) {
state = parser.startState();
}
}
return {
start: stream.getStartOfToken(),
end: stream.getCurrentPosition(),
string: stream.current(),
state,
style,
};
}
function canUseDirective(
state: $PropertyType<State, 'prevState'>,
directive: GraphQLDirective,
): boolean {
if (!state || !state.kind) {
return false;
}
const kind = state.kind;
const locations = directive.locations;
switch (kind) {
case 'Query':
return locations.indexOf('QUERY') !== -1;
case 'Mutation':
return locations.indexOf('MUTATION') !== -1;
case 'Subscription':
return locations.indexOf('SUBSCRIPTION') !== -1;
case 'Field':
case 'AliasedField':
return locations.indexOf('FIELD') !== -1;
case 'FragmentDefinition':
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
case 'FragmentSpread':
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
case 'InlineFragment':
return locations.indexOf('INLINE_FRAGMENT') !== -1;
// Schema Definitions
case 'SchemaDef':
return locations.indexOf('SCHEMA') !== -1;
case 'ScalarDef':
return locations.indexOf('SCALAR') !== -1;
case 'ObjectTypeDef':
return locations.indexOf('OBJECT') !== -1;
case 'FieldDef':
return locations.indexOf('FIELD_DEFINITION') !== -1;
case 'InterfaceDef':
return locations.indexOf('INTERFACE') !== -1;
case 'UnionDef':
return locations.indexOf('UNION') !== -1;
case 'EnumDef':
return locations.indexOf('ENUM') !== -1;
case 'EnumValue':
return locations.indexOf('ENUM_VALUE') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_OBJECT') !== -1;
case 'InputValueDef':
const prevStateKind = state.prevState && state.prevState.kind;
switch (prevStateKind) {
case 'ArgumentsDef':
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
case 'InputDef':
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
}
}
return false;
}
// Utility for collecting rich type information given any token's state
// from the graphql-mode parser.
export function getTypeInfo(
schema: GraphQLSchema,
tokenState: State,
): TypeInfo {
let argDef;
let argDefs;
let directiveDef;
let enumValue;
let fieldDef;
let inputType;
let objectFieldDefs;
let parentType;
let type;
forEachState(tokenState, state => {
switch (state.kind) {
case 'Query':
case 'ShortQuery':
type = schema.getQueryType();
break;
case 'Mutation':
type = schema.getMutationType();
break;
case 'Subscription':
type = schema.getSubscriptionType();
break;
case 'InlineFragment':
case 'FragmentDefinition':
if (state.type) {
type = schema.getType(state.type);
}
break;
case 'Field':
case 'AliasedField':
if (!type || !state.name) {
fieldDef = null;
} else {
fieldDef = parentType
? getFieldDef(schema, parentType, state.name)
: null;
type = fieldDef ? fieldDef.type : null;
}
break;
case 'SelectionSet':
parentType = getNamedType(type);
break;
case 'Directive':
directiveDef = state.name ? schema.getDirective(state.name) : null;
break;
case 'Arguments':
if (!state.prevState) {
argDefs = null;
} else {
switch (state.prevState.kind) {
case 'Field':
argDefs = fieldDef && fieldDef.args;
break;
case 'Directive':
argDefs = directiveDef && directiveDef.args;
break;
case 'AliasedField':
const name = state.prevState && state.prevState.name;
if (!name) {
argDefs = null;
break;
}
const field = parentType
? getFieldDef(schema, parentType, name)
: null;
if (!field) {
argDefs = null;
break;
}
argDefs = field.args;
break;
default:
argDefs = null;
break;
}
}
break;
case 'Argument':
if (argDefs) {
for (let i = 0; i < argDefs.length; i++) {
if (argDefs[i].name === state.name) {
argDef = argDefs[i];
break;
}
}
}
inputType = argDef && argDef.type;
break;
case 'EnumValue':
const enumType = getNamedType(inputType);
enumValue =
enumType instanceof GraphQLEnumType
? find(enumType.getValues(), val => val.value === state.name)
: null;
break;
case 'ListValue':
const nullableType = getNullableType(inputType);
inputType =
nullableType instanceof GraphQLList ? nullableType.ofType : null;
break;
case 'ObjectValue':
const objectType = getNamedType(inputType);
objectFieldDefs =
objectType instanceof GraphQLInputObjectType
? objectType.getFields()
: null;
break;
case 'ObjectField':
const objectField =
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
inputType = objectField && objectField.type;
break;
case 'NamedType':
if (state.name) {
type = schema.getType(state.name);
}
break;
}
});
return {
argDef,
argDefs,
directiveDef,
enumValue,
fieldDef,
inputType,
objectFieldDefs,
parentType,
type,
};
}
// Returns the first item in the array which causes predicate to return truthy.
function find(array, predicate) {
for (let i = 0; i < array.length; i++) {
if (predicate(array[i])) {
return array[i];
}
}
return null;
}

View File

@ -0,0 +1,136 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
FragmentSpreadNode,
FragmentDefinitionNode,
OperationDefinitionNode,
NamedTypeNode,
TypeDefinitionNode,
} from 'graphql';
import type {
Definition,
DefinitionQueryResult,
FragmentInfo,
Position,
Range,
Uri,
ObjectTypeInfo,
} from 'graphql-language-service-types';
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
import invariant from 'assert';
export const LANGUAGE = 'GraphQL';
function getRange(text: string, node: ASTNode): Range {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return locToRange(text, location);
}
function getPosition(text: string, node: ASTNode): Position {
const location = node.loc;
invariant(location, 'Expected ASTNode to have a location.');
return offsetToPosition(text, location.start);
}
export async function getDefinitionQueryResultForNamedType(
text: string,
node: NamedTypeNode,
dependencies: Array<ObjectTypeInfo>,
): Promise<DefinitionQueryResult> {
const name = node.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name && definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL type ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForNodeDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, node)),
};
}
export async function getDefinitionQueryResultForFragmentSpread(
text: string,
fragment: FragmentSpreadNode,
dependencies: Array<FragmentInfo>,
): Promise<DefinitionQueryResult> {
const name = fragment.name.value;
const defNodes = dependencies.filter(
({definition}) => definition.name.value === name,
);
if (defNodes.length === 0) {
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
return {queryRange: [], definitions: []};
}
const definitions: Array<Definition> = defNodes.map(
({filePath, content, definition}) =>
getDefinitionForFragmentDefinition(filePath || '', content, definition),
);
return {
definitions,
queryRange: definitions.map(_ => getRange(text, fragment)),
};
}
export function getDefinitionQueryResultForDefinitionNode(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): DefinitionQueryResult {
return {
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
queryRange: definition.name ? [getRange(text, definition.name)] : [],
};
}
function getDefinitionForFragmentDefinition(
path: Uri,
text: string,
definition: FragmentDefinitionNode | OperationDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}
function getDefinitionForNodeDefinition(
path: Uri,
text: string,
definition: TypeDefinitionNode,
): Definition {
const name = definition.name;
invariant(name, 'Expected ASTNode to have a Name.');
return {
path,
position: getPosition(text, definition),
range: getRange(text, definition),
name: name.value || '',
language: LANGUAGE,
// This is a file inside the project root, good enough for now
projectRoot: path,
};
}

View File

@ -0,0 +1,172 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
ASTNode,
DocumentNode,
GraphQLError,
GraphQLSchema,
Location,
SourceLocation,
} from 'graphql';
import type {
Diagnostic,
CustomValidationRule,
} from 'graphql-language-service-types';
import invariant from 'assert';
import {findDeprecatedUsages, parse} from 'graphql';
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
import {
Position,
Range,
validateWithCustomRules,
} from 'graphql-language-service-utils';
export const SEVERITY = {
ERROR: 1,
WARNING: 2,
INFORMATION: 3,
HINT: 4,
};
export function getDiagnostics(
query: string,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
let ast = null;
try {
ast = parse(query);
} catch (error) {
const range = getRange(error.locations[0], query);
return [
{
severity: SEVERITY.ERROR,
message: error.message,
source: 'GraphQL: Syntax',
range,
},
];
}
return validateQuery(ast, schema, customRules, isRelayCompatMode);
}
export function validateQuery(
ast: DocumentNode,
schema: ?GraphQLSchema = null,
customRules?: Array<CustomValidationRule>,
isRelayCompatMode?: boolean,
): Array<Diagnostic> {
// We cannot validate the query unless a schema is provided.
if (!schema) {
return [];
}
const validationErrorAnnotations = mapCat(
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
error => annotations(error, SEVERITY.ERROR, 'Validation'),
);
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
// support older versions of graphql-js.
const deprecationWarningAnnotations = !findDeprecatedUsages
? []
: mapCat(findDeprecatedUsages(schema, ast), error =>
annotations(error, SEVERITY.WARNING, 'Deprecation'),
);
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
}
// General utility for map-cating (aka flat-mapping).
function mapCat<T>(
array: Array<T>,
mapper: (item: T) => Array<any>,
): Array<any> {
return Array.prototype.concat.apply([], array.map(mapper));
}
function annotations(
error: GraphQLError,
severity: number,
type: string,
): Array<Diagnostic> {
if (!error.nodes) {
return [];
}
return error.nodes.map(node => {
const highlightNode =
node.kind !== 'Variable' && node.name
? node.name
: node.variable
? node.variable
: node;
invariant(error.locations, 'GraphQL validation error requires locations.');
const loc = error.locations[0];
const highlightLoc = getLocation(highlightNode);
const end = loc.column + (highlightLoc.end - highlightLoc.start);
return {
source: `GraphQL: ${type}`,
message: error.message,
severity,
range: new Range(
new Position(loc.line - 1, loc.column - 1),
new Position(loc.line - 1, end),
),
};
});
}
export function getRange(location: SourceLocation, queryText: string) {
const parser = onlineParser();
const state = parser.startState();
const lines = queryText.split('\n');
invariant(
lines.length >= location.line,
'Query text must have more lines than where the error happened',
);
let stream = null;
for (let i = 0; i < location.line; i++) {
stream = new CharacterStream(lines[i]);
while (!stream.eol()) {
const style = parser.token(stream, state);
if (style === 'invalidchar') {
break;
}
}
}
invariant(stream, 'Expected Parser stream to be available.');
const line = location.line - 1;
const start = stream.getStartOfToken();
const end = stream.getCurrentPosition();
return new Range(new Position(line, start), new Position(line, end));
}
/**
* Get location info from a node in a type-safe way.
*
* The only way a node could not have a location is if we initialized the parser
* (and therefore the lexer) with the `noLocation` option, but we always
* call `parse` without options above.
*/
function getLocation(node: any): Location {
const typeCastedNode = (node: ASTNode);
const location = typeCastedNode.loc;
invariant(location, 'Expected ASTNode to have a location.');
return location;
}

View File

@ -0,0 +1,186 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
/**
* Ported from codemirror-graphql
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
*/
import type {GraphQLSchema} from 'graphql';
import type {ContextToken} from 'graphql-language-service-types';
import type {Hover} from 'vscode-languageserver-types';
import type {Position} from 'graphql-language-service-utils';
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
import {GraphQLNonNull, GraphQLList} from 'graphql';
export function getHoverInformation(
schema: GraphQLSchema,
queryText: string,
cursor: Position,
contextToken?: ContextToken,
): Hover.contents {
const token = contextToken || getTokenAtPosition(queryText, cursor);
if (!schema || !token || !token.state) {
return [];
}
const state = token.state;
const kind = state.kind;
const step = state.step;
const typeInfo = getTypeInfo(schema, token.state);
const options = {schema};
// Given a Schema and a Token, produce the contents of an info tooltip.
// To do this, create a div element that we will render "into" and then pass
// it to various rendering functions.
if (
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
) {
const into = [];
renderField(into, typeInfo, options);
renderDescription(into, options, typeInfo.fieldDef);
return into.join('').trim();
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
const into = [];
renderDirective(into, typeInfo, options);
renderDescription(into, options, typeInfo.directiveDef);
return into.join('').trim();
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
const into = [];
renderArg(into, typeInfo, options);
renderDescription(into, options, typeInfo.argDef);
return into.join('').trim();
} else if (
kind === 'EnumValue' &&
typeInfo.enumValue &&
typeInfo.enumValue.description
) {
const into = [];
renderEnumValue(into, typeInfo, options);
renderDescription(into, options, typeInfo.enumValue);
return into.join('').trim();
} else if (
kind === 'NamedType' &&
typeInfo.type &&
typeInfo.type.description
) {
const into = [];
renderType(into, typeInfo, options, typeInfo.type);
renderDescription(into, options, typeInfo.type);
return into.join('').trim();
}
}
function renderField(into, typeInfo, options) {
renderQualifiedField(into, typeInfo, options);
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
}
function renderQualifiedField(into, typeInfo, options) {
if (!typeInfo.fieldDef) {
return;
}
const fieldName = (typeInfo.fieldDef.name: string);
if (fieldName.slice(0, 2) !== '__') {
renderType(into, typeInfo, options, typeInfo.parentType);
text(into, '.');
}
text(into, fieldName);
}
function renderDirective(into, typeInfo, options) {
if (!typeInfo.directiveDef) {
return;
}
const name = '@' + typeInfo.directiveDef.name;
text(into, name);
}
function renderArg(into, typeInfo, options) {
if (typeInfo.directiveDef) {
renderDirective(into, typeInfo, options);
} else if (typeInfo.fieldDef) {
renderQualifiedField(into, typeInfo, options);
}
if (!typeInfo.argDef) {
return;
}
const name = typeInfo.argDef.name;
text(into, '(');
text(into, name);
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
text(into, ')');
}
function renderTypeAnnotation(into, typeInfo, options, t) {
text(into, ': ');
renderType(into, typeInfo, options, t);
}
function renderEnumValue(into, typeInfo, options) {
if (!typeInfo.enumValue) {
return;
}
const name = typeInfo.enumValue.name;
renderType(into, typeInfo, options, typeInfo.inputType);
text(into, '.');
text(into, name);
}
function renderType(into, typeInfo, options, t) {
if (!t) {
return;
}
if (t instanceof GraphQLNonNull) {
renderType(into, typeInfo, options, t.ofType);
text(into, '!');
} else if (t instanceof GraphQLList) {
text(into, '[');
renderType(into, typeInfo, options, t.ofType);
text(into, ']');
} else {
text(into, t.name);
}
}
function renderDescription(into, options, def) {
if (!def) {
return;
}
const description =
typeof def.description === 'string' ? def.description : null;
if (description) {
text(into, '\n\n');
text(into, description);
}
renderDeprecation(into, options, def);
}
function renderDeprecation(into, options, def) {
if (!def) {
return;
}
const reason =
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
if (!reason) {
return;
}
text(into, '\n\n');
text(into, 'Deprecated: ');
text(into, reason);
}
function text(into: string[], content: string) {
into.push(content);
}

View File

@ -0,0 +1,121 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
import type {
Outline,
TextToken,
TokenKind,
} from 'graphql-language-service-types';
import {Kind, parse, visit} from 'graphql';
import {offsetToPosition} from 'graphql-language-service-utils';
const {INLINE_FRAGMENT} = Kind;
const OUTLINEABLE_KINDS = {
Field: true,
OperationDefinition: true,
Document: true,
SelectionSet: true,
Name: true,
FragmentDefinition: true,
FragmentSpread: true,
InlineFragment: true,
};
type OutlineTreeConverterType = {[name: string]: Function};
export function getOutline(queryText: string): ?Outline {
let ast;
try {
ast = parse(queryText);
} catch (error) {
return null;
}
const visitorFns = outlineTreeConverter(queryText);
const outlineTrees = visit(ast, {
leave(node) {
if (
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
visitorFns[node.kind]
) {
return visitorFns[node.kind](node);
}
return null;
},
});
return {outlineTrees};
}
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
const meta = node => ({
representativeName: node.name,
startPosition: offsetToPosition(docText, node.loc.start),
endPosition: offsetToPosition(docText, node.loc.end),
children: node.selectionSet || [],
});
return {
Field: node => {
const tokenizedText = node.alias
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
: [];
tokenizedText.push(buildToken('plain', node.name));
return {tokenizedText, ...meta(node)};
},
OperationDefinition: node => ({
tokenizedText: [
buildToken('keyword', node.operation),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
Document: node => node.definitions,
SelectionSet: node =>
concatMap(node.selections, child => {
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
}),
Name: node => node.value,
FragmentDefinition: node => ({
tokenizedText: [
buildToken('keyword', 'fragment'),
buildToken('whitespace', ' '),
buildToken('class-name', node.name),
],
...meta(node),
}),
FragmentSpread: node => ({
tokenizedText: [
buildToken('plain', '...'),
buildToken('class-name', node.name),
],
...meta(node),
}),
InlineFragment: node => node.selectionSet,
};
}
function buildToken(kind: TokenKind, value: string): TextToken {
return {kind, value};
}
function concatMap(arr: Array<any>, fn: Function): Array<any> {
const res = [];
for (let i = 0; i < arr.length; i++) {
const x = fn(arr[i], i);
if (Array.isArray(x)) {
res.push(...x);
} else {
res.push(x);
}
}
return res;
}

View File

@ -0,0 +1,31 @@
/**
* Copyright (c) Facebook, Inc.
* All rights reserved.
*
* This source code is licensed under the license found in the
* LICENSE file in the root directory of this source tree.
*
* @flow
*/
export {
getDefinitionState,
getFieldDef,
forEachState,
objectValues,
hintList,
} from './autocompleteUtils';
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
export {
LANGUAGE,
getDefinitionQueryResultForFragmentSpread,
getDefinitionQueryResultForDefinitionNode,
} from './getDefinition';
export {getDiagnostics, validateQuery} from './getDiagnostics';
export {getOutline} from './getOutline';
export {getHoverInformation} from './getHoverInformation';
export {GraphQLLanguageService} from './GraphQLLanguageService';

Binary file not shown.

After

Width:  |  Height:  |  Size: 31 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 15 KiB

View File

@ -0,0 +1,15 @@
{
"short_name": "Super Graph",
"name": "Super Graph - GraphQL API for Rails",
"icons": [
{
"src": "favicon.ico",
"sizes": "64x64 32x32 24x24 16x16",
"type": "image/x-icon"
}
],
"start_url": ".",
"display": "standalone",
"theme_color": "#000000",
"background_color": "#ffffff"
}

View File

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

45
cmd/main.go Normal file
View File

@ -0,0 +1,45 @@
// Main package for the Super Graph service and command line tooling
/*
Super Graph
For documentation, visit https://supergraph.dev
Commit SHA-1 :
Commit timestamp :
Branch :
Go version : go1.14
Licensed under the Apache Public License 2.0
Copyright 2020, Vikram Rangnekar.
Usage:
super-graph [command]
Available Commands:
conf:dump Dump config to file
db:create Create database
db:drop Drop database
db:migrate Migrate the database
db:new Generate a new migration
db:reset Reset database
db:seed Run the seed script to seed the database
db:setup Setup database
db:status Print current migration status
help Help about any command
new Create a new application
serv Run the super-graph service
version Super Graph binary version information
Flags:
-h, --help help for super-graph
--path string path to config files (default "./config")
Use "super-graph [command] --help" for more information about a command.
*/
package main
import "github.com/dosco/super-graph/cmd/internal/serv"
func main() {
serv.Cmd()
}

13
cmd/scripts/start.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/sh
if [ $1 = "secrets" ]
then
./sops --config ./config "${@:2}"
exit 0
fi
if test -f "./config/$SECRETS_FILE"
then
./sops --config ./config exec-env "./config/$SECRETS_FILE" "$*"
else
$@
fi

View File

@ -2,7 +2,7 @@ app_name: "Super Graph Development"
host_port: 0.0.0.0:8080
web_ui: true
# debug, info, warn, error, fatal, panic
# debug, error, warn, info, none
log_level: "debug"
# enable or disable http compression (uses gzip)
@ -32,6 +32,19 @@ reload_on_config_change: true
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# Secret key for general encryption operations like
# encrypting the cursor data
secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: true
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
@ -89,7 +102,7 @@ database:
port: 5432
dbname: app_development
user: postgres
password: ''
password: postgres
#schema: "public"
#pool_size: 10
@ -167,10 +180,13 @@ roles:
block: false
- name: deals
query:
limit: 3
columns: ["name", "description" ]
aggregation: false
- name: purchases
query:
limit: 3
aggregation: false
- name: user
@ -183,12 +199,10 @@ roles:
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description", "search_rank", "search_headline_description" ]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
presets:
- user_id: "$user_id"
- created_at: "now"

View File

@ -6,7 +6,7 @@ app_name: "Super Graph Production"
host_port: 0.0.0.0:8080
web_ui: false
# debug, info, warn, error, fatal, panic, disable
# debug, error, warn, info, none
log_level: "info"
# enable or disable http compression (uses gzip)
@ -32,6 +32,10 @@ enable_tracing: true
# Path pointing to where the migrations can be found
# migrations_path: migrations
# Secret key for general encryption operations like
# encrypting the cursor data
# secret_key: supercalifajalistics
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT
@ -50,7 +54,7 @@ database:
port: 5432
dbname: app_production
user: postgres
password: ''
password: postgres
#pool_size: 10
#max_retries: 0
#log_level: "debug"

Some files were not shown because too many files have changed in this diff Show More