Move license from MIT to Apache 2.0. Add Makefile

This commit is contained in:
Vikram Rangnekar
2019-11-28 01:25:46 -05:00
parent aff2a13ba4
commit 9ebd03fa8c
42 changed files with 885 additions and 210 deletions

View File

@ -220,6 +220,8 @@ func (al *allowList) load() {
}
func (al *allowList) save(item *allowItem) {
var err error
item.hash = gqlHash(item.gql, item.vars, "")
item.name = gqlName(item.gql)
@ -271,22 +273,35 @@ func (al *allowList) save(item *allowItem) {
k := keys[i]
v := urlMap[k]
f.WriteString(fmt.Sprintf("# %s\n\n", k))
if _, err := f.WriteString(fmt.Sprintf("# %s\n\n", k)); err != nil {
logger.Error().Err(err).Send()
return
}
for i := range v {
if len(v[i].vars) != 0 && bytes.Equal(v[i].vars, []byte("{}")) == false {
if len(v[i].vars) != 0 && !bytes.Equal(v[i].vars, []byte("{}")) {
vj, err := json.MarshalIndent(v[i].vars, "", "\t")
if err != nil {
logger.Warn().Err(err).Msg("Failed to write allow list 'vars' to file")
continue
}
f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
if err != nil {
logger.Error().Err(err).Send()
return
}
}
if v[i].gql[0] == '{' {
f.WriteString(fmt.Sprintf("query %s\n\n", v[i].gql))
_, err = f.WriteString(fmt.Sprintf("query %s\n\n", v[i].gql))
} else {
f.WriteString(fmt.Sprintf("%s\n\n", v[i].gql))
_, err = f.WriteString(fmt.Sprintf("%s\n\n", v[i].gql))
}
if err != nil {
logger.Error().Err(err).Send()
return
}
}
}

View File

@ -11,8 +11,7 @@ import (
const (
authHeader = "Authorization"
jwtBase int = iota
jwtAuth0
jwtAuth0 int = iota + 1
)
func jwtHandler(next http.HandlerFunc) http.HandlerFunc {

View File

@ -4,6 +4,7 @@ import (
"context"
"fmt"
"os"
"runtime"
"strings"
"github.com/dosco/super-graph/psql"
@ -21,6 +22,14 @@ const (
serverName = "Super Graph"
)
var (
// These variables are set using -ldflags
version string
gitBranch string
lastCommitSHA string
lastCommitTime string
)
var (
logger zerolog.Logger
errlog zerolog.Logger
@ -133,6 +142,12 @@ e.g. db:migrate -+1
Run: cmdConfDump,
})
rootCmd.AddCommand(&cobra.Command{
Use: "version",
Short: "Super Graph binary version information",
Run: cmdVersion,
})
rootCmd.Flags().StringVar(&confPath,
"path", "./config", "path to config files")
@ -169,7 +184,10 @@ func initConf() (*config, error) {
}
vi.SetConfigName(getConfigName())
vi.MergeInConfig()
if err := vi.MergeInConfig(); err != nil {
return nil, err
}
}
c := &config{}
@ -290,3 +308,28 @@ func initConfOnce() {
}
}
}
func cmdVersion(cmd *cobra.Command, args []string) {
fmt.Printf("\n%s\n", BuildDetails())
}
func BuildDetails() string {
return fmt.Sprintf(`
Super Graph %v
Commit SHA-1 : %v
Commit timestamp : %v
Branch : %v
Go version : %v
For documentation, visit https://supergraph.dev
Licensed under the Apache Public License 2.0
Copyright 2015-2019 Vikram Rangnekar.
`,
version,
lastCommitSHA,
lastCommitTime,
gitBranch,
runtime.Version())
}

View File

@ -9,7 +9,7 @@ import (
func cmdConfDump(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help()
cmd.Help() //nolint: errcheck
os.Exit(1)
}

View File

@ -14,19 +14,6 @@ import (
"github.com/spf13/cobra"
)
var sampleMigration = `-- This is a sample migration.
create table users(
id serial primary key,
fullname varchar not null,
email varchar not null
);
---- create above / drop below ----
drop table users;
`
var newMigrationText = `-- Write your migrate up statements here
---- create above / drop below ----
@ -48,7 +35,7 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
return
}
if os.IsNotExist(err) == false {
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile)
}
@ -108,7 +95,7 @@ func cmdDBDrop(cmd *cobra.Command, args []string) {
func cmdDBNew(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help()
cmd.Help() //nolint: errcheck
os.Exit(1)
}
@ -142,7 +129,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
func cmdDBMigrate(cmd *cobra.Command, args []string) {
if len(args) == 0 {
cmd.Help()
cmd.Help() //nolint: errcheck
os.Exit(1)
}
@ -211,7 +198,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
err = m.MigrateTo(currentVersion + mustParseDestination(dest[1:]))
} else {
cmd.Help()
cmd.Help() //nolint: errcheck
os.Exit(1)
}

View File

@ -16,7 +16,7 @@ import (
func cmdNew(cmd *cobra.Command, args []string) {
if len(args) != 1 {
cmd.Help()
cmd.Help() //nolint: errcheck
os.Exit(1)
}
@ -115,13 +115,17 @@ func (t *Templ) get(name string) ([]byte, error) {
b := bytes.Buffer{}
tmpl := fasttemplate.New(v, "{%", "%}")
tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
return w.Write([]byte(val))
}
return 0, fmt.Errorf("unknown template variable '%s'", tag)
})
if err != nil {
return nil, err
}
return b.Bytes(), nil
}
@ -133,7 +137,7 @@ func ifNotExists(filePath string, doFn func(string) error) {
return
}
if os.IsNotExist(err) == false {
if !os.IsNotExist(err) {
errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath)
}

View File

@ -43,7 +43,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
vm.Set("graphql", graphQLFunc)
console := vm.NewObject()
console.Set("log", logFunc)
console.Set("log", logFunc) //nolint: errcheck
vm.Set("console", console)
fake := vm.NewObject()
@ -100,7 +100,7 @@ func graphQLFunc(query string, data interface{}, opt map[string]string) map[stri
if err != nil {
errlog.Fatal().Err(err).Send()
}
defer tx.Rollback(c)
defer tx.Rollback(c) //nolint: errcheck
if conf.DB.SetUserID {
if err := setLocalUserID(c, tx); err != nil {
@ -128,6 +128,7 @@ func graphQLFunc(query string, data interface{}, opt map[string]string) map[stri
return val
}
//nolint: errcheck
func logFunc(args ...interface{}) {
for _, arg := range args {
if _, ok := arg.(map[string]interface{}); ok {
@ -144,6 +145,7 @@ func logFunc(args ...interface{}) {
}
}
//nolint: errcheck
func setFakeFuncs(f *goja.Object) {
gofakeit.Seed(0)

View File

@ -173,9 +173,10 @@ func newConfig(name string) *viper.Viper {
vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV")
vi.BindEnv("HOST", "HOST")
vi.BindEnv("PORT", "PORT")
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("HOST", "HOST") //nolint: errcheck
vi.BindEnv("PORT", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000)

View File

@ -16,10 +16,6 @@ import (
"github.com/valyala/fasttemplate"
)
const (
empty = ""
)
type coreContext struct {
req gqlReq
res gqlResp
@ -88,7 +84,7 @@ func (c *coreContext) resolvePreparedSQL() ([]byte, *stmt, error) {
if tx, err = db.Begin(c); err != nil {
return nil, nil, err
}
defer tx.Rollback(c)
defer tx.Rollback(c) //nolint: errcheck
}
if conf.DB.SetUserID {
@ -173,7 +169,7 @@ func (c *coreContext) resolveSQL() ([]byte, *stmt, error) {
if tx, err = db.Begin(c); err != nil {
return nil, nil, err
}
defer tx.Rollback(c)
defer tx.Rollback(c) //nolint: errcheck
}
if conf.DB.SetUserID {
@ -246,7 +242,7 @@ func (c *coreContext) resolveSQL() ([]byte, *stmt, error) {
}
}
if conf.Production == false {
if !conf.Production {
_allowList.add(&c.req)
}
@ -348,7 +344,7 @@ func parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
for i := range sel {
s := &sel[i]
if isSkipped(skipped, uint32(s.ID)) == false {
if !isSkipped(skipped, uint32(s.ID)) {
continue
}

View File

@ -59,9 +59,7 @@ func buildRoleStmt(gql, vars []byte, role string) ([]stmt, error) {
// For the 'anon' role in production only compile
// queries for tables defined in the config file.
if conf.Production &&
ro.Name == "anon" &&
hasTablesWithConfig(qc, ro) == false {
if conf.Production && ro.Name == "anon" && !hasTablesWithConfig(qc, ro) {
return nil, errors.New("query contains tables with no 'anon' role config")
}
@ -126,6 +124,7 @@ func buildMultiStmt(gql, vars []byte) ([]stmt, error) {
return stmts, nil
}
//nolint: errcheck
func renderUserQuery(
stmts []stmt, vars map[string]json.RawMessage) (string, error) {

View File

@ -10,7 +10,7 @@ func TestFuzzCrashers(t *testing.T) {
}
for _, f := range crashers {
gqlName(f)
_ = gqlName(f)
gqlHash(f, nil, "")
}
}

View File

@ -8,8 +8,6 @@ import (
"net/http"
"strings"
"time"
"github.com/gorilla/websocket"
)
const (
@ -20,7 +18,6 @@ const (
)
var (
upgrader = websocket.Upgrader{}
errUnauthorized = errors.New("not authorized")
)
@ -33,8 +30,6 @@ type gqlReq struct {
hdr http.Header
}
type variables map[string]json.RawMessage
type gqlResp struct {
Error string `json:"message,omitempty"`
Data json.RawMessage `json:"data,omitempty"`
@ -69,7 +64,8 @@ type resolver struct {
func apiv1Http(w http.ResponseWriter, r *http.Request) {
ctx := &coreContext{Context: r.Context()}
if conf.AuthFailBlock && authCheck(ctx) == false {
//nolint: errcheck
if conf.AuthFailBlock && !authCheck(ctx) {
w.WriteHeader(http.StatusUnauthorized)
json.NewEncoder(w).Encode(gqlResp{Error: errUnauthorized.Error()})
return
@ -97,6 +93,7 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
err = ctx.handleReq(w, r)
//nolint: errcheck
if err == errUnauthorized {
w.WriteHeader(http.StatusUnauthorized)
json.NewEncoder(w).Encode(gqlResp{Error: err.Error()})
@ -110,6 +107,7 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
}
}
//nolint: errcheck
func errorResp(w http.ResponseWriter, err error) {
json.NewEncoder(w).Encode(gqlResp{Error: err.Error()})
}

View File

@ -2,6 +2,7 @@ package serv
import "net/http"
//nolint: errcheck
func introspect(w http.ResponseWriter) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{

View File

@ -30,7 +30,7 @@ func initPreparedList() {
if err != nil {
errlog.Fatal().Err(err).Send()
}
defer tx.Rollback(c)
defer tx.Rollback(c) //nolint: errcheck
err = prepareRoleStmt(c, tx)
if err != nil {
@ -74,7 +74,7 @@ func prepareStmt(c context.Context, gql string, vars []byte) error {
if err != nil {
return err
}
defer tx.Rollback(c)
defer tx.Rollback(c) //nolint: errcheck
switch qt {
case qcode.QTQuery:
@ -141,6 +141,7 @@ func prepare(c context.Context, tx pgx.Tx, st *stmt, key string) error {
return nil
}
// nolint: errcheck
func prepareRoleStmt(c context.Context, tx pgx.Tx) error {
if len(conf.RolesQuery) == 0 {
return nil

View File

@ -108,7 +108,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
// Ensure that we use the correct events, as they are not uniform across
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
if conf.Production == false && strings.HasSuffix(event.Name, "/allow.list") {
if !conf.Production && strings.HasSuffix(event.Name, "/allow.list") {
continue
}

345
serv/rice-box.go Normal file

File diff suppressed because one or more lines are too long

View File

@ -76,12 +76,16 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
delete.Filters = blockFilter
}
qc.AddRole(r.Name, t.Name, qcode.TRConfig{
err := qc.AddRole(r.Name, t.Name, qcode.TRConfig{
Query: query,
Insert: insert,
Update: update,
Delete: delete,
})
if err != nil {
return nil, nil, err
}
}
}
@ -94,7 +98,7 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
}
func initWatcher(cpath string) {
if conf.WatchAndReload == false {
if !conf.WatchAndReload {
return
}
@ -151,6 +155,8 @@ func startHTTP() {
})
logger.Info().
Str("version", version).
Str("git_branch", gitBranch).
Str("host_post", hostPort).
Str("app_name", conf.AppName).
Str("env", conf.Env).

View File

@ -12,6 +12,7 @@ import (
"github.com/dosco/super-graph/jsn"
)
// nolint: errcheck
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
h.WriteString(k1)
h.WriteString(k2)
@ -21,6 +22,7 @@ func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
return v
}
// nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b)
h := sha1.New()
@ -64,7 +66,7 @@ func gqlHash(b string, vars []byte, role string) string {
} else {
starting = false
s = e
for e < len(b) && ws(b[e]) == false {
for e < len(b) && !ws(b[e]) {
e++
}
if e != 0 {
@ -81,7 +83,7 @@ func gqlHash(b string, vars []byte, role string) string {
io.WriteString(h, role)
}
if vars == nil || len(vars) == 0 {
if len(vars) == 0 {
return hex.EncodeToString(h.Sum(nil))
}