Compare commits

...

4 Commits

21 changed files with 194 additions and 130 deletions

View File

@ -91,6 +91,7 @@ This compiler is what sits at the heart of Super Graph, with layers of useful fu
- Database migrations tool - Database migrations tool
- Database seeding tool - Database seeding tool
- Works with Postgres and YugabyteDB - Works with Postgres and YugabyteDB
- OpenCensus Support: Zipkin, Prometheus, X-Ray, Stackdriver
## Documentation ## Documentation

View File

@ -77,6 +77,8 @@ cors_debug: true
# exporter: "zipkin" # exporter: "zipkin"
# endpoint: "http://zipkin:9411/api/v2/spans" # endpoint: "http://zipkin:9411/api/v2/spans"
# sample: 0.2 # sample: 0.2
# include_query: false
# include_params: false
auth: auth:
# Can be 'rails' or 'jwt' # Can be 'rails' or 'jwt'

View File

@ -19,7 +19,7 @@ func BenchmarkGraphQL(b *testing.B) {
defer db.Close() defer db.Close()
// mock.ExpectQuery(`^SELECT jsonb_build_object`).WithArgs() // mock.ExpectQuery(`^SELECT jsonb_build_object`).WithArgs()
c := &Config{DefaultBlock: true} c := &Config{}
sg, err := newSuperGraph(c, db, psql.GetTestDBInfo()) sg, err := newSuperGraph(c, db, psql.GetTestDBInfo())
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)

View File

@ -30,11 +30,12 @@ type Config struct {
// or other database functions // or other database functions
SetUserID bool `mapstructure:"set_user_id"` SetUserID bool `mapstructure:"set_user_id"`
// DefaultBlock ensures only tables configured under the `anon` role // DefaultAllow reverses the blocked by default behaviour for queries in
// config can be queries if the `anon` role. For example if the table // anonymous mode. (anon role)
// `users` is not listed under the anon role then it will be filtered // For example if the table `users` is not listed under the anon role then
// out of any unauthenticated queries that mention it. // access to it would by default for unauthenticated queries this reverses
DefaultBlock bool `mapstructure:"default_block"` // this behavior (!!! Use with caution !!!!)
DefaultAllow bool `mapstructure:"default_allow"`
// Vars is a map of hardcoded variables that can be leveraged in your // Vars is a map of hardcoded variables that can be leveraged in your
// queries (eg variable admin_id will be $admin_id in the query) // queries (eg variable admin_id will be $admin_id in the query)

View File

@ -93,7 +93,6 @@ func (sg *SuperGraph) initCompilers() error {
} }
sg.qc, err = qcode.NewCompiler(qcode.Config{ sg.qc, err = qcode.NewCompiler(qcode.Config{
DefaultBlock: sg.conf.DefaultBlock,
Blocklist: sg.conf.Blocklist, Blocklist: sg.conf.Blocklist,
}) })
if err != nil { if err != nil {

View File

@ -70,8 +70,8 @@ func (sg *SuperGraph) initConfig() error {
sg.roles["user"] = &ur sg.roles["user"] = &ur
} }
// If anon role is not defined and DefaultBlock is not then then create it // If anon role is not defined then create it
if _, ok := sg.roles["anon"]; !ok && !c.DefaultBlock { if _, ok := sg.roles["anon"]; !ok {
ur := Role{ ur := Role{
Name: "anon", Name: "anon",
tm: make(map[string]*RoleTable), tm: make(map[string]*RoleTable),
@ -206,7 +206,7 @@ func addForeignKey(di *psql.DBInfo, c Column, t Table) error {
func addRoles(c *Config, qc *qcode.Compiler) error { func addRoles(c *Config, qc *qcode.Compiler) error {
for _, r := range c.Roles { for _, r := range c.Roles {
for _, t := range r.Tables { for _, t := range r.Tables {
if err := addRole(qc, r, t); err != nil { if err := addRole(qc, r, t, c.DefaultAllow); err != nil {
return err return err
} }
} }
@ -215,9 +215,13 @@ func addRoles(c *Config, qc *qcode.Compiler) error {
return nil return nil
} }
func addRole(qc *qcode.Compiler, r Role, t RoleTable) error { func addRole(qc *qcode.Compiler, r Role, t RoleTable, defaultAllow bool) error {
ro := true // read-only ro := true // read-only
if defaultAllow {
ro = false
}
if r.Name != "anon" { if r.Name != "anon" {
ro = false ro = false
} }

View File

@ -50,7 +50,7 @@ func DropSchema(t *testing.T, db *sql.DB) {
} }
func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) { func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
config := core.Config{DefaultBlock: true} config := core.Config{}
config.UseAllowList = false config.UseAllowList = false
config.AllowListFile = "./allow.list" config.AllowListFile = "./allow.list"
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id` config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`

View File

@ -17,10 +17,6 @@ const (
closeBlock = 500 closeBlock = 500
) )
var (
ErrAllTablesSkipped = errors.New("all tables skipped. cannot render query")
)
type Variables map[string]json.RawMessage type Variables map[string]json.RawMessage
type Config struct { type Config struct {
@ -92,30 +88,35 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
io.WriteString(c.w, `SELECT jsonb_build_object(`) io.WriteString(c.w, `SELECT jsonb_build_object(`)
for _, id := range qc.Roots { for _, id := range qc.Roots {
root := &qc.Selects[id]
if root.SkipRender || len(root.Cols) == 0 {
continue
}
st.Push(root.ID + closeBlock)
st.Push(root.ID)
if i != 0 { if i != 0 {
io.WriteString(c.w, `, `) io.WriteString(c.w, `, `)
} }
root := &qc.Selects[id]
if root.SkipRender || len(root.Cols) == 0 {
squoted(c.w, root.FieldName)
io.WriteString(c.w, `, `)
io.WriteString(c.w, `NULL`)
} else {
st.Push(root.ID + closeBlock)
st.Push(root.ID)
c.renderRootSelect(root) c.renderRootSelect(root)
}
i++ i++
} }
io.WriteString(c.w, `) as "__root" FROM `)
if i == 0 {
return 0, ErrAllTablesSkipped
}
var ignored uint32 var ignored uint32
if st.Len() != 0 {
io.WriteString(c.w, `) as "__root" FROM `)
} else {
io.WriteString(c.w, `) as "__root"`)
return ignored, nil
}
for { for {
if st.Len() == 0 { if st.Len() == 0 {
break break

View File

@ -8,7 +8,6 @@ import (
type Config struct { type Config struct {
Blocklist []string Blocklist []string
DefaultBlock bool
} }
type QueryConfig struct { type QueryConfig struct {

View File

@ -180,7 +180,7 @@ var expPool = sync.Pool{
} }
func NewCompiler(c Config) (*Compiler, error) { func NewCompiler(c Config) (*Compiler, error) {
co := &Compiler{db: c.DefaultBlock} co := &Compiler{}
co.tr = make(map[string]map[string]*trval) co.tr = make(map[string]map[string]*trval)
co.bl = make(map[string]struct{}, len(c.Blocklist)) co.bl = make(map[string]struct{}, len(c.Blocklist))
@ -333,11 +333,13 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
} }
trv := com.getRole(role, field.Name) trv := com.getRole(role, field.Name)
skipRender := false
if trv != nil {
switch action { switch action {
case QTQuery: case QTQuery:
if trv.query.block { if trv.query.block {
continue skipRender = true
} }
case QTInsert: case QTInsert:
@ -356,16 +358,42 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
} }
} }
} else if role == "anon" {
skipRender = true
}
selects = append(selects, Select{ selects = append(selects, Select{
ID: id, ID: id,
ParentID: parentID, ParentID: parentID,
Name: field.Name, Name: field.Name,
Children: make([]int32, 0, 5), SkipRender: skipRender,
Allowed: trv.allowedColumns(action),
Functions: true,
}) })
s := &selects[(len(selects) - 1)] s := &selects[(len(selects) - 1)]
if len(field.Alias) != 0 {
s.FieldName = field.Alias
} else {
s.FieldName = s.Name
}
if s.ParentID == -1 {
qc.Roots = append(qc.Roots, s.ID)
} else {
p := &selects[s.ParentID]
p.Children = append(p.Children, s.ID)
}
if skipRender {
id++
continue
}
s.Children = make([]int32, 0, 5)
s.Functions = true
if trv != nil {
s.Allowed = trv.allowedColumns(action)
switch action { switch action {
case QTQuery: case QTQuery:
s.Functions = !trv.query.disable.funcs s.Functions = !trv.query.disable.funcs
@ -379,11 +407,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
s.PresetMap = trv.update.psmap s.PresetMap = trv.update.psmap
s.PresetList = trv.update.pslist s.PresetList = trv.update.pslist
} }
if len(field.Alias) != 0 {
s.FieldName = field.Alias
} else {
s.FieldName = s.Name
} }
err := com.compileArgs(qc, s, field.Args, role) err := com.compileArgs(qc, s, field.Args, role)
@ -394,13 +417,6 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
// Order is important AddFilters must come after compileArgs // Order is important AddFilters must come after compileArgs
com.AddFilters(qc, s, role) com.AddFilters(qc, s, role)
if s.ParentID == -1 {
qc.Roots = append(qc.Roots, s.ID)
} else {
p := &selects[s.ParentID]
p.Children = append(p.Children, s.ID)
}
s.Cols = make([]Column, 0, len(field.Children)) s.Cols = make([]Column, 0, len(field.Children))
action = QTQuery action = QTQuery
@ -440,14 +456,10 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) { func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
var fil *Exp var fil *Exp
var nu bool // user required (or not) in this filter var nu bool // need user_id (or not) in this filter
if trv, ok := com.tr[role][sel.Name]; ok { if trv, ok := com.tr[role][sel.Name]; ok {
fil, nu = trv.filter(qc.Type) fil, nu = trv.filter(qc.Type)
} else if com.db && role == "anon" {
// Tables not defined under the anon role will not be rendered
sel.SkipRender = true
} }
if fil == nil { if fil == nil {
@ -838,14 +850,17 @@ func (com *Compiler) compileArgAfterBefore(sel *Select, arg *Arg, pt PagingType)
return nil, false return nil, false
} }
var zeroTrv = &trval{} // var zeroTrv = &trval{}
func (com *Compiler) getRole(role, field string) *trval { func (com *Compiler) getRole(role, field string) *trval {
if trv, ok := com.tr[role][field]; ok { if trv, ok := com.tr[role][field]; ok {
return trv return trv
} else {
return zeroTrv
} }
return nil
// } else {
// return zeroTrv
// }
} }
func AddFilter(sel *Select, fil *Exp) { func AddFilter(sel *Select, fil *Exp) {

View File

@ -11,7 +11,6 @@ import (
"strings" "strings"
"github.com/dosco/super-graph/core/internal/allow" "github.com/dosco/super-graph/core/internal/allow"
"github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/core/internal/qcode" "github.com/dosco/super-graph/core/internal/qcode"
"github.com/valyala/fasttemplate" "github.com/valyala/fasttemplate"
) )
@ -103,9 +102,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
// logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name) // logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
stmts2, err := sg.buildRoleStmt(qb, vars, "anon") stmts2, err := sg.buildRoleStmt(qb, vars, "anon")
if err == psql.ErrAllTablesSkipped {
return nil
}
if err != nil { if err != nil {
return err return err
} }
@ -121,9 +117,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name) // logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
stmts, err := sg.buildRoleStmt(qb, vars, role.Name) stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
if err == psql.ErrAllTablesSkipped {
continue
}
if err != nil { if err != nil {
return err return err
} }

View File

@ -53,6 +53,7 @@ type Serv struct {
// Telemetry struct contains OpenCensus metrics and tracing related config // Telemetry struct contains OpenCensus metrics and tracing related config
Telemetry struct { Telemetry struct {
Debug bool Debug bool
Interval *time.Duration
Metrics struct { Metrics struct {
Exporter string Exporter string
Endpoint string Endpoint string
@ -64,6 +65,8 @@ type Serv struct {
Exporter string Exporter string
Endpoint string Endpoint string
Sample string Sample string
IncludeQuery bool `mapstructure:"include_query"`
IncludeParams bool `mapstructure:"include_params"`
} }
} }

View File

@ -55,7 +55,7 @@ func cmdDBReset(cmd *cobra.Command, args []string) {
func cmdDBCreate(cmd *cobra.Command, args []string) { func cmdDBCreate(cmd *cobra.Command, args []string) {
initConfOnce() initConfOnce()
db, err := initDB(conf, false) db, err := initDB(conf, false, false)
if err != nil { if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err) log.Fatalf("ERR failed to connect to database: %s", err)
} }
@ -74,7 +74,7 @@ func cmdDBCreate(cmd *cobra.Command, args []string) {
func cmdDBDrop(cmd *cobra.Command, args []string) { func cmdDBDrop(cmd *cobra.Command, args []string) {
initConfOnce() initConfOnce()
db, err := initDB(conf, false) db, err := initDB(conf, false, false)
if err != nil { if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err) log.Fatalf("ERR failed to connect to database: %s", err)
} }
@ -132,7 +132,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
initConfOnce() initConfOnce()
dest := args[0] dest := args[0]
conn, err := initDB(conf, true) conn, err := initDB(conf, true, false)
if err != nil { if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err) log.Fatalf("ERR failed to connect to database: %s", err)
} }
@ -224,7 +224,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
func cmdDBStatus(cmd *cobra.Command, args []string) { func cmdDBStatus(cmd *cobra.Command, args []string) {
initConfOnce() initConfOnce()
db, err := initDB(conf, true) db, err := initDB(conf, true, false)
if err != nil { if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err) log.Fatalf("ERR failed to connect to database: %s", err)
} }

View File

@ -26,10 +26,9 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
if conf, err = initConf(); err != nil { if conf, err = initConf(); err != nil {
log.Fatalf("ERR failed to read config: %s", err) log.Fatalf("ERR failed to read config: %s", err)
} }
conf.Production = false conf.Production = false
db, err = initDB(conf, true) db, err = initDB(conf, true, false)
if err != nil { if err != nil {
log.Fatalf("ERR failed to connect to database: %s", err) log.Fatalf("ERR failed to connect to database: %s", err)
} }
@ -80,6 +79,8 @@ func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[st
if v, ok := opt["user_id"]; ok && len(v) != 0 { if v, ok := opt["user_id"]; ok && len(v) != 0 {
ct = context.WithValue(ct, core.UserIDKey, v) ct = context.WithValue(ct, core.UserIDKey, v)
} else {
ct = context.WithValue(ct, core.UserIDKey, "-1")
} }
// var role string // var role string

View File

@ -19,7 +19,7 @@ func cmdServ(cmd *cobra.Command, args []string) {
initWatcher() initWatcher()
db, err = initDB(conf, true) db, err = initDB(conf, true, true)
if err != nil { if err != nil {
fatalInProd(err, "failed to connect to database") fatalInProd(err, "failed to connect to database")
} }

View File

@ -112,7 +112,7 @@ func GetConfigName() string {
} }
func (c *Config) telemetryEnabled() bool { func (c *Config) telemetryEnabled() bool {
return c.Telemetry.Metrics.Exporter != "" || c.Telemetry.Tracing.Exporter != "" return c.Telemetry.Debug || c.Telemetry.Metrics.Exporter != "" || c.Telemetry.Tracing.Exporter != ""
} }
func (c *Config) relPath(p string) string { func (c *Config) relPath(p string) string {

View File

@ -10,6 +10,8 @@ import (
"github.com/dosco/super-graph/core" "github.com/dosco/super-graph/core"
"github.com/dosco/super-graph/internal/serv/internal/auth" "github.com/dosco/super-graph/internal/serv/internal/auth"
"github.com/rs/cors" "github.com/rs/cors"
"go.opencensus.io/plugin/ochttp"
"go.opencensus.io/trace"
"go.uber.org/zap" "go.uber.org/zap"
) )
@ -44,7 +46,7 @@ func apiV1Handler() http.Handler {
AllowCredentials: true, AllowCredentials: true,
Debug: conf.DebugCORS, Debug: conf.DebugCORS,
}) })
h = c.Handler(h) return c.Handler(h)
} }
return h return h
@ -78,6 +80,22 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
doLog := true doLog := true
res, err := sg.GraphQL(ct, req.Query, req.Vars) res, err := sg.GraphQL(ct, req.Query, req.Vars)
if conf.telemetryEnabled() {
span := trace.FromContext(ct)
span.AddAttributes(
trace.StringAttribute("operation", res.OperationName()),
trace.StringAttribute("query_name", res.QueryName()),
trace.StringAttribute("role", res.Role()),
)
if err != nil {
span.AddAttributes(trace.StringAttribute("error", err.Error()))
}
ochttp.SetRoute(ct, apiRoute)
}
if !conf.Production && res.QueryName() == introspectionQuery { if !conf.Production && res.QueryName() == introspectionQuery {
doLog = false doLog = false
} }

View File

@ -111,13 +111,10 @@ func initConf() (*Config, error) {
c.UseAllowList = true c.UseAllowList = true
} }
// In anon role block all tables that are not defined in the role
c.DefaultBlock = true
return c, nil return c, nil
} }
func initDB(c *Config, useDB bool) (*sql.DB, error) { func initDB(c *Config, useDB, useTelemetry bool) (*sql.DB, error) {
var db *sql.DB var db *sql.DB
var err error var err error
@ -217,14 +214,35 @@ func initDB(c *Config, useDB bool) (*sql.DB, error) {
// return errors.New("failed to open db") // return errors.New("failed to open db")
// } // }
if conf.telemetryEnabled() { if useTelemetry && conf.telemetryEnabled() {
driverName, err = ocsql.Register(driverName, ocsql.WithAllTraceOptions(), ocsql.WithInstanceName(conf.AppName)) opts := ocsql.TraceOptions{
AllowRoot: false,
Ping: true,
RowsNext: true,
RowsClose: true,
RowsAffected: true,
LastInsertID: true,
Query: conf.Telemetry.Tracing.IncludeQuery,
QueryParams: conf.Telemetry.Tracing.IncludeParams,
}
opt := ocsql.WithOptions(opts)
name := ocsql.WithInstanceName(conf.AppName)
driverName, err = ocsql.Register(driverName, opt, name)
if err != nil { if err != nil {
return nil, fmt.Errorf("unable to register ocsql driver: %v", err) return nil, fmt.Errorf("unable to register ocsql driver: %v", err)
} }
ocsql.RegisterAllViews() ocsql.RegisterAllViews()
//defer ocsql.RecordStats(db, 2*time.Second)()
var interval time.Duration
if conf.Telemetry.Interval != nil {
interval = *conf.Telemetry.Interval
} else {
interval = 5 * time.Second
}
defer ocsql.RecordStats(db, interval)()
log.Println("INF OpenCensus telemetry enabled") log.Println("INF OpenCensus telemetry enabled")
} }
@ -242,9 +260,5 @@ func initDB(c *Config, useDB bool) (*sql.DB, error) {
return nil, fmt.Errorf("unable to open db connection: %v", err) return nil, fmt.Errorf("unable to open db connection: %v", err)
} }
if conf.telemetryEnabled() {
defer ocsql.RecordStats(db, 2*time.Second)()
}
return db, nil return db, nil
} }

File diff suppressed because one or more lines are too long

View File

@ -13,6 +13,11 @@ import (
rice "github.com/GeertJohan/go.rice" rice "github.com/GeertJohan/go.rice"
"github.com/NYTimes/gziphandler" "github.com/NYTimes/gziphandler"
"github.com/dosco/super-graph/internal/serv/internal/auth" "github.com/dosco/super-graph/internal/serv/internal/auth"
"go.opencensus.io/plugin/ochttp"
)
var (
apiRoute string = "/api/v1/graphql"
) )
func initWatcher() { func initWatcher() {
@ -76,6 +81,10 @@ func startHTTP() {
MaxHeaderBytes: 1 << 20, MaxHeaderBytes: 1 << 20,
} }
if conf.telemetryEnabled() {
srv.Handler = &ochttp.Handler{Handler: routes}
}
idleConnsClosed := make(chan struct{}) idleConnsClosed := make(chan struct{})
go func() { go func() {
sigint := make(chan os.Signal, 1) sigint := make(chan os.Signal, 1)
@ -114,8 +123,6 @@ func routeHandler() (http.Handler, error) {
return mux, nil return mux, nil
} }
apiRoute := "/api/v1/graphql"
if len(conf.APIPath) != 0 { if len(conf.APIPath) != 0 {
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql") apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
} }
@ -178,6 +185,10 @@ func setActionRoutes(routes map[string]http.Handler) error {
routes[p] = fn routes[p] = fn
} }
if conf.telemetryEnabled() {
routes[p] = ochttp.WithRouteTag(routes[p], p)
}
if err != nil { if err != nil {
return err return err
} }

View File

@ -57,7 +57,7 @@ func enableObservability(mux *http.ServeMux) (func(), error) {
} }
case "": case "":
log.Println("INF No OpenCensus metrics exporter initialized") log.Println("WRN OpenCensus: no metrics exporter defined")
default: default:
err = fmt.Errorf("invalid metrics exporter") err = fmt.Errorf("invalid metrics exporter")
@ -96,14 +96,16 @@ func enableObservability(mux *http.ServeMux) (func(), error) {
tex = zipkin.NewExporter(re, lep) tex = zipkin.NewExporter(re, lep)
case "": case "":
log.Println("INF No OpenCensus tracing exporter initialized") log.Println("WRN OpenCensus: no traceing exporter defined")
default: default:
err = fmt.Errorf("invalid tracing exporter") err = fmt.Errorf("invalid tracing exporter")
} }
if err != nil { if err != nil {
return nil, fmt.Errorf("ERR OpenCensus: %s: %v", conf.Telemetry.Tracing, err) return nil, fmt.Errorf("ERR OpenCensus: %s: %v",
conf.Telemetry.Tracing.Exporter,
err)
} }
if tex != nil { if tex != nil {