Compare commits

...

5 Commits

25 changed files with 250 additions and 121 deletions

1
.gitignore vendored
View File

@ -35,4 +35,5 @@ suppressions
release
.gofuzz
*-fuzz.zip
*.test

View File

@ -1,6 +1,6 @@
<img src="docs/guide/.vuepress/public/super-graph.png" width="250" />
### Build web products faster. Secure high performance GraphQL
### Build web products faster. Secure high-performance GraphQL
[![GoDoc](https://img.shields.io/badge/godoc-reference-5272B4.svg)](https://pkg.go.dev/github.com/dosco/super-graph/core?tab=doc)
![Apache 2.0](https://img.shields.io/github/license/dosco/super-graph.svg?style=flat-square)
@ -10,12 +10,12 @@
## What's Super Graph?
Designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance GraphQL API for Postgres DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a GO library and a service, use it in your own code or run it as a seperate service.
Designed to 100x your developer productivity. Super Graph will instantly, and without you writing any code, provide a high performance GraphQL API for your PostgresSQL DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a Go library and a service, use it in your own code or run it as a separate service.
## Using it as a service
```console
get get https://github.com/dosco/super-graph
go get github.com/dosco/super-graph
super-graph new <app_name>
```
@ -35,17 +35,12 @@ import (
func main() {
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
conf, err := core.ReadInConfig("./config/dev.yml")
sg, err := core.NewSuperGraph(nil, db)
if err != nil {
log.Fatalf(err)
}
sg, err = core.NewSuperGraph(conf, db)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
query := `
@ -58,7 +53,7 @@ func main() {
res, err := sg.GraphQL(context.Background(), query, nil)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
fmt.Println(string(res.Data))
@ -67,7 +62,7 @@ func main() {
## About Super Graph
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
After working on several products through my career I found that we spend way too much time on building API backends. Most APIs also need constant updating, and this costs time and money.
It's always the same thing, figure out what the UI needs then build an endpoint for it. Most API code involves struggling with an ORM to query a database and mangle the data into a shape that the UI expects to see.
@ -75,28 +70,27 @@ I didn't want to write this code anymore, I wanted the computer to do it. Enter
Having worked with compilers before I saw this as a compiler problem. Why not build a compiler that converts GraphQL to highly efficient SQL.
This compiler is what sits at the heart of Super Graph with layers of useful functionality around it like authentication, remote joins, rails integration, database migrations and everything else needed for you to build production ready apps with it.
This compiler is what sits at the heart of Super Graph, with layers of useful functionality around it like authentication, remote joins, rails integration, database migrations, and everything else needed for you to build production-ready apps with it.
## Features
- Complex nested queries and mutations
- Auto learns database tables and relationships
- Role and Attribute based access control
- Opaque cursor based efficient pagination
- Full text search and aggregations
- Role and Attribute-based access control
- Opaque cursor-based efficient pagination
- Full-text search and aggregations
- JWT tokens supported (Auth0, etc)
- Join database queries with remote REST APIs
- Also works with existing Ruby-On-Rails apps
- Rails authentication supported (Redis, Memcache, Cookie)
- A simple config file
- High performance GO codebase
- High performance Go codebase
- Tiny docker image and low memory requirements
- Fuzz tested for security
- Database migrations tool
- Database seeding tool
- Works with Postgres and YugabyteDB
## Documentation
[supergraph.dev](https://supergraph.dev)
@ -116,4 +110,3 @@ Twitter or Discord.
Copyright (c) 2019-present Vikram Rangnekar

View File

@ -16,17 +16,12 @@
func main() {
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
conf, err := core.ReadInConfig("./config/dev.yml")
sg, err := core.NewSuperGraph(nil, db)
if err != nil {
log.Fatalf(err)
}
sg, err = core.NewSuperGraph(conf, db)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
query := `
@ -39,7 +34,7 @@
res, err := sg.GraphQL(context.Background(), query, nil)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
fmt.Println(string(res.Data))
@ -82,6 +77,7 @@ type SuperGraph struct {
conf *Config
db *sql.DB
log *_log.Logger
dbinfo *psql.DBInfo
schema *psql.DBSchema
allowList *allow.List
encKey [32]byte
@ -99,10 +95,20 @@ type SuperGraph struct {
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
// schemas and relationships
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
return newSuperGraph(conf, db, nil)
}
// newSuperGraph helps with writing tests and benchmarks
func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph, error) {
if conf == nil {
conf = &Config{}
}
sg := &SuperGraph{
conf: conf,
db: db,
log: _log.New(os.Stdout, "", 0),
conf: conf,
db: db,
dbinfo: dbinfo,
log: _log.New(os.Stdout, "", 0),
}
if err := sg.initConfig(); err != nil {
@ -199,6 +205,8 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
return &ct.res, nil
}
// GraphQLSchema function return the GraphQL schema for the underlying database connected
// to this instance of Super Graph
func (sg *SuperGraph) GraphQLSchema() (string, error) {
return sg.ge.Schema.String(), nil
}

62
core/api_test.go Normal file
View File

@ -0,0 +1,62 @@
package core
import (
"context"
"fmt"
"testing"
"github.com/DATA-DOG/go-sqlmock"
"github.com/dosco/super-graph/core/internal/psql"
)
func BenchmarkGraphQL(b *testing.B) {
ct := context.WithValue(context.Background(), UserIDKey, "1")
db, _, err := sqlmock.New()
if err != nil {
b.Fatal(err)
}
defer db.Close()
// mock.ExpectQuery(`^SELECT jsonb_build_object`).WithArgs()
c := &Config{DefaultBlock: true}
sg, err := newSuperGraph(c, db, psql.GetTestDBInfo())
if err != nil {
b.Fatal(err)
}
query := `
query {
products {
id
name
user {
full_name
phone
email
}
customers {
id
email
}
}
users {
id
name
}
}`
b.ResetTimer()
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
_, err = sg.GraphQL(ct, query, nil)
}
})
fmt.Println(err)
//fmt.Println(mock.ExpectationsWereMet())
}

View File

@ -10,16 +10,56 @@ import (
// Core struct contains core specific config value
type Config struct {
SecretKey string `mapstructure:"secret_key"`
UseAllowList bool `mapstructure:"use_allow_list"`
AllowListFile string `mapstructure:"allow_list_file"`
SetUserID bool `mapstructure:"set_user_id"`
Vars map[string]string `mapstructure:"variables"`
Blocklist []string
Tables []Table
RolesQuery string `mapstructure:"roles_query"`
Roles []Role
Inflections map[string]string
// SecretKey is used to encrypt opaque values such as
// the cursor. Auto-generated if not set
SecretKey string `mapstructure:"secret_key"`
// UseAllowList (aka production mode) when set to true ensures
// only queries lists in the allow.list file can be used. All
// queries are pre-prepared so no compiling happens and things are
// very fast.
UseAllowList bool `mapstructure:"use_allow_list"`
// AllowListFile if the path to allow list file if not set the
// path is assumed to tbe the same as the config path (allow.list)
AllowListFile string `mapstructure:"allow_list_file"`
// SetUserID forces the database session variable `user.id` to
// be set to the user id. This variables can be used by triggers
// or other database functions
SetUserID bool `mapstructure:"set_user_id"`
// DefaultBlock ensures only tables configured under the `anon` role
// config can be queries if the `anon` role. For example if the table
// `users` is not listed under the anon role then it will be filtered
// out of any unauthenticated queries that mention it.
DefaultBlock bool `mapstructure:"default_block"`
// Vars is a map of hardcoded variables that can be leveraged in your
// queries (eg variable admin_id will be $admin_id in the query)
Vars map[string]string `mapstructure:"variables"`
// Blocklist is a list of tables and columns that should be filtered
// out from any and all queries
Blocklist []string
// Tables contains all table specific configuration such as aliased tables
// creating relationships between tables, etc
Tables []Table
// RolesQuery if set enabled attributed based access control. This query
// is use to fetch the user attributes that then dynamically define the users
// role.
RolesQuery string `mapstructure:"roles_query"`
// Roles contains all the configuration for all the roles you want to support
// `user` and `anon` are two default roles. User role is for when a user ID is
// available and Anon when it's not.
Roles []Role
// Inflections is to add additionally singular to plural mappings
// to the engine (eg. sheep: sheep)
Inflections map[string]string `mapstructure:"inflections"`
}
// Table struct defines a database table

View File

@ -50,26 +50,33 @@ type scontext struct {
}
func (sg *SuperGraph) initCompilers() error {
di, err := psql.GetDBInfo(sg.db)
if err != nil {
var err error
// If sg.di is not null then it's probably set
// for tests
if sg.dbinfo == nil {
sg.dbinfo, err = psql.GetDBInfo(sg.db)
if err != nil {
return err
}
}
if err = addTables(sg.conf, sg.dbinfo); err != nil {
return err
}
if err = addTables(sg.conf, di); err != nil {
if err = addForeignKeys(sg.conf, sg.dbinfo); err != nil {
return err
}
if err = addForeignKeys(sg.conf, di); err != nil {
return err
}
sg.schema, err = psql.NewDBSchema(di, getDBTableAliases(sg.conf))
sg.schema, err = psql.NewDBSchema(sg.dbinfo, getDBTableAliases(sg.conf))
if err != nil {
return err
}
sg.qc, err = qcode.NewCompiler(qcode.Config{
Blocklist: sg.conf.Blocklist,
DefaultBlock: sg.conf.DefaultBlock,
Blocklist: sg.conf.Blocklist,
})
if err != nil {
return err

View File

@ -70,6 +70,16 @@ func (sg *SuperGraph) initConfig() error {
sg.roles["user"] = &ur
}
// If anon role is not defined and DefaultBlock is not then then create it
if _, ok := sg.roles["anon"]; !ok && !c.DefaultBlock {
ur := Role{
Name: "anon",
tm: make(map[string]*RoleTable),
}
c.Roles = append(c.Roles, ur)
sg.roles["anon"] = &ur
}
// Roles: validate and sanitize
c.RolesQuery = sanitizeVars(c.RolesQuery)

View File

@ -50,7 +50,7 @@ func DropSchema(t *testing.T, db *sql.DB) {
}
func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
config := core.Config{}
config := core.Config{DefaultBlock: true}
config.UseAllowList = false
config.AllowListFile = "./allow.list"
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`

View File

@ -10,7 +10,7 @@ import (
var (
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
schema = getTestSchema()
schema = GetTestSchema()
vars = NewVariables(map[string]string{
"admin_account_id": "5",

View File

@ -1,4 +1,4 @@
package psql
package psql_test
import (
"encoding/json"

View File

@ -1,4 +1,4 @@
package psql
package psql_test
import (
"encoding/json"

View File

@ -1,4 +1,4 @@
package psql
package psql_test
import (
"fmt"
@ -8,6 +8,7 @@ import (
"strings"
"testing"
"github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/core/internal/qcode"
)
@ -19,7 +20,7 @@ const (
var (
qcompile *qcode.Compiler
pcompile *Compiler
pcompile *psql.Compiler
expected map[string][]string
)
@ -133,13 +134,16 @@ func TestMain(m *testing.M) {
log.Fatal(err)
}
schema := getTestSchema()
schema, err := psql.GetTestSchema()
if err != nil {
log.Fatal(err)
}
vars := NewVariables(map[string]string{
vars := psql.NewVariables(map[string]string{
"admin_account_id": "5",
})
pcompile = NewCompiler(Config{
pcompile = psql.NewCompiler(psql.Config{
Schema: schema,
Vars: vars,
})
@ -173,7 +177,7 @@ func TestMain(m *testing.M) {
os.Exit(m.Run())
}
func compileGQLToPSQL(t *testing.T, gql string, vars Variables, role string) {
func compileGQLToPSQL(t *testing.T, gql string, vars psql.Variables, role string) {
generateTestFile := false
if generateTestFile {

View File

@ -1,4 +1,4 @@
package psql
package psql_test
import (
"bytes"

View File

@ -57,9 +57,10 @@ type DBRel struct {
func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
schema := &DBSchema{
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
fm: make(map[string]*DBFunction, len(info.Functions)),
ver: info.Version,
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
fm: make(map[string]*DBFunction, len(info.Functions)),
}
for i, t := range info.Tables {

View File

@ -14,7 +14,7 @@ type DBInfo struct {
Tables []DBTable
Columns [][]DBColumn
Functions []DBFunction
colmap map[string]map[string]*DBColumn
colMap map[string]map[string]*DBColumn
}
func GetDBInfo(db *sql.DB) (*DBInfo, error) {
@ -36,22 +36,17 @@ func GetDBInfo(db *sql.DB) (*DBInfo, error) {
return nil, err
}
di.colmap = make(map[string]map[string]*DBColumn, len(di.Tables))
for i, t := range di.Tables {
for _, t := range di.Tables {
cols, err := GetColumns(db, "public", t.Name)
if err != nil {
return nil, err
}
di.Columns = append(di.Columns, cols)
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
for n, c := range di.Columns[i] {
di.colmap[t.Key][c.Key] = &di.Columns[i][n]
}
}
di.colMap = newColMap(di.Tables, di.Columns)
di.Functions, err = GetFunctions(db)
if err != nil {
return nil, err
@ -60,22 +55,37 @@ func GetDBInfo(db *sql.DB) (*DBInfo, error) {
return di, nil
}
func newColMap(tables []DBTable, columns [][]DBColumn) map[string]map[string]*DBColumn {
cm := make(map[string]map[string]*DBColumn, len(tables))
for i, t := range tables {
cols := columns[i]
cm[t.Key] = make(map[string]*DBColumn, len(cols))
for n, c := range cols {
cm[t.Key][c.Key] = &columns[i][n]
}
}
return cm
}
func (di *DBInfo) AddTable(t DBTable, cols []DBColumn) {
t.ID = di.Tables[len(di.Tables)-1].ID
di.Tables = append(di.Tables, t)
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
di.colMap[t.Key] = make(map[string]*DBColumn, len(cols))
for i := range cols {
cols[i].ID = int16(i)
c := &cols[i]
di.colmap[t.Key][c.Key] = c
di.colMap[t.Key][c.Key] = c
}
di.Columns = append(di.Columns, cols)
}
func (di *DBInfo) GetColumn(table, column string) (*DBColumn, bool) {
v, ok := di.colmap[strings.ToLower(table)][strings.ToLower(column)]
v, ok := di.colMap[strings.ToLower(table)][strings.ToLower(column)]
return v, ok
}

View File

@ -1,11 +1,10 @@
package psql
import (
"log"
"strings"
)
func getTestSchema() *DBSchema {
func GetTestDBInfo() *DBInfo {
tables := []DBTable{
DBTable{Name: "customers", Type: "table"},
DBTable{Name: "users", Type: "table"},
@ -74,36 +73,19 @@ func getTestSchema() *DBSchema {
}
}
schema := &DBSchema{
ver: 110000,
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
return &DBInfo{
Version: 110000,
Tables: tables,
Columns: columns,
Functions: []DBFunction{},
colMap: newColMap(tables, columns),
}
}
func GetTestSchema() (*DBSchema, error) {
aliases := map[string][]string{
"users": []string{"mes"},
}
for i, t := range tables {
err := schema.addTable(t, columns[i], aliases)
if err != nil {
log.Fatal(err)
}
}
for i, t := range tables {
err := schema.firstDegreeRels(t, columns[i])
if err != nil {
log.Fatal(err)
}
}
for i, t := range tables {
err := schema.secondDegreeRels(t, columns[i])
if err != nil {
log.Fatal(err)
}
}
return schema
return NewDBSchema(GetTestDBInfo(), aliases)
}

View File

@ -1,4 +1,4 @@
package psql
package psql_test
import (
"encoding/json"

View File

@ -7,7 +7,8 @@ import (
)
type Config struct {
Blocklist []string
Blocklist []string
DefaultBlock bool
}
type QueryConfig struct {

View File

@ -170,6 +170,7 @@ const (
)
type Compiler struct {
db bool // default block tables if not defined in anon role
tr map[string]map[string]*trval
bl map[string]struct{}
}
@ -179,7 +180,7 @@ var expPool = sync.Pool{
}
func NewCompiler(c Config) (*Compiler, error) {
co := &Compiler{}
co := &Compiler{db: c.DefaultBlock}
co.tr = make(map[string]map[string]*trval)
co.bl = make(map[string]struct{}, len(c.Blocklist))
@ -413,12 +414,12 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
var fil *Exp
var nu bool
var nu bool // user required (or not) in this filter
if trv, ok := com.tr[role][sel.Name]; ok {
fil, nu = trv.filter(qc.Type)
} else if role == "anon" {
} else if com.db && role == "anon" {
// Tables not defined under the anon role will not be rendered
sel.SkipRender = true
}

View File

@ -145,17 +145,12 @@ import (
func main() {
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
conf, err := config.NewConfig("./config")
sg, err := core.NewSuperGraph(nil, db)
if err != nil {
log.Fatalf(err)
}
sg, err = core.NewSuperGraph(conf, db)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
graphqlQuery := `
@ -168,7 +163,7 @@ func main() {
res, err := sg.GraphQL(context.Background(), graphqlQuery, nil)
if err != nil {
log.Fatalf(err)
log.Fatal(err)
}
fmt.Println(string(res.Data))

1
go.mod
View File

@ -1,6 +1,7 @@
module github.com/dosco/super-graph
require (
github.com/DATA-DOG/go-sqlmock v1.4.1
github.com/GeertJohan/go.rice v1.0.0
github.com/NYTimes/gziphandler v1.1.1
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3

2
go.sum
View File

@ -1,6 +1,8 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/jt0CW30vsg=
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=

View File

@ -45,6 +45,7 @@ type Serv struct {
MigrationsPath string `mapstructure:"migrations_path"`
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
APIPath string `mapstructure:"api_path"`
Auth auth.Auth
Auths []auth.Auth

View File

@ -100,6 +100,9 @@ func initConf() (*Config, error) {
c.UseAllowList = true
}
// In anon role block all tables that are not defined in the role
c.DefaultBlock = true
return c, nil
}

View File

@ -6,6 +6,7 @@ import (
"net/http"
"os"
"os/signal"
"path"
"strings"
"time"
@ -111,9 +112,15 @@ func routeHandler() (http.Handler, error) {
return mux, nil
}
apiRoute := "/api/v1/graphql"
if len(conf.APIPath) != 0 {
apiRoute = path.Join("/", conf.APIPath, "/v1/graphql")
}
routes := map[string]http.Handler{
"/health": http.HandlerFunc(health),
"/api/v1/graphql": apiV1Handler(),
"/health": http.HandlerFunc(health),
apiRoute: apiV1Handler(),
}
if err := setActionRoutes(routes); err != nil {