Compare commits
13 Commits
Author | SHA1 | Date | |
---|---|---|---|
bdffe7b14e | |||
ae7cde0433 | |||
6293d37e73 | |||
7a3fe5a1df | |||
2a32c179ba | |||
0a02bde219 | |||
966aa9ce8c | |||
6f18d56ca0 | |||
c400461835 | |||
a6691de1b7 | |||
e6934cda02 | |||
4cf7956ff5 | |||
5356455904 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -35,4 +35,5 @@ suppressions
|
|||||||
release
|
release
|
||||||
.gofuzz
|
.gofuzz
|
||||||
*-fuzz.zip
|
*-fuzz.zip
|
||||||
|
*.test
|
||||||
|
|
||||||
|
@ -15,10 +15,7 @@ Designed to 100x your developer productivity. Super Graph will instantly and wit
|
|||||||
## Using it as a service
|
## Using it as a service
|
||||||
|
|
||||||
```console
|
```console
|
||||||
git clone https://github.com/dosco/super-graph
|
get get https://github.com/dosco/super-graph
|
||||||
cd ./super-graph
|
|
||||||
make install
|
|
||||||
|
|
||||||
super-graph new <app_name>
|
super-graph new <app_name>
|
||||||
```
|
```
|
||||||
|
|
||||||
@ -46,7 +43,7 @@ func main() {
|
|||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(conf, db)
|
sg, err := core.NewSuperGraph(conf, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
|
@ -30,7 +30,7 @@ reload_on_config_change: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
migrations_path: ./config/migrations
|
migrations_path: ./migrations
|
||||||
|
|
||||||
# Secret key for general encryption operations like
|
# Secret key for general encryption operations like
|
||||||
# encrypting the cursor data
|
# encrypting the cursor data
|
||||||
@ -116,18 +116,18 @@ database:
|
|||||||
# database ping timeout is used for db health checking
|
# database ping timeout is used for db health checking
|
||||||
ping_timeout: 1m
|
ping_timeout: 1m
|
||||||
|
|
||||||
# Define additional variables here to be used with filters
|
# Define additional variables here to be used with filters
|
||||||
variables:
|
variables:
|
||||||
admin_account_id: "5"
|
admin_account_id: "5"
|
||||||
|
|
||||||
# Field and table names that you wish to block
|
# Field and table names that you wish to block
|
||||||
blocklist:
|
blocklist:
|
||||||
- ar_internal_metadata
|
- ar_internal_metadata
|
||||||
- schema_migrations
|
- schema_migrations
|
||||||
- secret
|
- secret
|
||||||
- password
|
- password
|
||||||
- encrypted
|
- encrypted
|
||||||
- token
|
- token
|
||||||
|
|
||||||
tables:
|
tables:
|
||||||
- name: customers
|
- name: customers
|
||||||
|
@ -30,7 +30,7 @@ enable_tracing: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
# migrations_path: migrations
|
# migrations_path: ./migrations
|
||||||
|
|
||||||
# Secret key for general encryption operations like
|
# Secret key for general encryption operations like
|
||||||
# encrypting the cursor data
|
# encrypting the cursor data
|
||||||
|
53
core/api.go
53
core/api.go
@ -24,7 +24,7 @@
|
|||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(conf, db)
|
sg, err := core.NewSuperGraph(conf, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf(err)
|
log.Fatalf(err)
|
||||||
}
|
}
|
||||||
@ -55,6 +55,7 @@ import (
|
|||||||
_log "log"
|
_log "log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql"
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/crypto"
|
"github.com/dosco/super-graph/core/internal/crypto"
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
@ -81,6 +82,7 @@ type SuperGraph struct {
|
|||||||
conf *Config
|
conf *Config
|
||||||
db *sql.DB
|
db *sql.DB
|
||||||
log *_log.Logger
|
log *_log.Logger
|
||||||
|
dbinfo *psql.DBInfo
|
||||||
schema *psql.DBSchema
|
schema *psql.DBSchema
|
||||||
allowList *allow.List
|
allowList *allow.List
|
||||||
encKey [32]byte
|
encKey [32]byte
|
||||||
@ -92,15 +94,26 @@ type SuperGraph struct {
|
|||||||
anonExists bool
|
anonExists bool
|
||||||
qc *qcode.Compiler
|
qc *qcode.Compiler
|
||||||
pc *psql.Compiler
|
pc *psql.Compiler
|
||||||
|
ge *graphql.Engine
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
||||||
// schemas and relationships
|
// schemas and relationships
|
||||||
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
||||||
|
return newSuperGraph(conf, db, nil)
|
||||||
|
}
|
||||||
|
|
||||||
|
// newSuperGraph helps with writing tests and benchmarks
|
||||||
|
func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph, error) {
|
||||||
|
if conf == nil {
|
||||||
|
conf = &Config{}
|
||||||
|
}
|
||||||
|
|
||||||
sg := &SuperGraph{
|
sg := &SuperGraph{
|
||||||
conf: conf,
|
conf: conf,
|
||||||
db: db,
|
db: db,
|
||||||
log: _log.New(os.Stdout, "", 0),
|
dbinfo: dbinfo,
|
||||||
|
log: _log.New(os.Stdout, "", 0),
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := sg.initConfig(); err != nil {
|
if err := sg.initConfig(); err != nil {
|
||||||
@ -123,6 +136,10 @@ func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := sg.initGraphQLEgine(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if len(conf.SecretKey) != 0 {
|
if len(conf.SecretKey) != 0 {
|
||||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||||
conf.SecretKey = ""
|
conf.SecretKey = ""
|
||||||
@ -154,7 +171,24 @@ type Result struct {
|
|||||||
// In developer mode all names queries are saved into a file `allow.list` and in production mode only
|
// In developer mode all names queries are saved into a file `allow.list` and in production mode only
|
||||||
// queries from this file can be run.
|
// queries from this file can be run.
|
||||||
func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMessage) (*Result, error) {
|
func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMessage) (*Result, error) {
|
||||||
ct := scontext{Context: c, sg: sg, query: query, vars: vars}
|
var res Result
|
||||||
|
|
||||||
|
res.op = qcode.GetQType(query)
|
||||||
|
res.name = allow.QueryName(query)
|
||||||
|
|
||||||
|
// use the chirino/graphql library for introspection queries
|
||||||
|
// disabled when allow list is enforced
|
||||||
|
if !sg.conf.UseAllowList && res.name == "IntrospectionQuery" {
|
||||||
|
r := sg.ge.ExecuteOne(&graphql.EngineRequest{Query: query})
|
||||||
|
res.Data = r.Data
|
||||||
|
|
||||||
|
if r.Error() != nil {
|
||||||
|
res.Error = r.Error().Error()
|
||||||
|
}
|
||||||
|
return &res, r.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
ct := scontext{Context: c, sg: sg, query: query, vars: vars, res: res}
|
||||||
|
|
||||||
if len(vars) <= 2 {
|
if len(vars) <= 2 {
|
||||||
ct.vars = nil
|
ct.vars = nil
|
||||||
@ -166,9 +200,6 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
|||||||
ct.role = "anon"
|
ct.role = "anon"
|
||||||
}
|
}
|
||||||
|
|
||||||
ct.res.op = qcode.GetQType(query)
|
|
||||||
ct.res.name = allow.QueryName(query)
|
|
||||||
|
|
||||||
data, err := ct.execQuery()
|
data, err := ct.execQuery()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &ct.res, err
|
return &ct.res, err
|
||||||
@ -178,3 +209,9 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
|||||||
|
|
||||||
return &ct.res, nil
|
return &ct.res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GraphQLSchema function return the GraphQL schema for the underlying database connected
|
||||||
|
// to this instance of Super Graph
|
||||||
|
func (sg *SuperGraph) GraphQLSchema() (string, error) {
|
||||||
|
return sg.ge.Schema.String(), nil
|
||||||
|
}
|
||||||
|
62
core/api_test.go
Normal file
62
core/api_test.go
Normal file
@ -0,0 +1,62 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"fmt"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/DATA-DOG/go-sqlmock"
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
|
)
|
||||||
|
|
||||||
|
func BenchmarkGraphQL(b *testing.B) {
|
||||||
|
ct := context.WithValue(context.Background(), UserIDKey, "1")
|
||||||
|
|
||||||
|
db, _, err := sqlmock.New()
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
defer db.Close()
|
||||||
|
|
||||||
|
// mock.ExpectQuery(`^SELECT jsonb_build_object`).WithArgs()
|
||||||
|
|
||||||
|
sg, err := newSuperGraph(nil, db, psql.GetTestDBInfo())
|
||||||
|
if err != nil {
|
||||||
|
b.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
query := `
|
||||||
|
query {
|
||||||
|
products {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
user {
|
||||||
|
full_name
|
||||||
|
phone
|
||||||
|
email
|
||||||
|
}
|
||||||
|
customers {
|
||||||
|
id
|
||||||
|
email
|
||||||
|
}
|
||||||
|
}
|
||||||
|
users {
|
||||||
|
id
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}`
|
||||||
|
|
||||||
|
b.ResetTimer()
|
||||||
|
b.ReportAllocs()
|
||||||
|
|
||||||
|
b.RunParallel(func(pb *testing.PB) {
|
||||||
|
for pb.Next() {
|
||||||
|
_, err = sg.GraphQL(ct, query, nil)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
|
fmt.Println(err)
|
||||||
|
|
||||||
|
//fmt.Println(mock.ExpectationsWereMet())
|
||||||
|
|
||||||
|
}
|
33
core/core.go
33
core/core.go
@ -50,20 +50,26 @@ type scontext struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) initCompilers() error {
|
func (sg *SuperGraph) initCompilers() error {
|
||||||
di, err := psql.GetDBInfo(sg.db)
|
var err error
|
||||||
if err != nil {
|
|
||||||
|
// If sg.di is not null then it's probably set
|
||||||
|
// for tests
|
||||||
|
if sg.dbinfo == nil {
|
||||||
|
sg.dbinfo, err = psql.GetDBInfo(sg.db)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if err = addTables(sg.conf, sg.dbinfo); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = addTables(sg.conf, di); err != nil {
|
if err = addForeignKeys(sg.conf, sg.dbinfo); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err = addForeignKeys(sg.conf, di); err != nil {
|
sg.schema, err = psql.NewDBSchema(sg.dbinfo, getDBTableAliases(sg.conf))
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
sg.schema, err = psql.NewDBSchema(di, getDBTableAliases(sg.conf))
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -94,15 +100,12 @@ func (c *scontext) execQuery() ([]byte, error) {
|
|||||||
|
|
||||||
if c.sg.conf.UseAllowList {
|
if c.sg.conf.UseAllowList {
|
||||||
data, st, err = c.resolvePreparedSQL()
|
data, st, err = c.resolvePreparedSQL()
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
data, st, err = c.resolveSQL()
|
data, st, err = c.resolveSQL()
|
||||||
if err != nil {
|
}
|
||||||
return nil, err
|
|
||||||
}
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(data) == 0 || st.skipped == 0 {
|
if len(data) == 0 || st.skipped == 0 {
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/crypto"
|
"github.com/dosco/super-graph/core/internal/crypto"
|
||||||
"github.com/dosco/super-graph/jsn"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (sg *SuperGraph) encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
|
func (sg *SuperGraph) encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
|
||||||
|
494
core/graph-schema.go
Normal file
494
core/graph-schema.go
Normal file
@ -0,0 +1,494 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql"
|
||||||
|
"github.com/chirino/graphql/resolvers"
|
||||||
|
"github.com/chirino/graphql/schema"
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
|
)
|
||||||
|
|
||||||
|
var typeMap map[string]string = map[string]string{
|
||||||
|
"smallint": "Int",
|
||||||
|
"integer": "Int",
|
||||||
|
"bigint": "Int",
|
||||||
|
"smallserial": "Int",
|
||||||
|
"serial": "Int",
|
||||||
|
"bigserial": "Int",
|
||||||
|
"decimal": "Float",
|
||||||
|
"numeric": "Float",
|
||||||
|
"real": "Float",
|
||||||
|
"double precision": "Float",
|
||||||
|
"money": "Float",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sg *SuperGraph) initGraphQLEgine() error {
|
||||||
|
engine := graphql.New()
|
||||||
|
engineSchema := engine.Schema
|
||||||
|
dbSchema := sg.schema
|
||||||
|
|
||||||
|
engineSchema.Parse(`
|
||||||
|
enum OrderDirection {
|
||||||
|
asc
|
||||||
|
desc
|
||||||
|
}
|
||||||
|
`)
|
||||||
|
|
||||||
|
gqltype := func(col psql.DBColumn) schema.Type {
|
||||||
|
typeName := typeMap[strings.ToLower(col.Type)]
|
||||||
|
if typeName == "" {
|
||||||
|
typeName = "String"
|
||||||
|
}
|
||||||
|
var t schema.Type = &schema.TypeName{Ident: schema.Ident{Text: typeName}}
|
||||||
|
if col.NotNull {
|
||||||
|
t = &schema.NonNull{OfType: t}
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
query := &schema.Object{
|
||||||
|
Name: "Query",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
mutation := &schema.Object{
|
||||||
|
Name: "Mutation",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[query.Name] = query
|
||||||
|
engineSchema.Types[mutation.Name] = mutation
|
||||||
|
engineSchema.EntryPoints[schema.Query] = query
|
||||||
|
engineSchema.EntryPoints[schema.Mutation] = mutation
|
||||||
|
|
||||||
|
//validGraphQLIdentifierRegex := regexp.MustCompile(`^[A-Za-z_][A-Za-z_0-9]*$`)
|
||||||
|
|
||||||
|
scalarExpressionTypesNeeded := map[string]bool{}
|
||||||
|
tableNames := dbSchema.GetTableNames()
|
||||||
|
funcs := dbSchema.GetFunctions()
|
||||||
|
|
||||||
|
for _, table := range tableNames {
|
||||||
|
ti, err := dbSchema.GetTable(table)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ti.IsSingular {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
singularName := ti.Singular
|
||||||
|
// if !validGraphQLIdentifierRegex.MatchString(singularName) {
|
||||||
|
// return errors.New("table name is not a valid GraphQL identifier: " + singularName)
|
||||||
|
// }
|
||||||
|
pluralName := ti.Plural
|
||||||
|
// if !validGraphQLIdentifierRegex.MatchString(pluralName) {
|
||||||
|
// return errors.New("table name is not a valid GraphQL identifier: " + pluralName)
|
||||||
|
// }
|
||||||
|
|
||||||
|
outputType := &schema.Object{
|
||||||
|
Name: singularName + "Output",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[outputType.Name] = outputType
|
||||||
|
|
||||||
|
inputType := &schema.InputObject{
|
||||||
|
Name: singularName + "Input",
|
||||||
|
Fields: schema.InputValueList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[inputType.Name] = inputType
|
||||||
|
|
||||||
|
orderByType := &schema.InputObject{
|
||||||
|
Name: singularName + "OrderBy",
|
||||||
|
Fields: schema.InputValueList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[orderByType.Name] = orderByType
|
||||||
|
|
||||||
|
expressionTypeName := singularName + "Expression"
|
||||||
|
expressionType := &schema.InputObject{
|
||||||
|
Name: expressionTypeName,
|
||||||
|
Fields: schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "and"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "or"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
engineSchema.Types[expressionType.Name] = expressionType
|
||||||
|
|
||||||
|
for _, col := range ti.Columns {
|
||||||
|
colName := col.Name
|
||||||
|
// if !validGraphQLIdentifierRegex.MatchString(colName) {
|
||||||
|
// return errors.New("column name is not a valid GraphQL identifier: " + colName)
|
||||||
|
// }
|
||||||
|
|
||||||
|
colType := gqltype(col)
|
||||||
|
nullableColType := ""
|
||||||
|
if x, ok := colType.(*schema.NonNull); ok {
|
||||||
|
nullableColType = x.OfType.(*schema.TypeName).Ident.Text
|
||||||
|
} else {
|
||||||
|
nullableColType = colType.(*schema.TypeName).Ident.Text
|
||||||
|
}
|
||||||
|
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, f := range funcs {
|
||||||
|
if col.Type != f.Params[0].Type {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: f.Name + "_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
// If it's a numeric type...
|
||||||
|
if nullableColType == "Float" || nullableColType == "Int" {
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "avg_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "count_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "max_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "min_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_pop_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_samp_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "variance_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "var_pop_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "var_samp_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
inputType.Fields = append(inputType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
orderByType.Fields = append(orderByType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "OrderDirection"}}},
|
||||||
|
})
|
||||||
|
|
||||||
|
scalarExpressionTypesNeeded[nullableColType] = true
|
||||||
|
|
||||||
|
expressionType.Fields = append(expressionType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: nullableColType + "Expression"}}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
outputTypeName := &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}
|
||||||
|
inputTypeName := &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}
|
||||||
|
pluralOutputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}}}}
|
||||||
|
pluralInputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}}}}
|
||||||
|
|
||||||
|
args := schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."},
|
||||||
|
Name: schema.Ident{Text: "order_by"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: orderByType.Name}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "where"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionType.Name}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "limit"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "offset"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "first"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "last"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "before"},
|
||||||
|
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "after"},
|
||||||
|
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ti.PrimaryCol != nil {
|
||||||
|
t := gqltype(*ti.PrimaryCol)
|
||||||
|
if _, ok := t.(*schema.NonNull); !ok {
|
||||||
|
t = &schema.NonNull{OfType: t}
|
||||||
|
}
|
||||||
|
args = append(args, &schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "Finds the record by the primary key"},
|
||||||
|
Name: schema.Ident{Text: "id"},
|
||||||
|
Type: t,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if ti.TSVCol != nil {
|
||||||
|
args = append(args, &schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "Performs full text search using a TSV index"},
|
||||||
|
Name: schema.Ident{Text: "search"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
query.Fields = append(query.Fields, &schema.Field{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: singularName,
|
||||||
|
Type: outputTypeName,
|
||||||
|
Args: args,
|
||||||
|
})
|
||||||
|
query.Fields = append(query.Fields, &schema.Field{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: pluralName,
|
||||||
|
Type: pluralOutputTypeName,
|
||||||
|
Args: args,
|
||||||
|
})
|
||||||
|
|
||||||
|
mutationArgs := append(args, schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "insert"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "update"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "upsert"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
}...)
|
||||||
|
|
||||||
|
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||||
|
Name: singularName,
|
||||||
|
Args: mutationArgs,
|
||||||
|
Type: outputType,
|
||||||
|
})
|
||||||
|
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||||
|
Name: pluralName,
|
||||||
|
Args: append(mutationArgs, schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "inserts"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "updates"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "upserts"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
}...),
|
||||||
|
Type: outputType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for typeName, _ := range scalarExpressionTypesNeeded {
|
||||||
|
expressionType := &schema.InputObject{
|
||||||
|
Name: typeName + "Expression",
|
||||||
|
Fields: schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "eq"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "neq"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "gt"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "greater_than"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lt"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lesser_than"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "gte"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "greater_or_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lte"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lesser_or_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nin"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "like"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nlike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_like"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "ilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_ilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "similar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nsimilar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_similar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key_any"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key_all"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "contains"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "contained_in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "is_null"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Boolean"}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
engineSchema.Types[expressionType.Name] = expressionType
|
||||||
|
}
|
||||||
|
|
||||||
|
err := engineSchema.ResolveTypes()
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
engine.Resolver = resolvers.Func(func(request *resolvers.ResolveRequest, next resolvers.Resolution) resolvers.Resolution {
|
||||||
|
resolver := resolvers.MetadataResolver.Resolve(request, next)
|
||||||
|
if resolver != nil {
|
||||||
|
return resolver
|
||||||
|
}
|
||||||
|
resolver = resolvers.MethodResolver.Resolve(request, next) // needed by the MetadataResolver
|
||||||
|
if resolver != nil {
|
||||||
|
return resolver
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
|
||||||
|
sg.ge = engine
|
||||||
|
return nil
|
||||||
|
}
|
@ -71,7 +71,7 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Roles: validate and sanitize
|
// Roles: validate and sanitize
|
||||||
c.RolesQuery = sanitize(c.RolesQuery)
|
c.RolesQuery = sanitizeVars(c.RolesQuery)
|
||||||
|
|
||||||
if len(c.RolesQuery) == 0 {
|
if len(c.RolesQuery) == 0 {
|
||||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||||
@ -108,6 +108,7 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
|||||||
if err := addTable(di, t.Columns, t); err != nil {
|
if err := addTable(di, t.Columns, t); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,84 @@
|
|||||||
|
package cockraochdb_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"regexp"
|
||||||
|
"sync/atomic"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||||
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCockroachDB(t *testing.T) {
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "temp-cockraochdb-")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command("cockroach", "start", "--insecure", "--listen-addr", ":0", "--http-addr", ":0", "--store=path="+dir)
|
||||||
|
finder := &urlFinder{
|
||||||
|
c: make(chan bool),
|
||||||
|
}
|
||||||
|
cmd.Stdout = finder
|
||||||
|
cmd.Stderr = ioutil.Discard
|
||||||
|
|
||||||
|
err = cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
t.Skip("is CockroachDB installed?: " + err.Error())
|
||||||
|
}
|
||||||
|
fmt.Println("started temporary cockroach db")
|
||||||
|
|
||||||
|
stopped := int32(0)
|
||||||
|
stopDatabase := func() {
|
||||||
|
fmt.Println("stopping temporary cockroach db")
|
||||||
|
if atomic.CompareAndSwapInt32(&stopped, 0, 1) {
|
||||||
|
cmd.Process.Kill()
|
||||||
|
cmd.Process.Wait()
|
||||||
|
os.RemoveAll(dir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer stopDatabase()
|
||||||
|
|
||||||
|
// Wait till we figure out the URL we should connect to...
|
||||||
|
<-finder.c
|
||||||
|
db, err := sql.Open("pgx", finder.URL)
|
||||||
|
if err != nil {
|
||||||
|
stopDatabase()
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
integration_tests.SetupSchema(t, db)
|
||||||
|
|
||||||
|
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||||
|
if t.Name() == "TestCockroachDB/nested_insert" {
|
||||||
|
t.Skip("nested inserts currently not working yet on cockroach db")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type urlFinder struct {
|
||||||
|
c chan bool
|
||||||
|
done bool
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (finder *urlFinder) Write(p []byte) (n int, err error) {
|
||||||
|
s := string(p)
|
||||||
|
urlRegex := regexp.MustCompile(`\nsql:\s+(postgresql:[^\s]+)\n`)
|
||||||
|
if !finder.done {
|
||||||
|
submatch := urlRegex.FindAllStringSubmatch(s, -1)
|
||||||
|
if submatch != nil {
|
||||||
|
finder.URL = submatch[0][1]
|
||||||
|
finder.done = true
|
||||||
|
close(finder.c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return len(p), nil
|
||||||
|
}
|
260
core/internal/integration_tests/integration_tests.go
Normal file
260
core/internal/integration_tests/integration_tests.go
Normal file
@ -0,0 +1,260 @@
|
|||||||
|
package integration_tests
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func SetupSchema(t *testing.T, db *sql.DB) {
|
||||||
|
|
||||||
|
_, err := db.Exec(`
|
||||||
|
CREATE TABLE users (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
full_name text
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`CREATE TABLE product (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
name text,
|
||||||
|
weight float
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`CREATE TABLE line_item (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
product integer REFERENCES product(id),
|
||||||
|
quantity integer,
|
||||||
|
price float
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func DropSchema(t *testing.T, db *sql.DB) {
|
||||||
|
|
||||||
|
_, err := db.Exec(`DROP TABLE IF EXISTS line_item`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`DROP TABLE IF EXISTS product`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`DROP TABLE IF EXISTS users`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
||||||
|
config := core.Config{}
|
||||||
|
config.UseAllowList = false
|
||||||
|
config.AllowListFile = "./allow.list"
|
||||||
|
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||||
|
|
||||||
|
config.Roles = []core.Role{
|
||||||
|
core.Role{
|
||||||
|
Name: "anon",
|
||||||
|
Tables: []core.RoleTable{
|
||||||
|
core.RoleTable{Name: "users", Query: core.Query{Limit: 100}},
|
||||||
|
core.RoleTable{Name: "product", Query: core.Query{Limit: 100}},
|
||||||
|
core.RoleTable{Name: "line_item", Query: core.Query{Limit: 100}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
sg, err := core.NewSuperGraph(&config, db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
t.Run("seed fixtures", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { products (insert: $products) { id } }`,
|
||||||
|
json.RawMessage(`{"products":[
|
||||||
|
{"id":1, "name":"Charmin Ultra Soft", "weight": 0.5},
|
||||||
|
{"id":2, "name":"Hand Sanitizer", "weight": 0.2},
|
||||||
|
{"id":3, "name":"Case of Corona", "weight": 1.2}
|
||||||
|
]}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"products": [{"id": 1}, {"id": 2}, {"id": 3}]}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`mutation { line_items (insert: $line_items) { id } }`,
|
||||||
|
json.RawMessage(`{"line_items":[
|
||||||
|
{"id":5001, "product":1, "price":6.95, "quantity":10},
|
||||||
|
{"id":5002, "product":2, "price":10.99, "quantity":2}
|
||||||
|
]}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001}, {"id": 5002}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{"id":5001}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 10}}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get line items", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`query { line_items { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 10}, {"id": 5002, "price": 10.99, "quantity": 2}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("update line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_item(update:$update, id:$id) { id } }`,
|
||||||
|
json.RawMessage(`{"id":5001, "update":{"quantity":20}}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001}}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{"id":5001}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 20}}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("delete line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_item(delete:true, id:$id) { id } }`,
|
||||||
|
json.RawMessage(`{"id":5002}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5002}}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`query { line_items { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 20}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("nested insert", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_items (insert: $line_item) { id, product { name } } }`,
|
||||||
|
json.RawMessage(`{"line_item":
|
||||||
|
{"id":5003, "product": { "connect": { "id": 1} }, "price":10.95, "quantity":15}
|
||||||
|
}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5003, "product": {"name": "Charmin Ultra Soft"}}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("schema introspection", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
schema, err := sg.GraphQLSchema()
|
||||||
|
require.NoError(t, err)
|
||||||
|
// Uncomment the following line if you need to regenerate the expected schema.
|
||||||
|
//ioutil.WriteFile("../introspection.graphql", []byte(schema), 0644)
|
||||||
|
expected, err := ioutil.ReadFile("../introspection.graphql")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, string(expected), schema)
|
||||||
|
})
|
||||||
|
|
||||||
|
res, err := sg.GraphQL(ctx, introspectionQuery, json.RawMessage(``))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Contains(t, string(res.Data),
|
||||||
|
`{"queryType":{"name":"Query"},"mutationType":{"name":"Mutation"},"subscriptionType":null,"types":`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const introspectionQuery = `
|
||||||
|
query IntrospectionQuery {
|
||||||
|
__schema {
|
||||||
|
queryType { name }
|
||||||
|
mutationType { name }
|
||||||
|
subscriptionType { name }
|
||||||
|
types {
|
||||||
|
...FullType
|
||||||
|
}
|
||||||
|
directives {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
locations
|
||||||
|
args {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fragment FullType on __Type {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
description
|
||||||
|
fields(includeDeprecated: true) {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
args {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
type {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
isDeprecated
|
||||||
|
deprecationReason
|
||||||
|
}
|
||||||
|
inputFields {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
interfaces {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
enumValues(includeDeprecated: true) {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
isDeprecated
|
||||||
|
deprecationReason
|
||||||
|
}
|
||||||
|
possibleTypes {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fragment InputValue on __InputValue {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
type { ...TypeRef }
|
||||||
|
defaultValue
|
||||||
|
}
|
||||||
|
fragment TypeRef on __Type {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
319
core/internal/integration_tests/introspection.graphql
Normal file
319
core/internal/integration_tests/introspection.graphql
Normal file
@ -0,0 +1,319 @@
|
|||||||
|
input FloatExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[Float!]!
|
||||||
|
eq:Float!
|
||||||
|
equals:Float!
|
||||||
|
greater_or_equals:Float!
|
||||||
|
greater_than:Float!
|
||||||
|
gt:Float!
|
||||||
|
gte:Float!
|
||||||
|
has_key:Float!
|
||||||
|
has_key_all:[Float!]!
|
||||||
|
has_key_any:[Float!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[Float!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:Float!
|
||||||
|
lesser_than:Float!
|
||||||
|
like:String!
|
||||||
|
lt:Float!
|
||||||
|
lte:Float!
|
||||||
|
neq:Float!
|
||||||
|
nilike:String!
|
||||||
|
nin:[Float!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:Float!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[Float!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
input IntExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[Int!]!
|
||||||
|
eq:Int!
|
||||||
|
equals:Int!
|
||||||
|
greater_or_equals:Int!
|
||||||
|
greater_than:Int!
|
||||||
|
gt:Int!
|
||||||
|
gte:Int!
|
||||||
|
has_key:Int!
|
||||||
|
has_key_all:[Int!]!
|
||||||
|
has_key_any:[Int!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[Int!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:Int!
|
||||||
|
lesser_than:Int!
|
||||||
|
like:String!
|
||||||
|
lt:Int!
|
||||||
|
lte:Int!
|
||||||
|
neq:Int!
|
||||||
|
nilike:String!
|
||||||
|
nin:[Int!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:Int!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[Int!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
type Mutation {
|
||||||
|
line_item(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput
|
||||||
|
):line_itemOutput
|
||||||
|
line_items(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput, inserts:[line_itemInput!]!, updates:[line_itemInput!]!, upserts:[line_itemInput!]!
|
||||||
|
):line_itemOutput
|
||||||
|
product(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:productInput, update:productInput, upsert:productInput
|
||||||
|
):productOutput
|
||||||
|
products(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:productInput, update:productInput, upsert:productInput, inserts:[productInput!]!, updates:[productInput!]!, upserts:[productInput!]!
|
||||||
|
):productOutput
|
||||||
|
user(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:userInput, update:userInput, upsert:userInput
|
||||||
|
):userOutput
|
||||||
|
users(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:userInput, update:userInput, upsert:userInput, inserts:[userInput!]!, updates:[userInput!]!, upserts:[userInput!]!
|
||||||
|
):userOutput
|
||||||
|
}
|
||||||
|
enum OrderDirection {
|
||||||
|
asc
|
||||||
|
desc
|
||||||
|
}
|
||||||
|
type Query {
|
||||||
|
line_item(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):line_itemOutput
|
||||||
|
line_items(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[line_itemOutput!]!
|
||||||
|
product(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):productOutput
|
||||||
|
products(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[productOutput!]!
|
||||||
|
user(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):userOutput
|
||||||
|
users(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[userOutput!]!
|
||||||
|
}
|
||||||
|
input StringExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[String!]!
|
||||||
|
eq:String!
|
||||||
|
equals:String!
|
||||||
|
greater_or_equals:String!
|
||||||
|
greater_than:String!
|
||||||
|
gt:String!
|
||||||
|
gte:String!
|
||||||
|
has_key:String!
|
||||||
|
has_key_all:[String!]!
|
||||||
|
has_key_any:[String!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[String!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:String!
|
||||||
|
lesser_than:String!
|
||||||
|
like:String!
|
||||||
|
lt:String!
|
||||||
|
lte:String!
|
||||||
|
neq:String!
|
||||||
|
nilike:String!
|
||||||
|
nin:[String!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:String!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[String!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
input line_itemExpression {
|
||||||
|
and:line_itemExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
not:line_itemExpression!
|
||||||
|
or:line_itemExpression!
|
||||||
|
price:FloatExpression!
|
||||||
|
product:IntExpression!
|
||||||
|
quantity:IntExpression!
|
||||||
|
}
|
||||||
|
input line_itemInput {
|
||||||
|
id:Int!
|
||||||
|
price:Float
|
||||||
|
product:Int
|
||||||
|
quantity:Int
|
||||||
|
}
|
||||||
|
input line_itemOrderBy {
|
||||||
|
id:OrderDirection!
|
||||||
|
price:OrderDirection!
|
||||||
|
product:OrderDirection!
|
||||||
|
quantity:OrderDirection!
|
||||||
|
}
|
||||||
|
type line_itemOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
avg_price:Float
|
||||||
|
avg_product:Int
|
||||||
|
avg_quantity:Int
|
||||||
|
count_id:Int!
|
||||||
|
count_price:Float
|
||||||
|
count_product:Int
|
||||||
|
count_quantity:Int
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
max_price:Float
|
||||||
|
max_product:Int
|
||||||
|
max_quantity:Int
|
||||||
|
min_id:Int!
|
||||||
|
min_price:Float
|
||||||
|
min_product:Int
|
||||||
|
min_quantity:Int
|
||||||
|
price:Float
|
||||||
|
product:Int
|
||||||
|
quantity:Int
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_pop_price:Float
|
||||||
|
stddev_pop_product:Int
|
||||||
|
stddev_pop_quantity:Int
|
||||||
|
stddev_price:Float
|
||||||
|
stddev_product:Int
|
||||||
|
stddev_quantity:Int
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
stddev_samp_price:Float
|
||||||
|
stddev_samp_product:Int
|
||||||
|
stddev_samp_quantity:Int
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_pop_price:Float
|
||||||
|
var_pop_product:Int
|
||||||
|
var_pop_quantity:Int
|
||||||
|
var_samp_id:Int!
|
||||||
|
var_samp_price:Float
|
||||||
|
var_samp_product:Int
|
||||||
|
var_samp_quantity:Int
|
||||||
|
variance_id:Int!
|
||||||
|
variance_price:Float
|
||||||
|
variance_product:Int
|
||||||
|
variance_quantity:Int
|
||||||
|
}
|
||||||
|
input productExpression {
|
||||||
|
and:productExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
name:StringExpression!
|
||||||
|
not:productExpression!
|
||||||
|
or:productExpression!
|
||||||
|
weight:FloatExpression!
|
||||||
|
}
|
||||||
|
input productInput {
|
||||||
|
id:Int!
|
||||||
|
name:String
|
||||||
|
weight:Float
|
||||||
|
}
|
||||||
|
input productOrderBy {
|
||||||
|
id:OrderDirection!
|
||||||
|
name:OrderDirection!
|
||||||
|
weight:OrderDirection!
|
||||||
|
}
|
||||||
|
type productOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
avg_weight:Float
|
||||||
|
count_id:Int!
|
||||||
|
count_weight:Float
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
max_weight:Float
|
||||||
|
min_id:Int!
|
||||||
|
min_weight:Float
|
||||||
|
name:String
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_pop_weight:Float
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
stddev_samp_weight:Float
|
||||||
|
stddev_weight:Float
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_pop_weight:Float
|
||||||
|
var_samp_id:Int!
|
||||||
|
var_samp_weight:Float
|
||||||
|
variance_id:Int!
|
||||||
|
variance_weight:Float
|
||||||
|
weight:Float
|
||||||
|
}
|
||||||
|
input userExpression {
|
||||||
|
and:userExpression!
|
||||||
|
full_name:StringExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
not:userExpression!
|
||||||
|
or:userExpression!
|
||||||
|
}
|
||||||
|
input userInput {
|
||||||
|
full_name:String
|
||||||
|
id:Int!
|
||||||
|
}
|
||||||
|
input userOrderBy {
|
||||||
|
full_name:OrderDirection!
|
||||||
|
id:OrderDirection!
|
||||||
|
}
|
||||||
|
type userOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
count_id:Int!
|
||||||
|
full_name:String
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
min_id:Int!
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_samp_id:Int!
|
||||||
|
variance_id:Int!
|
||||||
|
}
|
||||||
|
schema {
|
||||||
|
mutation: Mutation
|
||||||
|
query: Query
|
||||||
|
}
|
@ -0,0 +1,27 @@
|
|||||||
|
package cockraochdb_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||||
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCockroachDB(t *testing.T) {
|
||||||
|
|
||||||
|
url, found := os.LookupEnv("SG_POSTGRESQL_TEST_URL")
|
||||||
|
if !found {
|
||||||
|
t.Skip("set the SG_POSTGRESQL_TEST_URL env variable if you want to run integration tests against a PostgreSQL database")
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := sql.Open("pgx", url)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
integration_tests.DropSchema(t, db)
|
||||||
|
integration_tests.SetupSchema(t, db)
|
||||||
|
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||||
|
})
|
||||||
|
}
|
@ -167,7 +167,7 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
|||||||
}
|
}
|
||||||
|
|
||||||
func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error {
|
func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error {
|
||||||
pl := funcPrefixLen(col.Name)
|
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||||
// if pl == 0 {
|
// if pl == 0 {
|
||||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||||
// io.WriteString(c.w, `'`)
|
// io.WriteString(c.w, `'`)
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
var (
|
var (
|
||||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||||
|
|
||||||
schema = getTestSchema()
|
schema = GetTestSchema()
|
||||||
|
|
||||||
vars = NewVariables(map[string]string{
|
vars = NewVariables(map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
|
@ -21,9 +21,17 @@ func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
|||||||
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
|
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||||
|
if insert[0] == '[' {
|
||||||
|
io.WriteString(c.w, `json_array_elements(`)
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, `'{{`)
|
||||||
io.WriteString(c.w, qc.ActionVar)
|
io.WriteString(c.w, qc.ActionVar)
|
||||||
io.WriteString(c.w, `}}' :: json AS j)`)
|
io.WriteString(c.w, `}}' :: json`)
|
||||||
|
if insert[0] == '[' {
|
||||||
|
io.WriteString(c.w, `)`)
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, ` AS j)`)
|
||||||
|
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
st.Push(kvitem{_type: itemInsert, key: ti.Name, val: insert, ti: ti})
|
st.Push(kvitem{_type: itemInsert, key: ti.Name, val: insert, ti: ti})
|
||||||
@ -90,26 +98,9 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
|
|||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||||
renderNestedInsertRelColumns(w, item.kvitem, true)
|
renderNestedInsertRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
renderNestedInsertRelTables(w, item.kvitem)
|
renderNestedInsertRelTables(w, item.kvitem)
|
||||||
|
io.WriteString(w, ` RETURNING *)`)
|
||||||
if item.array {
|
|
||||||
io.WriteString(w, `json_populate_recordset`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `json_populate_record`)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.WriteString(w, `(NULL::`)
|
|
||||||
io.WriteString(w, ti.Name)
|
|
||||||
|
|
||||||
if len(item.path) == 0 {
|
|
||||||
io.WriteString(w, `, i.j) t RETURNING *)`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `, i.j->`)
|
|
||||||
joinPath(w, item.path)
|
|
||||||
io.WriteString(w, `) t RETURNING *)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,21 +163,21 @@ func renderNestedInsertRelColumns(w io.Writer, item kvitem, values bool) error {
|
|||||||
func renderNestedInsertRelTables(w io.Writer, item kvitem) error {
|
func renderNestedInsertRelTables(w io.Writer, item kvitem) error {
|
||||||
if len(item.items) == 0 {
|
if len(item.items) == 0 {
|
||||||
if item.relPC != nil && item.relPC.Type == RelOneToMany {
|
if item.relPC != nil && item.relPC.Type == RelOneToMany {
|
||||||
quoted(w, item.relPC.Left.Table)
|
|
||||||
io.WriteString(w, `, `)
|
io.WriteString(w, `, `)
|
||||||
|
quoted(w, item.relPC.Left.Table)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Render tables needed to set values if child-to-parent
|
// Render tables needed to set values if child-to-parent
|
||||||
// relationship is one-to-many
|
// relationship is one-to-many
|
||||||
for _, v := range item.items {
|
for _, v := range item.items {
|
||||||
if v.relCP.Type == RelOneToMany {
|
if v.relCP.Type == RelOneToMany {
|
||||||
|
io.WriteString(w, `, `)
|
||||||
if v._ctype > 0 {
|
if v._ctype > 0 {
|
||||||
io.WriteString(w, `"_x_`)
|
io.WriteString(w, `"_x_`)
|
||||||
io.WriteString(w, v.relCP.Left.Table)
|
io.WriteString(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `", `)
|
io.WriteString(w, `"`)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, v.relCP.Left.Table)
|
quoted(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `, `)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package psql
|
package psql_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -7,9 +7,9 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/jsn"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/dosco/super-graph/core/internal/util"
|
"github.com/dosco/super-graph/core/internal/util"
|
||||||
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
type itemType int
|
type itemType int
|
||||||
@ -396,7 +396,12 @@ func renderInsertUpdateColumns(w io.Writer,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if values {
|
if values {
|
||||||
colWithTable(w, "t", cn.Name)
|
io.WriteString(w, `CAST( i.j ->>`)
|
||||||
|
io.WriteString(w, `'`)
|
||||||
|
io.WriteString(w, cn.Name)
|
||||||
|
io.WriteString(w, `' AS `)
|
||||||
|
io.WriteString(w, cn.Type)
|
||||||
|
io.WriteString(w, `)`)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, cn.Name)
|
quoted(w, cn.Name)
|
||||||
}
|
}
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package psql
|
package psql_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package psql
|
package psql_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -8,6 +8,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -19,7 +20,7 @@ const (
|
|||||||
|
|
||||||
var (
|
var (
|
||||||
qcompile *qcode.Compiler
|
qcompile *qcode.Compiler
|
||||||
pcompile *Compiler
|
pcompile *psql.Compiler
|
||||||
expected map[string][]string
|
expected map[string][]string
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -133,13 +134,16 @@ func TestMain(m *testing.M) {
|
|||||||
log.Fatal(err)
|
log.Fatal(err)
|
||||||
}
|
}
|
||||||
|
|
||||||
schema := getTestSchema()
|
schema, err := psql.GetTestSchema()
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
vars := NewVariables(map[string]string{
|
vars := psql.NewVariables(map[string]string{
|
||||||
"admin_account_id": "5",
|
"admin_account_id": "5",
|
||||||
})
|
})
|
||||||
|
|
||||||
pcompile = NewCompiler(Config{
|
pcompile = psql.NewCompiler(psql.Config{
|
||||||
Schema: schema,
|
Schema: schema,
|
||||||
Vars: vars,
|
Vars: vars,
|
||||||
})
|
})
|
||||||
@ -173,7 +177,7 @@ func TestMain(m *testing.M) {
|
|||||||
os.Exit(m.Run())
|
os.Exit(m.Run())
|
||||||
}
|
}
|
||||||
|
|
||||||
func compileGQLToPSQL(t *testing.T, gql string, vars Variables, role string) {
|
func compileGQLToPSQL(t *testing.T, gql string, vars psql.Variables, role string) {
|
||||||
generateTestFile := false
|
generateTestFile := false
|
||||||
|
|
||||||
if generateTestFile {
|
if generateTestFile {
|
||||||
|
@ -141,7 +141,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
c.renderLateralJoin(sel)
|
c.renderLateralJoin(sel)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ti.Singular {
|
if !ti.IsSingular {
|
||||||
c.renderPluralSelect(sel, ti)
|
c.renderPluralSelect(sel, ti)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +178,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
|
|
||||||
if !ti.Singular {
|
if !ti.IsSingular {
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
}
|
}
|
||||||
@ -438,7 +438,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
|
|
||||||
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
||||||
int2string(c.w, sel.ID)
|
int2string(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `") `)
|
io.WriteString(c.w, `".*) `)
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
for i := range sel.OrderBy {
|
for i := range sel.OrderBy {
|
||||||
@ -543,7 +543,7 @@ func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skip
|
|||||||
var cn string
|
var cn string
|
||||||
|
|
||||||
for _, col := range sel.Cols {
|
for _, col := range sel.Cols {
|
||||||
if n := funcPrefixLen(col.Name); n != 0 {
|
if n := funcPrefixLen(c.schema.fm, col.Name); n != 0 {
|
||||||
if !sel.Functions {
|
if !sel.Functions {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
@ -706,7 +706,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case ti.Singular:
|
case ti.IsSingular:
|
||||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||||
|
|
||||||
case len(sel.Paging.Limit) != 0:
|
case len(sel.Paging.Limit) != 0:
|
||||||
@ -1193,7 +1193,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
|||||||
io.WriteString(c.w, col.Type)
|
io.WriteString(c.w, col.Type)
|
||||||
}
|
}
|
||||||
|
|
||||||
func funcPrefixLen(fn string) int {
|
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||||
switch {
|
switch {
|
||||||
case strings.HasPrefix(fn, "avg_"):
|
case strings.HasPrefix(fn, "avg_"):
|
||||||
return 4
|
return 4
|
||||||
@ -1218,6 +1218,14 @@ func funcPrefixLen(fn string) int {
|
|||||||
case strings.HasPrefix(fn, "var_samp_"):
|
case strings.HasPrefix(fn, "var_samp_"):
|
||||||
return 9
|
return 9
|
||||||
}
|
}
|
||||||
|
fnLen := len(fn)
|
||||||
|
|
||||||
|
for k := range fm {
|
||||||
|
kLen := len(k)
|
||||||
|
if kLen < fnLen && k[0] == fn[0] && strings.HasPrefix(fn, k) && fn[kLen] == '_' {
|
||||||
|
return kLen + 1
|
||||||
|
}
|
||||||
|
}
|
||||||
return 0
|
return 0
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package psql
|
package psql_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"bytes"
|
"bytes"
|
||||||
|
@ -11,17 +11,20 @@ type DBSchema struct {
|
|||||||
ver int
|
ver int
|
||||||
t map[string]*DBTableInfo
|
t map[string]*DBTableInfo
|
||||||
rm map[string]map[string]*DBRel
|
rm map[string]map[string]*DBRel
|
||||||
|
fm map[string]*DBFunction
|
||||||
}
|
}
|
||||||
|
|
||||||
type DBTableInfo struct {
|
type DBTableInfo struct {
|
||||||
Name string
|
Name string
|
||||||
Type string
|
Type string
|
||||||
Singular bool
|
IsSingular bool
|
||||||
Columns []DBColumn
|
Columns []DBColumn
|
||||||
PrimaryCol *DBColumn
|
PrimaryCol *DBColumn
|
||||||
TSVCol *DBColumn
|
TSVCol *DBColumn
|
||||||
ColMap map[string]*DBColumn
|
ColMap map[string]*DBColumn
|
||||||
ColIDMap map[int16]*DBColumn
|
ColIDMap map[int16]*DBColumn
|
||||||
|
Singular string
|
||||||
|
Plural string
|
||||||
}
|
}
|
||||||
|
|
||||||
type RelType int
|
type RelType int
|
||||||
@ -54,8 +57,10 @@ type DBRel struct {
|
|||||||
|
|
||||||
func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||||
schema := &DBSchema{
|
schema := &DBSchema{
|
||||||
t: make(map[string]*DBTableInfo),
|
ver: info.Version,
|
||||||
rm: make(map[string]map[string]*DBRel),
|
t: make(map[string]*DBTableInfo),
|
||||||
|
rm: make(map[string]map[string]*DBRel),
|
||||||
|
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, t := range info.Tables {
|
for i, t := range info.Tables {
|
||||||
@ -79,6 +84,12 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
for k, f := range info.Functions {
|
||||||
|
if len(f.Params) == 1 {
|
||||||
|
schema.fm[strings.ToLower(f.Name)] = &info.Functions[k]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
return schema, nil
|
return schema, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -89,23 +100,28 @@ func (s *DBSchema) addTable(
|
|||||||
colidmap := make(map[int16]*DBColumn, len(cols))
|
colidmap := make(map[int16]*DBColumn, len(cols))
|
||||||
|
|
||||||
singular := flect.Singularize(t.Key)
|
singular := flect.Singularize(t.Key)
|
||||||
|
plural := flect.Pluralize(t.Key)
|
||||||
|
|
||||||
s.t[singular] = &DBTableInfo{
|
s.t[singular] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
Singular: true,
|
IsSingular: true,
|
||||||
Columns: cols,
|
Columns: cols,
|
||||||
ColMap: colmap,
|
ColMap: colmap,
|
||||||
ColIDMap: colidmap,
|
ColIDMap: colidmap,
|
||||||
|
Singular: singular,
|
||||||
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
|
||||||
plural := flect.Pluralize(t.Key)
|
|
||||||
s.t[plural] = &DBTableInfo{
|
s.t[plural] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
Singular: false,
|
IsSingular: false,
|
||||||
Columns: cols,
|
Columns: cols,
|
||||||
ColMap: colmap,
|
ColMap: colmap,
|
||||||
ColIDMap: colidmap,
|
ColIDMap: colidmap,
|
||||||
|
Singular: singular,
|
||||||
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
|
||||||
if al, ok := aliases[t.Key]; ok {
|
if al, ok := aliases[t.Key]; ok {
|
||||||
@ -364,6 +380,14 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *DBSchema) GetTableNames() []string {
|
||||||
|
var names []string
|
||||||
|
for name, _ := range s.t {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
|
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
|
||||||
t, ok := s.t[table]
|
t, ok := s.t[table]
|
||||||
if !ok {
|
if !ok {
|
||||||
@ -424,3 +448,11 @@ func (s *DBSchema) GetRel(child, parent string) (*DBRel, error) {
|
|||||||
}
|
}
|
||||||
return rel, nil
|
return rel, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *DBSchema) GetFunctions() []*DBFunction {
|
||||||
|
var funcs []*DBFunction
|
||||||
|
for _, f := range s.fm {
|
||||||
|
funcs = append(funcs, f)
|
||||||
|
}
|
||||||
|
return funcs
|
||||||
|
}
|
||||||
|
@ -10,10 +10,11 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
type DBInfo struct {
|
type DBInfo struct {
|
||||||
Version int
|
Version int
|
||||||
Tables []DBTable
|
Tables []DBTable
|
||||||
Columns [][]DBColumn
|
Columns [][]DBColumn
|
||||||
colmap map[string]map[string]*DBColumn
|
Functions []DBFunction
|
||||||
|
colMap map[string]map[string]*DBColumn
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetDBInfo(db *sql.DB) (*DBInfo, error) {
|
func GetDBInfo(db *sql.DB) (*DBInfo, error) {
|
||||||
@ -35,41 +36,56 @@ func GetDBInfo(db *sql.DB) (*DBInfo, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
di.colmap = make(map[string]map[string]*DBColumn, len(di.Tables))
|
for _, t := range di.Tables {
|
||||||
|
|
||||||
for i, t := range di.Tables {
|
|
||||||
cols, err := GetColumns(db, "public", t.Name)
|
cols, err := GetColumns(db, "public", t.Name)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
di.Columns = append(di.Columns, cols)
|
di.Columns = append(di.Columns, cols)
|
||||||
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
|
}
|
||||||
|
|
||||||
for n, c := range di.Columns[i] {
|
di.colMap = newColMap(di.Tables, di.Columns)
|
||||||
di.colmap[t.Key][c.Key] = &di.Columns[i][n]
|
|
||||||
}
|
di.Functions, err = GetFunctions(db)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return di, nil
|
return di, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func newColMap(tables []DBTable, columns [][]DBColumn) map[string]map[string]*DBColumn {
|
||||||
|
cm := make(map[string]map[string]*DBColumn, len(tables))
|
||||||
|
|
||||||
|
for i, t := range tables {
|
||||||
|
cols := columns[i]
|
||||||
|
cm[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||||
|
|
||||||
|
for n, c := range cols {
|
||||||
|
cm[t.Key][c.Key] = &columns[i][n]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return cm
|
||||||
|
}
|
||||||
|
|
||||||
func (di *DBInfo) AddTable(t DBTable, cols []DBColumn) {
|
func (di *DBInfo) AddTable(t DBTable, cols []DBColumn) {
|
||||||
t.ID = di.Tables[len(di.Tables)-1].ID
|
t.ID = di.Tables[len(di.Tables)-1].ID
|
||||||
|
|
||||||
di.Tables = append(di.Tables, t)
|
di.Tables = append(di.Tables, t)
|
||||||
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
|
di.colMap[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||||
|
|
||||||
for i := range cols {
|
for i := range cols {
|
||||||
cols[i].ID = int16(i)
|
cols[i].ID = int16(i)
|
||||||
c := &cols[i]
|
c := &cols[i]
|
||||||
di.colmap[t.Key][c.Key] = c
|
di.colMap[t.Key][c.Key] = c
|
||||||
}
|
}
|
||||||
di.Columns = append(di.Columns, cols)
|
di.Columns = append(di.Columns, cols)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (di *DBInfo) GetColumn(table, column string) (*DBColumn, bool) {
|
func (di *DBInfo) GetColumn(table, column string) (*DBColumn, bool) {
|
||||||
v, ok := di.colmap[strings.ToLower(table)][strings.ToLower(column)]
|
v, ok := di.colMap[strings.ToLower(table)][strings.ToLower(column)]
|
||||||
return v, ok
|
return v, ok
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -237,6 +253,64 @@ ORDER BY id;`
|
|||||||
return cols, nil
|
return cols, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
type DBFunction struct {
|
||||||
|
Name string
|
||||||
|
Params []DBFuncParam
|
||||||
|
}
|
||||||
|
|
||||||
|
type DBFuncParam struct {
|
||||||
|
ID int
|
||||||
|
Name string
|
||||||
|
Type string
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetFunctions(db *sql.DB) ([]DBFunction, error) {
|
||||||
|
sqlStmt := `
|
||||||
|
SELECT
|
||||||
|
routines.routine_name,
|
||||||
|
parameters.specific_name,
|
||||||
|
parameters.data_type,
|
||||||
|
parameters.parameter_name,
|
||||||
|
parameters.ordinal_position
|
||||||
|
FROM
|
||||||
|
information_schema.routines
|
||||||
|
RIGHT JOIN
|
||||||
|
information_schema.parameters
|
||||||
|
ON (routines.specific_name = parameters.specific_name and parameters.ordinal_position IS NOT NULL)
|
||||||
|
WHERE
|
||||||
|
routines.specific_schema = 'public'
|
||||||
|
ORDER BY
|
||||||
|
routines.routine_name, parameters.ordinal_position;`
|
||||||
|
|
||||||
|
rows, err := db.Query(sqlStmt)
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("Error fetching functions: %s", err)
|
||||||
|
}
|
||||||
|
defer rows.Close()
|
||||||
|
|
||||||
|
var funcs []DBFunction
|
||||||
|
fm := make(map[string]int)
|
||||||
|
|
||||||
|
for rows.Next() {
|
||||||
|
var fn, fid string
|
||||||
|
fp := DBFuncParam{}
|
||||||
|
|
||||||
|
err = rows.Scan(&fn, &fid, &fp.Type, &fp.Name, &fp.ID)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if i, ok := fm[fid]; ok {
|
||||||
|
funcs[i].Params = append(funcs[i].Params, fp)
|
||||||
|
} else {
|
||||||
|
funcs = append(funcs, DBFunction{Name: fn, Params: []DBFuncParam{fp}})
|
||||||
|
fm[fid] = len(funcs) - 1
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return funcs, nil
|
||||||
|
}
|
||||||
|
|
||||||
// func GetValType(type string) qcode.ValType {
|
// func GetValType(type string) qcode.ValType {
|
||||||
// switch {
|
// switch {
|
||||||
// case "bigint", "integer", "smallint", "numeric", "bigserial":
|
// case "bigint", "integer", "smallint", "numeric", "bigserial":
|
||||||
|
@ -1,11 +1,10 @@
|
|||||||
package psql
|
package psql
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"log"
|
|
||||||
"strings"
|
"strings"
|
||||||
)
|
)
|
||||||
|
|
||||||
func getTestSchema() *DBSchema {
|
func GetTestDBInfo() *DBInfo {
|
||||||
tables := []DBTable{
|
tables := []DBTable{
|
||||||
DBTable{Name: "customers", Type: "table"},
|
DBTable{Name: "customers", Type: "table"},
|
||||||
DBTable{Name: "users", Type: "table"},
|
DBTable{Name: "users", Type: "table"},
|
||||||
@ -74,36 +73,19 @@ func getTestSchema() *DBSchema {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
schema := &DBSchema{
|
return &DBInfo{
|
||||||
ver: 110000,
|
Version: 110000,
|
||||||
t: make(map[string]*DBTableInfo),
|
Tables: tables,
|
||||||
rm: make(map[string]map[string]*DBRel),
|
Columns: columns,
|
||||||
|
Functions: []DBFunction{},
|
||||||
|
colMap: newColMap(tables, columns),
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetTestSchema() (*DBSchema, error) {
|
||||||
aliases := map[string][]string{
|
aliases := map[string][]string{
|
||||||
"users": []string{"mes"},
|
"users": []string{"mes"},
|
||||||
}
|
}
|
||||||
|
|
||||||
for i, t := range tables {
|
return NewDBSchema(GetTestDBInfo(), aliases)
|
||||||
err := schema.addTable(t, columns[i], aliases)
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, t := range tables {
|
|
||||||
err := schema.firstDegreeRels(t, columns[i])
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for i, t := range tables {
|
|
||||||
err := schema.secondDegreeRels(t, columns[i])
|
|
||||||
if err != nil {
|
|
||||||
log.Fatal(err)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return schema
|
|
||||||
}
|
}
|
@ -1,25 +1,25 @@
|
|||||||
=== RUN TestCompileInsert
|
=== RUN TestCompileInsert
|
||||||
=== RUN TestCompileInsert/simpleInsert
|
=== RUN TestCompileInsert/simpleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/singleInsert
|
=== RUN TestCompileInsert/singleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT "t"."name", "t"."description", "t"."price", "t"."user_id" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/bulkInsert
|
=== RUN TestCompileInsert/bulkInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j->'product') t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileInsert (0.02s)
|
--- PASS: TestCompileInsert (0.02s)
|
||||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||||
@ -33,67 +33,67 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id"
|
|||||||
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
||||||
=== RUN TestCompileMutate
|
=== RUN TestCompileMutate
|
||||||
=== RUN TestCompileMutate/singleUpsert
|
=== RUN TestCompileMutate/singleUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/singleUpsertWhere
|
=== RUN TestCompileMutate/singleUpsertWhere
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/bulkUpsert
|
=== RUN TestCompileMutate/bulkUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/delete
|
=== RUN TestCompileMutate/delete
|
||||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileMutate (0.01s)
|
--- PASS: TestCompileMutate (0.00s)
|
||||||
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
||||||
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
||||||
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
||||||
--- PASS: TestCompileMutate/delete (0.00s)
|
--- PASS: TestCompileMutate/delete (0.00s)
|
||||||
=== RUN TestCompileQuery
|
=== RUN TestCompileQuery
|
||||||
=== RUN TestCompileQuery/withComplexArgs
|
=== RUN TestCompileQuery/withComplexArgs
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereAndList
|
=== RUN TestCompileQuery/withWhereAndList
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereIsNull
|
=== RUN TestCompileQuery/withWhereIsNull
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereMultiOr
|
=== RUN TestCompileQuery/withWhereMultiOr
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/fetchByID
|
=== RUN TestCompileQuery/fetchByID
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/searchQuery
|
=== RUN TestCompileQuery/searchQuery
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToMany
|
=== RUN TestCompileQuery/oneToMany
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToManyReverse
|
=== RUN TestCompileQuery/oneToManyReverse
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToManyArray
|
=== RUN TestCompileQuery/oneToManyArray
|
||||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToMany
|
=== RUN TestCompileQuery/manyToMany
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToManyReverse
|
=== RUN TestCompileQuery/manyToManyReverse
|
||||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunction
|
=== RUN TestCompileQuery/aggFunction
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionDisabled
|
=== RUN TestCompileQuery/aggFunctionDisabled
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionWithFilter
|
=== RUN TestCompileQuery/aggFunctionWithFilter
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/syntheticTables
|
=== RUN TestCompileQuery/syntheticTables
|
||||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/queryWithVariables
|
=== RUN TestCompileQuery/queryWithVariables
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereOnRelations
|
=== RUN TestCompileQuery/withWhereOnRelations
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4") AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withCursor
|
=== RUN TestCompileQuery/withCursor
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0") - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedQuery
|
=== RUN TestCompileQuery/blockedQuery
|
||||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedFunctions
|
=== RUN TestCompileQuery/blockedFunctions
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileQuery (0.02s)
|
--- PASS: TestCompileQuery (0.02s)
|
||||||
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
||||||
@ -121,23 +121,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
|||||||
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
||||||
=== RUN TestCompileUpdate
|
=== RUN TestCompileUpdate
|
||||||
=== RUN TestCompileUpdate/singleUpdate
|
=== RUN TestCompileUpdate/singleUpdate
|
||||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileUpdate (0.02s)
|
--- PASS: TestCompileUpdate (0.02s)
|
||||||
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
||||||
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
||||||
@ -148,4 +148,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql 0.320s
|
ok github.com/dosco/super-graph/core/internal/psql 0.306s
|
||||||
|
@ -91,25 +91,9 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||||
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
renderNestedUpdateRelTables(w, item.kvitem)
|
renderNestedUpdateRelTables(w, item.kvitem)
|
||||||
|
io.WriteString(w, `) `)
|
||||||
if item.array {
|
|
||||||
io.WriteString(w, `json_populate_recordset`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `json_populate_record`)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.WriteString(w, `(NULL::`)
|
|
||||||
io.WriteString(w, ti.Name)
|
|
||||||
|
|
||||||
if len(item.path) == 0 {
|
|
||||||
io.WriteString(w, `, i.j) t)`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `, i.j->`)
|
|
||||||
joinPath(w, item.path)
|
|
||||||
io.WriteString(w, `) t) `)
|
|
||||||
}
|
|
||||||
|
|
||||||
if item.id != 0 {
|
if item.id != 0 {
|
||||||
// Render sql to set id values if child-to-parent
|
// Render sql to set id values if child-to-parent
|
||||||
@ -137,9 +121,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(w, ` WHERE `)
|
if qc.Selects[0].Where != nil {
|
||||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
io.WriteString(w, ` WHERE `)
|
||||||
return err
|
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -202,9 +188,9 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
|
|||||||
// relationship is one-to-many
|
// relationship is one-to-many
|
||||||
for _, v := range item.items {
|
for _, v := range item.items {
|
||||||
if v._ctype > 0 && v.relCP.Type == RelOneToMany {
|
if v._ctype > 0 && v.relCP.Type == RelOneToMany {
|
||||||
io.WriteString(w, `"_x_`)
|
io.WriteString(w, `, "_x_`)
|
||||||
io.WriteString(w, v.relCP.Left.Table)
|
io.WriteString(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `", `)
|
io.WriteString(w, `"`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,4 +1,4 @@
|
|||||||
package psql
|
package psql_test
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
|
@ -58,21 +58,14 @@ func (sg *SuperGraph) initPrepared() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := sg.prepareStmt(v)
|
err := sg.prepareStmt(v)
|
||||||
if err == nil {
|
if err != nil {
|
||||||
|
sg.log.Printf("WRN %s: %v", v.Name, err)
|
||||||
|
} else {
|
||||||
success++
|
success++
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// if len(v.Vars) == 0 {
|
|
||||||
// logger.Warn().Err(err).Msg(v.Query)
|
|
||||||
// } else {
|
|
||||||
// logger.Warn().Err(err).Msgf("%s %s", v.Vars, v.Query)
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// logger.Info().
|
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
||||||
// Msgf("Registered %d of %d queries from allow.list as prepared statements",
|
|
||||||
// success, len(list))
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -84,13 +77,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
|
|
||||||
qt := qcode.GetQType(query)
|
qt := qcode.GetQType(query)
|
||||||
ct := context.Background()
|
ct := context.Background()
|
||||||
|
|
||||||
tx, err := sg.db.BeginTx(ct, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer tx.Rollback() //nolint: errcheck
|
|
||||||
|
|
||||||
switch qt {
|
switch qt {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
var stmts1 []stmt
|
var stmts1 []stmt
|
||||||
@ -108,7 +94,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
|
|
||||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts1, stmtHash(item.Name, "user"))
|
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -124,7 +110,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts2, stmtHash(item.Name, "anon"))
|
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -135,36 +121,29 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||||
|
|
||||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||||
|
if err == psql.ErrAllTablesSkipped {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// if len(item.Vars) == 0 {
|
return err
|
||||||
// logger.Warn().Err(err).Msg(item.Query)
|
|
||||||
// } else {
|
|
||||||
// logger.Warn().Err(err).Msgf("%s %s", item.Vars, item.Query)
|
|
||||||
// }
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts, stmtHash(item.Name, role.Name))
|
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) prepare(ct context.Context, tx *sql.Tx, st []stmt, key string) error {
|
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||||
finalSQL, am := processTemplate(st[0].sql)
|
finalSQL, am := processTemplate(st[0].sql)
|
||||||
|
|
||||||
sd, err := tx.Prepare(finalSQL)
|
sd, err := sg.db.Prepare(finalSQL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.prepared[key] = &preparedItem{
|
sg.prepared[key] = &preparedItem{
|
||||||
@ -256,7 +235,9 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
sg.log.Printf("WRN allow list disabled no file specified")
|
sg.log.Printf("WRN allow list disabled no file specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
if sg.conf.UseAllowList {
|
// When list is not eabled it is still created and
|
||||||
|
// and new queries are saved to it.
|
||||||
|
if !sg.conf.UseAllowList {
|
||||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -465,6 +465,14 @@ shuffle_strings
|
|||||||
numerify
|
numerify
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Other utility functions
|
||||||
|
|
||||||
|
```
|
||||||
|
shuffle_strings(string_array)
|
||||||
|
make_slug(text)
|
||||||
|
make_slug_lang(text, lang)
|
||||||
|
```
|
||||||
|
|
||||||
### Migrations
|
### Migrations
|
||||||
|
|
||||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||||
@ -722,6 +730,32 @@ query {
|
|||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### Custom Functions
|
||||||
|
|
||||||
|
Any function defined in the database like the below `add_five` that adds 5 to any number given to it can be used
|
||||||
|
within your query. The one limitation is that it should be a function that only accepts a single argument. The function is used within you're GraphQL in similar way to how aggregrations are used above. Example below
|
||||||
|
|
||||||
|
```grahql
|
||||||
|
query {
|
||||||
|
thread(id: 5) {
|
||||||
|
id
|
||||||
|
total_votes
|
||||||
|
add_five_total_votes
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Postgres user-defined function `add_five`
|
||||||
|
```
|
||||||
|
CREATE OR REPLACE FUNCTION add_five(a integer) RETURNS integer AS $$
|
||||||
|
BEGIN
|
||||||
|
|
||||||
|
RETURN a + 5;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
In GraphQL mutations is the operation type for when you need to modify data. Super Graph supports the `insert`, `update`, `upsert` and `delete`. You can also do complex nested inserts and updates.
|
In GraphQL mutations is the operation type for when you need to modify data. Super Graph supports the `insert`, `update`, `upsert` and `delete`. You can also do complex nested inserts and updates.
|
||||||
|
|
||||||
When using mutations the data must be passed as variables since Super Graphs compiles the query into an prepared statement in the database for maximum speed. Prepared statements are are functions in your code when called they accept arguments and your variables are passed in as those arguments.
|
When using mutations the data must be passed as variables since Super Graphs compiles the query into an prepared statement in the database for maximum speed. Prepared statements are are functions in your code when called they accept arguments and your variables are passed in as those arguments.
|
||||||
@ -1701,7 +1735,7 @@ reload_on_config_change: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
migrations_path: ./config/migrations
|
migrations_path: ./migrations
|
||||||
|
|
||||||
# Postgres related environment Variables
|
# Postgres related environment Variables
|
||||||
# SG_DATABASE_HOST
|
# SG_DATABASE_HOST
|
||||||
|
@ -3,6 +3,11 @@ services:
|
|||||||
db:
|
db:
|
||||||
image: postgres
|
image: postgres
|
||||||
tmpfs: /var/lib/postgresql/data
|
tmpfs: /var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
rails_app:
|
rails_app:
|
||||||
image: dosco/super-graph-demo:latest
|
image: dosco/super-graph-demo:latest
|
||||||
|
6
go.mod
6
go.mod
@ -1,12 +1,14 @@
|
|||||||
module github.com/dosco/super-graph
|
module github.com/dosco/super-graph
|
||||||
|
|
||||||
require (
|
require (
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.4.1
|
||||||
github.com/GeertJohan/go.rice v1.0.0
|
github.com/GeertJohan/go.rice v1.0.0
|
||||||
github.com/NYTimes/gziphandler v1.1.1
|
github.com/NYTimes/gziphandler v1.1.1
|
||||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||||
github.com/cespare/xxhash/v2 v2.1.1
|
github.com/cespare/xxhash/v2 v2.1.1
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d
|
||||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||||
@ -15,6 +17,7 @@ require (
|
|||||||
github.com/garyburd/redigo v1.6.0
|
github.com/garyburd/redigo v1.6.0
|
||||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||||
github.com/gobuffalo/flect v0.2.1
|
github.com/gobuffalo/flect v0.2.1
|
||||||
|
github.com/gosimple/slug v1.9.0
|
||||||
github.com/jackc/pgtype v1.3.0
|
github.com/jackc/pgtype v1.3.0
|
||||||
github.com/jackc/pgx/v4 v4.6.0
|
github.com/jackc/pgx/v4 v4.6.0
|
||||||
github.com/mitchellh/mapstructure v1.2.2 // indirect
|
github.com/mitchellh/mapstructure v1.2.2 // indirect
|
||||||
@ -27,11 +30,14 @@ require (
|
|||||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/spf13/viper v1.6.3
|
github.com/spf13/viper v1.6.3
|
||||||
|
github.com/stretchr/testify v1.5.1
|
||||||
github.com/valyala/fasttemplate v1.1.0
|
github.com/valyala/fasttemplate v1.1.0
|
||||||
go.uber.org/zap v1.14.1
|
go.uber.org/zap v1.14.1
|
||||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
||||||
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e // indirect
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||||
gopkg.in/ini.v1 v1.55.0 // indirect
|
gopkg.in/ini.v1 v1.55.0 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
|
34
go.sum
34
go.sum
@ -1,6 +1,8 @@
|
|||||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
|
||||||
|
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||||
github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/jt0CW30vsg=
|
github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/jt0CW30vsg=
|
||||||
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
||||||
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
||||||
@ -25,9 +27,13 @@ github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
|||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d h1:JnYHwwRhFmQ8DeyfqmIrzpkkxnZ+iT5V1CUd3Linin0=
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d/go.mod h1:+34LPrbHFfKVDPsNfi445UArMEjbeTlCm7C+OpdC7IU=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||||
|
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w=
|
||||||
|
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
@ -51,6 +57,7 @@ github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk
|
|||||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65 h1:Nud597JuGCF/MScrb6NNVDRgmuk8X7w3pFc5GvSsm5E=
|
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65 h1:Nud597JuGCF/MScrb6NNVDRgmuk8X7w3pFc5GvSsm5E=
|
||||||
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||||
|
github.com/friendsofgo/graphiql v0.2.2/go.mod h1:8Y2kZ36AoTGWs78+VRpvATyt3LJBx0SZXmay80ZTRWo=
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
@ -83,6 +90,9 @@ github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGa
|
|||||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
|
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
|
||||||
|
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||||
@ -179,6 +189,8 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
|
|||||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
|
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
|
||||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
|
github.com/opentracing/opentracing-go v1.0.2 h1:3jA2P6O1F9UOrWVpwrIo17pu01KWvNWg4X946/Y5Zwg=
|
||||||
|
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI=
|
github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI=
|
||||||
@ -199,6 +211,8 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
|
|||||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||||
|
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be h1:ta7tUOvsPHVHGom5hKW5VXNc2xZIkfCKP8iaqOyYtUQ=
|
||||||
|
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be/go.mod h1:MIDFMn7db1kT65GmV94GzpX9Qdi7N/pQlwb+AN8wh+Q=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||||
@ -209,9 +223,13 @@ github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=
|
|||||||
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
||||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||||
|
github.com/segmentio/ksuid v1.0.2 h1:9yBfKyw4ECGTdALaF09Snw3sLJmYIX6AbPJrAy6MrDc=
|
||||||
|
github.com/segmentio/ksuid v1.0.2/go.mod h1:BXuJDr2byAiHuQaQtSKoXh1J0YmUDurywOXgB2w+OSU=
|
||||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
||||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||||
|
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
@ -256,6 +274,12 @@ github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5
|
|||||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
|
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||||
|
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||||
|
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible h1:Dw0AFQs6RGO8RxMPGP2LknN/VtHolVH82P9PP0Ni+9w=
|
||||||
|
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
|
||||||
|
github.com/uber/jaeger-lib v1.5.0 h1:OHbgr8l656Ub3Fw5k9SWnBfIEwvoHQ+W2y+Aa9D1Uyo=
|
||||||
|
github.com/uber/jaeger-lib v1.5.0/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
|
||||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
@ -286,6 +310,7 @@ golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACk
|
|||||||
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
|
||||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
@ -294,6 +319,7 @@ golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHl
|
|||||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
||||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||||
|
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@ -304,6 +330,8 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR
|
|||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7 h1:fHDIZ2oxGnUZRN6WgWFCbYBjH9uqVPRCUVUDhs0wnbA=
|
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7 h1:fHDIZ2oxGnUZRN6WgWFCbYBjH9uqVPRCUVUDhs0wnbA=
|
||||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
|
||||||
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
@ -324,6 +352,7 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
|
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
|
||||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
@ -342,10 +371,13 @@ golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtn
|
|||||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs=
|
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs=
|
||||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20200128220307-520188d60f50 h1:0qnG0gwzB6QPiLDow10WJDdB38c+hQ7ArxO26Qc1boM=
|
||||||
|
golang.org/x/tools v0.0.0-20200128220307-520188d60f50/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
@ -357,6 +389,8 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+
|
|||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
||||||
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
||||||
|
@ -23,7 +23,7 @@ func newAction(a *Action) (http.Handler, error) {
|
|||||||
|
|
||||||
httpFn := func(w http.ResponseWriter, r *http.Request) {
|
httpFn := func(w http.ResponseWriter, r *http.Request) {
|
||||||
if err := fn(w, r); err != nil {
|
if err := fn(w, r); err != nil {
|
||||||
renderErr(w, err, nil)
|
renderErr(w, err)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -98,8 +98,9 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
initConfOnce()
|
initConfOnce()
|
||||||
name := args[0]
|
name := args[0]
|
||||||
|
migrationsPath := conf.relPath(conf.MigrationsPath)
|
||||||
|
|
||||||
m, err := migrate.FindMigrations(conf.MigrationsPath)
|
m, err := migrate.FindMigrations(migrationsPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR error loading migrations: %s", err)
|
log.Fatalf("ERR error loading migrations: %s", err)
|
||||||
}
|
}
|
||||||
@ -107,7 +108,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
|||||||
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
||||||
|
|
||||||
// Write new migration
|
// Write new migration
|
||||||
mpath := filepath.Join(conf.MigrationsPath, mname)
|
mpath := filepath.Join(migrationsPath, mname)
|
||||||
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR %s", err)
|
log.Fatalf("ERR %s", err)
|
||||||
@ -144,7 +145,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
m.Data = getMigrationVars()
|
m.Data = getMigrationVars()
|
||||||
|
|
||||||
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||||
}
|
}
|
||||||
@ -236,7 +237,7 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
m.Data = getMigrationVars()
|
m.Data = getMigrationVars()
|
||||||
|
|
||||||
err = m.LoadMigrations(conf.MigrationsPath)
|
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||||
}
|
}
|
||||||
|
@ -16,6 +16,7 @@ import (
|
|||||||
"github.com/brianvoe/gofakeit/v5"
|
"github.com/brianvoe/gofakeit/v5"
|
||||||
"github.com/dop251/goja"
|
"github.com/dop251/goja"
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/gosimple/slug"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -61,6 +62,10 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
setFakeFuncs(fake)
|
setFakeFuncs(fake)
|
||||||
vm.Set("fake", fake)
|
vm.Set("fake", fake)
|
||||||
|
|
||||||
|
util := vm.NewObject()
|
||||||
|
setUtilFuncs(util)
|
||||||
|
vm.Set("util", util)
|
||||||
|
|
||||||
_, err = vm.RunScript("seed.js", string(b))
|
_, err = vm.RunScript("seed.js", string(b))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to execute script: %s", err)
|
log.Fatalf("ERR failed to execute script: %s", err)
|
||||||
@ -405,9 +410,14 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("letter", gofakeit.Letter)
|
f.Set("letter", gofakeit.Letter)
|
||||||
f.Set("lexify", gofakeit.Lexify)
|
f.Set("lexify", gofakeit.Lexify)
|
||||||
f.Set("rand_string", getRandValue)
|
f.Set("rand_string", getRandValue)
|
||||||
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
|
||||||
f.Set("numerify", gofakeit.Numerify)
|
f.Set("numerify", gofakeit.Numerify)
|
||||||
|
|
||||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setUtilFuncs(f *goja.Object) {
|
||||||
|
// Slugs
|
||||||
|
f.Set("make_slug", slug.Make)
|
||||||
|
f.Set("make_slug_lang", slug.MakeLang)
|
||||||
|
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
||||||
}
|
}
|
||||||
|
@ -24,10 +24,6 @@ func cmdServ(cmd *cobra.Command, args []string) {
|
|||||||
fatalInProd(err, "failed to connect to database")
|
fatalInProd(err, "failed to connect to database")
|
||||||
}
|
}
|
||||||
|
|
||||||
// if conf != nil && db != nil {
|
|
||||||
// initResolvers()
|
|
||||||
// }
|
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalInProd(err, "failed to initialize Super Graph")
|
fatalInProd(err, "failed to initialize Super Graph")
|
||||||
|
@ -4,6 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
@ -48,10 +49,6 @@ func ReadInConfig(configFile string) (*Config, error) {
|
|||||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(c.Core.AllowListFile) == 0 {
|
|
||||||
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,3 +110,11 @@ func GetConfigName() string {
|
|||||||
|
|
||||||
return ge
|
return ge
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Config) relPath(p string) string {
|
||||||
|
if filepath.IsAbs(p) {
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
return path.Join(c.cpath, p)
|
||||||
|
}
|
||||||
|
@ -6,9 +6,7 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core"
|
|
||||||
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
@ -30,7 +28,7 @@ type gqlReq struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type errorResp struct {
|
type errorResp struct {
|
||||||
Error error `json:"error"`
|
Error string `json:"error"`
|
||||||
}
|
}
|
||||||
|
|
||||||
func apiV1Handler() http.Handler {
|
func apiV1Handler() http.Handler {
|
||||||
@ -56,13 +54,13 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
if conf.AuthFailBlock && !auth.IsAuth(ct) {
|
if conf.AuthFailBlock && !auth.IsAuth(ct) {
|
||||||
renderErr(w, errUnauthorized, nil)
|
renderErr(w, errUnauthorized)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
|
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
renderErr(w, err, nil)
|
renderErr(w, err)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
defer r.Body.Close()
|
defer r.Body.Close()
|
||||||
@ -71,29 +69,28 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
err = json.Unmarshal(b, &req)
|
err = json.Unmarshal(b, &req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
renderErr(w, err, nil)
|
renderErr(w, err)
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.EqualFold(req.OpName, introspectionQuery) {
|
|
||||||
introspect(w)
|
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
doLog := true
|
||||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||||
|
|
||||||
if logLevel >= LogLevelDebug {
|
if !conf.Production && res.QueryName() == "IntrospectionQuery" {
|
||||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
doLog = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if doLog && logLevel >= LogLevelDebug {
|
||||||
|
log.Printf("DBG query %s: %s", res.QueryName(), res.SQL())
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
renderErr(w, err, res)
|
renderErr(w, err)
|
||||||
return
|
} else {
|
||||||
|
json.NewEncoder(w).Encode(res)
|
||||||
}
|
}
|
||||||
|
|
||||||
json.NewEncoder(w).Encode(res)
|
if doLog && logLevel >= LogLevelInfo {
|
||||||
|
|
||||||
if logLevel >= LogLevelInfo {
|
|
||||||
zlog.Info("success",
|
zlog.Info("success",
|
||||||
zap.String("op", res.Operation()),
|
zap.String("op", res.Operation()),
|
||||||
zap.String("name", res.QueryName()),
|
zap.String("name", res.QueryName()),
|
||||||
@ -103,22 +100,10 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
|
func renderErr(w http.ResponseWriter, err error) {
|
||||||
if err == errUnauthorized {
|
if err == errUnauthorized {
|
||||||
w.WriteHeader(http.StatusUnauthorized)
|
w.WriteHeader(http.StatusUnauthorized)
|
||||||
}
|
}
|
||||||
|
|
||||||
json.NewEncoder(w).Encode(&errorResp{err})
|
json.NewEncoder(w).Encode(errorResp{err.Error()})
|
||||||
|
|
||||||
if logLevel >= LogLevelError {
|
|
||||||
if res != nil {
|
|
||||||
zlog.Error(err.Error(),
|
|
||||||
zap.String("op", res.Operation()),
|
|
||||||
zap.String("name", res.QueryName()),
|
|
||||||
zap.String("role", res.Role()),
|
|
||||||
)
|
|
||||||
} else {
|
|
||||||
zlog.Error(err.Error())
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
@ -8,6 +8,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -21,7 +22,12 @@ const (
|
|||||||
)
|
)
|
||||||
|
|
||||||
func initConf() (*Config, error) {
|
func initConf() (*Config, error) {
|
||||||
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
cp, err := filepath.Abs(confPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := ReadInConfig(path.Join(cp, GetConfigName()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -86,6 +92,14 @@ func initConf() (*Config, error) {
|
|||||||
c.AuthFailBlock = false
|
c.AuthFailBlock = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(c.AllowListFile) == 0 {
|
||||||
|
c.AllowListFile = c.relPath("./allow.list")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Production {
|
||||||
|
c.UseAllowList = true
|
||||||
|
}
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -128,7 +142,7 @@ func initDB(c *Config, useDB bool) (*sql.DB, error) {
|
|||||||
if strings.Contains(c.DB.ServerCert, PEM_SIG) {
|
if strings.Contains(c.DB.ServerCert, PEM_SIG) {
|
||||||
pem = []byte(c.DB.ServerCert)
|
pem = []byte(c.DB.ServerCert)
|
||||||
} else {
|
} else {
|
||||||
pem, err = ioutil.ReadFile(c.DB.ServerCert)
|
pem, err = ioutil.ReadFile(c.relPath(c.DB.ServerCert))
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -145,7 +159,7 @@ func initDB(c *Config, useDB bool) (*sql.DB, error) {
|
|||||||
if strings.Contains(c.DB.ClientCert, PEM_SIG) {
|
if strings.Contains(c.DB.ClientCert, PEM_SIG) {
|
||||||
certs, err = tls.X509KeyPair([]byte(c.DB.ClientCert), []byte(c.DB.ClientKey))
|
certs, err = tls.X509KeyPair([]byte(c.DB.ClientCert), []byte(c.DB.ClientKey))
|
||||||
} else {
|
} else {
|
||||||
certs, err = tls.LoadX509KeyPair(c.DB.ClientCert, c.DB.ClientKey)
|
certs, err = tls.LoadX509KeyPair(c.relPath(c.DB.ClientCert), c.relPath(c.DB.ClientKey))
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -190,17 +190,3 @@ func self() (string, error) {
|
|||||||
}
|
}
|
||||||
return bin, nil
|
return bin, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get path relative to cwd
|
|
||||||
func relpath(p string) string {
|
|
||||||
cwd, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.HasPrefix(p, cwd) {
|
|
||||||
return "./" + strings.TrimLeft(p[len(cwd):], "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
File diff suppressed because one or more lines are too long
@ -34,7 +34,7 @@ reload_on_config_change: false
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
# migrations_path: migrations
|
# migrations_path: ./migrations
|
||||||
|
|
||||||
# Secret key for general encryption operations like
|
# Secret key for general encryption operations like
|
||||||
# encrypting the cursor data
|
# encrypting the cursor data
|
||||||
|
@ -119,3 +119,17 @@ func isDev() bool {
|
|||||||
func sanitize(value string) string {
|
func sanitize(value string) string {
|
||||||
return strings.ToLower(strings.TrimSpace(value))
|
return strings.ToLower(strings.TrimSpace(value))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Get path relative to cwd
|
||||||
|
func relpath(p string) string {
|
||||||
|
cwd, err := os.Getwd()
|
||||||
|
if err != nil {
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
if strings.HasPrefix(p, cwd) {
|
||||||
|
return "./" + strings.TrimLeft(p[len(cwd):], "/")
|
||||||
|
}
|
||||||
|
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
@ -1,31 +1,23 @@
|
|||||||
{
|
{
|
||||||
"files": {
|
"files": {
|
||||||
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
"main.css": "/static/css/main.d4fa22d6.chunk.css",
|
||||||
"main.js": "/static/js/main.0805cb98.chunk.js",
|
"main.js": "/static/js/main.55a8068a.chunk.js",
|
||||||
"main.js.map": "/static/js/main.0805cb98.chunk.js.map",
|
"main.js.map": "/static/js/main.55a8068a.chunk.js.map",
|
||||||
"runtime-main.js": "/static/js/runtime-main.3ce8a40d.js",
|
"runtime-main.js": "/static/js/runtime-main.3ce8a40d.js",
|
||||||
"runtime-main.js.map": "/static/js/runtime-main.3ce8a40d.js.map",
|
"runtime-main.js.map": "/static/js/runtime-main.3ce8a40d.js.map",
|
||||||
"static/js/2.8ee1f069.chunk.js": "/static/js/2.8ee1f069.chunk.js",
|
"static/js/2.660c567d.chunk.js": "/static/js/2.660c567d.chunk.js",
|
||||||
"static/js/2.8ee1f069.chunk.js.map": "/static/js/2.8ee1f069.chunk.js.map",
|
"static/js/2.660c567d.chunk.js.map": "/static/js/2.660c567d.chunk.js.map",
|
||||||
"index.html": "/index.html",
|
"index.html": "/index.html",
|
||||||
"precache-manifest.ac47d060293cf05e2bbe249faba24591.js": "/precache-manifest.ac47d060293cf05e2bbe249faba24591.js",
|
"precache-manifest.cf56ad15e4cdbe76d4cebd621a25f8a9.js": "/precache-manifest.cf56ad15e4cdbe76d4cebd621a25f8a9.js",
|
||||||
"service-worker.js": "/service-worker.js",
|
"service-worker.js": "/service-worker.js",
|
||||||
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
"static/css/main.d4fa22d6.chunk.css.map": "/static/css/main.d4fa22d6.chunk.css.map",
|
||||||
"static/js/2.8ee1f069.chunk.js.LICENSE.txt": "/static/js/2.8ee1f069.chunk.js.LICENSE.txt",
|
"static/js/2.660c567d.chunk.js.LICENSE.txt": "/static/js/2.660c567d.chunk.js.LICENSE.txt",
|
||||||
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
|
||||||
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
|
||||||
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
|
||||||
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
|
||||||
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
|
||||||
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
|
||||||
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
|
||||||
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
|
||||||
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
||||||
},
|
},
|
||||||
"entrypoints": [
|
"entrypoints": [
|
||||||
"static/js/runtime-main.3ce8a40d.js",
|
"static/js/runtime-main.3ce8a40d.js",
|
||||||
"static/js/2.8ee1f069.chunk.js",
|
"static/js/2.660c567d.chunk.js",
|
||||||
"static/css/main.c6b5c55c.chunk.css",
|
"static/css/main.d4fa22d6.chunk.css",
|
||||||
"static/js/main.0805cb98.chunk.js"
|
"static/js/main.55a8068a.chunk.js"
|
||||||
]
|
]
|
||||||
}
|
}
|
@ -1 +1 @@
|
|||||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([])</script><script src="/static/js/2.8ee1f069.chunk.js"></script><script src="/static/js/main.0805cb98.chunk.js"></script></body></html>
|
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.d4fa22d6.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"==typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([])</script><script src="/static/js/2.660c567d.chunk.js"></script><script src="/static/js/main.55a8068a.chunk.js"></script></body></html>
|
@ -1,62 +0,0 @@
|
|||||||
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
|
||||||
{
|
|
||||||
"revision": "399fde27c94f0d24ea225c30ca2f7bb6",
|
|
||||||
"url": "/index.html"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "f2b9b3f132e297317cb0",
|
|
||||||
"url": "/static/css/main.c6b5c55c.chunk.css"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "26fcdfad21baf8714840",
|
|
||||||
"url": "/static/js/2.8ee1f069.chunk.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "4044397a22b006229bd81c3fc79e2c09",
|
|
||||||
"url": "/static/js/2.8ee1f069.chunk.js.LICENSE.txt"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "f2b9b3f132e297317cb0",
|
|
||||||
"url": "/static/js/main.0805cb98.chunk.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "28c836c6390ca2244059",
|
|
||||||
"url": "/static/js/runtime-main.3ce8a40d.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
|
||||||
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
|
||||||
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
|
||||||
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
|
||||||
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
|
||||||
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "d94118379d362fc161aa1246bcc14d43",
|
|
||||||
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
|
||||||
"url": "/static/media/getOutline.js.c04e3998.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
|
||||||
"url": "/static/media/index.js.02c24280.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
|
||||||
"url": "/static/media/logo.57ee3b60.png"
|
|
||||||
}
|
|
||||||
]);
|
|
@ -0,0 +1,30 @@
|
|||||||
|
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
||||||
|
{
|
||||||
|
"revision": "dd24b6ab0d5823ad93efd681af20c3af",
|
||||||
|
"url": "/index.html"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "ce055c1f214eb88c71e5",
|
||||||
|
"url": "/static/css/main.d4fa22d6.chunk.css"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "f79d286b3c55466899f3",
|
||||||
|
"url": "/static/js/2.660c567d.chunk.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "4044397a22b006229bd81c3fc79e2c09",
|
||||||
|
"url": "/static/js/2.660c567d.chunk.js.LICENSE.txt"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "ce055c1f214eb88c71e5",
|
||||||
|
"url": "/static/js/main.55a8068a.chunk.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "28c836c6390ca2244059",
|
||||||
|
"url": "/static/js/runtime-main.3ce8a40d.js"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
||||||
|
"url": "/static/media/logo.57ee3b60.png"
|
||||||
|
}
|
||||||
|
]);
|
@ -14,7 +14,7 @@
|
|||||||
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
|
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
|
||||||
|
|
||||||
importScripts(
|
importScripts(
|
||||||
"/precache-manifest.ac47d060293cf05e2bbe249faba24591.js"
|
"/precache-manifest.cf56ad15e4cdbe76d4cebd621a25f8a9.js"
|
||||||
);
|
);
|
||||||
|
|
||||||
self.addEventListener('message', (event) => {
|
self.addEventListener('message', (event) => {
|
||||||
|
@ -1,2 +1,2 @@
|
|||||||
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI","Roboto","Oxygen","Ubuntu","Cantarell","Fira Sans","Droid Sans","Helvetica Neue",sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,"Courier New",monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,"Segoe UI","Roboto","Oxygen","Ubuntu","Cantarell","Fira Sans","Droid Sans","Helvetica Neue",sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#09141b}code{font-family:source-code-pro,Menlo,Monaco,Consolas,"Courier New",monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
||||||
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
/*# sourceMappingURL=main.d4fa22d6.chunk.css.map */
|
@ -1 +1 @@
|
|||||||
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mJAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,yEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}
|
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mJAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,yEAEF,CAEA,6BACE,0BACF","file":"main.d4fa22d6.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #09141b;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}
|
3
internal/serv/web/build/static/js/2.660c567d.chunk.js
Normal file
3
internal/serv/web/build/static/js/2.660c567d.chunk.js
Normal file
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -1,2 +0,0 @@
|
|||||||
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},191:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(202),i=n(201),s=n(5),l=n.n(s),c=n(20),u=n(130),g=(n(442),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(g.apply(e,arguments))};var f=function(e){Object(a.a)(n,e);var t=Object(i.a)(n);function n(){return Object(r.a)(this,n),t.apply(this,arguments)}return Object(o.a)(n,[{key:"render",value:function(){return l.a.createElement("div",null,l.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},l.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},l.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),l.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),l.a.createElement(c.Provider,{store:u.store},l.a.createElement(u.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),n}(s.Component);t.a=f}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(191);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},442:function(e,t,n){}},[[205,1,2]]]);
|
|
||||||
//# sourceMappingURL=main.0805cb98.chunk.js.map
|
|
File diff suppressed because one or more lines are too long
2
internal/serv/web/build/static/js/main.55a8068a.chunk.js
Normal file
2
internal/serv/web/build/static/js/main.55a8068a.chunk.js
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{238:function(e,n,t){"use strict";(function(e){var r=t(66),o=t(67),i=t(270),a=t(269),s=t(3),u=t.n(s),c=t(24),l=t(156),d=(t(560),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(d.apply(e,arguments))};var f=function(e){Object(i.a)(t,e);var n=Object(a.a)(t);function t(){return Object(r.a)(this,t),n.apply(this,arguments)}return Object(o.a)(t,[{key:"render",value:function(){return u.a.createElement("div",null,u.a.createElement("header",{style:{color:"lightblue",letterSpacing:"0.15rem",paddingTop:"10px",paddingBottom:"0px"}},u.a.createElement("div",{style:{textDecoration:"none",margin:"0px",fontSize:"14px",fontWeight:"500",textTransform:"uppercase",marginLeft:"10px"}},"Super Graph")),u.a.createElement(c.Provider,{store:l.store},u.a.createElement(l.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, }"})))}}]),t}(s.Component);n.a=f}).call(this,t(37))},273:function(e,n,t){e.exports=t(274)},274:function(e,n,t){"use strict";t.r(n);var r=t(3),o=t.n(r),i=t(61),a=t.n(i),s=t(238);a.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},389:function(e,n,t){var r={"./Range.js":79,"./file.js":98,"./getASTNodeAtPosition.js":97,"./index.js":30,"./validateWithCustomRules.js":122};function o(e){var n=i(e);return t(n)}function i(e){if(!t.o(r,e)){var n=new Error("Cannot find module '"+e+"'");throw n.code="MODULE_NOT_FOUND",n}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=i,e.exports=o,o.id=389},390:function(e,n){function t(e){var n=new Error("Cannot find module '"+e+"'");throw n.code="MODULE_NOT_FOUND",n}t.keys=function(){return[]},t.resolve=t,e.exports=t,t.id=390},391:function(e,n,t){var r={"./Range.js":79,"./file.js":98,"./getASTNodeAtPosition.js":97,"./index.js":30,"./validateWithCustomRules.js":122};function o(e){return Promise.resolve().then((function(){if(!t.o(r,e)){var n=new Error("Cannot find module '"+e+"'");throw n.code="MODULE_NOT_FOUND",n}return t(r[e])}))}o.keys=function(){return Object.keys(r)},o.id=391,e.exports=o},392:function(e,n){function t(e){return Promise.resolve().then((function(){var n=new Error("Cannot find module '"+e+"'");throw n.code="MODULE_NOT_FOUND",n}))}t.keys=function(){return[]},t.resolve=t,e.exports=t,t.id=392},560:function(e,n,t){}},[[273,1,2]]]);
|
||||||
|
//# sourceMappingURL=main.55a8068a.chunk.js.map
|
File diff suppressed because one or more lines are too long
@ -1,328 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
DocumentNode,
|
|
||||||
FragmentSpreadNode,
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
OperationDefinitionNode,
|
|
||||||
TypeDefinitionNode,
|
|
||||||
NamedTypeNode,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
DefinitionQueryResult,
|
|
||||||
Diagnostic,
|
|
||||||
GraphQLCache,
|
|
||||||
GraphQLConfig,
|
|
||||||
GraphQLProjectConfig,
|
|
||||||
Uri,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
import type {Hover} from 'vscode-languageserver-types';
|
|
||||||
|
|
||||||
import {Kind, parse, print} from 'graphql';
|
|
||||||
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
|
||||||
import {getHoverInformation} from './getHoverInformation';
|
|
||||||
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
|
||||||
import {
|
|
||||||
getDefinitionQueryResultForFragmentSpread,
|
|
||||||
getDefinitionQueryResultForDefinitionNode,
|
|
||||||
getDefinitionQueryResultForNamedType,
|
|
||||||
} from './getDefinition';
|
|
||||||
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
const {
|
|
||||||
FRAGMENT_DEFINITION,
|
|
||||||
OBJECT_TYPE_DEFINITION,
|
|
||||||
INTERFACE_TYPE_DEFINITION,
|
|
||||||
ENUM_TYPE_DEFINITION,
|
|
||||||
UNION_TYPE_DEFINITION,
|
|
||||||
SCALAR_TYPE_DEFINITION,
|
|
||||||
INPUT_OBJECT_TYPE_DEFINITION,
|
|
||||||
SCALAR_TYPE_EXTENSION,
|
|
||||||
OBJECT_TYPE_EXTENSION,
|
|
||||||
INTERFACE_TYPE_EXTENSION,
|
|
||||||
UNION_TYPE_EXTENSION,
|
|
||||||
ENUM_TYPE_EXTENSION,
|
|
||||||
INPUT_OBJECT_TYPE_EXTENSION,
|
|
||||||
DIRECTIVE_DEFINITION,
|
|
||||||
FRAGMENT_SPREAD,
|
|
||||||
OPERATION_DEFINITION,
|
|
||||||
NAMED_TYPE,
|
|
||||||
} = Kind;
|
|
||||||
|
|
||||||
export class GraphQLLanguageService {
|
|
||||||
_graphQLCache: GraphQLCache;
|
|
||||||
_graphQLConfig: GraphQLConfig;
|
|
||||||
|
|
||||||
constructor(cache: GraphQLCache) {
|
|
||||||
this._graphQLCache = cache;
|
|
||||||
this._graphQLConfig = cache.getGraphQLConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
async getDiagnostics(
|
|
||||||
query: string,
|
|
||||||
uri: Uri,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Promise<Array<Diagnostic>> {
|
|
||||||
// Perform syntax diagnostics first, as this doesn't require
|
|
||||||
// schema/fragment definitions, even the project configuration.
|
|
||||||
let queryHasExtensions = false;
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
|
||||||
const schemaPath = projectConfig.schemaPath;
|
|
||||||
try {
|
|
||||||
const queryAST = parse(query);
|
|
||||||
if (!schemaPath || uri !== schemaPath) {
|
|
||||||
queryHasExtensions = queryAST.definitions.some(definition => {
|
|
||||||
switch (definition.kind) {
|
|
||||||
case OBJECT_TYPE_DEFINITION:
|
|
||||||
case INTERFACE_TYPE_DEFINITION:
|
|
||||||
case ENUM_TYPE_DEFINITION:
|
|
||||||
case UNION_TYPE_DEFINITION:
|
|
||||||
case SCALAR_TYPE_DEFINITION:
|
|
||||||
case INPUT_OBJECT_TYPE_DEFINITION:
|
|
||||||
case SCALAR_TYPE_EXTENSION:
|
|
||||||
case OBJECT_TYPE_EXTENSION:
|
|
||||||
case INTERFACE_TYPE_EXTENSION:
|
|
||||||
case UNION_TYPE_EXTENSION:
|
|
||||||
case ENUM_TYPE_EXTENSION:
|
|
||||||
case INPUT_OBJECT_TYPE_EXTENSION:
|
|
||||||
case DIRECTIVE_DEFINITION:
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const range = getRange(error.locations[0], query);
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
severity: SEVERITY.ERROR,
|
|
||||||
message: error.message,
|
|
||||||
source: 'GraphQL: Syntax',
|
|
||||||
range,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's a matching config, proceed to prepare to run validation
|
|
||||||
let source = query;
|
|
||||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
|
||||||
query,
|
|
||||||
fragmentDefinitions,
|
|
||||||
);
|
|
||||||
const dependenciesSource = fragmentDependencies.reduce(
|
|
||||||
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
|
|
||||||
source = `${source} ${dependenciesSource}`;
|
|
||||||
|
|
||||||
let validationAst = null;
|
|
||||||
try {
|
|
||||||
validationAst = parse(source);
|
|
||||||
} catch (error) {
|
|
||||||
// the query string is already checked to be parsed properly - errors
|
|
||||||
// from this parse must be from corrupted fragment dependencies.
|
|
||||||
// For IDEs we don't care for errors outside of the currently edited
|
|
||||||
// query, so we return an empty array here.
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if there are custom validation rules to be used
|
|
||||||
let customRules;
|
|
||||||
const customRulesModulePath =
|
|
||||||
projectConfig.extensions.customValidationRules;
|
|
||||||
if (customRulesModulePath) {
|
|
||||||
/* eslint-disable no-implicit-coercion */
|
|
||||||
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
|
||||||
if (rulesPath) {
|
|
||||||
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
|
||||||
}
|
|
||||||
/* eslint-enable no-implicit-coercion */
|
|
||||||
}
|
|
||||||
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName, queryHasExtensions)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (!schema) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getAutocompleteSuggestions(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<Array<CompletionItem>> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (schema) {
|
|
||||||
return getAutocompleteSuggestions(schema, query, position);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
async getHoverInformation(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<Hover.contents> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (schema) {
|
|
||||||
return getHoverInformation(schema, query, position);
|
|
||||||
}
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async getDefinition(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
|
|
||||||
let ast;
|
|
||||||
try {
|
|
||||||
ast = parse(query);
|
|
||||||
} catch (error) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const node = getASTNodeAtPosition(query, ast, position);
|
|
||||||
if (node) {
|
|
||||||
switch (node.kind) {
|
|
||||||
case FRAGMENT_SPREAD:
|
|
||||||
return this._getDefinitionForFragmentSpread(
|
|
||||||
query,
|
|
||||||
ast,
|
|
||||||
node,
|
|
||||||
filePath,
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
case FRAGMENT_DEFINITION:
|
|
||||||
case OPERATION_DEFINITION:
|
|
||||||
return getDefinitionQueryResultForDefinitionNode(
|
|
||||||
filePath,
|
|
||||||
query,
|
|
||||||
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
|
||||||
);
|
|
||||||
case NAMED_TYPE:
|
|
||||||
return this._getDefinitionForNamedType(
|
|
||||||
query,
|
|
||||||
ast,
|
|
||||||
node,
|
|
||||||
filePath,
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _getDefinitionForNamedType(
|
|
||||||
query: string,
|
|
||||||
ast: DocumentNode,
|
|
||||||
node: NamedTypeNode,
|
|
||||||
filePath: Uri,
|
|
||||||
projectConfig: GraphQLProjectConfig,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
|
||||||
ast,
|
|
||||||
objectTypeDefinitions,
|
|
||||||
);
|
|
||||||
|
|
||||||
const localObjectTypeDefinitions = ast.definitions.filter(
|
|
||||||
definition =>
|
|
||||||
definition.kind === OBJECT_TYPE_DEFINITION ||
|
|
||||||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
|
||||||
definition.kind === ENUM_TYPE_DEFINITION,
|
|
||||||
);
|
|
||||||
|
|
||||||
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
|
||||||
TypeDefinitionNode,
|
|
||||||
>);
|
|
||||||
|
|
||||||
const localOperationDefinationInfos = typeCastedDefs.map(
|
|
||||||
(definition: TypeDefinitionNode) => ({
|
|
||||||
filePath,
|
|
||||||
content: query,
|
|
||||||
definition,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await getDefinitionQueryResultForNamedType(
|
|
||||||
query,
|
|
||||||
node,
|
|
||||||
dependencies.concat(localOperationDefinationInfos),
|
|
||||||
);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _getDefinitionForFragmentSpread(
|
|
||||||
query: string,
|
|
||||||
ast: DocumentNode,
|
|
||||||
node: FragmentSpreadNode,
|
|
||||||
filePath: Uri,
|
|
||||||
projectConfig: GraphQLProjectConfig,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
|
||||||
ast,
|
|
||||||
fragmentDefinitions,
|
|
||||||
);
|
|
||||||
|
|
||||||
const localFragDefinitions = ast.definitions.filter(
|
|
||||||
definition => definition.kind === FRAGMENT_DEFINITION,
|
|
||||||
);
|
|
||||||
|
|
||||||
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
>);
|
|
||||||
|
|
||||||
const localFragInfos = typeCastedDefs.map(
|
|
||||||
(definition: FragmentDefinitionNode) => ({
|
|
||||||
filePath,
|
|
||||||
content: query,
|
|
||||||
definition,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await getDefinitionQueryResultForFragmentSpread(
|
|
||||||
query,
|
|
||||||
node,
|
|
||||||
dependencies.concat(localFragInfos),
|
|
||||||
);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,204 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
|
||||||
import {isCompositeType} from 'graphql';
|
|
||||||
import {
|
|
||||||
SchemaMetaFieldDef,
|
|
||||||
TypeMetaFieldDef,
|
|
||||||
TypeNameMetaFieldDef,
|
|
||||||
} from 'graphql/type/introspection';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
ContextToken,
|
|
||||||
State,
|
|
||||||
TypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
// Utility for returning the state representing the Definition this token state
|
|
||||||
// is within, if any.
|
|
||||||
export function getDefinitionState(tokenState: State): ?State {
|
|
||||||
let definitionState;
|
|
||||||
|
|
||||||
forEachState(tokenState, state => {
|
|
||||||
switch (state.kind) {
|
|
||||||
case 'Query':
|
|
||||||
case 'ShortQuery':
|
|
||||||
case 'Mutation':
|
|
||||||
case 'Subscription':
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
definitionState = state;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return definitionState;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gets the field definition given a type and field name
|
|
||||||
export function getFieldDef(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
type: GraphQLType,
|
|
||||||
fieldName: string,
|
|
||||||
): ?GraphQLField<*, *> {
|
|
||||||
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
|
||||||
return SchemaMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
|
||||||
return TypeMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
|
||||||
return TypeNameMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (type.getFields && typeof type.getFields === 'function') {
|
|
||||||
return (type.getFields()[fieldName]: any);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
|
||||||
export function forEachState(
|
|
||||||
stack: State,
|
|
||||||
fn: (state: State) => ?TypeInfo,
|
|
||||||
): void {
|
|
||||||
const reverseStateStack = [];
|
|
||||||
let state = stack;
|
|
||||||
while (state && state.kind) {
|
|
||||||
reverseStateStack.push(state);
|
|
||||||
state = state.prevState;
|
|
||||||
}
|
|
||||||
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
|
||||||
fn(reverseStateStack[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function objectValues(object: Object): Array<any> {
|
|
||||||
const keys = Object.keys(object);
|
|
||||||
const len = keys.length;
|
|
||||||
const values = new Array(len);
|
|
||||||
for (let i = 0; i < len; ++i) {
|
|
||||||
values[i] = object[keys[i]];
|
|
||||||
}
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the expected hint response given a possible list and a token
|
|
||||||
export function hintList(
|
|
||||||
token: ContextToken,
|
|
||||||
list: Array<CompletionItem>,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
return filterAndSortList(list, normalizeText(token.string));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a list of hint entries and currently typed text, sort and filter to
|
|
||||||
// provide a concise list.
|
|
||||||
function filterAndSortList(
|
|
||||||
list: Array<CompletionItem>,
|
|
||||||
text: string,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (!text) {
|
|
||||||
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
|
||||||
}
|
|
||||||
|
|
||||||
const byProximity = list.map(entry => ({
|
|
||||||
proximity: getProximity(normalizeText(entry.label), text),
|
|
||||||
entry,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const conciseMatches = filterNonEmpty(
|
|
||||||
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
|
||||||
pair => !pair.entry.isDeprecated,
|
|
||||||
);
|
|
||||||
|
|
||||||
const sortedMatches = conciseMatches.sort(
|
|
||||||
(a, b) =>
|
|
||||||
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
|
||||||
a.proximity - b.proximity ||
|
|
||||||
a.entry.label.length - b.entry.label.length,
|
|
||||||
);
|
|
||||||
|
|
||||||
return sortedMatches.map(pair => pair.entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filters the array by the predicate, unless it results in an empty array,
|
|
||||||
// in which case return the original array.
|
|
||||||
function filterNonEmpty(
|
|
||||||
array: Array<Object>,
|
|
||||||
predicate: (entry: Object) => boolean,
|
|
||||||
): Array<Object> {
|
|
||||||
const filtered = array.filter(predicate);
|
|
||||||
return filtered.length === 0 ? array : filtered;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeText(text: string): string {
|
|
||||||
return text.toLowerCase().replace(/\W/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine a numeric proximity for a suggestion based on current text.
|
|
||||||
function getProximity(suggestion: string, text: string): number {
|
|
||||||
// start with lexical distance
|
|
||||||
let proximity = lexicalDistance(text, suggestion);
|
|
||||||
if (suggestion.length > text.length) {
|
|
||||||
// do not penalize long suggestions.
|
|
||||||
proximity -= suggestion.length - text.length - 1;
|
|
||||||
// penalize suggestions not starting with this phrase
|
|
||||||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
|
||||||
}
|
|
||||||
return proximity;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Computes the lexical distance between strings A and B.
|
|
||||||
*
|
|
||||||
* The "distance" between two strings is given by counting the minimum number
|
|
||||||
* of edits needed to transform string A into string B. An edit can be an
|
|
||||||
* insertion, deletion, or substitution of a single character, or a swap of two
|
|
||||||
* adjacent characters.
|
|
||||||
*
|
|
||||||
* This distance can be useful for detecting typos in input or sorting
|
|
||||||
*
|
|
||||||
* @param {string} a
|
|
||||||
* @param {string} b
|
|
||||||
* @return {int} distance in number of edits
|
|
||||||
*/
|
|
||||||
function lexicalDistance(a: string, b: string): number {
|
|
||||||
let i;
|
|
||||||
let j;
|
|
||||||
const d = [];
|
|
||||||
const aLength = a.length;
|
|
||||||
const bLength = b.length;
|
|
||||||
|
|
||||||
for (i = 0; i <= aLength; i++) {
|
|
||||||
d[i] = [i];
|
|
||||||
}
|
|
||||||
|
|
||||||
for (j = 1; j <= bLength; j++) {
|
|
||||||
d[0][j] = j;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 1; i <= aLength; i++) {
|
|
||||||
for (j = 1; j <= bLength; j++) {
|
|
||||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
|
||||||
|
|
||||||
d[i][j] = Math.min(
|
|
||||||
d[i - 1][j] + 1,
|
|
||||||
d[i][j - 1] + 1,
|
|
||||||
d[i - 1][j - 1] + cost,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
|
||||||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return d[aLength][bLength];
|
|
||||||
}
|
|
@ -1,665 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
GraphQLDirective,
|
|
||||||
GraphQLSchema,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
ContextToken,
|
|
||||||
State,
|
|
||||||
TypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
import {
|
|
||||||
GraphQLBoolean,
|
|
||||||
GraphQLEnumType,
|
|
||||||
GraphQLInputObjectType,
|
|
||||||
GraphQLList,
|
|
||||||
SchemaMetaFieldDef,
|
|
||||||
TypeMetaFieldDef,
|
|
||||||
TypeNameMetaFieldDef,
|
|
||||||
assertAbstractType,
|
|
||||||
doTypesOverlap,
|
|
||||||
getNamedType,
|
|
||||||
getNullableType,
|
|
||||||
isAbstractType,
|
|
||||||
isCompositeType,
|
|
||||||
isInputType,
|
|
||||||
} from 'graphql';
|
|
||||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
|
||||||
import {
|
|
||||||
forEachState,
|
|
||||||
getDefinitionState,
|
|
||||||
getFieldDef,
|
|
||||||
hintList,
|
|
||||||
objectValues,
|
|
||||||
} from './autocompleteUtils';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given GraphQLSchema, queryText, and context of the current position within
|
|
||||||
* the source text, provide a list of typeahead entries.
|
|
||||||
*/
|
|
||||||
export function getAutocompleteSuggestions(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
contextToken?: ContextToken,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
|
||||||
|
|
||||||
const state =
|
|
||||||
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
|
||||||
|
|
||||||
// relieve flow errors by checking if `state` exists
|
|
||||||
if (!state) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const kind = state.kind;
|
|
||||||
const step = state.step;
|
|
||||||
const typeInfo = getTypeInfo(schema, token.state);
|
|
||||||
|
|
||||||
// Definition kinds
|
|
||||||
if (kind === 'Document') {
|
|
||||||
return hintList(token, [
|
|
||||||
{label: 'query'},
|
|
||||||
{label: 'mutation'},
|
|
||||||
{label: 'subscription'},
|
|
||||||
{label: 'fragment'},
|
|
||||||
{label: '{'},
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Field names
|
|
||||||
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
|
||||||
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Argument names
|
|
||||||
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
|
||||||
const argDefs = typeInfo.argDefs;
|
|
||||||
if (argDefs) {
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
argDefs.map(argDef => ({
|
|
||||||
label: argDef.name,
|
|
||||||
detail: String(argDef.type),
|
|
||||||
documentation: argDef.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Input Object fields
|
|
||||||
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
|
||||||
if (typeInfo.objectFieldDefs) {
|
|
||||||
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
objectFields.map(field => ({
|
|
||||||
label: field.name,
|
|
||||||
detail: String(field.type),
|
|
||||||
documentation: field.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Input values: Enum and Boolean
|
|
||||||
if (
|
|
||||||
kind === 'EnumValue' ||
|
|
||||||
(kind === 'ListValue' && step === 1) ||
|
|
||||||
(kind === 'ObjectField' && step === 2) ||
|
|
||||||
(kind === 'Argument' && step === 2)
|
|
||||||
) {
|
|
||||||
return getSuggestionsForInputValues(token, typeInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fragment type conditions
|
|
||||||
if (
|
|
||||||
(kind === 'TypeCondition' && step === 1) ||
|
|
||||||
(kind === 'NamedType' &&
|
|
||||||
state.prevState != null &&
|
|
||||||
state.prevState.kind === 'TypeCondition')
|
|
||||||
) {
|
|
||||||
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fragment spread names
|
|
||||||
if (kind === 'FragmentSpread' && step === 1) {
|
|
||||||
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Variable definition types
|
|
||||||
if (
|
|
||||||
(kind === 'VariableDefinition' && step === 2) ||
|
|
||||||
(kind === 'ListType' && step === 1) ||
|
|
||||||
(kind === 'NamedType' &&
|
|
||||||
state.prevState &&
|
|
||||||
(state.prevState.kind === 'VariableDefinition' ||
|
|
||||||
state.prevState.kind === 'ListType'))
|
|
||||||
) {
|
|
||||||
return getSuggestionsForVariableDefinition(token, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Directive names
|
|
||||||
if (kind === 'Directive') {
|
|
||||||
return getSuggestionsForDirective(token, state, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper functions to get suggestions for each kinds
|
|
||||||
function getSuggestionsForFieldNames(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (typeInfo.parentType) {
|
|
||||||
const parentType = typeInfo.parentType;
|
|
||||||
const fields =
|
|
||||||
parentType.getFields instanceof Function
|
|
||||||
? objectValues(parentType.getFields())
|
|
||||||
: [];
|
|
||||||
if (isAbstractType(parentType)) {
|
|
||||||
fields.push(TypeNameMetaFieldDef);
|
|
||||||
}
|
|
||||||
if (parentType === schema.getQueryType()) {
|
|
||||||
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
|
||||||
}
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
fields.map(field => ({
|
|
||||||
label: field.name,
|
|
||||||
detail: String(field.type),
|
|
||||||
documentation: field.description,
|
|
||||||
isDeprecated: field.isDeprecated,
|
|
||||||
deprecationReason: field.deprecationReason,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForInputValues(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const namedInputType = getNamedType(typeInfo.inputType);
|
|
||||||
if (namedInputType instanceof GraphQLEnumType) {
|
|
||||||
const values = namedInputType.getValues();
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
values.map(value => ({
|
|
||||||
label: value.name,
|
|
||||||
detail: String(namedInputType),
|
|
||||||
documentation: value.description,
|
|
||||||
isDeprecated: value.isDeprecated,
|
|
||||||
deprecationReason: value.deprecationReason,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
} else if (namedInputType === GraphQLBoolean) {
|
|
||||||
return hintList(token, [
|
|
||||||
{
|
|
||||||
label: 'true',
|
|
||||||
detail: String(GraphQLBoolean),
|
|
||||||
documentation: 'Not false.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'false',
|
|
||||||
detail: String(GraphQLBoolean),
|
|
||||||
documentation: 'Not true.',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForFragmentTypeConditions(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
let possibleTypes;
|
|
||||||
if (typeInfo.parentType) {
|
|
||||||
if (isAbstractType(typeInfo.parentType)) {
|
|
||||||
const abstractType = assertAbstractType(typeInfo.parentType);
|
|
||||||
// Collect both the possible Object types as well as the interfaces
|
|
||||||
// they implement.
|
|
||||||
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
|
||||||
const possibleIfaceMap = Object.create(null);
|
|
||||||
possibleObjTypes.forEach(type => {
|
|
||||||
type.getInterfaces().forEach(iface => {
|
|
||||||
possibleIfaceMap[iface.name] = iface;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
|
||||||
} else {
|
|
||||||
// The parent type is a non-abstract Object type, so the only possible
|
|
||||||
// type that can be used is that same type.
|
|
||||||
possibleTypes = [typeInfo.parentType];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const typeMap = schema.getTypeMap();
|
|
||||||
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
|
||||||
}
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
possibleTypes.map(type => {
|
|
||||||
const namedType = getNamedType(type);
|
|
||||||
return {
|
|
||||||
label: String(type),
|
|
||||||
documentation: (namedType && namedType.description) || '',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForFragmentSpread(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const typeMap = schema.getTypeMap();
|
|
||||||
const defState = getDefinitionState(token.state);
|
|
||||||
const fragments = getFragmentDefinitions(queryText);
|
|
||||||
|
|
||||||
// Filter down to only the fragments which may exist here.
|
|
||||||
const relevantFrags = fragments.filter(
|
|
||||||
frag =>
|
|
||||||
// Only include fragments with known types.
|
|
||||||
typeMap[frag.typeCondition.name.value] &&
|
|
||||||
// Only include fragments which are not cyclic.
|
|
||||||
!(
|
|
||||||
defState &&
|
|
||||||
defState.kind === 'FragmentDefinition' &&
|
|
||||||
defState.name === frag.name.value
|
|
||||||
) &&
|
|
||||||
// Only include fragments which could possibly be spread here.
|
|
||||||
isCompositeType(typeInfo.parentType) &&
|
|
||||||
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
|
||||||
doTypesOverlap(
|
|
||||||
schema,
|
|
||||||
typeInfo.parentType,
|
|
||||||
typeMap[frag.typeCondition.name.value],
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
relevantFrags.map(frag => ({
|
|
||||||
label: frag.name.value,
|
|
||||||
detail: String(typeMap[frag.typeCondition.name.value]),
|
|
||||||
documentation: `fragment ${frag.name.value} on ${
|
|
||||||
frag.typeCondition.name.value
|
|
||||||
}`,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getFragmentDefinitions(
|
|
||||||
queryText: string,
|
|
||||||
): Array<FragmentDefinitionNode> {
|
|
||||||
const fragmentDefs = [];
|
|
||||||
runOnlineParser(queryText, (_, state) => {
|
|
||||||
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
|
||||||
fragmentDefs.push({
|
|
||||||
kind: 'FragmentDefinition',
|
|
||||||
name: {
|
|
||||||
kind: 'Name',
|
|
||||||
value: state.name,
|
|
||||||
},
|
|
||||||
selectionSet: {
|
|
||||||
kind: 'SelectionSet',
|
|
||||||
selections: [],
|
|
||||||
},
|
|
||||||
typeCondition: {
|
|
||||||
kind: 'NamedType',
|
|
||||||
name: {
|
|
||||||
kind: 'Name',
|
|
||||||
value: state.type,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return fragmentDefs;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForVariableDefinition(
|
|
||||||
token: ContextToken,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const inputTypeMap = schema.getTypeMap();
|
|
||||||
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
inputTypes.map(type => ({
|
|
||||||
label: type.name,
|
|
||||||
documentation: type.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForDirective(
|
|
||||||
token: ContextToken,
|
|
||||||
state: State,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (state.prevState && state.prevState.kind) {
|
|
||||||
const directives = schema
|
|
||||||
.getDirectives()
|
|
||||||
.filter(directive => canUseDirective(state.prevState, directive));
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
directives.map(directive => ({
|
|
||||||
label: directive.name,
|
|
||||||
documentation: directive.description || '',
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getTokenAtPosition(
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
): ContextToken {
|
|
||||||
let styleAtCursor = null;
|
|
||||||
let stateAtCursor = null;
|
|
||||||
let stringAtCursor = null;
|
|
||||||
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
|
||||||
if (index === cursor.line) {
|
|
||||||
if (stream.getCurrentPosition() >= cursor.character) {
|
|
||||||
styleAtCursor = style;
|
|
||||||
stateAtCursor = {...state};
|
|
||||||
stringAtCursor = stream.current();
|
|
||||||
return 'BREAK';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Return the state/style of parsed token in case those at cursor aren't
|
|
||||||
// available.
|
|
||||||
return {
|
|
||||||
start: token.start,
|
|
||||||
end: token.end,
|
|
||||||
string: stringAtCursor || token.string,
|
|
||||||
state: stateAtCursor || token.state,
|
|
||||||
style: styleAtCursor || token.style,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provides an utility function to parse a given query text and construct a
|
|
||||||
* `token` context object.
|
|
||||||
* A token context provides useful information about the token/style that
|
|
||||||
* CharacterStream currently possesses, as well as the end state and style
|
|
||||||
* of the token.
|
|
||||||
*/
|
|
||||||
type callbackFnType = (
|
|
||||||
stream: CharacterStream,
|
|
||||||
state: State,
|
|
||||||
style: string,
|
|
||||||
index: number,
|
|
||||||
) => void | 'BREAK';
|
|
||||||
|
|
||||||
function runOnlineParser(
|
|
||||||
queryText: string,
|
|
||||||
callback: callbackFnType,
|
|
||||||
): ContextToken {
|
|
||||||
const lines = queryText.split('\n');
|
|
||||||
const parser = onlineParser();
|
|
||||||
let state = parser.startState();
|
|
||||||
let style = '';
|
|
||||||
|
|
||||||
let stream: CharacterStream = new CharacterStream('');
|
|
||||||
|
|
||||||
for (let i = 0; i < lines.length; i++) {
|
|
||||||
stream = new CharacterStream(lines[i]);
|
|
||||||
while (!stream.eol()) {
|
|
||||||
style = parser.token(stream, state);
|
|
||||||
const code = callback(stream, state, style, i);
|
|
||||||
if (code === 'BREAK') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Above while loop won't run if there is an empty line.
|
|
||||||
// Run the callback one more time to catch this.
|
|
||||||
callback(stream, state, style, i);
|
|
||||||
|
|
||||||
if (!state.kind) {
|
|
||||||
state = parser.startState();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
start: stream.getStartOfToken(),
|
|
||||||
end: stream.getCurrentPosition(),
|
|
||||||
string: stream.current(),
|
|
||||||
state,
|
|
||||||
style,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function canUseDirective(
|
|
||||||
state: $PropertyType<State, 'prevState'>,
|
|
||||||
directive: GraphQLDirective,
|
|
||||||
): boolean {
|
|
||||||
if (!state || !state.kind) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const kind = state.kind;
|
|
||||||
const locations = directive.locations;
|
|
||||||
switch (kind) {
|
|
||||||
case 'Query':
|
|
||||||
return locations.indexOf('QUERY') !== -1;
|
|
||||||
case 'Mutation':
|
|
||||||
return locations.indexOf('MUTATION') !== -1;
|
|
||||||
case 'Subscription':
|
|
||||||
return locations.indexOf('SUBSCRIPTION') !== -1;
|
|
||||||
case 'Field':
|
|
||||||
case 'AliasedField':
|
|
||||||
return locations.indexOf('FIELD') !== -1;
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
|
||||||
case 'FragmentSpread':
|
|
||||||
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
|
||||||
case 'InlineFragment':
|
|
||||||
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
|
||||||
|
|
||||||
// Schema Definitions
|
|
||||||
case 'SchemaDef':
|
|
||||||
return locations.indexOf('SCHEMA') !== -1;
|
|
||||||
case 'ScalarDef':
|
|
||||||
return locations.indexOf('SCALAR') !== -1;
|
|
||||||
case 'ObjectTypeDef':
|
|
||||||
return locations.indexOf('OBJECT') !== -1;
|
|
||||||
case 'FieldDef':
|
|
||||||
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
|
||||||
case 'InterfaceDef':
|
|
||||||
return locations.indexOf('INTERFACE') !== -1;
|
|
||||||
case 'UnionDef':
|
|
||||||
return locations.indexOf('UNION') !== -1;
|
|
||||||
case 'EnumDef':
|
|
||||||
return locations.indexOf('ENUM') !== -1;
|
|
||||||
case 'EnumValue':
|
|
||||||
return locations.indexOf('ENUM_VALUE') !== -1;
|
|
||||||
case 'InputDef':
|
|
||||||
return locations.indexOf('INPUT_OBJECT') !== -1;
|
|
||||||
case 'InputValueDef':
|
|
||||||
const prevStateKind = state.prevState && state.prevState.kind;
|
|
||||||
switch (prevStateKind) {
|
|
||||||
case 'ArgumentsDef':
|
|
||||||
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
|
||||||
case 'InputDef':
|
|
||||||
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility for collecting rich type information given any token's state
|
|
||||||
// from the graphql-mode parser.
|
|
||||||
export function getTypeInfo(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
tokenState: State,
|
|
||||||
): TypeInfo {
|
|
||||||
let argDef;
|
|
||||||
let argDefs;
|
|
||||||
let directiveDef;
|
|
||||||
let enumValue;
|
|
||||||
let fieldDef;
|
|
||||||
let inputType;
|
|
||||||
let objectFieldDefs;
|
|
||||||
let parentType;
|
|
||||||
let type;
|
|
||||||
|
|
||||||
forEachState(tokenState, state => {
|
|
||||||
switch (state.kind) {
|
|
||||||
case 'Query':
|
|
||||||
case 'ShortQuery':
|
|
||||||
type = schema.getQueryType();
|
|
||||||
break;
|
|
||||||
case 'Mutation':
|
|
||||||
type = schema.getMutationType();
|
|
||||||
break;
|
|
||||||
case 'Subscription':
|
|
||||||
type = schema.getSubscriptionType();
|
|
||||||
break;
|
|
||||||
case 'InlineFragment':
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
if (state.type) {
|
|
||||||
type = schema.getType(state.type);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Field':
|
|
||||||
case 'AliasedField':
|
|
||||||
if (!type || !state.name) {
|
|
||||||
fieldDef = null;
|
|
||||||
} else {
|
|
||||||
fieldDef = parentType
|
|
||||||
? getFieldDef(schema, parentType, state.name)
|
|
||||||
: null;
|
|
||||||
type = fieldDef ? fieldDef.type : null;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'SelectionSet':
|
|
||||||
parentType = getNamedType(type);
|
|
||||||
break;
|
|
||||||
case 'Directive':
|
|
||||||
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
|
||||||
break;
|
|
||||||
case 'Arguments':
|
|
||||||
if (!state.prevState) {
|
|
||||||
argDefs = null;
|
|
||||||
} else {
|
|
||||||
switch (state.prevState.kind) {
|
|
||||||
case 'Field':
|
|
||||||
argDefs = fieldDef && fieldDef.args;
|
|
||||||
break;
|
|
||||||
case 'Directive':
|
|
||||||
argDefs = directiveDef && directiveDef.args;
|
|
||||||
break;
|
|
||||||
case 'AliasedField':
|
|
||||||
const name = state.prevState && state.prevState.name;
|
|
||||||
if (!name) {
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const field = parentType
|
|
||||||
? getFieldDef(schema, parentType, name)
|
|
||||||
: null;
|
|
||||||
if (!field) {
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
argDefs = field.args;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Argument':
|
|
||||||
if (argDefs) {
|
|
||||||
for (let i = 0; i < argDefs.length; i++) {
|
|
||||||
if (argDefs[i].name === state.name) {
|
|
||||||
argDef = argDefs[i];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inputType = argDef && argDef.type;
|
|
||||||
break;
|
|
||||||
case 'EnumValue':
|
|
||||||
const enumType = getNamedType(inputType);
|
|
||||||
enumValue =
|
|
||||||
enumType instanceof GraphQLEnumType
|
|
||||||
? find(enumType.getValues(), val => val.value === state.name)
|
|
||||||
: null;
|
|
||||||
break;
|
|
||||||
case 'ListValue':
|
|
||||||
const nullableType = getNullableType(inputType);
|
|
||||||
inputType =
|
|
||||||
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
|
||||||
break;
|
|
||||||
case 'ObjectValue':
|
|
||||||
const objectType = getNamedType(inputType);
|
|
||||||
objectFieldDefs =
|
|
||||||
objectType instanceof GraphQLInputObjectType
|
|
||||||
? objectType.getFields()
|
|
||||||
: null;
|
|
||||||
break;
|
|
||||||
case 'ObjectField':
|
|
||||||
const objectField =
|
|
||||||
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
|
||||||
inputType = objectField && objectField.type;
|
|
||||||
break;
|
|
||||||
case 'NamedType':
|
|
||||||
if (state.name) {
|
|
||||||
type = schema.getType(state.name);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
argDef,
|
|
||||||
argDefs,
|
|
||||||
directiveDef,
|
|
||||||
enumValue,
|
|
||||||
fieldDef,
|
|
||||||
inputType,
|
|
||||||
objectFieldDefs,
|
|
||||||
parentType,
|
|
||||||
type,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the first item in the array which causes predicate to return truthy.
|
|
||||||
function find(array, predicate) {
|
|
||||||
for (let i = 0; i < array.length; i++) {
|
|
||||||
if (predicate(array[i])) {
|
|
||||||
return array[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
@ -1,136 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
ASTNode,
|
|
||||||
FragmentSpreadNode,
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
OperationDefinitionNode,
|
|
||||||
NamedTypeNode,
|
|
||||||
TypeDefinitionNode,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
Definition,
|
|
||||||
DefinitionQueryResult,
|
|
||||||
FragmentInfo,
|
|
||||||
Position,
|
|
||||||
Range,
|
|
||||||
Uri,
|
|
||||||
ObjectTypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
|
||||||
import invariant from 'assert';
|
|
||||||
|
|
||||||
export const LANGUAGE = 'GraphQL';
|
|
||||||
|
|
||||||
function getRange(text: string, node: ASTNode): Range {
|
|
||||||
const location = node.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return locToRange(text, location);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getPosition(text: string, node: ASTNode): Position {
|
|
||||||
const location = node.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return offsetToPosition(text, location.start);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getDefinitionQueryResultForNamedType(
|
|
||||||
text: string,
|
|
||||||
node: NamedTypeNode,
|
|
||||||
dependencies: Array<ObjectTypeInfo>,
|
|
||||||
): Promise<DefinitionQueryResult> {
|
|
||||||
const name = node.name.value;
|
|
||||||
const defNodes = dependencies.filter(
|
|
||||||
({definition}) => definition.name && definition.name.value === name,
|
|
||||||
);
|
|
||||||
if (defNodes.length === 0) {
|
|
||||||
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
|
||||||
return {queryRange: [], definitions: []};
|
|
||||||
}
|
|
||||||
const definitions: Array<Definition> = defNodes.map(
|
|
||||||
({filePath, content, definition}) =>
|
|
||||||
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
definitions,
|
|
||||||
queryRange: definitions.map(_ => getRange(text, node)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getDefinitionQueryResultForFragmentSpread(
|
|
||||||
text: string,
|
|
||||||
fragment: FragmentSpreadNode,
|
|
||||||
dependencies: Array<FragmentInfo>,
|
|
||||||
): Promise<DefinitionQueryResult> {
|
|
||||||
const name = fragment.name.value;
|
|
||||||
const defNodes = dependencies.filter(
|
|
||||||
({definition}) => definition.name.value === name,
|
|
||||||
);
|
|
||||||
if (defNodes.length === 0) {
|
|
||||||
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
|
||||||
return {queryRange: [], definitions: []};
|
|
||||||
}
|
|
||||||
const definitions: Array<Definition> = defNodes.map(
|
|
||||||
({filePath, content, definition}) =>
|
|
||||||
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
definitions,
|
|
||||||
queryRange: definitions.map(_ => getRange(text, fragment)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getDefinitionQueryResultForDefinitionNode(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
|
||||||
): DefinitionQueryResult {
|
|
||||||
return {
|
|
||||||
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
|
||||||
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDefinitionForFragmentDefinition(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
|
||||||
): Definition {
|
|
||||||
const name = definition.name;
|
|
||||||
invariant(name, 'Expected ASTNode to have a Name.');
|
|
||||||
return {
|
|
||||||
path,
|
|
||||||
position: getPosition(text, definition),
|
|
||||||
range: getRange(text, definition),
|
|
||||||
name: name.value || '',
|
|
||||||
language: LANGUAGE,
|
|
||||||
// This is a file inside the project root, good enough for now
|
|
||||||
projectRoot: path,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDefinitionForNodeDefinition(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: TypeDefinitionNode,
|
|
||||||
): Definition {
|
|
||||||
const name = definition.name;
|
|
||||||
invariant(name, 'Expected ASTNode to have a Name.');
|
|
||||||
return {
|
|
||||||
path,
|
|
||||||
position: getPosition(text, definition),
|
|
||||||
range: getRange(text, definition),
|
|
||||||
name: name.value || '',
|
|
||||||
language: LANGUAGE,
|
|
||||||
// This is a file inside the project root, good enough for now
|
|
||||||
projectRoot: path,
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,172 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
ASTNode,
|
|
||||||
DocumentNode,
|
|
||||||
GraphQLError,
|
|
||||||
GraphQLSchema,
|
|
||||||
Location,
|
|
||||||
SourceLocation,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
Diagnostic,
|
|
||||||
CustomValidationRule,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
import invariant from 'assert';
|
|
||||||
import {findDeprecatedUsages, parse} from 'graphql';
|
|
||||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
|
||||||
import {
|
|
||||||
Position,
|
|
||||||
Range,
|
|
||||||
validateWithCustomRules,
|
|
||||||
} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
export const SEVERITY = {
|
|
||||||
ERROR: 1,
|
|
||||||
WARNING: 2,
|
|
||||||
INFORMATION: 3,
|
|
||||||
HINT: 4,
|
|
||||||
};
|
|
||||||
|
|
||||||
export function getDiagnostics(
|
|
||||||
query: string,
|
|
||||||
schema: ?GraphQLSchema = null,
|
|
||||||
customRules?: Array<CustomValidationRule>,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
let ast = null;
|
|
||||||
try {
|
|
||||||
ast = parse(query);
|
|
||||||
} catch (error) {
|
|
||||||
const range = getRange(error.locations[0], query);
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
severity: SEVERITY.ERROR,
|
|
||||||
message: error.message,
|
|
||||||
source: 'GraphQL: Syntax',
|
|
||||||
range,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function validateQuery(
|
|
||||||
ast: DocumentNode,
|
|
||||||
schema: ?GraphQLSchema = null,
|
|
||||||
customRules?: Array<CustomValidationRule>,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
// We cannot validate the query unless a schema is provided.
|
|
||||||
if (!schema) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const validationErrorAnnotations = mapCat(
|
|
||||||
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
|
||||||
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
|
||||||
);
|
|
||||||
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
|
||||||
// support older versions of graphql-js.
|
|
||||||
const deprecationWarningAnnotations = !findDeprecatedUsages
|
|
||||||
? []
|
|
||||||
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
|
||||||
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
|
||||||
);
|
|
||||||
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
|
||||||
}
|
|
||||||
|
|
||||||
// General utility for map-cating (aka flat-mapping).
|
|
||||||
function mapCat<T>(
|
|
||||||
array: Array<T>,
|
|
||||||
mapper: (item: T) => Array<any>,
|
|
||||||
): Array<any> {
|
|
||||||
return Array.prototype.concat.apply([], array.map(mapper));
|
|
||||||
}
|
|
||||||
|
|
||||||
function annotations(
|
|
||||||
error: GraphQLError,
|
|
||||||
severity: number,
|
|
||||||
type: string,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
if (!error.nodes) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return error.nodes.map(node => {
|
|
||||||
const highlightNode =
|
|
||||||
node.kind !== 'Variable' && node.name
|
|
||||||
? node.name
|
|
||||||
: node.variable
|
|
||||||
? node.variable
|
|
||||||
: node;
|
|
||||||
|
|
||||||
invariant(error.locations, 'GraphQL validation error requires locations.');
|
|
||||||
const loc = error.locations[0];
|
|
||||||
const highlightLoc = getLocation(highlightNode);
|
|
||||||
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
|
||||||
return {
|
|
||||||
source: `GraphQL: ${type}`,
|
|
||||||
message: error.message,
|
|
||||||
severity,
|
|
||||||
range: new Range(
|
|
||||||
new Position(loc.line - 1, loc.column - 1),
|
|
||||||
new Position(loc.line - 1, end),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRange(location: SourceLocation, queryText: string) {
|
|
||||||
const parser = onlineParser();
|
|
||||||
const state = parser.startState();
|
|
||||||
const lines = queryText.split('\n');
|
|
||||||
|
|
||||||
invariant(
|
|
||||||
lines.length >= location.line,
|
|
||||||
'Query text must have more lines than where the error happened',
|
|
||||||
);
|
|
||||||
|
|
||||||
let stream = null;
|
|
||||||
|
|
||||||
for (let i = 0; i < location.line; i++) {
|
|
||||||
stream = new CharacterStream(lines[i]);
|
|
||||||
while (!stream.eol()) {
|
|
||||||
const style = parser.token(stream, state);
|
|
||||||
if (style === 'invalidchar') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
invariant(stream, 'Expected Parser stream to be available.');
|
|
||||||
|
|
||||||
const line = location.line - 1;
|
|
||||||
const start = stream.getStartOfToken();
|
|
||||||
const end = stream.getCurrentPosition();
|
|
||||||
|
|
||||||
return new Range(new Position(line, start), new Position(line, end));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get location info from a node in a type-safe way.
|
|
||||||
*
|
|
||||||
* The only way a node could not have a location is if we initialized the parser
|
|
||||||
* (and therefore the lexer) with the `noLocation` option, but we always
|
|
||||||
* call `parse` without options above.
|
|
||||||
*/
|
|
||||||
function getLocation(node: any): Location {
|
|
||||||
const typeCastedNode = (node: ASTNode);
|
|
||||||
const location = typeCastedNode.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return location;
|
|
||||||
}
|
|
@ -1,186 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ported from codemirror-graphql
|
|
||||||
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {GraphQLSchema} from 'graphql';
|
|
||||||
import type {ContextToken} from 'graphql-language-service-types';
|
|
||||||
import type {Hover} from 'vscode-languageserver-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
|
||||||
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
|
||||||
|
|
||||||
export function getHoverInformation(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
contextToken?: ContextToken,
|
|
||||||
): Hover.contents {
|
|
||||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
|
||||||
|
|
||||||
if (!schema || !token || !token.state) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const state = token.state;
|
|
||||||
const kind = state.kind;
|
|
||||||
const step = state.step;
|
|
||||||
const typeInfo = getTypeInfo(schema, token.state);
|
|
||||||
const options = {schema};
|
|
||||||
|
|
||||||
// Given a Schema and a Token, produce the contents of an info tooltip.
|
|
||||||
// To do this, create a div element that we will render "into" and then pass
|
|
||||||
// it to various rendering functions.
|
|
||||||
if (
|
|
||||||
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
|
||||||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderField(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.fieldDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
|
||||||
const into = [];
|
|
||||||
renderDirective(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.directiveDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
|
||||||
const into = [];
|
|
||||||
renderArg(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.argDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (
|
|
||||||
kind === 'EnumValue' &&
|
|
||||||
typeInfo.enumValue &&
|
|
||||||
typeInfo.enumValue.description
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderEnumValue(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.enumValue);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (
|
|
||||||
kind === 'NamedType' &&
|
|
||||||
typeInfo.type &&
|
|
||||||
typeInfo.type.description
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderType(into, typeInfo, options, typeInfo.type);
|
|
||||||
renderDescription(into, options, typeInfo.type);
|
|
||||||
return into.join('').trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderField(into, typeInfo, options) {
|
|
||||||
renderQualifiedField(into, typeInfo, options);
|
|
||||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderQualifiedField(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.fieldDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const fieldName = (typeInfo.fieldDef.name: string);
|
|
||||||
if (fieldName.slice(0, 2) !== '__') {
|
|
||||||
renderType(into, typeInfo, options, typeInfo.parentType);
|
|
||||||
text(into, '.');
|
|
||||||
}
|
|
||||||
text(into, fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDirective(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.directiveDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const name = '@' + typeInfo.directiveDef.name;
|
|
||||||
text(into, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderArg(into, typeInfo, options) {
|
|
||||||
if (typeInfo.directiveDef) {
|
|
||||||
renderDirective(into, typeInfo, options);
|
|
||||||
} else if (typeInfo.fieldDef) {
|
|
||||||
renderQualifiedField(into, typeInfo, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!typeInfo.argDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const name = typeInfo.argDef.name;
|
|
||||||
text(into, '(');
|
|
||||||
text(into, name);
|
|
||||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
|
||||||
text(into, ')');
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderTypeAnnotation(into, typeInfo, options, t) {
|
|
||||||
text(into, ': ');
|
|
||||||
renderType(into, typeInfo, options, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderEnumValue(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.enumValue) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const name = typeInfo.enumValue.name;
|
|
||||||
renderType(into, typeInfo, options, typeInfo.inputType);
|
|
||||||
text(into, '.');
|
|
||||||
text(into, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderType(into, typeInfo, options, t) {
|
|
||||||
if (!t) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (t instanceof GraphQLNonNull) {
|
|
||||||
renderType(into, typeInfo, options, t.ofType);
|
|
||||||
text(into, '!');
|
|
||||||
} else if (t instanceof GraphQLList) {
|
|
||||||
text(into, '[');
|
|
||||||
renderType(into, typeInfo, options, t.ofType);
|
|
||||||
text(into, ']');
|
|
||||||
} else {
|
|
||||||
text(into, t.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDescription(into, options, def) {
|
|
||||||
if (!def) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const description =
|
|
||||||
typeof def.description === 'string' ? def.description : null;
|
|
||||||
if (description) {
|
|
||||||
text(into, '\n\n');
|
|
||||||
text(into, description);
|
|
||||||
}
|
|
||||||
renderDeprecation(into, options, def);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDeprecation(into, options, def) {
|
|
||||||
if (!def) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const reason =
|
|
||||||
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
|
||||||
if (!reason) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
text(into, '\n\n');
|
|
||||||
text(into, 'Deprecated: ');
|
|
||||||
text(into, reason);
|
|
||||||
}
|
|
||||||
|
|
||||||
function text(into: string[], content: string) {
|
|
||||||
into.push(content);
|
|
||||||
}
|
|
@ -1,121 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
Outline,
|
|
||||||
TextToken,
|
|
||||||
TokenKind,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
import {Kind, parse, visit} from 'graphql';
|
|
||||||
import {offsetToPosition} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
const {INLINE_FRAGMENT} = Kind;
|
|
||||||
|
|
||||||
const OUTLINEABLE_KINDS = {
|
|
||||||
Field: true,
|
|
||||||
OperationDefinition: true,
|
|
||||||
Document: true,
|
|
||||||
SelectionSet: true,
|
|
||||||
Name: true,
|
|
||||||
FragmentDefinition: true,
|
|
||||||
FragmentSpread: true,
|
|
||||||
InlineFragment: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
type OutlineTreeConverterType = {[name: string]: Function};
|
|
||||||
|
|
||||||
export function getOutline(queryText: string): ?Outline {
|
|
||||||
let ast;
|
|
||||||
try {
|
|
||||||
ast = parse(queryText);
|
|
||||||
} catch (error) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const visitorFns = outlineTreeConverter(queryText);
|
|
||||||
const outlineTrees = visit(ast, {
|
|
||||||
leave(node) {
|
|
||||||
if (
|
|
||||||
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
|
||||||
visitorFns[node.kind]
|
|
||||||
) {
|
|
||||||
return visitorFns[node.kind](node);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return {outlineTrees};
|
|
||||||
}
|
|
||||||
|
|
||||||
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
|
||||||
const meta = node => ({
|
|
||||||
representativeName: node.name,
|
|
||||||
startPosition: offsetToPosition(docText, node.loc.start),
|
|
||||||
endPosition: offsetToPosition(docText, node.loc.end),
|
|
||||||
children: node.selectionSet || [],
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
Field: node => {
|
|
||||||
const tokenizedText = node.alias
|
|
||||||
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
|
||||||
: [];
|
|
||||||
tokenizedText.push(buildToken('plain', node.name));
|
|
||||||
return {tokenizedText, ...meta(node)};
|
|
||||||
},
|
|
||||||
OperationDefinition: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('keyword', node.operation),
|
|
||||||
buildToken('whitespace', ' '),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
Document: node => node.definitions,
|
|
||||||
SelectionSet: node =>
|
|
||||||
concatMap(node.selections, child => {
|
|
||||||
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
|
||||||
}),
|
|
||||||
Name: node => node.value,
|
|
||||||
FragmentDefinition: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('keyword', 'fragment'),
|
|
||||||
buildToken('whitespace', ' '),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
FragmentSpread: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('plain', '...'),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
InlineFragment: node => node.selectionSet,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildToken(kind: TokenKind, value: string): TextToken {
|
|
||||||
return {kind, value};
|
|
||||||
}
|
|
||||||
|
|
||||||
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
|
||||||
const res = [];
|
|
||||||
for (let i = 0; i < arr.length; i++) {
|
|
||||||
const x = fn(arr[i], i);
|
|
||||||
if (Array.isArray(x)) {
|
|
||||||
res.push(...x);
|
|
||||||
} else {
|
|
||||||
res.push(x);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
export {
|
|
||||||
getDefinitionState,
|
|
||||||
getFieldDef,
|
|
||||||
forEachState,
|
|
||||||
objectValues,
|
|
||||||
hintList,
|
|
||||||
} from './autocompleteUtils';
|
|
||||||
|
|
||||||
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
|
||||||
|
|
||||||
export {
|
|
||||||
LANGUAGE,
|
|
||||||
getDefinitionQueryResultForFragmentSpread,
|
|
||||||
getDefinitionQueryResultForDefinitionNode,
|
|
||||||
} from './getDefinition';
|
|
||||||
|
|
||||||
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
|
||||||
export {getOutline} from './getOutline';
|
|
||||||
export {getHoverInformation} from './getHoverInformation';
|
|
||||||
|
|
||||||
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
|
@ -3,9 +3,9 @@
|
|||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"private": true,
|
"private": true,
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"@apollographql/graphql-playground-react": "^1.7.32",
|
|
||||||
"apollo-link-ws": "^1.0.8",
|
"apollo-link-ws": "^1.0.8",
|
||||||
"graphql": "^14.1.1",
|
"graphql": "^14.1.1",
|
||||||
|
"graphql-playground-react": "^1.7.22",
|
||||||
"react": "^16.13.0",
|
"react": "^16.13.0",
|
||||||
"react-dom": "^16.11.0",
|
"react-dom": "^16.11.0",
|
||||||
"react-scripts": "3.4.1",
|
"react-scripts": "3.4.1",
|
||||||
|
@ -1,6 +1,6 @@
|
|||||||
import React, { Component } from 'react'
|
import React, { Component } from 'react'
|
||||||
import { Provider } from 'react-redux'
|
import { Provider } from 'react-redux'
|
||||||
import { Playground, store } from '@apollographql/graphql-playground-react'
|
import { Playground, store } from 'graphql-playground-react'
|
||||||
|
|
||||||
import './index.css'
|
import './index.css'
|
||||||
|
|
||||||
@ -15,36 +15,21 @@ class App extends Component {
|
|||||||
return (
|
return (
|
||||||
<div>
|
<div>
|
||||||
<header style={{
|
<header style={{
|
||||||
background: '#09141b',
|
color: 'lightblue',
|
||||||
color: '#03a9f4',
|
|
||||||
letterSpacing: '0.15rem',
|
letterSpacing: '0.15rem',
|
||||||
height: '65px',
|
paddingTop: '10px',
|
||||||
display: 'flex',
|
paddingBottom: '0px'
|
||||||
alignItems: 'center'
|
|
||||||
}}
|
}}
|
||||||
>
|
>
|
||||||
<h3 style={{
|
<div style={{
|
||||||
textDecoration: 'none',
|
textDecoration: 'none',
|
||||||
margin: '0px',
|
margin: '0px',
|
||||||
fontSize: '18px',
|
fontSize: '14px',
|
||||||
|
fontWeight: '500',
|
||||||
}}
|
|
||||||
>
|
|
||||||
<span style={{
|
|
||||||
textTransform: 'uppercase',
|
textTransform: 'uppercase',
|
||||||
marginLeft: '20px',
|
|
||||||
paddingRight: '10px',
|
|
||||||
borderRight: '1px solid #fff'
|
|
||||||
}}>
|
|
||||||
Super Graph
|
|
||||||
</span>
|
|
||||||
<span style={{
|
|
||||||
fontSize: '16px',
|
|
||||||
marginLeft: '10px',
|
marginLeft: '10px',
|
||||||
color: '#fff'
|
}}
|
||||||
}}>
|
>Super Graph</div>
|
||||||
Instant GraphQL</span>
|
|
||||||
</h3>
|
|
||||||
</header>
|
</header>
|
||||||
|
|
||||||
<Provider store={store}>
|
<Provider store={store}>
|
||||||
@ -55,7 +40,6 @@ class App extends Component {
|
|||||||
'request.credentials': 'include',
|
'request.credentials': 'include',
|
||||||
'general.betaUpdates': true,
|
'general.betaUpdates': true,
|
||||||
'editor.reuseHeaders': true,
|
'editor.reuseHeaders': true,
|
||||||
'editor.theme': 'dark'
|
|
||||||
}"
|
}"
|
||||||
/>
|
/>
|
||||||
</Provider>
|
</Provider>
|
||||||
|
@ -6,7 +6,7 @@ body {
|
|||||||
sans-serif;
|
sans-serif;
|
||||||
-webkit-font-smoothing: antialiased;
|
-webkit-font-smoothing: antialiased;
|
||||||
-moz-osx-font-smoothing: grayscale;
|
-moz-osx-font-smoothing: grayscale;
|
||||||
background-color: #0f202d;
|
background-color: #09141b;
|
||||||
}
|
}
|
||||||
|
|
||||||
code {
|
code {
|
||||||
|
@ -1,7 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3">
|
|
||||||
<g fill="#61DAFB">
|
|
||||||
<path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/>
|
|
||||||
<circle cx="420.9" cy="296.5" r="45.7"/>
|
|
||||||
<path d="M520.5 78.1z"/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 2.6 KiB |
@ -2,64 +2,6 @@
|
|||||||
# yarn lockfile v1
|
# yarn lockfile v1
|
||||||
|
|
||||||
|
|
||||||
"@apollographql/graphql-playground-react@^1.7.32":
|
|
||||||
version "1.7.32"
|
|
||||||
resolved "https://registry.yarnpkg.com/@apollographql/graphql-playground-react/-/graphql-playground-react-1.7.32.tgz#3cc11ee897981cb5d162beac7e733d153e2d572e"
|
|
||||||
integrity sha512-IRp92j+XOyiONQQDA45yJxEeylmQIRhewVwB0i+DnH2HNUp37BSk/ZzF6OPncHfiZ9qnGe6jZloQftYHQKk5Vg==
|
|
||||||
dependencies:
|
|
||||||
"@types/lru-cache" "^4.1.1"
|
|
||||||
apollo-link "^1.0.7"
|
|
||||||
apollo-link-http "^1.3.2"
|
|
||||||
apollo-link-ws "1.0.8"
|
|
||||||
calculate-size "^1.1.1"
|
|
||||||
codemirror "^5.38.0"
|
|
||||||
codemirror-graphql timsuchanek/codemirror-graphql#details-fix
|
|
||||||
copy-to-clipboard "^3.0.8"
|
|
||||||
cryptiles "4.1.2"
|
|
||||||
cuid "^1.3.8"
|
|
||||||
graphiql "^0.11.2"
|
|
||||||
graphql "^0.11.7"
|
|
||||||
immutable "^4.0.0-rc.9"
|
|
||||||
isomorphic-fetch "^2.2.1"
|
|
||||||
js-yaml "^3.10.0"
|
|
||||||
json-stable-stringify "^1.0.1"
|
|
||||||
keycode "^2.1.9"
|
|
||||||
lodash "^4.17.11"
|
|
||||||
lodash.debounce "^4.0.8"
|
|
||||||
markdown-it "^8.4.1"
|
|
||||||
marked "^0.3.19"
|
|
||||||
prettier "^1.13.0"
|
|
||||||
prop-types "^15.6.0"
|
|
||||||
query-string "5"
|
|
||||||
react "^16.3.1"
|
|
||||||
react-addons-shallow-compare "^15.6.2"
|
|
||||||
react-codemirror "^1.0.0"
|
|
||||||
react-copy-to-clipboard "^5.0.1"
|
|
||||||
react-display-name "^0.2.3"
|
|
||||||
react-dom "^16.3.1"
|
|
||||||
react-helmet "^5.2.0"
|
|
||||||
react-input-autosize "^2.2.1"
|
|
||||||
react-modal "^3.1.11"
|
|
||||||
react-redux "^5.0.6"
|
|
||||||
react-router-dom "^4.2.2"
|
|
||||||
react-sortable-hoc "^0.8.3"
|
|
||||||
react-transition-group "^2.2.1"
|
|
||||||
react-virtualized "^9.12.0"
|
|
||||||
redux "^3.7.2"
|
|
||||||
redux-actions "^2.2.1"
|
|
||||||
redux-immutable "^4.0.0"
|
|
||||||
redux-localstorage rc
|
|
||||||
redux-localstorage-debounce "^0.1.0"
|
|
||||||
redux-localstorage-filter "^0.1.1"
|
|
||||||
redux-saga "^0.16.0"
|
|
||||||
reselect "^3.0.1"
|
|
||||||
seamless-immutable "^7.0.1"
|
|
||||||
styled-components "^4.0.0"
|
|
||||||
subscriptions-transport-ws "^0.9.5"
|
|
||||||
utility-types "^1.0.0"
|
|
||||||
webpack-bundle-analyzer "^3.3.2"
|
|
||||||
zen-observable "^0.7.1"
|
|
||||||
|
|
||||||
"@babel/code-frame@7.8.3", "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.8.3":
|
"@babel/code-frame@7.8.3", "@babel/code-frame@^7.0.0", "@babel/code-frame@^7.8.3":
|
||||||
version "7.8.3"
|
version "7.8.3"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e"
|
resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.8.3.tgz#33e25903d7481181534e12ec0a25f16b6fcf419e"
|
||||||
@ -1024,7 +966,7 @@
|
|||||||
dependencies:
|
dependencies:
|
||||||
regenerator-runtime "^0.13.4"
|
regenerator-runtime "^0.13.4"
|
||||||
|
|
||||||
"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.3.4", "@babel/runtime@^7.4.5", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7":
|
"@babel/runtime@^7.0.0", "@babel/runtime@^7.1.2", "@babel/runtime@^7.3.4", "@babel/runtime@^7.4.5", "@babel/runtime@^7.5.5", "@babel/runtime@^7.6.3", "@babel/runtime@^7.7.2", "@babel/runtime@^7.8.4", "@babel/runtime@^7.8.7", "@babel/runtime@^7.9.2":
|
||||||
version "7.9.2"
|
version "7.9.2"
|
||||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.9.2.tgz#d90df0583a3a252f09aaa619665367bae518db06"
|
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.9.2.tgz#d90df0583a3a252f09aaa619665367bae518db06"
|
||||||
integrity sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==
|
integrity sha512-NE2DtOdufG7R5vnfQUTehdTfNycfUANEtCa9PssN9O/xmTzP4E08UI797ixaei6hBEVL9BI/PsdJS5x7mWoB9Q==
|
||||||
@ -1292,6 +1234,50 @@
|
|||||||
resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b"
|
resolved "https://registry.yarnpkg.com/@nodelib/fs.stat/-/fs.stat-1.1.3.tgz#2b5a3ab3f918cca48a8c754c08168e3f03eba61b"
|
||||||
integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==
|
integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==
|
||||||
|
|
||||||
|
"@redux-saga/core@^1.1.3":
|
||||||
|
version "1.1.3"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/core/-/core-1.1.3.tgz#3085097b57a4ea8db5528d58673f20ce0950f6a4"
|
||||||
|
integrity sha512-8tInBftak8TPzE6X13ABmEtRJGjtK17w7VUs7qV17S8hCO5S3+aUTWZ/DBsBJPdE8Z5jOPwYALyvofgq1Ws+kg==
|
||||||
|
dependencies:
|
||||||
|
"@babel/runtime" "^7.6.3"
|
||||||
|
"@redux-saga/deferred" "^1.1.2"
|
||||||
|
"@redux-saga/delay-p" "^1.1.2"
|
||||||
|
"@redux-saga/is" "^1.1.2"
|
||||||
|
"@redux-saga/symbols" "^1.1.2"
|
||||||
|
"@redux-saga/types" "^1.1.0"
|
||||||
|
redux "^4.0.4"
|
||||||
|
typescript-tuple "^2.2.1"
|
||||||
|
|
||||||
|
"@redux-saga/deferred@^1.1.2":
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/deferred/-/deferred-1.1.2.tgz#59937a0eba71fff289f1310233bc518117a71888"
|
||||||
|
integrity sha512-908rDLHFN2UUzt2jb4uOzj6afpjgJe3MjICaUNO3bvkV/kN/cNeI9PMr8BsFXB/MR8WTAZQq/PlTq8Kww3TBSQ==
|
||||||
|
|
||||||
|
"@redux-saga/delay-p@^1.1.2":
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/delay-p/-/delay-p-1.1.2.tgz#8f515f4b009b05b02a37a7c3d0ca9ddc157bb355"
|
||||||
|
integrity sha512-ojc+1IoC6OP65Ts5+ZHbEYdrohmIw1j9P7HS9MOJezqMYtCDgpkoqB5enAAZrNtnbSL6gVCWPHaoaTY5KeO0/g==
|
||||||
|
dependencies:
|
||||||
|
"@redux-saga/symbols" "^1.1.2"
|
||||||
|
|
||||||
|
"@redux-saga/is@^1.1.2":
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/is/-/is-1.1.2.tgz#ae6c8421f58fcba80faf7cadb7d65b303b97e58e"
|
||||||
|
integrity sha512-OLbunKVsCVNTKEf2cH4TYyNbbPgvmZ52iaxBD4I1fTif4+MTXMa4/Z07L83zW/hTCXwpSZvXogqMqLfex2Tg6w==
|
||||||
|
dependencies:
|
||||||
|
"@redux-saga/symbols" "^1.1.2"
|
||||||
|
"@redux-saga/types" "^1.1.0"
|
||||||
|
|
||||||
|
"@redux-saga/symbols@^1.1.2":
|
||||||
|
version "1.1.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/symbols/-/symbols-1.1.2.tgz#216a672a487fc256872b8034835afc22a2d0595d"
|
||||||
|
integrity sha512-EfdGnF423glv3uMwLsGAtE6bg+R9MdqlHEzExnfagXPrIiuxwr3bdiAwz3gi+PsrQ3yBlaBpfGLtDG8rf3LgQQ==
|
||||||
|
|
||||||
|
"@redux-saga/types@^1.1.0":
|
||||||
|
version "1.1.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/@redux-saga/types/-/types-1.1.0.tgz#0e81ce56b4883b4b2a3001ebe1ab298b84237204"
|
||||||
|
integrity sha512-afmTuJrylUU/0OtqzaRkbyYFFNgCF73Bvel/sw90pvGrWIZ+vyoIJqA6eMSoA6+nb443kTmulmBtC9NerXboNg==
|
||||||
|
|
||||||
"@svgr/babel-plugin-add-jsx-attribute@^4.2.0":
|
"@svgr/babel-plugin-add-jsx-attribute@^4.2.0":
|
||||||
version "4.2.0"
|
version "4.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-4.2.0.tgz#dadcb6218503532d6884b210e7f3c502caaa44b1"
|
resolved "https://registry.yarnpkg.com/@svgr/babel-plugin-add-jsx-attribute/-/babel-plugin-add-jsx-attribute-4.2.0.tgz#dadcb6218503532d6884b210e7f3c502caaa44b1"
|
||||||
@ -1488,9 +1474,9 @@
|
|||||||
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
|
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
|
||||||
|
|
||||||
"@types/node@*":
|
"@types/node@*":
|
||||||
version "13.11.1"
|
version "13.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.11.1.tgz#49a2a83df9d26daacead30d0ccc8762b128d53c7"
|
resolved "https://registry.yarnpkg.com/@types/node/-/node-13.13.1.tgz#1ba94c5a177a1692518bfc7b41aec0aa1a14354e"
|
||||||
integrity sha512-eWQGP3qtxwL8FGneRrC5DwrJLGN4/dH1clNTuLfN81HCrxVtxRjygDTUoZJ5ASlDEeo0ppYFQjQIlXhtXpOn6g==
|
integrity sha512-uysqysLJ+As9jqI5yqjwP3QJrhOcUwBjHUlUxPxjbplwKoILvXVsmYWEhfmAQlrPfbRZmhJB007o4L9sKqtHqQ==
|
||||||
|
|
||||||
"@types/parse-json@^4.0.0":
|
"@types/parse-json@^4.0.0":
|
||||||
version "4.0.0"
|
version "4.0.0"
|
||||||
@ -1520,39 +1506,39 @@
|
|||||||
"@types/yargs-parser" "*"
|
"@types/yargs-parser" "*"
|
||||||
|
|
||||||
"@typescript-eslint/eslint-plugin@^2.10.0":
|
"@typescript-eslint/eslint-plugin@^2.10.0":
|
||||||
version "2.28.0"
|
version "2.29.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.28.0.tgz#4431bc6d3af41903e5255770703d4e55a0ccbdec"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.29.0.tgz#c9efab7624e3dd6d144a0e4577a541d1bd42c2ac"
|
||||||
integrity sha512-w0Ugcq2iatloEabQP56BRWJowliXUP5Wv6f9fKzjJmDW81hOTBxRoJ4LoEOxRpz9gcY51Libytd2ba3yLmSOfg==
|
integrity sha512-X/YAY7azKirENm4QRpT7OVmzok02cSkqeIcLmdz6gXUQG4Hk0Fi9oBAynSAyNXeGdMRuZvjBa0c1Lu0dn/u6VA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@typescript-eslint/experimental-utils" "2.28.0"
|
"@typescript-eslint/experimental-utils" "2.29.0"
|
||||||
functional-red-black-tree "^1.0.1"
|
functional-red-black-tree "^1.0.1"
|
||||||
regexpp "^3.0.0"
|
regexpp "^3.0.0"
|
||||||
tsutils "^3.17.1"
|
tsutils "^3.17.1"
|
||||||
|
|
||||||
"@typescript-eslint/experimental-utils@2.28.0":
|
"@typescript-eslint/experimental-utils@2.29.0":
|
||||||
version "2.28.0"
|
version "2.29.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-2.28.0.tgz#1fd0961cd8ef6522687b4c562647da6e71f8833d"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/experimental-utils/-/experimental-utils-2.29.0.tgz#3cb8060de9265ba131625a96bbfec31ba6d4a0fe"
|
||||||
integrity sha512-4SL9OWjvFbHumM/Zh/ZeEjUFxrYKtdCi7At4GyKTbQlrj1HcphIDXlje4Uu4cY+qzszR5NdVin4CCm6AXCjd6w==
|
integrity sha512-H/6VJr6eWYstyqjWXBP2Nn1hQJyvJoFdDtsHxGiD+lEP7piGnGpb/ZQd+z1ZSB1F7dN+WsxUDh8+S4LwI+f3jw==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/json-schema" "^7.0.3"
|
"@types/json-schema" "^7.0.3"
|
||||||
"@typescript-eslint/typescript-estree" "2.28.0"
|
"@typescript-eslint/typescript-estree" "2.29.0"
|
||||||
eslint-scope "^5.0.0"
|
eslint-scope "^5.0.0"
|
||||||
eslint-utils "^2.0.0"
|
eslint-utils "^2.0.0"
|
||||||
|
|
||||||
"@typescript-eslint/parser@^2.10.0":
|
"@typescript-eslint/parser@^2.10.0":
|
||||||
version "2.28.0"
|
version "2.29.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-2.28.0.tgz#bb761286efd2b0714761cab9d0ee5847cf080385"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/parser/-/parser-2.29.0.tgz#6e3c4e21ed6393dc05b9d8b47f0b7e731ef21c9c"
|
||||||
integrity sha512-RqPybRDquui9d+K86lL7iPqH6Dfp9461oyqvlXMNtap+PyqYbkY5dB7LawQjDzot99fqzvS0ZLZdfe+1Bt3Jgw==
|
integrity sha512-H78M+jcu5Tf6m/5N8iiFblUUv+HJDguMSdFfzwa6vSg9lKR8Mk9BsgeSjO8l2EshKnJKcbv0e8IDDOvSNjl0EA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@types/eslint-visitor-keys" "^1.0.0"
|
"@types/eslint-visitor-keys" "^1.0.0"
|
||||||
"@typescript-eslint/experimental-utils" "2.28.0"
|
"@typescript-eslint/experimental-utils" "2.29.0"
|
||||||
"@typescript-eslint/typescript-estree" "2.28.0"
|
"@typescript-eslint/typescript-estree" "2.29.0"
|
||||||
eslint-visitor-keys "^1.1.0"
|
eslint-visitor-keys "^1.1.0"
|
||||||
|
|
||||||
"@typescript-eslint/typescript-estree@2.28.0":
|
"@typescript-eslint/typescript-estree@2.29.0":
|
||||||
version "2.28.0"
|
version "2.29.0"
|
||||||
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-2.28.0.tgz#d34949099ff81092c36dc275b6a1ea580729ba00"
|
resolved "https://registry.yarnpkg.com/@typescript-eslint/typescript-estree/-/typescript-estree-2.29.0.tgz#1be6612bb02fc37ac9f466521c1459a4744e8d3a"
|
||||||
integrity sha512-HDr8MP9wfwkiuqzRVkuM3BeDrOC4cKbO5a6BymZBHUt5y/2pL0BXD6I/C/ceq2IZoHWhcASk+5/zo+dwgu9V8Q==
|
integrity sha512-3YGbtnWy4az16Egy5Fj5CckkVlpIh0MADtAQza+jiMADRSKkjdpzZp/5WuvwK/Qib3Z0HtzrDFeWanS99dNhnA==
|
||||||
dependencies:
|
dependencies:
|
||||||
debug "^4.1.1"
|
debug "^4.1.1"
|
||||||
eslint-visitor-keys "^1.1.0"
|
eslint-visitor-keys "^1.1.0"
|
||||||
@ -1730,11 +1716,6 @@ abab@^2.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.3.tgz#623e2075e02eb2d3f2475e49f99c91846467907a"
|
resolved "https://registry.yarnpkg.com/abab/-/abab-2.0.3.tgz#623e2075e02eb2d3f2475e49f99c91846467907a"
|
||||||
integrity sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==
|
integrity sha512-tsFzPpcttalNjFBCFMqsKYQcWxxen1pgJR56by//QwvJc4/OUS3kPOOttx2tSIfjsylB0pYu7f5D3K1RCxUnUg==
|
||||||
|
|
||||||
abbrev@1:
|
|
||||||
version "1.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/abbrev/-/abbrev-1.1.1.tgz#f8f2c887ad10bf67f634f005b6987fed3179aac8"
|
|
||||||
integrity sha512-nne9/IiQ/hzIhY6pdDnbBtz7DjPTKrY00P/zvPSm5pOFkl6xuGrGnXn/VtTNNfNtAfZ9/1RtehkszU9qcTii0Q==
|
|
||||||
|
|
||||||
accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7:
|
accepts@~1.3.4, accepts@~1.3.5, accepts@~1.3.7:
|
||||||
version "1.3.7"
|
version "1.3.7"
|
||||||
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
|
resolved "https://registry.yarnpkg.com/accepts/-/accepts-1.3.7.tgz#531bc726517a3b2b41f850021c6cc15eaab507cd"
|
||||||
@ -1816,9 +1797,9 @@ ajv-keywords@^3.1.0, ajv-keywords@^3.4.1:
|
|||||||
integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==
|
integrity sha512-RO1ibKvd27e6FEShVFfPALuHI3WjSVNeK5FIsmme/LYRNxjKuNj+Dt7bucLa6NdSv3JcVTyMlm9kGR84z1XpaQ==
|
||||||
|
|
||||||
ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.0, ajv@^6.5.5:
|
ajv@^6.1.0, ajv@^6.10.0, ajv@^6.10.2, ajv@^6.12.0, ajv@^6.5.5:
|
||||||
version "6.12.0"
|
version "6.12.2"
|
||||||
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.0.tgz#06d60b96d87b8454a5adaba86e7854da629db4b7"
|
resolved "https://registry.yarnpkg.com/ajv/-/ajv-6.12.2.tgz#c629c5eced17baf314437918d2da88c99d5958cd"
|
||||||
integrity sha512-D6gFiFA0RRLyUbvijN74DWAjXSFxWKaWP7mldxkVhyhAV3+SWA9HEJPHQ2c9soIeTFJqcSdFDGFgdqs1iUU2Hw==
|
integrity sha512-k+V+hzjm5q/Mr8ef/1Y9goCmlsK4I6Sm74teeyGvFk1XrOsbsKLjEdrvny42CZ+a8sXbk8KWpY/bDwS+FLL2UQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
fast-deep-equal "^3.1.1"
|
fast-deep-equal "^3.1.1"
|
||||||
fast-json-stable-stringify "^2.0.0"
|
fast-json-stable-stringify "^2.0.0"
|
||||||
@ -1954,19 +1935,11 @@ apollo-utilities@^1.3.0:
|
|||||||
ts-invariant "^0.4.0"
|
ts-invariant "^0.4.0"
|
||||||
tslib "^1.10.0"
|
tslib "^1.10.0"
|
||||||
|
|
||||||
aproba@^1.0.3, aproba@^1.1.1:
|
aproba@^1.1.1:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
|
resolved "https://registry.yarnpkg.com/aproba/-/aproba-1.2.0.tgz#6802e6264efd18c790a1b0d517f0f2627bf2c94a"
|
||||||
integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
|
integrity sha512-Y9J6ZjXtoYh8RnXVCMOU/ttDmk1aBjunq9vO0ta5x85WDQiQfUF9sIPBITdbiiIVcBo03Hi3jMxigBtsddlXRw==
|
||||||
|
|
||||||
are-we-there-yet@~1.1.2:
|
|
||||||
version "1.1.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/are-we-there-yet/-/are-we-there-yet-1.1.5.tgz#4b35c2944f062a8bfcda66410760350fe9ddfc21"
|
|
||||||
integrity sha512-5hYdAkZlcG8tOLujVDTgCT+uPX0VnpAH28gWsLfzpXYm7wP6mp5Q/gYyR7YQ0cKVJcXJnl3j2kpBan13PtQf6w==
|
|
||||||
dependencies:
|
|
||||||
delegates "^1.0.0"
|
|
||||||
readable-stream "^2.0.6"
|
|
||||||
|
|
||||||
argparse@^1.0.7:
|
argparse@^1.0.7:
|
||||||
version "1.0.10"
|
version "1.0.10"
|
||||||
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
resolved "https://registry.yarnpkg.com/argparse/-/argparse-1.0.10.tgz#bcd6791ea5ae09725e17e5ad988134cd40b3d911"
|
||||||
@ -2220,9 +2193,9 @@ babel-loader@8.1.0:
|
|||||||
schema-utils "^2.6.5"
|
schema-utils "^2.6.5"
|
||||||
|
|
||||||
babel-plugin-dynamic-import-node@^2.3.0:
|
babel-plugin-dynamic-import-node@^2.3.0:
|
||||||
version "2.3.0"
|
version "2.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.0.tgz#f00f507bdaa3c3e3ff6e7e5e98d90a7acab96f7f"
|
resolved "https://registry.yarnpkg.com/babel-plugin-dynamic-import-node/-/babel-plugin-dynamic-import-node-2.3.3.tgz#84fda19c976ec5c6defef57f9427b3def66e17a3"
|
||||||
integrity sha512-o6qFkpeQEBxcqt0XYlWzAVxNCSCZdUgcR8IRlhD/8DylxjjO4foPcvTW0GGKa/cVt3rvxZ7o5ippJ+/0nvLhlQ==
|
integrity sha512-jZVI+s9Zg3IqA/kdi0i6UDCybUI3aSBLnglhYbSSjKlV7yF1F/5LWv8MakQmvYpnbJDS6fcBL2KzHSxNCMtWSQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
object.assign "^4.1.0"
|
object.assign "^4.1.0"
|
||||||
|
|
||||||
@ -2577,12 +2550,12 @@ browserslist@4.10.0:
|
|||||||
pkg-up "^3.1.0"
|
pkg-up "^3.1.0"
|
||||||
|
|
||||||
browserslist@^4.0.0, browserslist@^4.11.1, browserslist@^4.6.2, browserslist@^4.6.4, browserslist@^4.8.5, browserslist@^4.9.1:
|
browserslist@^4.0.0, browserslist@^4.11.1, browserslist@^4.6.2, browserslist@^4.6.4, browserslist@^4.8.5, browserslist@^4.9.1:
|
||||||
version "4.11.1"
|
version "4.12.0"
|
||||||
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.11.1.tgz#92f855ee88d6e050e7e7311d987992014f1a1f1b"
|
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.12.0.tgz#06c6d5715a1ede6c51fc39ff67fd647f740b656d"
|
||||||
integrity sha512-DCTr3kDrKEYNw6Jb9HFxVLQNaue8z+0ZfRBRjmCunKDEXEBajKDj2Y+Uelg+Pi29OnvaSGwjOsnRyNEkXzHg5g==
|
integrity sha512-UH2GkcEDSI0k/lRkuDSzFl9ZZ87skSy9w2XAn1MsZnL+4c4rqbBd3e82UWHbYDpztABrPBhZsTEeuxVfHppqDg==
|
||||||
dependencies:
|
dependencies:
|
||||||
caniuse-lite "^1.0.30001038"
|
caniuse-lite "^1.0.30001043"
|
||||||
electron-to-chromium "^1.3.390"
|
electron-to-chromium "^1.3.413"
|
||||||
node-releases "^1.1.53"
|
node-releases "^1.1.53"
|
||||||
pkg-up "^2.0.0"
|
pkg-up "^2.0.0"
|
||||||
|
|
||||||
@ -2759,10 +2732,10 @@ caniuse-api@^3.0.0:
|
|||||||
lodash.memoize "^4.1.2"
|
lodash.memoize "^4.1.2"
|
||||||
lodash.uniq "^4.5.0"
|
lodash.uniq "^4.5.0"
|
||||||
|
|
||||||
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001035, caniuse-lite@^1.0.30001038, caniuse-lite@^1.0.30001039:
|
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000981, caniuse-lite@^1.0.30001035, caniuse-lite@^1.0.30001039, caniuse-lite@^1.0.30001043:
|
||||||
version "1.0.30001042"
|
version "1.0.30001045"
|
||||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001042.tgz#c91ec21ec2d270bd76dbc2ce261260c292b8c93c"
|
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001045.tgz#a770df9de36ad6ca0c34f90eaa797a2dbbb1b619"
|
||||||
integrity sha512-igMQ4dlqnf4tWv0xjaaE02op9AJ2oQzXKjWf4EuAHFN694Uo9/EfPVIPJcmn2WkU9RqozCxx5e2KPcVClHDbDw==
|
integrity sha512-Y8o2Iz1KPcD6FjySbk1sPpvJqchgxk/iow0DABpGyzA1UeQAuxh63Xh0Enj5/BrsYbXtCN32JmR4ZxQTCQ6E6A==
|
||||||
|
|
||||||
capture-exit@^2.0.0:
|
capture-exit@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
@ -2979,25 +2952,18 @@ code-point-at@^1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
|
resolved "https://registry.yarnpkg.com/code-point-at/-/code-point-at-1.1.0.tgz#0d070b4d043a5bea33a2f1a40e2edb3d9a4ccf77"
|
||||||
integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
|
integrity sha1-DQcLTQQ6W+ozovGkDi7bPZpMz3c=
|
||||||
|
|
||||||
codemirror-graphql@^0.6.11:
|
codemirror-graphql@^0.11.6:
|
||||||
version "0.6.12"
|
version "0.11.6"
|
||||||
resolved "https://registry.yarnpkg.com/codemirror-graphql/-/codemirror-graphql-0.6.12.tgz#91a273fe5188857524a30221d06e645b4ca41f00"
|
resolved "https://registry.yarnpkg.com/codemirror-graphql/-/codemirror-graphql-0.11.6.tgz#885e34afb5b7aacf0e328d4d5949e73ad21d5a4e"
|
||||||
integrity sha512-7YP956JubbWkmk9QqKy62CZgdGbEulHNJkz2/aUDTpsE1KrQtRrT9WzStJaxAOEX2k4wUOpojUX2ItPxa69kFA==
|
integrity sha512-/zVKgOVS2/hfjAY0yoBkLz9ESHnWKBWpBNXQSoFF4Hl5q5AS2DmM22coonWKJcCvNry6TLak2F+QkzPeKVv3Eg==
|
||||||
dependencies:
|
dependencies:
|
||||||
graphql-language-service-interface "^1.0.16"
|
graphql-language-service-interface "^2.3.3"
|
||||||
graphql-language-service-parser "^0.1.14"
|
graphql-language-service-parser "^1.5.2"
|
||||||
|
|
||||||
codemirror-graphql@timsuchanek/codemirror-graphql#details-fix:
|
codemirror@^5.18.2, codemirror@^5.47.0, codemirror@^5.52.2:
|
||||||
version "0.6.12"
|
version "5.53.2"
|
||||||
resolved "https://codeload.github.com/timsuchanek/codemirror-graphql/tar.gz/801ec32683c38d6dc0f8f7bc19014a111edc9ebd"
|
resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.53.2.tgz#9799121cf8c50809cca487304e9de3a74d33f428"
|
||||||
dependencies:
|
integrity sha512-wvSQKS4E+P8Fxn/AQ+tQtJnF1qH5UOlxtugFLpubEZ5jcdH2iXTVinb+Xc/4QjshuOxRm4fUsU2QPF1JJKiyXA==
|
||||||
graphql-language-service-interface "^1.0.18"
|
|
||||||
graphql-language-service-parser "^1.0.18"
|
|
||||||
|
|
||||||
codemirror@^5.18.2, codemirror@^5.26.0, codemirror@^5.38.0:
|
|
||||||
version "5.52.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/codemirror/-/codemirror-5.52.2.tgz#c29e1f7179f85eb0dd17c0586fa810e4838ff584"
|
|
||||||
integrity sha512-WCGCixNUck2HGvY8/ZNI1jYfxPG5cRHv0VjmWuNzbtCLz8qYA5d+je4QhSSCtCaagyeOwMi/HmmPTjBgiTm2lQ==
|
|
||||||
|
|
||||||
collection-visit@^1.0.0:
|
collection-visit@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
@ -3136,11 +3102,6 @@ console-browserify@^1.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
|
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
|
||||||
integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
|
integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
|
||||||
|
|
||||||
console-control-strings@^1.0.0, console-control-strings@~1.1.0:
|
|
||||||
version "1.1.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/console-control-strings/-/console-control-strings-1.1.0.tgz#3d7cf4464db6446ea644bf4b39507f9851008e8e"
|
|
||||||
integrity sha1-PXz0Rk22RG6mRL9LOVB/mFEAjo4=
|
|
||||||
|
|
||||||
constants-browserify@^1.0.0:
|
constants-browserify@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
|
resolved "https://registry.yarnpkg.com/constants-browserify/-/constants-browserify-1.0.0.tgz#c20b96d8c617748aaf1c16021760cd27fcb8cb75"
|
||||||
@ -3202,7 +3163,7 @@ copy-descriptor@^0.1.0:
|
|||||||
resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
|
resolved "https://registry.yarnpkg.com/copy-descriptor/-/copy-descriptor-0.1.1.tgz#676f6eb3c39997c2ee1ac3a924fd6124748f578d"
|
||||||
integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=
|
integrity sha1-Z29us8OZl8LuGsOpJP1hJHSPV40=
|
||||||
|
|
||||||
copy-to-clipboard@^3, copy-to-clipboard@^3.0.8:
|
copy-to-clipboard@^3, copy-to-clipboard@^3.0.8, copy-to-clipboard@^3.2.0:
|
||||||
version "3.3.1"
|
version "3.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz#115aa1a9998ffab6196f93076ad6da3b913662ae"
|
resolved "https://registry.yarnpkg.com/copy-to-clipboard/-/copy-to-clipboard-3.3.1.tgz#115aa1a9998ffab6196f93076ad6da3b913662ae"
|
||||||
integrity sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==
|
integrity sha512-i13qo6kIHTTpCm8/Wup+0b1mVWETvu2kIMzKoK8FpkLkFxlt0znUAHcMzox+T8sPlqtZXq3CulEjQHsYiGFJUw==
|
||||||
@ -3640,7 +3601,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.0, debug@^2.6.9:
|
|||||||
dependencies:
|
dependencies:
|
||||||
ms "2.0.0"
|
ms "2.0.0"
|
||||||
|
|
||||||
debug@^3.0.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6:
|
debug@^3.0.0, debug@^3.1.1, debug@^3.2.5:
|
||||||
version "3.2.6"
|
version "3.2.6"
|
||||||
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
|
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
|
||||||
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
|
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
|
||||||
@ -3676,11 +3637,6 @@ deep-equal@^1.0.1:
|
|||||||
object-keys "^1.1.1"
|
object-keys "^1.1.1"
|
||||||
regexp.prototype.flags "^1.2.0"
|
regexp.prototype.flags "^1.2.0"
|
||||||
|
|
||||||
deep-extend@^0.6.0:
|
|
||||||
version "0.6.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/deep-extend/-/deep-extend-0.6.0.tgz#c4fa7c95404a17a9c3e8ca7e1537312b736330ac"
|
|
||||||
integrity sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==
|
|
||||||
|
|
||||||
deep-is@~0.1.3:
|
deep-is@~0.1.3:
|
||||||
version "0.1.3"
|
version "0.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
|
resolved "https://registry.yarnpkg.com/deep-is/-/deep-is-0.1.3.tgz#b369d6fb5dbc13eecf524f91b070feedc357cf34"
|
||||||
@ -3741,11 +3697,6 @@ delayed-stream@~1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
|
resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
|
||||||
integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
|
integrity sha1-3zrhmayt+31ECqrgsp4icrJOxhk=
|
||||||
|
|
||||||
delegates@^1.0.0:
|
|
||||||
version "1.0.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/delegates/-/delegates-1.0.0.tgz#84c6e159b81904fdca59a0ef44cd870d31250f9a"
|
|
||||||
integrity sha1-hMbhWbgZBP3KWaDvRM2HDTElD5o=
|
|
||||||
|
|
||||||
depd@~1.1.2:
|
depd@~1.1.2:
|
||||||
version "1.1.2"
|
version "1.1.2"
|
||||||
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
resolved "https://registry.yarnpkg.com/depd/-/depd-1.1.2.tgz#9bcd52e14c097763e749b274c4346ed2e560b5a9"
|
||||||
@ -3764,11 +3715,6 @@ destroy@~1.0.4:
|
|||||||
resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
|
resolved "https://registry.yarnpkg.com/destroy/-/destroy-1.0.4.tgz#978857442c44749e4206613e37946205826abd80"
|
||||||
integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=
|
integrity sha1-l4hXRCxEdJ5CBmE+N5RiBYJqvYA=
|
||||||
|
|
||||||
detect-libc@^1.0.2:
|
|
||||||
version "1.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/detect-libc/-/detect-libc-1.0.3.tgz#fa137c4bd698edf55cd5cd02ac559f91a4c4ba9b"
|
|
||||||
integrity sha1-+hN8S9aY7fVc1c0CrFWfkaTEups=
|
|
||||||
|
|
||||||
detect-newline@^2.1.0:
|
detect-newline@^2.1.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2"
|
resolved "https://registry.yarnpkg.com/detect-newline/-/detect-newline-2.1.0.tgz#f41f1c10be4b00e87b5f13da680759f2c5bfd3e2"
|
||||||
@ -3984,10 +3930,10 @@ ejs@^2.6.1:
|
|||||||
resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba"
|
resolved "https://registry.yarnpkg.com/ejs/-/ejs-2.7.4.tgz#48661287573dcc53e366c7a1ae52c3a120eec9ba"
|
||||||
integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==
|
integrity sha512-7vmuyh5+kuUyJKePhQfRQBhXV5Ce+RnaeeQArKu1EAMpL3WbgMt5WG6uQZpEVvYSSsxMXRKOewtDk9RaTKXRlA==
|
||||||
|
|
||||||
electron-to-chromium@^1.3.378, electron-to-chromium@^1.3.390:
|
electron-to-chromium@^1.3.378, electron-to-chromium@^1.3.413:
|
||||||
version "1.3.410"
|
version "1.3.414"
|
||||||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.410.tgz#00e0ec61c22933daa8b4de172c03932678783adc"
|
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.414.tgz#9d0a92defefda7cc1cf8895058b892795ddd6b41"
|
||||||
integrity sha512-DbCBdwtARI0l3e3m6ZIxVaTNahb6dSsmGjuag/twiVcWuM4MSpL5IfsJsJSyqLqxosE/m0CXlZaBmxegQW/dAg==
|
integrity sha512-UfxhIvED++qLwWrAq9uYVcqF8FdeV9sU2S7qhiHYFODxzXRrd1GZRl/PjITHsTEejgibcWDraD8TQqoHb1aCBQ==
|
||||||
|
|
||||||
elliptic@^6.0.0:
|
elliptic@^6.0.0:
|
||||||
version "6.5.2"
|
version "6.5.2"
|
||||||
@ -4055,7 +4001,7 @@ entities@^1.1.1, entities@~1.1.1:
|
|||||||
resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56"
|
resolved "https://registry.yarnpkg.com/entities/-/entities-1.1.2.tgz#bdfa735299664dfafd34529ed4f8522a275fea56"
|
||||||
integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==
|
integrity sha512-f2LZMYl1Fzu7YSBKg+RoROelpOaNrcGmE9AZubeDfrCEia483oW4MI4VyFd5VNHIgQ/7qm1I0wUHK1eJnn2y2w==
|
||||||
|
|
||||||
entities@^2.0.0:
|
entities@^2.0.0, entities@~2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4"
|
resolved "https://registry.yarnpkg.com/entities/-/entities-2.0.0.tgz#68d6084cab1b079767540d80e56a39b423e4abf4"
|
||||||
integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==
|
integrity sha512-D9f7V0JSRwIxlRI2mjMqufDrRDnx8p+eEOz7aUM9SuvF8gsBzra0/6tbjl1m8eQHrZlYj6PxqE00hZ1SAIKPLw==
|
||||||
@ -4343,11 +4289,11 @@ esprima@^4.0.0, esprima@^4.0.1:
|
|||||||
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
|
integrity sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==
|
||||||
|
|
||||||
esquery@^1.0.1:
|
esquery@^1.0.1:
|
||||||
version "1.3.0"
|
version "1.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.0.tgz#e5e29a6f66a837840d34f68cb9ce355260d1128b"
|
resolved "https://registry.yarnpkg.com/esquery/-/esquery-1.3.1.tgz#b78b5828aa8e214e29fb74c4d5b752e1c033da57"
|
||||||
integrity sha512-/5qB+Mb0m2bh86tjGbA8pB0qBfdmCIK6ZNPjcw4/TtEH0+tTf0wLA5HK4KMTweSMwLGHwBDWCBV+6+2+EuHmgg==
|
integrity sha512-olpvt9QG0vniUBZspVRN6lwB7hOZoTRtT+jzR+tS4ffYx2mzbw+z0XCOk44aaLYKApNX5nMm+E+P6o25ip/DHQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
estraverse "^5.0.0"
|
estraverse "^5.1.0"
|
||||||
|
|
||||||
esrecurse@^4.1.0:
|
esrecurse@^4.1.0:
|
||||||
version "4.2.1"
|
version "4.2.1"
|
||||||
@ -4361,7 +4307,7 @@ estraverse@^4.1.0, estraverse@^4.1.1, estraverse@^4.2.0:
|
|||||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
|
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-4.3.0.tgz#398ad3f3c5a24948be7725e83d11a7de28cdbd1d"
|
||||||
integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
|
integrity sha512-39nnKffWz8xN1BU/2c79n9nB9HDzo0niYUqx6xyqUnyoAnQyyWpOTdZEeiCch8BBu515t4wp9ZmgVfVhn9EBpw==
|
||||||
|
|
||||||
estraverse@^5.0.0:
|
estraverse@^5.1.0:
|
||||||
version "5.1.0"
|
version "5.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.1.0.tgz#374309d39fd935ae500e7b92e8a6b4c720e59642"
|
resolved "https://registry.yarnpkg.com/estraverse/-/estraverse-5.1.0.tgz#374309d39fd935ae500e7b92e8a6b4c720e59642"
|
||||||
integrity sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw==
|
integrity sha512-FyohXK+R0vE+y1nHLoBM7ZTyqRpqAlhdZHCWIWEviFLiGB8b04H6bQs8G+XTthacvT8VuwvteiP7RJSxMs8UEw==
|
||||||
@ -4876,13 +4822,6 @@ fs-extra@^8.1.0:
|
|||||||
jsonfile "^4.0.0"
|
jsonfile "^4.0.0"
|
||||||
universalify "^0.1.0"
|
universalify "^0.1.0"
|
||||||
|
|
||||||
fs-minipass@^1.2.5:
|
|
||||||
version "1.2.7"
|
|
||||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-1.2.7.tgz#ccff8570841e7fe4265693da88936c55aed7f7c7"
|
|
||||||
integrity sha512-GWSSJGFy4e9GUeCcbIkED+bgAoFyj7XF1mV8rma3QW4NIqX9Kyx79N/PF61H5udOV3aY1IaMLs6pGbH71nlCTA==
|
|
||||||
dependencies:
|
|
||||||
minipass "^2.6.0"
|
|
||||||
|
|
||||||
fs-minipass@^2.0.0:
|
fs-minipass@^2.0.0:
|
||||||
version "2.1.0"
|
version "2.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
resolved "https://registry.yarnpkg.com/fs-minipass/-/fs-minipass-2.1.0.tgz#7f5036fdbf12c63c169190cbe4199c852271f9fb"
|
||||||
@ -4928,20 +4867,6 @@ functional-red-black-tree@^1.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
|
resolved "https://registry.yarnpkg.com/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz#1b0ab3bd553b2a0d6399d29c0e3ea0b252078327"
|
||||||
integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
|
integrity sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=
|
||||||
|
|
||||||
gauge@~2.7.3:
|
|
||||||
version "2.7.4"
|
|
||||||
resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
|
|
||||||
integrity sha1-LANAXHU4w51+s3sxcCLjJfsBi/c=
|
|
||||||
dependencies:
|
|
||||||
aproba "^1.0.3"
|
|
||||||
console-control-strings "^1.0.0"
|
|
||||||
has-unicode "^2.0.0"
|
|
||||||
object-assign "^4.1.0"
|
|
||||||
signal-exit "^3.0.0"
|
|
||||||
string-width "^1.0.1"
|
|
||||||
strip-ansi "^3.0.1"
|
|
||||||
wide-align "^1.1.0"
|
|
||||||
|
|
||||||
gensync@^1.0.0-beta.1:
|
gensync@^1.0.0-beta.1:
|
||||||
version "1.0.0-beta.1"
|
version "1.0.0-beta.1"
|
||||||
resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269"
|
resolved "https://registry.yarnpkg.com/gensync/-/gensync-1.0.0-beta.1.tgz#58f4361ff987e5ff6e1e7a210827aa371eaac269"
|
||||||
@ -5070,25 +4995,17 @@ graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6
|
|||||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
|
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
|
||||||
integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
|
integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
|
||||||
|
|
||||||
graphiql@^0.11.2:
|
graphiql@^0.17.5:
|
||||||
version "0.11.11"
|
version "0.17.5"
|
||||||
resolved "https://registry.yarnpkg.com/graphiql/-/graphiql-0.11.11.tgz#eeaf9a38927dbe8c6ecbf81e700735e16ec50e71"
|
resolved "https://registry.yarnpkg.com/graphiql/-/graphiql-0.17.5.tgz#76c553fc0d8936f77e33114ac3374f1807a718ff"
|
||||||
integrity sha512-+r8qY2JRRs+uaZcrZOxpNhdlCZoS8yS5KQ6X53Twc8WecZ6VtAn+MVHroLOd4u9HVPxTXZ9RUd9+556EpTc0xA==
|
integrity sha512-ogNsrg9qM1py9PzcIUn+C29JukOADbjIfB6zwtfui4BrpOEpDb5UZ6TjAmSL/F/8tCt4TbgwKtkSrBeLNNUrqA==
|
||||||
dependencies:
|
dependencies:
|
||||||
codemirror "^5.26.0"
|
codemirror "^5.47.0"
|
||||||
codemirror-graphql "^0.6.11"
|
codemirror-graphql "^0.11.6"
|
||||||
markdown-it "^8.4.0"
|
copy-to-clipboard "^3.2.0"
|
||||||
|
entities "^2.0.0"
|
||||||
graphql-config@2.0.1:
|
markdown-it "^10.0.0"
|
||||||
version "2.0.1"
|
regenerator-runtime "^0.13.3"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-config/-/graphql-config-2.0.1.tgz#d34a9bdf1d7360af7b01db9b20260a342ddc7390"
|
|
||||||
integrity sha512-eb4FzlODifHE/Q+91QptAmkGw39wL5ToinJ2556UUsGt2drPc4tzifL+HSnHSaxiIbH8EUhc/Fa6+neinF04qA==
|
|
||||||
dependencies:
|
|
||||||
graphql-import "^0.4.4"
|
|
||||||
graphql-request "^1.5.0"
|
|
||||||
js-yaml "^3.10.0"
|
|
||||||
lodash "^4.17.4"
|
|
||||||
minimatch "^3.0.4"
|
|
||||||
|
|
||||||
graphql-config@2.2.1:
|
graphql-config@2.2.1:
|
||||||
version "2.2.1"
|
version "2.2.1"
|
||||||
@ -5101,13 +5018,6 @@ graphql-config@2.2.1:
|
|||||||
lodash "^4.17.4"
|
lodash "^4.17.4"
|
||||||
minimatch "^3.0.4"
|
minimatch "^3.0.4"
|
||||||
|
|
||||||
graphql-import@^0.4.4:
|
|
||||||
version "0.4.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/graphql-import/-/graphql-import-0.4.5.tgz#e2f18c28d335733f46df8e0733d8deb1c6e2a645"
|
|
||||||
integrity sha512-G/+I08Qp6/QGTb9qapknCm3yPHV0ZL7wbaalWFpxsfR8ZhZoTBe//LsbsCKlbALQpcMegchpJhpTSKiJjhaVqQ==
|
|
||||||
dependencies:
|
|
||||||
lodash "^4.17.4"
|
|
||||||
|
|
||||||
graphql-import@^0.7.1:
|
graphql-import@^0.7.1:
|
||||||
version "0.7.1"
|
version "0.7.1"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-import/-/graphql-import-0.7.1.tgz#4add8d91a5f752d764b0a4a7a461fcd93136f223"
|
resolved "https://registry.yarnpkg.com/graphql-import/-/graphql-import-0.7.1.tgz#4add8d91a5f752d764b0a4a7a461fcd93136f223"
|
||||||
@ -5116,24 +5026,17 @@ graphql-import@^0.7.1:
|
|||||||
lodash "^4.17.4"
|
lodash "^4.17.4"
|
||||||
resolve-from "^4.0.0"
|
resolve-from "^4.0.0"
|
||||||
|
|
||||||
graphql-language-service-interface@^1.0.16, graphql-language-service-interface@^1.0.18:
|
graphql-language-service-interface@^2.3.3:
|
||||||
version "1.3.2"
|
version "2.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-interface/-/graphql-language-service-interface-1.3.2.tgz#4bd5d49e23766c3d2ab65d110f26f10e321cc000"
|
resolved "https://registry.yarnpkg.com/graphql-language-service-interface/-/graphql-language-service-interface-2.3.3.tgz#33d2263e797dcfcac2426e00a33349d2a489edfa"
|
||||||
integrity sha512-sOxFV5sBSnYtKIFHtlmAHHVdhok7CRbvCPLcuHvL4Q1RSgKRsPpeHUDKU+yCbmlonOKn/RWEKaYWrUY0Sgv70A==
|
integrity sha512-SMUbbiHbD19ffyDrucR+vwyaKYhDcTgbBFDJu9Z4TBa5XaksmyiurB3f+pWlIkuFvogBvW3JDiiJJlUW7awivg==
|
||||||
dependencies:
|
dependencies:
|
||||||
graphql-config "2.0.1"
|
graphql-config "2.2.1"
|
||||||
graphql-language-service-parser "^1.2.2"
|
graphql-language-service-parser "^1.5.2"
|
||||||
graphql-language-service-types "^1.2.2"
|
graphql-language-service-types "^1.5.2"
|
||||||
graphql-language-service-utils "^1.2.2"
|
graphql-language-service-utils "^2.3.3"
|
||||||
|
|
||||||
graphql-language-service-parser@^0.1.14:
|
graphql-language-service-parser@^1.5.2:
|
||||||
version "0.1.14"
|
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-parser/-/graphql-language-service-parser-0.1.14.tgz#dd25abda5dcff4f2268c9a19e026004271491661"
|
|
||||||
integrity sha512-72M4OksONeqT5slfdfODmlPBFlUQQkcnRhjgmPt9H2n8/DUcf4XzDkGXudBWpzNfjVU35+IADYW6x13wKw/fOg==
|
|
||||||
dependencies:
|
|
||||||
graphql-language-service-types "^0.1.14"
|
|
||||||
|
|
||||||
graphql-language-service-parser@^1.0.18, graphql-language-service-parser@^1.2.2:
|
|
||||||
version "1.5.2"
|
version "1.5.2"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-parser/-/graphql-language-service-parser-1.5.2.tgz#37deb56c16155cbd324fedef42ef9a3f0b38d723"
|
resolved "https://registry.yarnpkg.com/graphql-language-service-parser/-/graphql-language-service-parser-1.5.2.tgz#37deb56c16155cbd324fedef42ef9a3f0b38d723"
|
||||||
integrity sha512-kModfvwX5XiT+tYRhh8d6X+rb5Zq9zFQVdcoVlQJvoIW7U6SkxUAeO5Ei9OI3KOMH5r8wyfmXflBZ+xUbJySJw==
|
integrity sha512-kModfvwX5XiT+tYRhh8d6X+rb5Zq9zFQVdcoVlQJvoIW7U6SkxUAeO5Ei9OI3KOMH5r8wyfmXflBZ+xUbJySJw==
|
||||||
@ -5141,25 +5044,78 @@ graphql-language-service-parser@^1.0.18, graphql-language-service-parser@^1.2.2:
|
|||||||
graphql-config "2.2.1"
|
graphql-config "2.2.1"
|
||||||
graphql-language-service-types "^1.5.2"
|
graphql-language-service-types "^1.5.2"
|
||||||
|
|
||||||
graphql-language-service-types@^0.1.14:
|
graphql-language-service-types@^1.5.2:
|
||||||
version "0.1.14"
|
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-types/-/graphql-language-service-types-0.1.14.tgz#e6112785fc23ea8222f59a7f00e61b359f263c88"
|
|
||||||
integrity sha512-77KP83gvK0gWswvGY/+jx/GpsOfKuwWQ1AYnyPT2GDGG3//1QxQTObaZMAEevNTxJtO8T26YXxoUlqkvET7iEg==
|
|
||||||
|
|
||||||
graphql-language-service-types@^1.2.2, graphql-language-service-types@^1.5.2:
|
|
||||||
version "1.5.2"
|
version "1.5.2"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-types/-/graphql-language-service-types-1.5.2.tgz#bfd3b27a45dbc2457233c73cc1f8ff5da26795f8"
|
resolved "https://registry.yarnpkg.com/graphql-language-service-types/-/graphql-language-service-types-1.5.2.tgz#bfd3b27a45dbc2457233c73cc1f8ff5da26795f8"
|
||||||
integrity sha512-WOFHBZX1K41svohPTmhOcKg+zz27d6ULFuZ8mzkiJ9nIpGKueAPyh7/xR0VZNBUAfDzTCbE6wQZxsPl5Kvd7IA==
|
integrity sha512-WOFHBZX1K41svohPTmhOcKg+zz27d6ULFuZ8mzkiJ9nIpGKueAPyh7/xR0VZNBUAfDzTCbE6wQZxsPl5Kvd7IA==
|
||||||
dependencies:
|
dependencies:
|
||||||
graphql-config "2.2.1"
|
graphql-config "2.2.1"
|
||||||
|
|
||||||
graphql-language-service-utils@^1.2.2:
|
graphql-language-service-utils@^2.3.3:
|
||||||
version "1.2.2"
|
version "2.3.3"
|
||||||
resolved "https://registry.yarnpkg.com/graphql-language-service-utils/-/graphql-language-service-utils-1.2.2.tgz#d31d4b4288085bd31d1bb8efc35790d69e496cae"
|
resolved "https://registry.yarnpkg.com/graphql-language-service-utils/-/graphql-language-service-utils-2.3.3.tgz#babfffecb754920f028525c4c094bb68638370a3"
|
||||||
integrity sha512-98hzn1Dg3sSAiB+TuvNwWAoBrzuHs8NylkTK26TFyBjozM5wBZttp+T08OvOt+9hCFYRa43yRPrWcrs78KH9Hw==
|
integrity sha512-uHLdIbQpKkE1V2WA12DRMXrUZpPD3ZKPOuH3MHlNg+j9AEe1y83chA4yP5DQqR+ARdMpefz4FJHvEjQr9alXYw==
|
||||||
dependencies:
|
dependencies:
|
||||||
graphql-config "2.0.1"
|
graphql-config "2.2.1"
|
||||||
graphql-language-service-types "^1.2.2"
|
graphql-language-service-types "^1.5.2"
|
||||||
|
|
||||||
|
graphql-playground-react@^1.7.22:
|
||||||
|
version "1.7.22"
|
||||||
|
resolved "https://registry.yarnpkg.com/graphql-playground-react/-/graphql-playground-react-1.7.22.tgz#73ef2be8ef6a0731bfbff58fcc89541507f0cec8"
|
||||||
|
integrity sha512-Fnu+t7kLGCEIbLNOh4FqZSuxsgPOJuQQVyQRrtKFCjppIy8RSxAzRMygFv2M/qizCw+C40byEQXjYZ19DWhw+g==
|
||||||
|
dependencies:
|
||||||
|
"@types/lru-cache" "^4.1.1"
|
||||||
|
apollo-link "^1.0.7"
|
||||||
|
apollo-link-http "^1.3.2"
|
||||||
|
apollo-link-ws "1.0.8"
|
||||||
|
calculate-size "^1.1.1"
|
||||||
|
codemirror "^5.52.2"
|
||||||
|
codemirror-graphql "^0.11.6"
|
||||||
|
copy-to-clipboard "^3.0.8"
|
||||||
|
cryptiles "4.1.2"
|
||||||
|
cuid "^1.3.8"
|
||||||
|
graphiql "^0.17.5"
|
||||||
|
graphql "^14.6.0"
|
||||||
|
immutable "^4.0.0-rc.9"
|
||||||
|
isomorphic-fetch "^2.2.1"
|
||||||
|
js-yaml "^3.10.0"
|
||||||
|
json-stable-stringify "^1.0.1"
|
||||||
|
keycode "^2.1.9"
|
||||||
|
lodash "^4.17.11"
|
||||||
|
lodash.debounce "^4.0.8"
|
||||||
|
markdown-it "^8.4.1"
|
||||||
|
marked "^0.3.19"
|
||||||
|
prettier "2.0.2"
|
||||||
|
prop-types "^15.7.2"
|
||||||
|
query-string "5"
|
||||||
|
react "16.13.1"
|
||||||
|
react-addons-shallow-compare "^15.6.2"
|
||||||
|
react-codemirror "^1.0.0"
|
||||||
|
react-copy-to-clipboard "^5.0.1"
|
||||||
|
react-display-name "^0.2.3"
|
||||||
|
react-dom "^16.13.1"
|
||||||
|
react-helmet "^5.2.0"
|
||||||
|
react-input-autosize "^2.2.1"
|
||||||
|
react-modal "^3.1.11"
|
||||||
|
react-redux "^7.2.0"
|
||||||
|
react-router-dom "^4.2.2"
|
||||||
|
react-sortable-hoc "^0.8.3"
|
||||||
|
react-transition-group "^2.2.1"
|
||||||
|
react-virtualized "^9.12.0"
|
||||||
|
redux "^4.0.5"
|
||||||
|
redux-actions "^2.6.5"
|
||||||
|
redux-immutable "^4.0.0"
|
||||||
|
redux-localstorage "^1.0.0-rc5"
|
||||||
|
redux-localstorage-debounce "^0.1.0"
|
||||||
|
redux-localstorage-filter "^0.1.1"
|
||||||
|
redux-saga "^1.1.3"
|
||||||
|
reselect "^4.0.0"
|
||||||
|
seamless-immutable "^7.0.1"
|
||||||
|
styled-components "^4.0.0"
|
||||||
|
subscriptions-transport-ws "^0.9.5"
|
||||||
|
utility-types "^1.0.0"
|
||||||
|
webpack-bundle-analyzer "^3.3.2"
|
||||||
|
zen-observable "^0.7.1"
|
||||||
|
|
||||||
graphql-request@^1.5.0:
|
graphql-request@^1.5.0:
|
||||||
version "1.8.2"
|
version "1.8.2"
|
||||||
@ -5168,7 +5124,7 @@ graphql-request@^1.5.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
cross-fetch "2.2.2"
|
cross-fetch "2.2.2"
|
||||||
|
|
||||||
graphql@^0.11.7, graphql@^14.1.1:
|
graphql@^14.1.1, graphql@^14.6.0:
|
||||||
version "14.6.0"
|
version "14.6.0"
|
||||||
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.6.0.tgz#57822297111e874ea12f5cd4419616930cd83e49"
|
resolved "https://registry.yarnpkg.com/graphql/-/graphql-14.6.0.tgz#57822297111e874ea12f5cd4419616930cd83e49"
|
||||||
integrity sha512-VKzfvHEKybTKjQVpTFrA5yUq2S9ihcZvfJAtsDBBCuV6wauPu1xl/f9ehgVf0FcEJJs4vz6ysb/ZMkGigQZseg==
|
integrity sha512-VKzfvHEKybTKjQVpTFrA5yUq2S9ihcZvfJAtsDBBCuV6wauPu1xl/f9ehgVf0FcEJJs4vz6ysb/ZMkGigQZseg==
|
||||||
@ -5233,11 +5189,6 @@ has-symbols@^1.0.0, has-symbols@^1.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
|
resolved "https://registry.yarnpkg.com/has-symbols/-/has-symbols-1.0.1.tgz#9f5214758a44196c406d9bd76cebf81ec2dd31e8"
|
||||||
integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
|
integrity sha512-PLcsoqu++dmEIZB+6totNFKq/7Do+Z0u4oT0zKOJNl3lYK6vGwwu2hjHs+68OEZbTjiUE9bgOABXbP/GvrS0Kg==
|
||||||
|
|
||||||
has-unicode@^2.0.0:
|
|
||||||
version "2.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/has-unicode/-/has-unicode-2.0.1.tgz#e0e6fe6a28cf51138855e086d1691e771de2a8b9"
|
|
||||||
integrity sha1-4Ob+aijPUROIVeCG0Wkedx3iqLk=
|
|
||||||
|
|
||||||
has-value@^0.3.1:
|
has-value@^0.3.1:
|
||||||
version "0.3.1"
|
version "0.3.1"
|
||||||
resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
|
resolved "https://registry.yarnpkg.com/has-value/-/has-value-0.3.1.tgz#7b1f58bada62ca827ec0a2078025654845995e1f"
|
||||||
@ -5504,7 +5455,7 @@ https-browserify@^1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
|
resolved "https://registry.yarnpkg.com/https-browserify/-/https-browserify-1.0.0.tgz#ec06c10e0a34c0f2faf199f7fd7fc78fffd03c73"
|
||||||
integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
|
integrity sha1-7AbBDgo0wPL68Zn3/X/Hj//QPHM=
|
||||||
|
|
||||||
iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@^0.4.4, iconv-lite@~0.4.13:
|
iconv-lite@0.4.24, iconv-lite@^0.4.24, iconv-lite@~0.4.13:
|
||||||
version "0.4.24"
|
version "0.4.24"
|
||||||
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
|
resolved "https://registry.yarnpkg.com/iconv-lite/-/iconv-lite-0.4.24.tgz#2022b4b25fbddc21d2f524974a474aafe733908b"
|
||||||
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
|
integrity sha512-v3MXnZAcvnywkTUEZomIActle7RXXeedOR31wwl7VlyoXO4Qi9arvSenNQWne1TcRwhCL1HwLI21bEqdpj8/rA==
|
||||||
@ -5535,13 +5486,6 @@ iferr@^0.1.5:
|
|||||||
resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501"
|
resolved "https://registry.yarnpkg.com/iferr/-/iferr-0.1.5.tgz#c60eed69e6d8fdb6b3104a1fcbca1c192dc5b501"
|
||||||
integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
|
integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
|
||||||
|
|
||||||
ignore-walk@^3.0.1:
|
|
||||||
version "3.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
|
|
||||||
integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
|
|
||||||
dependencies:
|
|
||||||
minimatch "^3.0.4"
|
|
||||||
|
|
||||||
ignore@^3.3.5:
|
ignore@^3.3.5:
|
||||||
version "3.3.10"
|
version "3.3.10"
|
||||||
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
resolved "https://registry.yarnpkg.com/ignore/-/ignore-3.3.10.tgz#0a97fb876986e8081c631160f8f9f389157f0043"
|
||||||
@ -5643,7 +5587,7 @@ inherits@2.0.3:
|
|||||||
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
resolved "https://registry.yarnpkg.com/inherits/-/inherits-2.0.3.tgz#633c2c83e3da42a502f52466022480f4208261de"
|
||||||
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
integrity sha1-Yzwsg+PaQqUC9SRmAiSA9CCCYd4=
|
||||||
|
|
||||||
ini@^1.3.5, ini@~1.3.0:
|
ini@^1.3.5:
|
||||||
version "1.3.5"
|
version "1.3.5"
|
||||||
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
|
resolved "https://registry.yarnpkg.com/ini/-/ini-1.3.5.tgz#eee25f56db1c9ec6085e0c22778083f596abf927"
|
||||||
integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==
|
integrity sha512-RZY5huIKCMRWDUqZlEi72f/lmXKMvuszcMBduliQ3nnWbx9X/ZBQO7DijMEYS9EhHBb2qacRUMtC7svLwe0lcw==
|
||||||
@ -6503,9 +6447,9 @@ jest-worker@^24.6.0, jest-worker@^24.9.0:
|
|||||||
supports-color "^6.1.0"
|
supports-color "^6.1.0"
|
||||||
|
|
||||||
jest-worker@^25.1.0:
|
jest-worker@^25.1.0:
|
||||||
version "25.2.6"
|
version "25.4.0"
|
||||||
resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-25.2.6.tgz#d1292625326794ce187c38f51109faced3846c58"
|
resolved "https://registry.yarnpkg.com/jest-worker/-/jest-worker-25.4.0.tgz#ee0e2ceee5a36ecddf5172d6d7e0ab00df157384"
|
||||||
integrity sha512-FJn9XDUSxcOR4cwDzRfL1z56rUofNTFs539FGASpd50RHdb6EVkhxQqktodW2mI49l+W3H+tFJDotCHUQF6dmA==
|
integrity sha512-ghAs/1FtfYpMmYQ0AHqxV62XPvKdUDIBBApMZfly+E9JEmYh2K45G0R5dWxx986RN12pRCxsViwQVtGl+N4whw==
|
||||||
dependencies:
|
dependencies:
|
||||||
merge-stream "^2.0.0"
|
merge-stream "^2.0.0"
|
||||||
supports-color "^7.0.0"
|
supports-color "^7.0.0"
|
||||||
@ -6883,11 +6827,6 @@ locate-path@^5.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
p-locate "^4.1.0"
|
p-locate "^4.1.0"
|
||||||
|
|
||||||
lodash-es@^4.2.1:
|
|
||||||
version "4.17.15"
|
|
||||||
resolved "https://registry.yarnpkg.com/lodash-es/-/lodash-es-4.17.15.tgz#21bd96839354412f23d7a10340e5eac6ee455d78"
|
|
||||||
integrity sha512-rlrc3yU3+JNOpZ9zj5pQtxnx2THmvRykwL4Xlxoa8I9lHBlVbbyPhgyPMioxVZ4NqyxaVVtaJnzsyOidQIhyyQ==
|
|
||||||
|
|
||||||
lodash._getnative@^3.0.0:
|
lodash._getnative@^3.0.0:
|
||||||
version "3.9.1"
|
version "3.9.1"
|
||||||
resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5"
|
resolved "https://registry.yarnpkg.com/lodash._getnative/-/lodash._getnative-3.9.1.tgz#570bc7dede46d61cdcde687d65d3eecbaa3aaff5"
|
||||||
@ -6945,7 +6884,7 @@ lodash.uniq@^4.5.0:
|
|||||||
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
|
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
|
||||||
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
|
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
|
||||||
|
|
||||||
"lodash@>=3.5 <5", lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4, lodash@^4.17.5, lodash@^4.2.1:
|
"lodash@>=3.5 <5", lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.4, lodash@^4.17.5:
|
||||||
version "4.17.15"
|
version "4.17.15"
|
||||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||||
@ -7022,7 +6961,18 @@ map-visit@^1.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
object-visit "^1.0.0"
|
object-visit "^1.0.0"
|
||||||
|
|
||||||
markdown-it@^8.4.0, markdown-it@^8.4.1:
|
markdown-it@^10.0.0:
|
||||||
|
version "10.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-10.0.0.tgz#abfc64f141b1722d663402044e43927f1f50a8dc"
|
||||||
|
integrity sha512-YWOP1j7UbDNz+TumYP1kpwnP0aEa711cJjrAQrzd0UXlbJfc5aAq0F/PZHjiioqDC1NKgvIMX+o+9Bk7yuM2dg==
|
||||||
|
dependencies:
|
||||||
|
argparse "^1.0.7"
|
||||||
|
entities "~2.0.0"
|
||||||
|
linkify-it "^2.0.0"
|
||||||
|
mdurl "^1.0.1"
|
||||||
|
uc.micro "^1.0.5"
|
||||||
|
|
||||||
|
markdown-it@^8.4.1:
|
||||||
version "8.4.2"
|
version "8.4.2"
|
||||||
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-8.4.2.tgz#386f98998dc15a37722aa7722084f4020bdd9b54"
|
resolved "https://registry.yarnpkg.com/markdown-it/-/markdown-it-8.4.2.tgz#386f98998dc15a37722aa7722084f4020bdd9b54"
|
||||||
integrity sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==
|
integrity sha512-GcRz3AWTqSUphY3vsUqQSFMbgR38a4Lh3GWlHRh/7MRwz8mcu9n2IO7HOh+bXHrR9kOPDl5RNCaEsrneb+xhHQ==
|
||||||
@ -7245,14 +7195,6 @@ minipass-pipeline@^1.2.2:
|
|||||||
dependencies:
|
dependencies:
|
||||||
minipass "^3.0.0"
|
minipass "^3.0.0"
|
||||||
|
|
||||||
minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0:
|
|
||||||
version "2.9.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
|
|
||||||
integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
|
|
||||||
dependencies:
|
|
||||||
safe-buffer "^5.1.2"
|
|
||||||
yallist "^3.0.0"
|
|
||||||
|
|
||||||
minipass@^3.0.0, minipass@^3.1.1:
|
minipass@^3.0.0, minipass@^3.1.1:
|
||||||
version "3.1.1"
|
version "3.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.1.tgz#7607ce778472a185ad6d89082aa2070f79cedcd5"
|
resolved "https://registry.yarnpkg.com/minipass/-/minipass-3.1.1.tgz#7607ce778472a185ad6d89082aa2070f79cedcd5"
|
||||||
@ -7260,13 +7202,6 @@ minipass@^3.0.0, minipass@^3.1.1:
|
|||||||
dependencies:
|
dependencies:
|
||||||
yallist "^4.0.0"
|
yallist "^4.0.0"
|
||||||
|
|
||||||
minizlib@^1.2.1:
|
|
||||||
version "1.3.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
|
|
||||||
integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
|
|
||||||
dependencies:
|
|
||||||
minipass "^2.9.0"
|
|
||||||
|
|
||||||
mississippi@^3.0.0:
|
mississippi@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022"
|
resolved "https://registry.yarnpkg.com/mississippi/-/mississippi-3.0.0.tgz#ea0a3291f97e0b5e8776b363d5f0a12d94c67022"
|
||||||
@ -7299,7 +7234,7 @@ mixin-object@^2.0.1:
|
|||||||
for-in "^0.1.3"
|
for-in "^0.1.3"
|
||||||
is-extendable "^0.1.1"
|
is-extendable "^0.1.1"
|
||||||
|
|
||||||
mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@~0.5.1:
|
mkdirp@^0.5.1, mkdirp@^0.5.3, mkdirp@~0.5.1:
|
||||||
version "0.5.5"
|
version "0.5.5"
|
||||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
|
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.5.tgz#d91cefd62d1436ca0f41620e251288d420099def"
|
||||||
integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
|
integrity sha512-NKmAlESf6jMGym1++R0Ra7wvhV+wFW63FaSOFPwRahvea0gMUcGUhVeAg/0BC0wiv9ih5NYPB1Wn1UEI1/L+xQ==
|
||||||
@ -7352,9 +7287,9 @@ mute-stream@0.0.8:
|
|||||||
integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
|
integrity sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==
|
||||||
|
|
||||||
nan@^2.12.1:
|
nan@^2.12.1:
|
||||||
version "2.14.0"
|
version "2.14.1"
|
||||||
resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.0.tgz#7818f722027b2459a86f0295d434d1fc2336c52c"
|
resolved "https://registry.yarnpkg.com/nan/-/nan-2.14.1.tgz#d7be34dfa3105b91494c3147089315eff8874b01"
|
||||||
integrity sha512-INOFj37C7k3AfaNTtX8RhsTw7qRy7eLET14cROi9+5HAVbbHuIWUHEauBv5qT4Av2tWasiTY1Jw6puUNqRJXQg==
|
integrity sha512-isWHgVjnFjh2x2yuJ/tj3JbwoHu3UC2dX5G/88Cm24yB6YopVgxvBObDY7n5xW6ExmFhJpSEQqFPvq9zaXc8Jw==
|
||||||
|
|
||||||
nanomatch@^1.2.9:
|
nanomatch@^1.2.9:
|
||||||
version "1.2.13"
|
version "1.2.13"
|
||||||
@ -7378,15 +7313,6 @@ natural-compare@^1.4.0:
|
|||||||
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
|
resolved "https://registry.yarnpkg.com/natural-compare/-/natural-compare-1.4.0.tgz#4abebfeed7541f2c27acfb29bdbbd15c8d5ba4f7"
|
||||||
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
|
integrity sha1-Sr6/7tdUHywnrPspvbvRXI1bpPc=
|
||||||
|
|
||||||
needle@^2.2.1:
|
|
||||||
version "2.4.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/needle/-/needle-2.4.1.tgz#14af48732463d7475696f937626b1b993247a56a"
|
|
||||||
integrity sha512-x/gi6ijr4B7fwl6WYL9FwlCvRQKGlUNvnceho8wxkwXqN8jvVmmmATTmZPRRG7b/yC1eode26C2HO9jl78Du9g==
|
|
||||||
dependencies:
|
|
||||||
debug "^3.2.6"
|
|
||||||
iconv-lite "^0.4.4"
|
|
||||||
sax "^1.2.4"
|
|
||||||
|
|
||||||
negotiator@0.6.2:
|
negotiator@0.6.2:
|
||||||
version "0.6.2"
|
version "0.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
|
resolved "https://registry.yarnpkg.com/negotiator/-/negotiator-0.6.2.tgz#feacf7ccf525a77ae9634436a64883ffeca346fb"
|
||||||
@ -7488,35 +7414,11 @@ node-notifier@^5.4.2:
|
|||||||
shellwords "^0.1.1"
|
shellwords "^0.1.1"
|
||||||
which "^1.3.0"
|
which "^1.3.0"
|
||||||
|
|
||||||
node-pre-gyp@*:
|
|
||||||
version "0.14.0"
|
|
||||||
resolved "https://registry.yarnpkg.com/node-pre-gyp/-/node-pre-gyp-0.14.0.tgz#9a0596533b877289bcad4e143982ca3d904ddc83"
|
|
||||||
integrity sha512-+CvDC7ZttU/sSt9rFjix/P05iS43qHCOOGzcr3Ry99bXG7VX953+vFyEuph/tfqoYu8dttBkE86JSKBO2OzcxA==
|
|
||||||
dependencies:
|
|
||||||
detect-libc "^1.0.2"
|
|
||||||
mkdirp "^0.5.1"
|
|
||||||
needle "^2.2.1"
|
|
||||||
nopt "^4.0.1"
|
|
||||||
npm-packlist "^1.1.6"
|
|
||||||
npmlog "^4.0.2"
|
|
||||||
rc "^1.2.7"
|
|
||||||
rimraf "^2.6.1"
|
|
||||||
semver "^5.3.0"
|
|
||||||
tar "^4.4.2"
|
|
||||||
|
|
||||||
node-releases@^1.1.52, node-releases@^1.1.53:
|
node-releases@^1.1.52, node-releases@^1.1.53:
|
||||||
version "1.1.53"
|
version "1.1.53"
|
||||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.53.tgz#2d821bfa499ed7c5dffc5e2f28c88e78a08ee3f4"
|
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.53.tgz#2d821bfa499ed7c5dffc5e2f28c88e78a08ee3f4"
|
||||||
integrity sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==
|
integrity sha512-wp8zyQVwef2hpZ/dJH7SfSrIPD6YoJz6BDQDpGEkcA0s3LpAQoxBIYmfIq6QAhC1DhwsyCgTaTTcONwX8qzCuQ==
|
||||||
|
|
||||||
nopt@^4.0.1:
|
|
||||||
version "4.0.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/nopt/-/nopt-4.0.3.tgz#a375cad9d02fd921278d954c2254d5aa57e15e48"
|
|
||||||
integrity sha512-CvaGwVMztSMJLOeXPrez7fyfObdZqNUK1cPAEzLHrTybIua9pMdmmPR5YwtfNftIOMv3DPUhFaxsZMNTQO20Kg==
|
|
||||||
dependencies:
|
|
||||||
abbrev "1"
|
|
||||||
osenv "^0.1.4"
|
|
||||||
|
|
||||||
normalize-package-data@^2.3.2:
|
normalize-package-data@^2.3.2:
|
||||||
version "2.5.0"
|
version "2.5.0"
|
||||||
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
|
resolved "https://registry.yarnpkg.com/normalize-package-data/-/normalize-package-data-2.5.0.tgz#e66db1838b200c1dfc233225d12cb36520e234a8"
|
||||||
@ -7559,27 +7461,6 @@ normalize-url@^3.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559"
|
resolved "https://registry.yarnpkg.com/normalize-url/-/normalize-url-3.3.0.tgz#b2e1c4dc4f7c6d57743df733a4f5978d18650559"
|
||||||
integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==
|
integrity sha512-U+JJi7duF1o+u2pynbp2zXDW2/PADgC30f0GsHZtRh+HOcXHnw137TrNlyxxRvWW5fjKd3bcLHPxofWuCjaeZg==
|
||||||
|
|
||||||
npm-bundled@^1.0.1:
|
|
||||||
version "1.1.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/npm-bundled/-/npm-bundled-1.1.1.tgz#1edd570865a94cdb1bc8220775e29466c9fb234b"
|
|
||||||
integrity sha512-gqkfgGePhTpAEgUsGEgcq1rqPXA+tv/aVBlgEzfXwA1yiUJF7xtEt3CtVwOjNYQOVknDk0F20w58Fnm3EtG0fA==
|
|
||||||
dependencies:
|
|
||||||
npm-normalize-package-bin "^1.0.1"
|
|
||||||
|
|
||||||
npm-normalize-package-bin@^1.0.1:
|
|
||||||
version "1.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/npm-normalize-package-bin/-/npm-normalize-package-bin-1.0.1.tgz#6e79a41f23fd235c0623218228da7d9c23b8f6e2"
|
|
||||||
integrity sha512-EPfafl6JL5/rU+ot6P3gRSCpPDW5VmIzX959Ob1+ySFUuuYHWHekXpwdUZcKP5C+DS4GEtdJluwBjnsNDl+fSA==
|
|
||||||
|
|
||||||
npm-packlist@^1.1.6:
|
|
||||||
version "1.4.8"
|
|
||||||
resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.8.tgz#56ee6cc135b9f98ad3d51c1c95da22bbb9b2ef3e"
|
|
||||||
integrity sha512-5+AZgwru5IevF5ZdnFglB5wNlHG1AOOuw28WhUq8/8emhBmLv6jX5by4WJCh7lW0uSYZYS6DXqIsyZVIXRZU9A==
|
|
||||||
dependencies:
|
|
||||||
ignore-walk "^3.0.1"
|
|
||||||
npm-bundled "^1.0.1"
|
|
||||||
npm-normalize-package-bin "^1.0.1"
|
|
||||||
|
|
||||||
npm-run-path@^2.0.0:
|
npm-run-path@^2.0.0:
|
||||||
version "2.0.2"
|
version "2.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
|
resolved "https://registry.yarnpkg.com/npm-run-path/-/npm-run-path-2.0.2.tgz#35a9232dfa35d7067b4cb2ddf2357b1871536c5f"
|
||||||
@ -7587,16 +7468,6 @@ npm-run-path@^2.0.0:
|
|||||||
dependencies:
|
dependencies:
|
||||||
path-key "^2.0.0"
|
path-key "^2.0.0"
|
||||||
|
|
||||||
npmlog@^4.0.2:
|
|
||||||
version "4.1.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/npmlog/-/npmlog-4.1.2.tgz#08a7f2a8bf734604779a9efa4ad5cc717abb954b"
|
|
||||||
integrity sha512-2uUqazuKlTaSI/dC8AzicUck7+IrEaOnN/e0jd3Xtt1KcGpwx30v50mL7oPyr/h9bL3E4aZccVwpwP+5W9Vjkg==
|
|
||||||
dependencies:
|
|
||||||
are-we-there-yet "~1.1.2"
|
|
||||||
console-control-strings "~1.1.0"
|
|
||||||
gauge "~2.7.3"
|
|
||||||
set-blocking "~2.0.0"
|
|
||||||
|
|
||||||
nth-check@^1.0.2, nth-check@~1.0.1:
|
nth-check@^1.0.2, nth-check@~1.0.1:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c"
|
resolved "https://registry.yarnpkg.com/nth-check/-/nth-check-1.0.2.tgz#b2bd295c37e3dd58a3bf0700376663ba4d9cf05c"
|
||||||
@ -7811,11 +7682,6 @@ os-browserify@^0.3.0:
|
|||||||
resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
|
resolved "https://registry.yarnpkg.com/os-browserify/-/os-browserify-0.3.0.tgz#854373c7f5c2315914fc9bfc6bd8238fdda1ec27"
|
||||||
integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=
|
integrity sha1-hUNzx/XCMVkU/Jv8a9gjj92h7Cc=
|
||||||
|
|
||||||
os-homedir@^1.0.0:
|
|
||||||
version "1.0.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/os-homedir/-/os-homedir-1.0.2.tgz#ffbc4988336e0e833de0c168c7ef152121aa7fb3"
|
|
||||||
integrity sha1-/7xJiDNuDoM94MFox+8VISGqf7M=
|
|
||||||
|
|
||||||
os-locale@^3.0.0:
|
os-locale@^3.0.0:
|
||||||
version "3.1.0"
|
version "3.1.0"
|
||||||
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
|
resolved "https://registry.yarnpkg.com/os-locale/-/os-locale-3.1.0.tgz#a802a6ee17f24c10483ab9935719cef4ed16bf1a"
|
||||||
@ -7825,19 +7691,11 @@ os-locale@^3.0.0:
|
|||||||
lcid "^2.0.0"
|
lcid "^2.0.0"
|
||||||
mem "^4.0.0"
|
mem "^4.0.0"
|
||||||
|
|
||||||
os-tmpdir@^1.0.0, os-tmpdir@~1.0.2:
|
os-tmpdir@~1.0.2:
|
||||||
version "1.0.2"
|
version "1.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
resolved "https://registry.yarnpkg.com/os-tmpdir/-/os-tmpdir-1.0.2.tgz#bbe67406c79aa85c5cfec766fe5734555dfa1274"
|
||||||
integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
|
integrity sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=
|
||||||
|
|
||||||
osenv@^0.1.4:
|
|
||||||
version "0.1.5"
|
|
||||||
resolved "https://registry.yarnpkg.com/osenv/-/osenv-0.1.5.tgz#85cdfafaeb28e8677f416e287592b5f3f49ea410"
|
|
||||||
integrity sha512-0CWcCECdMVc2Rw3U5w9ZjqX6ga6ubk1xDVKxtBQPK7wis/0F2r9T6k4ydGYhecl7YUBxBVxhL5oisPsNxAPe2g==
|
|
||||||
dependencies:
|
|
||||||
os-homedir "^1.0.0"
|
|
||||||
os-tmpdir "^1.0.0"
|
|
||||||
|
|
||||||
p-defer@^1.0.0:
|
p-defer@^1.0.0:
|
||||||
version "1.0.0"
|
version "1.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c"
|
resolved "https://registry.yarnpkg.com/p-defer/-/p-defer-1.0.0.tgz#9f6eb182f6c9aa8cd743004a7d4f96b196b0fb0c"
|
||||||
@ -8900,10 +8758,10 @@ prepend-http@^1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"
|
resolved "https://registry.yarnpkg.com/prepend-http/-/prepend-http-1.0.4.tgz#d4f4562b0ce3696e41ac52d0e002e57a635dc6dc"
|
||||||
integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=
|
integrity sha1-1PRWKwzjaW5BrFLQ4ALlemNdxtw=
|
||||||
|
|
||||||
prettier@^1.13.0:
|
prettier@2.0.2:
|
||||||
version "1.19.1"
|
version "2.0.2"
|
||||||
resolved "https://registry.yarnpkg.com/prettier/-/prettier-1.19.1.tgz#f7d7f5ff8a9cd872a7be4ca142095956a60797cb"
|
resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.0.2.tgz#1ba8f3eb92231e769b7fcd7cb73ae1b6b74ade08"
|
||||||
integrity sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==
|
integrity sha512-5xJQIPT8BraI7ZnaDwSbu5zLrB6vvi8hVV58yHQ+QK64qrY40dULy0HSRlQ2/2IdzeBpjhDkqdcFBnFeDEMVdg==
|
||||||
|
|
||||||
pretty-bytes@^5.1.0:
|
pretty-bytes@^5.1.0:
|
||||||
version "5.3.0"
|
version "5.3.0"
|
||||||
@ -9138,16 +8996,6 @@ raw-body@2.4.0:
|
|||||||
iconv-lite "0.4.24"
|
iconv-lite "0.4.24"
|
||||||
unpipe "1.0.0"
|
unpipe "1.0.0"
|
||||||
|
|
||||||
rc@^1.2.7:
|
|
||||||
version "1.2.8"
|
|
||||||
resolved "https://registry.yarnpkg.com/rc/-/rc-1.2.8.tgz#cd924bf5200a075b83c188cd6b9e211b7fc0d3ed"
|
|
||||||
integrity sha512-y3bGgqKj3QBdxLbLkomlohkvsA8gdAiUQlSBJnBhfn+BPxg4bc62d8TcBW15wavDfgexCgccckhcZvywyQYPOw==
|
|
||||||
dependencies:
|
|
||||||
deep-extend "^0.6.0"
|
|
||||||
ini "~1.3.0"
|
|
||||||
minimist "^1.2.0"
|
|
||||||
strip-json-comments "~2.0.1"
|
|
||||||
|
|
||||||
react-addons-shallow-compare@^15.6.2:
|
react-addons-shallow-compare@^15.6.2:
|
||||||
version "15.6.2"
|
version "15.6.2"
|
||||||
resolved "https://registry.yarnpkg.com/react-addons-shallow-compare/-/react-addons-shallow-compare-15.6.2.tgz#198a00b91fc37623db64a28fd17b596ba362702f"
|
resolved "https://registry.yarnpkg.com/react-addons-shallow-compare/-/react-addons-shallow-compare-15.6.2.tgz#198a00b91fc37623db64a28fd17b596ba362702f"
|
||||||
@ -9223,7 +9071,7 @@ react-display-name@^0.2.3:
|
|||||||
resolved "https://registry.yarnpkg.com/react-display-name/-/react-display-name-0.2.5.tgz#304c7cbfb59ee40389d436e1a822c17fe27936c6"
|
resolved "https://registry.yarnpkg.com/react-display-name/-/react-display-name-0.2.5.tgz#304c7cbfb59ee40389d436e1a822c17fe27936c6"
|
||||||
integrity sha512-I+vcaK9t4+kypiSgaiVWAipqHRXYmZIuAiS8vzFvXHHXVigg/sMKwlRgLy6LH2i3rmP+0Vzfl5lFsFRwF1r3pg==
|
integrity sha512-I+vcaK9t4+kypiSgaiVWAipqHRXYmZIuAiS8vzFvXHHXVigg/sMKwlRgLy6LH2i3rmP+0Vzfl5lFsFRwF1r3pg==
|
||||||
|
|
||||||
react-dom@^16.11.0, react-dom@^16.3.1:
|
react-dom@^16.11.0, react-dom@^16.13.1:
|
||||||
version "16.13.1"
|
version "16.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.13.1.tgz#c1bd37331a0486c078ee54c4740720993b2e0e7f"
|
resolved "https://registry.yarnpkg.com/react-dom/-/react-dom-16.13.1.tgz#c1bd37331a0486c078ee54c4740720993b2e0e7f"
|
||||||
integrity sha512-81PIMmVLnCNLO/fFOQxdQkvEq/+Hfpv24XNJfpyZhTRfO0QcmQIF/PgCa1zCOj2w1hrn12MFLyaJ/G0+Mxtfag==
|
integrity sha512-81PIMmVLnCNLO/fFOQxdQkvEq/+Hfpv24XNJfpyZhTRfO0QcmQIF/PgCa1zCOj2w1hrn12MFLyaJ/G0+Mxtfag==
|
||||||
@ -9260,7 +9108,7 @@ react-input-autosize@^2.2.1:
|
|||||||
dependencies:
|
dependencies:
|
||||||
prop-types "^15.5.8"
|
prop-types "^15.5.8"
|
||||||
|
|
||||||
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1, react-is@^16.8.4:
|
react-is@^16.6.0, react-is@^16.7.0, react-is@^16.8.1, react-is@^16.8.4, react-is@^16.9.0:
|
||||||
version "16.13.1"
|
version "16.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
resolved "https://registry.yarnpkg.com/react-is/-/react-is-16.13.1.tgz#789729a4dc36de2999dc156dd6c1d9c18cea56a4"
|
||||||
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
|
integrity sha512-24e6ynE2H+OKt4kqsOvNd8kBpV65zoxbA4BVsEOB3ARVWQki/DHzaUoC5KuON/BiccDaCCTZBuOcfZs70kR8bQ==
|
||||||
@ -9280,18 +9128,16 @@ react-modal@^3.1.11:
|
|||||||
react-lifecycles-compat "^3.0.0"
|
react-lifecycles-compat "^3.0.0"
|
||||||
warning "^4.0.3"
|
warning "^4.0.3"
|
||||||
|
|
||||||
react-redux@^5.0.6:
|
react-redux@^7.2.0:
|
||||||
version "5.1.2"
|
version "7.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-5.1.2.tgz#b19cf9e21d694422727bf798e934a916c4080f57"
|
resolved "https://registry.yarnpkg.com/react-redux/-/react-redux-7.2.0.tgz#f970f62192b3981642fec46fd0db18a074fe879d"
|
||||||
integrity sha512-Ns1G0XXc8hDyH/OcBHOxNgQx9ayH3SPxBnFCOidGKSle8pKihysQw2rG/PmciUQRoclhVBO8HMhiRmGXnDja9Q==
|
integrity sha512-EvCAZYGfOLqwV7gh849xy9/pt55rJXPwmYvI4lilPM5rUT/1NxuuN59ipdBksRVSvz0KInbPnp4IfoXJXCqiDA==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/runtime" "^7.1.2"
|
"@babel/runtime" "^7.5.5"
|
||||||
hoist-non-react-statics "^3.3.0"
|
hoist-non-react-statics "^3.3.0"
|
||||||
invariant "^2.2.4"
|
loose-envify "^1.4.0"
|
||||||
loose-envify "^1.1.0"
|
prop-types "^15.7.2"
|
||||||
prop-types "^15.6.1"
|
react-is "^16.9.0"
|
||||||
react-is "^16.6.0"
|
|
||||||
react-lifecycles-compat "^3.0.0"
|
|
||||||
|
|
||||||
react-router-dom@^4.2.2:
|
react-router-dom@^4.2.2:
|
||||||
version "4.3.1"
|
version "4.3.1"
|
||||||
@ -9416,7 +9262,7 @@ react-virtualized@^9.12.0:
|
|||||||
prop-types "^15.6.0"
|
prop-types "^15.6.0"
|
||||||
react-lifecycles-compat "^3.0.4"
|
react-lifecycles-compat "^3.0.4"
|
||||||
|
|
||||||
react@^16.13.0, react@^16.3.1:
|
react@16.13.1, react@^16.13.0:
|
||||||
version "16.13.1"
|
version "16.13.1"
|
||||||
resolved "https://registry.yarnpkg.com/react/-/react-16.13.1.tgz#2e818822f1a9743122c063d6410d85c1e3afe48e"
|
resolved "https://registry.yarnpkg.com/react/-/react-16.13.1.tgz#2e818822f1a9743122c063d6410d85c1e3afe48e"
|
||||||
integrity sha512-YMZQQq32xHLX0bz5Mnibv1/LHb3Sqzngu7xstSM+vrkE5Kzr9xE0yMByK5kMoTK30YVJE61WfbxIFFvfeDKT1w==
|
integrity sha512-YMZQQq32xHLX0bz5Mnibv1/LHb3Sqzngu7xstSM+vrkE5Kzr9xE0yMByK5kMoTK30YVJE61WfbxIFFvfeDKT1w==
|
||||||
@ -9459,7 +9305,7 @@ read-pkg@^3.0.0:
|
|||||||
normalize-package-data "^2.3.2"
|
normalize-package-data "^2.3.2"
|
||||||
path-type "^3.0.0"
|
path-type "^3.0.0"
|
||||||
|
|
||||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||||
version "2.3.7"
|
version "2.3.7"
|
||||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.7.tgz#1eca1cf711aef814c04f62252a36a62f6cb23b57"
|
||||||
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
|
integrity sha512-Ebho8K4jIbHAxnuxi7o42OrZgF/ZTNcsZj6nRKyUmkhLFq8CHItp/fy6hQZuZmP/n3yZ9VBUbp4zz/mX8hmYPw==
|
||||||
@ -9516,7 +9362,7 @@ reduce-reducers@^0.4.3:
|
|||||||
resolved "https://registry.yarnpkg.com/reduce-reducers/-/reduce-reducers-0.4.3.tgz#8e052618801cd8fc2714b4915adaa8937eb6d66c"
|
resolved "https://registry.yarnpkg.com/reduce-reducers/-/reduce-reducers-0.4.3.tgz#8e052618801cd8fc2714b4915adaa8937eb6d66c"
|
||||||
integrity sha512-+CNMnI8QhgVMtAt54uQs3kUxC3Sybpa7Y63HR14uGLgI9/QR5ggHvpxwhGGe3wmx5V91YwqQIblN9k5lspAmGw==
|
integrity sha512-+CNMnI8QhgVMtAt54uQs3kUxC3Sybpa7Y63HR14uGLgI9/QR5ggHvpxwhGGe3wmx5V91YwqQIblN9k5lspAmGw==
|
||||||
|
|
||||||
redux-actions@^2.2.1:
|
redux-actions@^2.6.5:
|
||||||
version "2.6.5"
|
version "2.6.5"
|
||||||
resolved "https://registry.yarnpkg.com/redux-actions/-/redux-actions-2.6.5.tgz#bdca548768ee99832a63910c276def85e821a27e"
|
resolved "https://registry.yarnpkg.com/redux-actions/-/redux-actions-2.6.5.tgz#bdca548768ee99832a63910c276def85e821a27e"
|
||||||
integrity sha512-pFhEcWFTYNk7DhQgxMGnbsB1H2glqhQJRQrtPb96kD3hWiZRzXHwwmFPswg6V2MjraXRXWNmuP9P84tvdLAJmw==
|
integrity sha512-pFhEcWFTYNk7DhQgxMGnbsB1H2glqhQJRQrtPb96kD3hWiZRzXHwwmFPswg6V2MjraXRXWNmuP9P84tvdLAJmw==
|
||||||
@ -9544,25 +9390,25 @@ redux-localstorage-filter@^0.1.1:
|
|||||||
resolved "https://registry.yarnpkg.com/redux-localstorage-filter/-/redux-localstorage-filter-0.1.1.tgz#94c5ab68d8cda479bb3cc6cdf03569f8f63a188d"
|
resolved "https://registry.yarnpkg.com/redux-localstorage-filter/-/redux-localstorage-filter-0.1.1.tgz#94c5ab68d8cda479bb3cc6cdf03569f8f63a188d"
|
||||||
integrity sha1-lMWraNjNpHm7PMbN8DVp+PY6GI0=
|
integrity sha1-lMWraNjNpHm7PMbN8DVp+PY6GI0=
|
||||||
|
|
||||||
redux-localstorage@rc:
|
redux-localstorage@^1.0.0-rc5:
|
||||||
version "1.0.0-rc5"
|
version "1.0.0-rc5"
|
||||||
resolved "https://registry.yarnpkg.com/redux-localstorage/-/redux-localstorage-1.0.0-rc5.tgz#7067bc4cb0b03b5c791025ac33dde6175d50d5d1"
|
resolved "https://registry.yarnpkg.com/redux-localstorage/-/redux-localstorage-1.0.0-rc5.tgz#7067bc4cb0b03b5c791025ac33dde6175d50d5d1"
|
||||||
integrity sha1-cGe8TLCwO1x5ECWsM93mF11Q1dE=
|
integrity sha1-cGe8TLCwO1x5ECWsM93mF11Q1dE=
|
||||||
|
|
||||||
redux-saga@^0.16.0:
|
redux-saga@^1.1.3:
|
||||||
version "0.16.2"
|
version "1.1.3"
|
||||||
resolved "https://registry.yarnpkg.com/redux-saga/-/redux-saga-0.16.2.tgz#993662e86bc945d8509ac2b8daba3a8c615cc971"
|
resolved "https://registry.yarnpkg.com/redux-saga/-/redux-saga-1.1.3.tgz#9f3e6aebd3c994bbc0f6901a625f9a42b51d1112"
|
||||||
integrity sha512-iIjKnRThI5sKPEASpUvySemjzwqwI13e3qP7oLub+FycCRDysLSAOwt958niZW6LhxfmS6Qm1BzbU70w/Koc4w==
|
integrity sha512-RkSn/z0mwaSa5/xH/hQLo8gNf4tlvT18qXDNvedihLcfzh+jMchDgaariQoehCpgRltEm4zHKJyINEz6aqswTw==
|
||||||
|
|
||||||
redux@^3.7.2:
|
|
||||||
version "3.7.2"
|
|
||||||
resolved "https://registry.yarnpkg.com/redux/-/redux-3.7.2.tgz#06b73123215901d25d065be342eb026bc1c8537b"
|
|
||||||
integrity sha512-pNqnf9q1hI5HHZRBkj3bAngGZW/JMCmexDlOxw4XagXY2o1327nHH54LoTjiPJ0gizoqPDRqWyX/00g0hD6w+A==
|
|
||||||
dependencies:
|
dependencies:
|
||||||
lodash "^4.2.1"
|
"@redux-saga/core" "^1.1.3"
|
||||||
lodash-es "^4.2.1"
|
|
||||||
loose-envify "^1.1.0"
|
redux@^4.0.4, redux@^4.0.5:
|
||||||
symbol-observable "^1.0.3"
|
version "4.0.5"
|
||||||
|
resolved "https://registry.yarnpkg.com/redux/-/redux-4.0.5.tgz#4db5de5816e17891de8a80c424232d06f051d93f"
|
||||||
|
integrity sha512-VSz1uMAH24DM6MF72vcojpYPtrTUu3ByVWfPL1nPfVRb5mZVTve5GnNCUV53QM/BZ66xfWrm0CTWoM+Xlz8V1w==
|
||||||
|
dependencies:
|
||||||
|
loose-envify "^1.4.0"
|
||||||
|
symbol-observable "^1.2.0"
|
||||||
|
|
||||||
regenerate-unicode-properties@^8.2.0:
|
regenerate-unicode-properties@^8.2.0:
|
||||||
version "8.2.0"
|
version "8.2.0"
|
||||||
@ -9742,10 +9588,10 @@ requires-port@^1.0.0:
|
|||||||
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
|
resolved "https://registry.yarnpkg.com/requires-port/-/requires-port-1.0.0.tgz#925d2601d39ac485e091cf0da5c6e694dc3dcaff"
|
||||||
integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=
|
integrity sha1-kl0mAdOaxIXgkc8NpcbmlNw9yv8=
|
||||||
|
|
||||||
reselect@^3.0.1:
|
reselect@^4.0.0:
|
||||||
version "3.0.1"
|
version "4.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/reselect/-/reselect-3.0.1.tgz#efdaa98ea7451324d092b2b2163a6a1d7a9a2147"
|
resolved "https://registry.yarnpkg.com/reselect/-/reselect-4.0.0.tgz#f2529830e5d3d0e021408b246a206ef4ea4437f7"
|
||||||
integrity sha1-79qpjqdFEyTQkrKyFjpqHXqaIUc=
|
integrity sha512-qUgANli03jjAyGlnbYVAV5vvnOmJnODyABz51RdBN7M4WaVu8mecZWgyQNkG8Yqe3KRGRt0l4K4B3XVEULC4CA==
|
||||||
|
|
||||||
resolve-cwd@^2.0.0:
|
resolve-cwd@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
@ -9803,9 +9649,9 @@ resolve@1.15.0:
|
|||||||
path-parse "^1.0.6"
|
path-parse "^1.0.6"
|
||||||
|
|
||||||
resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.15.1, resolve@^1.3.2, resolve@^1.8.1:
|
resolve@^1.10.0, resolve@^1.12.0, resolve@^1.13.1, resolve@^1.15.1, resolve@^1.3.2, resolve@^1.8.1:
|
||||||
version "1.16.0"
|
version "1.16.1"
|
||||||
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.16.0.tgz#063dc704fa3413e13ac1d0d1756a7cbfe95dd1a7"
|
resolved "https://registry.yarnpkg.com/resolve/-/resolve-1.16.1.tgz#49fac5d8bacf1fd53f200fa51247ae736175832c"
|
||||||
integrity sha512-LarL/PIKJvc09k1jaeT4kQb/8/7P+qV4qSnN2K80AES+OHdfZELAKVOBjxsvtToT/uLOfFbvYvKfZmV8cee7nA==
|
integrity sha512-rmAglCSqWWMrrBv/XM6sW0NuRFiKViw/W4d9EbC4pt+49H8JwHy+mcGmALTEg504AUDcLTvb1T2q3E9AnmY+ig==
|
||||||
dependencies:
|
dependencies:
|
||||||
path-parse "^1.0.6"
|
path-parse "^1.0.6"
|
||||||
|
|
||||||
@ -9857,7 +9703,7 @@ rimraf@2.6.3:
|
|||||||
dependencies:
|
dependencies:
|
||||||
glob "^7.1.3"
|
glob "^7.1.3"
|
||||||
|
|
||||||
rimraf@^2.5.4, rimraf@^2.6.1, rimraf@^2.6.3, rimraf@^2.7.1:
|
rimraf@^2.5.4, rimraf@^2.6.3, rimraf@^2.7.1:
|
||||||
version "2.7.1"
|
version "2.7.1"
|
||||||
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
|
resolved "https://registry.yarnpkg.com/rimraf/-/rimraf-2.7.1.tgz#35797f13a7fdadc566142c29d4f07ccad483e3ec"
|
||||||
integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
|
integrity sha512-uWjbaKIK3T1OSVptzX7Nl6PvQ3qAGtKEtVRjRuazjfL3Bx5eI409VZSqgND+4UNnmzLVdPj9FqFJNPqBZFve4w==
|
||||||
@ -9981,9 +9827,9 @@ schema-utils@^1.0.0:
|
|||||||
ajv-keywords "^3.1.0"
|
ajv-keywords "^3.1.0"
|
||||||
|
|
||||||
schema-utils@^2.5.0, schema-utils@^2.6.0, schema-utils@^2.6.1, schema-utils@^2.6.4, schema-utils@^2.6.5:
|
schema-utils@^2.5.0, schema-utils@^2.6.0, schema-utils@^2.6.1, schema-utils@^2.6.4, schema-utils@^2.6.5:
|
||||||
version "2.6.5"
|
version "2.6.6"
|
||||||
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.6.5.tgz#c758f0a7e624263073d396e29cd40aa101152d8a"
|
resolved "https://registry.yarnpkg.com/schema-utils/-/schema-utils-2.6.6.tgz#299fe6bd4a3365dc23d99fd446caff8f1d6c330c"
|
||||||
integrity sha512-5KXuwKziQrTVHh8j/Uxz+QUbxkaLW9X/86NBlx/gnKgtsZA2GIVMUn17qWhRFwF8jdYb3Dig5hRO/W5mZqy6SQ==
|
integrity sha512-wHutF/WPSbIi9x6ctjGGk2Hvl0VOz5l3EKEuKbjPlB30mKZUzb9A5k9yEXRX3pwyqVLPvpfZZEllaFq/M718hA==
|
||||||
dependencies:
|
dependencies:
|
||||||
ajv "^6.12.0"
|
ajv "^6.12.0"
|
||||||
ajv-keywords "^3.4.1"
|
ajv-keywords "^3.4.1"
|
||||||
@ -10005,7 +9851,7 @@ selfsigned@^1.10.7:
|
|||||||
dependencies:
|
dependencies:
|
||||||
node-forge "0.9.0"
|
node-forge "0.9.0"
|
||||||
|
|
||||||
"semver@2 || 3 || 4 || 5", semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0:
|
"semver@2 || 3 || 4 || 5", semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0:
|
||||||
version "5.7.1"
|
version "5.7.1"
|
||||||
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
resolved "https://registry.yarnpkg.com/semver/-/semver-5.7.1.tgz#a954f931aeba508d307bbf069eff0c01c96116f7"
|
||||||
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
|
integrity sha512-sauaDf/PZdVgrLTNYHRtpXa1iRiKcaebiKQ1BJdpQlWH2lCvexQdX55snPFyK7QzpudqbCI0qXFfOasHdyNDGQ==
|
||||||
@ -10067,7 +9913,7 @@ serve-static@1.14.1:
|
|||||||
parseurl "~1.3.3"
|
parseurl "~1.3.3"
|
||||||
send "0.17.1"
|
send "0.17.1"
|
||||||
|
|
||||||
set-blocking@^2.0.0, set-blocking@~2.0.0:
|
set-blocking@^2.0.0:
|
||||||
version "2.0.0"
|
version "2.0.0"
|
||||||
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
|
resolved "https://registry.yarnpkg.com/set-blocking/-/set-blocking-2.0.0.tgz#045f9782d011ae9a6803ddd382b24392b3d890f7"
|
||||||
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
|
integrity sha1-BF+XgtARrppoA93TgrJDkrPYkPc=
|
||||||
@ -10284,9 +10130,9 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
|
|||||||
urix "^0.1.0"
|
urix "^0.1.0"
|
||||||
|
|
||||||
source-map-support@^0.5.6, source-map-support@~0.5.12:
|
source-map-support@^0.5.6, source-map-support@~0.5.12:
|
||||||
version "0.5.16"
|
version "0.5.18"
|
||||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042"
|
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.18.tgz#f5f33489e270bd7f7d7e7b8debf283f3a4066960"
|
||||||
integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==
|
integrity sha512-9luZr/BZ2QeU6tO2uG8N2aZpVSli4TSAOAqFOyTO51AJcD9P99c0K1h6dD6r6qo5dyT44BR5exweOaLLeldTkQ==
|
||||||
dependencies:
|
dependencies:
|
||||||
buffer-from "^1.0.0"
|
buffer-from "^1.0.0"
|
||||||
source-map "^0.6.0"
|
source-map "^0.6.0"
|
||||||
@ -10315,9 +10161,9 @@ spdx-correct@^3.0.0:
|
|||||||
spdx-license-ids "^3.0.0"
|
spdx-license-ids "^3.0.0"
|
||||||
|
|
||||||
spdx-exceptions@^2.1.0:
|
spdx-exceptions@^2.1.0:
|
||||||
version "2.2.0"
|
version "2.3.0"
|
||||||
resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.2.0.tgz#2ea450aee74f2a89bfb94519c07fcd6f41322977"
|
resolved "https://registry.yarnpkg.com/spdx-exceptions/-/spdx-exceptions-2.3.0.tgz#3f28ce1a77a00372683eade4a433183527a2163d"
|
||||||
integrity sha512-2XQACfElKi9SlVb1CYadKDXvoajPgBVPn/gOQLrTvHdElaVhr7ZEbqJaRnJLVNeaI4cMEAgVCeBMKF6MWRDCRA==
|
integrity sha512-/tTrYOC7PPI1nUAgx34hUpqXuyJG+DTHJTnIULG4rDygi4xu/tfgmq1e1cIRwRzwZgo4NLySi+ricLkZkw4i5A==
|
||||||
|
|
||||||
spdx-expression-parse@^3.0.0:
|
spdx-expression-parse@^3.0.0:
|
||||||
version "3.0.0"
|
version "3.0.0"
|
||||||
@ -10487,7 +10333,7 @@ string-width@^1.0.1:
|
|||||||
is-fullwidth-code-point "^1.0.0"
|
is-fullwidth-code-point "^1.0.0"
|
||||||
strip-ansi "^3.0.0"
|
strip-ansi "^3.0.0"
|
||||||
|
|
||||||
"string-width@^1.0.2 || 2", string-width@^2.0.0, string-width@^2.1.1:
|
string-width@^2.0.0, string-width@^2.1.1:
|
||||||
version "2.1.1"
|
version "2.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
|
resolved "https://registry.yarnpkg.com/string-width/-/string-width-2.1.1.tgz#ab93f27a8dc13d28cac815c462143a6d9012ae9e"
|
||||||
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
|
integrity sha512-nOqH59deCq9SRHlxq1Aw85Jnt4w6KvLKqWVik6oA9ZklXLNIOlqg4F2yrT1MVaTjAqvVwdfeZ7w7aCvJD7ugkw==
|
||||||
@ -10633,11 +10479,6 @@ strip-json-comments@^3.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.0.tgz#7638d31422129ecf4457440009fba03f9f9ac180"
|
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-3.1.0.tgz#7638d31422129ecf4457440009fba03f9f9ac180"
|
||||||
integrity sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==
|
integrity sha512-e6/d0eBu7gHtdCqFt0xJr642LdToM5/cN4Qb9DbHjVx1CP5RyeM+zH7pbecEmDv/lBqb0QH+6Uqq75rxFPkM0w==
|
||||||
|
|
||||||
strip-json-comments@~2.0.1:
|
|
||||||
version "2.0.1"
|
|
||||||
resolved "https://registry.yarnpkg.com/strip-json-comments/-/strip-json-comments-2.0.1.tgz#3c531942e908c2697c0ec344858c286c7ca0a60a"
|
|
||||||
integrity sha1-PFMZQukIwml8DsNEhYwobHygpgo=
|
|
||||||
|
|
||||||
style-loader@0.23.1:
|
style-loader@0.23.1:
|
||||||
version "0.23.1"
|
version "0.23.1"
|
||||||
resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925"
|
resolved "https://registry.yarnpkg.com/style-loader/-/style-loader-0.23.1.tgz#cb9154606f3e771ab6c4ab637026a1049174d925"
|
||||||
@ -10745,7 +10586,7 @@ svgo@^1.0.0, svgo@^1.2.2:
|
|||||||
unquote "~1.1.1"
|
unquote "~1.1.1"
|
||||||
util.promisify "~1.0.0"
|
util.promisify "~1.0.0"
|
||||||
|
|
||||||
symbol-observable@^1.0.3, symbol-observable@^1.0.4:
|
symbol-observable@^1.0.4, symbol-observable@^1.2.0:
|
||||||
version "1.2.0"
|
version "1.2.0"
|
||||||
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
|
resolved "https://registry.yarnpkg.com/symbol-observable/-/symbol-observable-1.2.0.tgz#c22688aed4eab3cdc2dfeacbb561660560a00804"
|
||||||
integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==
|
integrity sha512-e900nM8RRtGhlV36KGEU9k65K3mPb1WV70OdjfxlG2EAuM1noi/E/BaW/uMhL7bPEssK8QV57vN3esixjUvcXQ==
|
||||||
@ -10770,19 +10611,6 @@ tapable@^1.0.0, tapable@^1.1.3:
|
|||||||
resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
|
resolved "https://registry.yarnpkg.com/tapable/-/tapable-1.1.3.tgz#a1fccc06b58db61fd7a45da2da44f5f3a3e67ba2"
|
||||||
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
|
integrity sha512-4WK/bYZmj8xLr+HUCODHGF1ZFzsYffasLUgEiMBY4fgtltdO6B4WJtlSbPaDTLpYTcGVwM2qLnFTICEcNxs3kA==
|
||||||
|
|
||||||
tar@^4.4.2:
|
|
||||||
version "4.4.13"
|
|
||||||
resolved "https://registry.yarnpkg.com/tar/-/tar-4.4.13.tgz#43b364bc52888d555298637b10d60790254ab525"
|
|
||||||
integrity sha512-w2VwSrBoHa5BsSyH+KxEqeQBAllHhccyMFVHtGtdMpF4W7IRWfZjFiQceJPChOeTsSDVUpER2T8FA93pr0L+QA==
|
|
||||||
dependencies:
|
|
||||||
chownr "^1.1.1"
|
|
||||||
fs-minipass "^1.2.5"
|
|
||||||
minipass "^2.8.6"
|
|
||||||
minizlib "^1.2.1"
|
|
||||||
mkdirp "^0.5.0"
|
|
||||||
safe-buffer "^5.1.2"
|
|
||||||
yallist "^3.0.3"
|
|
||||||
|
|
||||||
terser-webpack-plugin@2.3.5:
|
terser-webpack-plugin@2.3.5:
|
||||||
version "2.3.5"
|
version "2.3.5"
|
||||||
resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-2.3.5.tgz#5ad971acce5c517440ba873ea4f09687de2f4a81"
|
resolved "https://registry.yarnpkg.com/terser-webpack-plugin/-/terser-webpack-plugin-2.3.5.tgz#5ad971acce5c517440ba873ea4f09687de2f4a81"
|
||||||
@ -11071,6 +10899,25 @@ typedarray@^0.0.6:
|
|||||||
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
|
resolved "https://registry.yarnpkg.com/typedarray/-/typedarray-0.0.6.tgz#867ac74e3864187b1d3d47d996a78ec5c8830777"
|
||||||
integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
|
integrity sha1-hnrHTjhkGHsdPUfZlqeOxciDB3c=
|
||||||
|
|
||||||
|
typescript-compare@^0.0.2:
|
||||||
|
version "0.0.2"
|
||||||
|
resolved "https://registry.yarnpkg.com/typescript-compare/-/typescript-compare-0.0.2.tgz#7ee40a400a406c2ea0a7e551efd3309021d5f425"
|
||||||
|
integrity sha512-8ja4j7pMHkfLJQO2/8tut7ub+J3Lw2S3061eJLFQcvs3tsmJKp8KG5NtpLn7KcY2w08edF74BSVN7qJS0U6oHA==
|
||||||
|
dependencies:
|
||||||
|
typescript-logic "^0.0.0"
|
||||||
|
|
||||||
|
typescript-logic@^0.0.0:
|
||||||
|
version "0.0.0"
|
||||||
|
resolved "https://registry.yarnpkg.com/typescript-logic/-/typescript-logic-0.0.0.tgz#66ebd82a2548f2b444a43667bec120b496890196"
|
||||||
|
integrity sha512-zXFars5LUkI3zP492ls0VskH3TtdeHCqu0i7/duGt60i5IGPIpAHE/DWo5FqJ6EjQ15YKXrt+AETjv60Dat34Q==
|
||||||
|
|
||||||
|
typescript-tuple@^2.2.1:
|
||||||
|
version "2.2.1"
|
||||||
|
resolved "https://registry.yarnpkg.com/typescript-tuple/-/typescript-tuple-2.2.1.tgz#7d9813fb4b355f69ac55032e0363e8bb0f04dad2"
|
||||||
|
integrity sha512-Zcr0lbt8z5ZdEzERHAMAniTiIKerFCMgd7yjq1fPnDJ43et/k9twIFQMUYff9k5oXcsQ0WpvFcgzK2ZKASoW6Q==
|
||||||
|
dependencies:
|
||||||
|
typescript-compare "^0.0.2"
|
||||||
|
|
||||||
ua-parser-js@^0.7.18:
|
ua-parser-js@^0.7.18:
|
||||||
version "0.7.21"
|
version "0.7.21"
|
||||||
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.21.tgz#853cf9ce93f642f67174273cc34565ae6f308777"
|
resolved "https://registry.yarnpkg.com/ua-parser-js/-/ua-parser-js-0.7.21.tgz#853cf9ce93f642f67174273cc34565ae6f308777"
|
||||||
@ -11555,13 +11402,6 @@ which@^2.0.1:
|
|||||||
dependencies:
|
dependencies:
|
||||||
isexe "^2.0.0"
|
isexe "^2.0.0"
|
||||||
|
|
||||||
wide-align@^1.1.0:
|
|
||||||
version "1.1.3"
|
|
||||||
resolved "https://registry.yarnpkg.com/wide-align/-/wide-align-1.1.3.tgz#ae074e6bdc0c14a431e804e624549c633b000457"
|
|
||||||
integrity sha512-QGkOQc8XL6Bt5PwnsExKBPuMKBxnGxWWW3fU55Xt4feHozMUhdUMaBCk290qpm/wG5u/RSKzwdAC4i51YigihA==
|
|
||||||
dependencies:
|
|
||||||
string-width "^1.0.2 || 2"
|
|
||||||
|
|
||||||
word-wrap@~1.2.3:
|
word-wrap@~1.2.3:
|
||||||
version "1.2.3"
|
version "1.2.3"
|
||||||
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
|
resolved "https://registry.yarnpkg.com/word-wrap/-/word-wrap-1.2.3.tgz#610636f6b1f703891bd34771ccb17fb93b47079c"
|
||||||
@ -11795,7 +11635,7 @@ xtend@^4.0.0, xtend@~4.0.1:
|
|||||||
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b"
|
resolved "https://registry.yarnpkg.com/y18n/-/y18n-4.0.0.tgz#95ef94f85ecc81d007c264e190a120f0a3c8566b"
|
||||||
integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==
|
integrity sha512-r9S/ZyXu/Xu9q1tYlpsLIsa3EeLXXk0VwlxqTcFRfg9EhMW+17kbt9G0NrgCmhGb5vT2hyhJZLfDGx+7+5Uj/w==
|
||||||
|
|
||||||
yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3:
|
yallist@^3.0.2:
|
||||||
version "3.1.1"
|
version "3.1.1"
|
||||||
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
|
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
|
||||||
integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
|
integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
|
||||||
@ -11806,11 +11646,11 @@ yallist@^4.0.0:
|
|||||||
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
|
integrity sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==
|
||||||
|
|
||||||
yaml@^1.7.2:
|
yaml@^1.7.2:
|
||||||
version "1.8.3"
|
version "1.9.2"
|
||||||
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.8.3.tgz#2f420fca58b68ce3a332d0ca64be1d191dd3f87a"
|
resolved "https://registry.yarnpkg.com/yaml/-/yaml-1.9.2.tgz#f0cfa865f003ab707663e4f04b3956957ea564ed"
|
||||||
integrity sha512-X/v7VDnK+sxbQ2Imq4Jt2PRUsRsP7UcpSl3Llg6+NRRqWLIvxkMFYtH1FmvwNGYRKKPa+EPA4qDBlI9WVG1UKw==
|
integrity sha512-HPT7cGGI0DuRcsO51qC1j9O16Dh1mZ2bnXwsi0jrSpsLz0WxOLSLXfkABVl6bZO629py3CU+OMJtpNHDLB97kg==
|
||||||
dependencies:
|
dependencies:
|
||||||
"@babel/runtime" "^7.8.7"
|
"@babel/runtime" "^7.9.2"
|
||||||
|
|
||||||
yargs-parser@^11.1.1:
|
yargs-parser@^11.1.1:
|
||||||
version "11.1.1"
|
version "11.1.1"
|
||||||
|
Reference in New Issue
Block a user