fix: bug with shared pointer in new jit mode

This commit is contained in:
Vikram Rangnekar
2020-06-03 18:19:07 -04:00
parent 0ce129de14
commit 82cc712a93
14 changed files with 75 additions and 56 deletions

View File

@ -85,10 +85,10 @@ type SuperGraph struct {
allowList *allow.List
encKey [32]byte
hashSeed maphash.Seed
queries map[uint64]*query
queries map[uint64]query
roles map[string]*Role
getRole *sql.Stmt
rmap map[uint64]*resolvFn
rmap map[uint64]resolvFn
abacEnabled bool
anonExists bool
qc *qcode.Compiler

View File

@ -179,7 +179,7 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
}
if q.sd == nil {
q.Do(func() { c.sg.prepare(q, role) })
q.Do(func() { c.sg.prepare(&q, role) })
if q.err != nil {
return nil, nil, err
@ -196,6 +196,8 @@ func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
return nil, nil, err
}
fmt.Println(">>", varsList)
if useTx {
row = tx.Stmt(q.sd).QueryRow(varsList...)
} else {

View File

@ -63,7 +63,7 @@ func (sg *SuperGraph) initPrepared() error {
return fmt.Errorf("role query: %w", err)
}
sg.queries = make(map[uint64]*query)
sg.queries = make(map[uint64]query)
list, err := sg.allowList.Load()
if err != nil {
@ -77,22 +77,19 @@ func (sg *SuperGraph) initPrepared() error {
if len(v.Query) == 0 {
continue
}
q := &query{ai: v, qt: qcode.GetQType(v.Query)}
qt := qcode.GetQType(v.Query)
switch q.qt {
switch qt {
case qcode.QTQuery:
sg.queries[queryID(&h, v.Name, "user")] = q
h.Reset()
sg.queries[queryID(&h, v.Name, "user")] = query{ai: v, qt: qt}
if sg.anonExists {
sg.queries[queryID(&h, v.Name, "anon")] = q
h.Reset()
sg.queries[queryID(&h, v.Name, "anon")] = query{ai: v, qt: qt}
}
case qcode.QTMutation:
for _, role := range sg.conf.Roles {
sg.queries[queryID(&h, v.Name, role.Name)] = q
h.Reset()
sg.queries[queryID(&h, v.Name, role.Name)] = query{ai: v, qt: qt}
}
}
}
@ -166,5 +163,8 @@ func (sg *SuperGraph) initAllowList() error {
func queryID(h *maphash.Hash, name string, role string) uint64 {
h.WriteString(name)
h.WriteString(role)
return h.Sum64()
v := h.Sum64()
h.Reset()
return v
}

View File

@ -4,10 +4,10 @@ import (
"bytes"
"errors"
"fmt"
"hash/maphash"
"net/http"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/qcode"
"github.com/dosco/super-graph/jsn"
)
@ -16,12 +16,13 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
var err error
sel := st.qc.Selects
h := xxhash.New()
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
// fetch the field name used within the db response json
// that are used to mark insertion points and the mapping between
// those field names and their select objects
fids, sfmap := sg.parentFieldIds(h, sel, st.md.Skipped)
fids, sfmap := sg.parentFieldIds(&h, sel, st.md.Skipped)
// fetch the field values of the marked insertion points
// these values contain the id to be used with fetching remote data
@ -30,10 +31,10 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
switch {
case len(from) == 1:
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
to, err = sg.resolveRemote(hdr, &h, from[0], sel, sfmap)
case len(from) > 1:
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
to, err = sg.resolveRemotes(hdr, &h, from, sel, sfmap)
default:
return nil, errors.New("something wrong no remote ids found in db response")
@ -55,7 +56,7 @@ func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]
func (sg *SuperGraph) resolveRemote(
hdr http.Header,
h *xxhash.Digest,
h *maphash.Hash,
field jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -66,7 +67,8 @@ func (sg *SuperGraph) resolveRemote(
to := toA[:1]
// use the json key to find the related Select object
k1 := xxhash.Sum64(field.Key)
h.Write(field.Key)
k1 := h.Sum64()
s, ok := sfmap[k1]
if !ok {
@ -117,7 +119,7 @@ func (sg *SuperGraph) resolveRemote(
func (sg *SuperGraph) resolveRemotes(
hdr http.Header,
h *xxhash.Digest,
h *maphash.Hash,
from []jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
@ -134,7 +136,8 @@ func (sg *SuperGraph) resolveRemotes(
for i, id := range from {
// use the json key to find the related Select object
k1 := xxhash.Sum64(id.Key)
h.Write(id.Key)
k1 := h.Sum64()
s, ok := sfmap[k1]
if !ok {
@ -192,7 +195,7 @@ func (sg *SuperGraph) resolveRemotes(
return to, cerr
}
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
func (sg *SuperGraph) parentFieldIds(h *maphash.Hash, sel []qcode.Select, skipped uint32) (
[][]byte,
map[uint64]*qcode.Select) {
@ -227,8 +230,8 @@ func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipp
fm[n] = r.IDField
n++
k := xxhash.Sum64(r.IDField)
sm[k] = s
h.Write(r.IDField)
sm[h.Sum64()] = s
}
}

View File

@ -2,11 +2,11 @@ package core
import (
"fmt"
"hash/maphash"
"io/ioutil"
"net/http"
"strings"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/core/internal/psql"
"github.com/dosco/super-graph/jsn"
)
@ -19,7 +19,7 @@ type resolvFn struct {
func (sg *SuperGraph) initResolvers() error {
var err error
sg.rmap = make(map[uint64]*resolvFn)
sg.rmap = make(map[uint64]resolvFn)
for _, t := range sg.conf.Tables {
err = sg.initRemotes(t)
@ -36,7 +36,8 @@ func (sg *SuperGraph) initResolvers() error {
}
func (sg *SuperGraph) initRemotes(t Table) error {
h := xxhash.New()
h := maphash.Hash{}
h.SetSeed(sg.hashSeed)
for _, r := range t.Remotes {
// defines the table column to be used as an id in the
@ -75,17 +76,18 @@ func (sg *SuperGraph) initRemotes(t Table) error {
path = append(path, []byte(p))
}
rf := &resolvFn{
rf := resolvFn{
IDField: []byte(idk),
Path: path,
Fn: fn,
}
// index resolver obj by parent and child names
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
sg.rmap[mkkey(&h, r.Name, t.Name)] = rf
// index resolver obj by IDField
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
h.Write(rf.IDField)
sg.rmap[h.Sum64()] = rf
}
return nil

View File

@ -1,11 +1,9 @@
package core
import (
"github.com/cespare/xxhash/v2"
)
import "hash/maphash"
// nolint: errcheck
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
func mkkey(h *maphash.Hash, k1 string, k2 string) uint64 {
h.WriteString(k1)
h.WriteString(k2)
v := h.Sum64()