Compare commits

..

2 Commits
v0.6 ... v0.7

Author SHA1 Message Date
d5dcff2810 Failure to prepare statements should be a warning 2019-09-29 18:53:51 -04:00
20ddfb26f3 Fix duplicte column bug 2019-09-29 15:20:59 -04:00
7 changed files with 125 additions and 81 deletions

View File

@ -1,32 +1,5 @@
# http://localhost:8080/
variables {
"update": {
"name": "Hellooooo",
"description": "World",
"created_at": "now",
"updated_at": "now"
},
"user": 123
}
mutation {
products(update: $update, where: {id: {eq: 134}}) {
id
name
description
}
}
query {
me {
id
email
full_name
}
}
variables {
"update": {
"name": "Hellooooo",
@ -67,3 +40,30 @@ query {
}
}
variables {
"update": {
"name": "Hellooooo",
"description": "World",
"created_at": "now",
"updated_at": "now"
},
"user": 123
}
mutation {
products(update: $update, where: {id: {eq: 134}}) {
id
name
description
}
}
query {
me {
id
email
full_name
}
}

View File

@ -2,9 +2,35 @@ package psql
import (
"encoding/json"
"fmt"
"testing"
)
func simpleInsert(t *testing.T) {
gql := `mutation {
user(insert: $data) {
id
}
}`
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO users (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "user_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "user"."id" FROM "users" AS "user" WHERE ((("user"."id") = {{user_id}})) LIMIT ('1') :: integer) AS "user_0" LIMIT ('1') :: integer) AS "done_1337";`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
fmt.Println(">", string(resSQL))
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleInsert(t *testing.T) {
gql := `mutation {
product(id: 15, insert: $insert) {
@ -102,6 +128,7 @@ func delete(t *testing.T) {
}
func TestCompileInsert(t *testing.T) {
t.Run("simpleInsert", simpleInsert)
t.Run("singleInsert", singleInsert)
t.Run("bulkInsert", bulkInsert)
t.Run("singleUpdate", singleUpdate)

View File

@ -17,21 +17,20 @@ type DBTable struct {
func GetTables(dbc *pgxpool.Conn) ([]*DBTable, error) {
sqlStmt := `
SELECT
c.relname as "name",
CASE c.relkind WHEN 'r' THEN 'table'
SELECT
c.relname as "name",
CASE c.relkind WHEN 'r' THEN 'table'
WHEN 'v' THEN 'view'
WHEN 'm' THEN 'materialized view'
WHEN 'f' THEN 'foreign table'
END as "type"
FROM pg_catalog.pg_class c
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN pg_catalog.pg_namespace n ON n.oid = c.relnamespace
WHERE c.relkind IN ('r','v','m','f','')
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
AND pg_catalog.pg_table_is_visible(c.oid);
`
AND n.nspname <> 'pg_catalog'
AND n.nspname <> 'information_schema'
AND n.nspname !~ '^pg_toast'
AND pg_catalog.pg_table_is_visible(c.oid);`
var tables []*DBTable
@ -67,41 +66,39 @@ type DBColumn struct {
func GetColumns(dbc *pgxpool.Conn, schema, table string) ([]*DBColumn, error) {
sqlStmt := `
SELECT
f.attnum AS id,
f.attname AS name,
f.attnotnull AS notnull,
pg_catalog.format_type(f.atttypid,f.atttypmod) AS type,
CASE
WHEN p.contype = 'p' THEN true
ELSE false
END AS primarykey,
CASE
WHEN p.contype = 'u' THEN true
ELSE false
END AS uniquekey,
CASE
WHEN p.contype = 'f' THEN g.relname
ELSE ''::text
END AS foreignkey,
CASE
WHEN p.contype = 'f' THEN p.confkey
ELSE ARRAY[]::int2[]
END AS foreignkey_fieldnum
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
JOIN pg_type t ON t.oid = f.atttypid
LEFT JOIN pg_attrdef d ON d.adrelid = c.oid AND d.adnum = f.attnum
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN pg_constraint p ON p.conrelid = c.oid AND f.attnum = ANY (p.conkey)
LEFT JOIN pg_class AS g ON p.confrelid = g.oid
WHERE c.relkind = 'r'::char
AND n.nspname = $1 -- Replace with Schema name
AND c.relname = $2 -- Replace with table name
AND f.attnum > 0 ORDER BY id;
`
var cols []*DBColumn
SELECT
f.attnum AS id,
f.attname AS name,
f.attnotnull AS notnull,
pg_catalog.format_type(f.atttypid,f.atttypmod) AS type,
CASE
WHEN p.contype = ('p'::char) THEN true
ELSE false
END AS primarykey,
CASE
WHEN p.contype = ('u'::char) THEN true
ELSE false
END AS uniquekey,
CASE
WHEN p.contype = ('f'::char) THEN g.relname
ELSE ''::text
END AS foreignkey,
CASE
WHEN p.contype = ('f'::char) THEN p.confkey
ELSE ARRAY[]::int2[]
END AS foreignkey_fieldnum
FROM pg_attribute f
JOIN pg_class c ON c.oid = f.attrelid
LEFT JOIN pg_attrdef d ON d.adrelid = c.oid AND d.adnum = f.attnum
LEFT JOIN pg_namespace n ON n.oid = c.relnamespace
LEFT JOIN pg_constraint p ON p.conrelid = c.oid AND f.attnum = ANY (p.conkey)
LEFT JOIN pg_class AS g ON p.confrelid = g.oid
WHERE c.relkind = ('r'::char)
AND n.nspname = $1 -- Replace with Schema name
AND c.relname = $2 -- Replace with table name
AND f.attnum > 0
AND f.attisdropped = false
ORDER BY id;`
rows, err := dbc.Query(context.Background(), sqlStmt, schema, table)
if err != nil {
@ -109,6 +106,8 @@ WHERE c.relkind = 'r'::char
}
defer rows.Close()
cmap := make(map[int]*DBColumn)
for rows.Next() {
c := DBColumn{}
err = rows.Scan(&c.ID, &c.Name, &c.NotNull, &c.Type, &c.PrimaryKey, &c.UniqueKey,
@ -117,7 +116,25 @@ WHERE c.relkind = 'r'::char
return nil, err
}
c.fKeyColID.AssignTo(&c.FKeyColID)
cols = append(cols, &c)
if v, ok := cmap[c.ID]; ok {
if c.PrimaryKey {
v.PrimaryKey = true
}
if c.NotNull {
v.NotNull = true
}
if c.UniqueKey {
v.UniqueKey = true
}
} else {
cmap[c.ID] = &c
}
}
cols := make([]*DBColumn, 0, len(cmap))
for _, v := range cmap {
cols = append(cols, v)
}
return cols, nil
@ -193,14 +210,14 @@ func (s *DBSchema) updateSchema(
// Foreign key columns in current table
colByID := make(map[int]*DBColumn)
columns := make(map[string]*DBColumn, len(cols))
colNames := make([]string, len(cols))
colNames := make([]string, 0, len(cols))
for i := range cols {
c := cols[i]
name := strings.ToLower(c.Name)
columns[name] = cols[i]
columns[name] = c
colNames = append(colNames, name)
colByID[c.ID] = cols[i]
colByID[c.ID] = c
}
singular := strings.ToLower(flect.Singularize(t.Name))

View File

@ -69,7 +69,7 @@ func cmdDBCreate(cmd *cobra.Command, args []string) {
}
defer conn.Close(ctx)
sql := fmt.Sprintf("create database %s", conf.DB.DBName)
sql := fmt.Sprintf("CREATE DATABASE %s", conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
if err != nil {
@ -94,7 +94,7 @@ func cmdDBDrop(cmd *cobra.Command, args []string) {
}
defer conn.Close(ctx)
sql := fmt.Sprintf("drop database if exists %s", conf.DB.DBName)
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS %s`, conf.DB.DBName)
_, err = conn.Exec(ctx, sql)
if err != nil {

View File

@ -75,7 +75,7 @@ func cmdNew(cmd *cobra.Command, args []string) {
})
ifNotExists(path.Join(appConfigPath, "seed.js"), func(p string) error {
if v, err := tmpl.get("docker-compose.yml"); err == nil {
if v, err := tmpl.get("seed.js"); err == nil {
return ioutil.WriteFile(p, v, 0644)
} else {
return err

View File

@ -30,7 +30,7 @@ func initPreparedList() {
for k, v := range _allowList.list {
err := prepareStmt(k, v.gql, v.vars)
if err != nil {
logger.Fatal().Err(err).Send()
logger.Warn().Err(err).Send()
}
}
}

View File

@ -4,8 +4,8 @@ var users = [];
for (i = 0; i < 10; i++) {
var data = {
full_name: fake.name(),
email: fake.email(),
full_name: fake.name(),
email: fake.email()
}
var res = graphql(" \