Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
cc687b1b2b | |||
3033dcf1a9 | |||
0381982d19 | |||
2b0a798faa | |||
8b6c562ac1 | |||
a1fb89b762 | |||
c82a7bff0d | |||
7acf28bb3c | |||
be5d4e976a | |||
d1b884aec6 | |||
4be4ce860b | |||
dfa4caf540 | |||
7763251fb7 | |||
51e105699e | |||
90694f8803 | |||
ad82f5b267 | |||
99b37a9c50 |
16
Dockerfile
16
Dockerfile
@ -6,14 +6,19 @@ RUN yarn
|
||||
RUN yarn build
|
||||
|
||||
# stage: 2
|
||||
FROM golang:1.13.4-alpine as go-build
|
||||
FROM golang:1.14-alpine as go-build
|
||||
RUN apk update && \
|
||||
apk add --no-cache make && \
|
||||
apk add --no-cache git && \
|
||||
apk add --no-cache jq && \
|
||||
apk add --no-cache upx=3.95-r2
|
||||
|
||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||
|
||||
ARG SOPS_VERSION=3.5.0
|
||||
ADD https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux /usr/local/bin/sops
|
||||
RUN chmod 755 /usr/local/bin/sops
|
||||
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
||||
@ -36,10 +41,13 @@ RUN mkdir -p /config
|
||||
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
COPY --from=go-build /app/config/* /config/
|
||||
COPY --from=go-build /app/super-graph .
|
||||
COPY --from=go-build /app/scripts/start.sh .
|
||||
COPY --from=go-build /usr/local/bin/sops .
|
||||
|
||||
RUN chmod +x /super-graph
|
||||
RUN chmod +x /start.sh
|
||||
|
||||
USER nobody
|
||||
|
||||
EXPOSE 8080
|
||||
|
||||
CMD ./super-graph serv
|
||||
ENTRYPOINT ["./start.sh"]
|
||||
CMD ["./super-graph", "serv"]
|
||||
|
4
Makefile
4
Makefile
@ -35,7 +35,7 @@ $(GORICE):
|
||||
|
||||
$(WEB_BUILD_DIR):
|
||||
@echo "First install Yarn and create a build of the web UI found under ./web"
|
||||
@echo "Command: cd web && yarn build"
|
||||
@echo "Command: cd web && yarn && yarn build"
|
||||
@exit 1
|
||||
|
||||
$(GITCHGLOG):
|
||||
@ -77,7 +77,7 @@ clean:
|
||||
run: clean
|
||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||
|
||||
install:
|
||||
install: gen
|
||||
@echo $(GOPATH)
|
||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||
|
@ -93,7 +93,7 @@ database:
|
||||
port: 5432
|
||||
dbname: app_development
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
|
@ -54,7 +54,7 @@ database:
|
||||
port: 5432
|
||||
dbname: app_production
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
@ -1,7 +1,10 @@
|
||||
version: '3.4'
|
||||
services:
|
||||
db:
|
||||
image: postgres
|
||||
image: postgres:12
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
|
@ -34,6 +34,12 @@ Super Graph has a rich feature set like integrating with your existing Ruby on R
|
||||
# clone the repository
|
||||
git clone https://github.com/dosco/super-graph
|
||||
|
||||
# run db in background
|
||||
docker-compose up -d db
|
||||
|
||||
# see logs and wait until DB is really UP
|
||||
docker-compose logs db
|
||||
|
||||
# setup the demo rails app & database and run it
|
||||
docker-compose run rails_app rake db:create db:migrate db:seed
|
||||
|
||||
@ -137,7 +143,7 @@ What if I told you Super Graph will fetch all this data with a single SQL query
|
||||
|
||||
```graphql
|
||||
query {
|
||||
products(limit 5, where: { price: { gt: 12 } }) {
|
||||
products(limit: 5, where: { price: { gt: 12 } }) {
|
||||
id
|
||||
name
|
||||
description
|
||||
@ -153,7 +159,7 @@ query {
|
||||
}
|
||||
}
|
||||
purchases(
|
||||
limit 10,
|
||||
limit: 10,
|
||||
order_by: { created_at: desc } ,
|
||||
where: { user_id: { eq: $user_id } }
|
||||
) {
|
||||
@ -216,7 +222,7 @@ You can then add your database schema to the migrations, maybe create some seed
|
||||
git clone https://github.com/dosco/super-graph && cd super-graph && make install
|
||||
```
|
||||
|
||||
And then create and launch you're new app
|
||||
And then create and launch your new app
|
||||
|
||||
```bash
|
||||
# create a new app and change to it's directory
|
||||
@ -286,6 +292,12 @@ for (i = 0; i < 10; i++) {
|
||||
}
|
||||
```
|
||||
|
||||
If you want to import a lot of data using a CSV file is the best and fastest option. The `import_csv` command uses the `COPY FROM` Postgres method to load massive amounts of data into tables. The first line of the CSV file must be the header with column names.
|
||||
|
||||
```javascript
|
||||
var post_count = import_csv("posts", "posts.csv")
|
||||
```
|
||||
|
||||
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
|
||||
|
||||
```
|
||||
@ -1133,7 +1145,7 @@ query {
|
||||
|
||||
## Using Variables
|
||||
|
||||
Variables (`$product_id`) and their values (`"product_id": 5`) can be passed along side the GraphQL query. Using variables makes for better client side code as well as improved server side SQL query caching. The build-in web-ui also supports setting variables. Not having to manipulate your GraphQL query string to insert values into it makes for cleaner
|
||||
Variables (`$product_id`) and their values (`"product_id": 5`) can be passed along side the GraphQL query. Using variables makes for better client side code as well as improved server side SQL query caching. The built-in web-ui also supports setting variables. Not having to manipulate your GraphQL query string to insert values into it makes for cleaner
|
||||
and better client side code.
|
||||
|
||||
```javascript
|
||||
@ -1552,7 +1564,7 @@ roles:
|
||||
|
||||
This configuration is relatively simple to follow the `roles_query` parameter is the query that must be run to help figure out a users role. This query can be as complex as you like and include joins with other tables.
|
||||
|
||||
The individual roles are defined under the `roles` parameter and this includes each table the role has a custom setting for. The role is dynamically matched using the `match` parameter for example in the above case `users.id = 1` means that when the `roles_query` is executed a user with the id `1` willbe assigned the admin role and those that don't match get the `user` role if authenticated successfully or the `anon` role.
|
||||
The individual roles are defined under the `roles` parameter and this includes each table the role has a custom setting for. The role is dynamically matched using the `match` parameter for example in the above case `users.id = 1` means that when the `roles_query` is executed a user with the id `1` will be assigned the admin role and those that don't match get the `user` role if authenticated successfully or the `anon` role.
|
||||
|
||||
## Remote Joins
|
||||
|
||||
@ -1767,7 +1779,7 @@ database:
|
||||
port: 5432
|
||||
dbname: app_development
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
@ -1969,7 +1981,7 @@ To use Yugabyte in your local development flow just uncomment the following line
|
||||
|
||||
## Developing Super Graph
|
||||
|
||||
If you want to build and run Super Graph from code then the below commands will build the web ui and launch Super Graph in developer mode with a watcher to rebuild on code changes. And the demo rails app is also launched to make it essier to test changes.
|
||||
If you want to build and run Super Graph from code then the below commands will build the web ui and launch Super Graph in developer mode with a watcher to rebuild on code changes. And the demo rails app is also launched to make it easier to test changes.
|
||||
|
||||
```bash
|
||||
|
||||
|
@ -13,7 +13,7 @@ Super Graph code is made up of a number of packages. We have done our best to ke
|
||||
|
||||
## QCODE
|
||||
|
||||
This package contains the core of the GraphQL conpiler it handling the lexing and parsing of the GraphQL query transforming it into an internal representation called
|
||||
This package contains the core of the GraphQL compiler it handling the lexing and parsing of the GraphQL query transforming it into an internal representation called
|
||||
`QCode`.
|
||||
|
||||
This is the first step of the compiling process the `func NewCompiler(c Config)` function creates a new instance of this compiler which has it's own config.
|
||||
@ -71,7 +71,7 @@ item{itemObjOpen, 16, 20} // {
|
||||
...
|
||||
```
|
||||
|
||||
These tokens are then fed into the parser `parse.go` the parser does the work of generating an abstract syntax tree (AST) from the tokens. This AST is an internal representation (data structure) and is not exposed outside the package. Sinc the AST is a tree a stack `stack.go` is used to walk the tree and generate the QCode AST. The QCode data structure is also a tree (represented as an array). This is then returned to the caller of the compile function.
|
||||
These tokens are then fed into the parser `parse.go` the parser does the work of generating an abstract syntax tree (AST) from the tokens. This AST is an internal representation (data structure) and is not exposed outside the package. Since the AST is a tree a stack `stack.go` is used to walk the tree and generate the QCode AST. The QCode data structure is also a tree (represented as an array). This is then returned to the caller of the compile function.
|
||||
|
||||
```go
|
||||
type Operation struct {
|
||||
@ -238,4 +238,4 @@ ok github.com/dosco/super-graph/psql 2.530s
|
||||
|
||||
## Reach out
|
||||
|
||||
If you'd like me to explain other parts of the code please reach out over Twitter or Discord. I'll keep adding to this doc as I get time.
|
||||
If you'd like me to explain other parts of the code please reach out over Twitter or Discord. I'll keep adding to this doc as I get time.
|
||||
|
@ -19,7 +19,7 @@ default: &default
|
||||
encoding: unicode
|
||||
host: db
|
||||
username: postgres
|
||||
password:
|
||||
password: postgres
|
||||
pool: 5
|
||||
|
||||
development:
|
||||
|
2
go.mod
2
go.mod
@ -2,7 +2,6 @@ module github.com/dosco/super-graph
|
||||
|
||||
require (
|
||||
github.com/GeertJohan/go.rice v1.0.0
|
||||
github.com/Masterminds/semver v1.5.0
|
||||
github.com/NYTimes/gziphandler v1.1.1
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||
@ -12,7 +11,6 @@ require (
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.7
|
||||
github.com/garyburd/redigo v1.6.0
|
||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
|
||||
|
4
go.sum
4
go.sum
@ -5,8 +5,6 @@ github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/
|
||||
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
||||
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
||||
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
|
||||
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
|
||||
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
|
||||
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
|
||||
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
|
||||
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
|
||||
@ -54,8 +52,6 @@ github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk
|
||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c h1:/bXaeEuNG6V0HeyEGw11DYLW5BGsOPlcVRIXbHNUWSo=
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw=
|
||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
||||
|
@ -27,14 +27,20 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
|
||||
var k []byte
|
||||
state := expectKey
|
||||
instr := false
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
if b[i-1] != '\\' && b[i] == '"' {
|
||||
instr = !instr
|
||||
}
|
||||
if !instr {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
17
jsn/get.go
17
jsn/get.go
@ -51,13 +51,20 @@ func Get(b []byte, keys [][]byte) []Field {
|
||||
state := expectKey
|
||||
|
||||
n := 0
|
||||
instr := false
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
if b[i-1] != '\\' && b[i] == '"' {
|
||||
instr = !instr
|
||||
}
|
||||
if !instr {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2,6 +2,7 @@ package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@ -161,6 +162,8 @@ var (
|
||||
|
||||
input6 = `
|
||||
{"users" : [{"id" : 1, "email" : "vicram@gmail.com", "slug" : "vikram-rangnekar", "threads" : [], "threads_cursor" : null}, {"id" : 3, "email" : "marareilly@lang.name", "slug" : "raymundo-corwin", "threads" : [{"id" : 9, "title" : "Et alias et aut porro praesentium nam in voluptatem reiciendis quisquam perspiciatis inventore eos quia et et enim qui amet."}, {"id" : 25, "title" : "Ipsam quam nemo culpa tempore amet optio sit sed eligendi autem consequatur quaerat rem velit quibusdam quibusdam optio a voluptatem."}], "threads_cursor" : 25}], "users_cursor" : 3}`
|
||||
|
||||
input7, _ = ioutil.ReadFile("test.json")
|
||||
)
|
||||
|
||||
func TestGet(t *testing.T) {
|
||||
@ -256,6 +259,15 @@ func TestGet2(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestGet3(t *testing.T) {
|
||||
values := Get(input7, [][]byte{[]byte("data")})
|
||||
v := values[0].Value
|
||||
|
||||
if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) {
|
||||
t.Fatal("corrupt ending")
|
||||
}
|
||||
}
|
||||
|
||||
func TestValue(t *testing.T) {
|
||||
v1 := []byte("12345")
|
||||
if !bytes.Equal(Value(v1), v1) {
|
||||
|
17
jsn/keys.go
17
jsn/keys.go
@ -10,15 +10,20 @@ func Keys(b []byte) [][]byte {
|
||||
|
||||
st := NewStack()
|
||||
ae := 0
|
||||
instr := false
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
if b[i-1] != '\\' && b[i] == '"' {
|
||||
instr = !instr
|
||||
}
|
||||
if !instr {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -32,6 +32,8 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
state := expectKey
|
||||
ws, we := -1, len(b)
|
||||
|
||||
instr := false
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
// skip any left padding whitespace
|
||||
if ws == -1 && (b[i] == '{' || b[i] == '[') {
|
||||
@ -39,11 +41,16 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
}
|
||||
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
if b[i-1] != '\\' && b[i] == '"' {
|
||||
instr = !instr
|
||||
}
|
||||
if !instr {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
16
jsn/strip.go
16
jsn/strip.go
@ -11,14 +11,20 @@ func Strip(b []byte, path [][]byte) []byte {
|
||||
pi := 0
|
||||
pm := false
|
||||
state := expectKey
|
||||
instr := false
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
if b[i-1] != '\\' && b[i] == '"' {
|
||||
instr = !instr
|
||||
}
|
||||
if !instr {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
d++
|
||||
case '}', ']':
|
||||
d--
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
1
jsn/test.json
Normal file
1
jsn/test.json
Normal file
File diff suppressed because one or more lines are too long
@ -1127,9 +1127,15 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
|
||||
switch ex.Type {
|
||||
case qcode.ValVar:
|
||||
if val, ok := vars[ex.Val]; ok {
|
||||
val, ok := vars[ex.Val]
|
||||
switch {
|
||||
case ok && strings.HasPrefix(val, "sql:"):
|
||||
io.WriteString(c.w, ` (`)
|
||||
io.WriteString(c.w, val[4:])
|
||||
io.WriteString(c.w, `)`)
|
||||
case ok:
|
||||
squoted(c.w, val)
|
||||
} else {
|
||||
default:
|
||||
io.WriteString(c.w, ` '{{`)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `}}'`)
|
||||
|
@ -4,8 +4,9 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/Masterminds/semver"
|
||||
"github.com/adjust/gorails/marshal"
|
||||
)
|
||||
|
||||
@ -37,17 +38,20 @@ func NewAuth(version, secret string) (*Auth, error) {
|
||||
AuthSalt: authSalt,
|
||||
}
|
||||
|
||||
ver, err := semver.NewVersion(version)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rails auth: %s", err)
|
||||
var v1, v2 int
|
||||
var err error
|
||||
|
||||
sv := strings.Split(version, ".")
|
||||
if len(sv) >= 2 {
|
||||
if v1, err = strconv.Atoi(sv[0]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if v2, err = strconv.Atoi(sv[1]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
gt52, err := semver.NewConstraint(">= 5.2")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rails auth: %s", err)
|
||||
}
|
||||
|
||||
if gt52.Check(ver) {
|
||||
if v1 >= 5 && v2 >= 2 {
|
||||
ra.Cipher = railsCipher52
|
||||
} else {
|
||||
ra.Cipher = railsCipher
|
||||
|
13
scripts/start.sh
Executable file
13
scripts/start.sh
Executable file
@ -0,0 +1,13 @@
|
||||
#!/bin/sh
|
||||
if [ $1 = "secrets" ]
|
||||
then
|
||||
./sops --config ./config "${@:2}"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
if test -f "./config/$SECRETS_FILE"
|
||||
then
|
||||
./sops --config ./config exec-env "./config/$SECRETS_FILE" "$*"
|
||||
else
|
||||
$@
|
||||
fi
|
@ -39,11 +39,16 @@ func argMap(ctx context.Context, vars []byte) func(w io.Writer, tag string) (int
|
||||
|
||||
}
|
||||
v := fields[0].Value
|
||||
|
||||
// Open and close quotes
|
||||
if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' {
|
||||
fields[0].Value = v[1 : len(v)-1]
|
||||
}
|
||||
|
||||
if tag == "cursor" {
|
||||
if bytes.EqualFold(v, []byte("null")) {
|
||||
return io.WriteString(w, ``)
|
||||
}
|
||||
v1, err := decrypt(string(fields[0].Value))
|
||||
if err != nil {
|
||||
return 0, err
|
||||
|
105
serv/cmd_seed.go
105
serv/cmd_seed.go
@ -3,6 +3,7 @@ package serv
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
@ -10,9 +11,12 @@ import (
|
||||
"math/rand"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/brianvoe/gofakeit"
|
||||
"github.com/dop251/goja"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
@ -42,6 +46,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
|
||||
vm := goja.New()
|
||||
vm.Set("graphql", graphQLFunc)
|
||||
vm.Set("import_csv", importCSV)
|
||||
|
||||
console := vm.NewObject()
|
||||
console.Set("log", logFunc) //nolint: errcheck
|
||||
@ -129,6 +134,106 @@ func graphQLFunc(query string, data interface{}, opt map[string]string) map[stri
|
||||
return val
|
||||
}
|
||||
|
||||
type csvSource struct {
|
||||
rows [][]string
|
||||
i int
|
||||
}
|
||||
|
||||
func NewCSVSource(filename string) (*csvSource, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
r := csv.NewReader(f)
|
||||
rows, err := r.ReadAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &csvSource{rows: rows}, nil
|
||||
}
|
||||
|
||||
func (c *csvSource) Next() bool {
|
||||
return c.i < len(c.rows)
|
||||
}
|
||||
|
||||
func (c *csvSource) Values() ([]interface{}, error) {
|
||||
var vals []interface{}
|
||||
var err error
|
||||
|
||||
for _, v := range c.rows[c.i] {
|
||||
switch {
|
||||
case len(v) == 0:
|
||||
vals = append(vals, "")
|
||||
case isDigit(v):
|
||||
var n int
|
||||
if n, err = strconv.Atoi(v); err == nil {
|
||||
vals = append(vals, n)
|
||||
}
|
||||
case strings.EqualFold(v, "true") || strings.EqualFold(v, "false"):
|
||||
var b bool
|
||||
if b, err = strconv.ParseBool(v); err == nil {
|
||||
vals = append(vals, b)
|
||||
}
|
||||
default:
|
||||
vals = append(vals, v)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w (line no %d)", err, c.i)
|
||||
}
|
||||
}
|
||||
c.i++
|
||||
|
||||
return vals, nil
|
||||
}
|
||||
|
||||
func isDigit(v string) bool {
|
||||
for i := range v {
|
||||
if v[i] < '0' || v[i] > '9' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *csvSource) Err() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func importCSV(table, filename string) int64 {
|
||||
if filename[0] != '/' {
|
||||
filename = path.Join(confPath, filename)
|
||||
}
|
||||
|
||||
s, err := NewCSVSource(filename)
|
||||
if err != nil {
|
||||
errlog.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
var cols []string
|
||||
colval, _ := s.Values()
|
||||
|
||||
for _, c := range colval {
|
||||
cols = append(cols, c.(string))
|
||||
}
|
||||
|
||||
n, err := db.CopyFrom(
|
||||
context.Background(),
|
||||
pgx.Identifier{table},
|
||||
cols,
|
||||
s)
|
||||
|
||||
if err != nil {
|
||||
err = fmt.Errorf("%w (line no %d)", err, s.i)
|
||||
errlog.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
return n
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func logFunc(args ...interface{}) {
|
||||
for _, arg := range args {
|
||||
|
@ -205,8 +205,8 @@ func newConfig(name string) *viper.Viper {
|
||||
vi.SetDefault("env", "development")
|
||||
|
||||
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
||||
vi.BindEnv("HOST", "HOST") //nolint: errcheck
|
||||
vi.BindEnv("PORT", "PORT") //nolint: errcheck
|
||||
vi.BindEnv("host", "HOST") //nolint: errcheck
|
||||
vi.BindEnv("port", "PORT") //nolint: errcheck
|
||||
|
||||
vi.SetDefault("auth.rails.max_idle", 80)
|
||||
vi.SetDefault("auth.rails.max_active", 12000)
|
||||
|
13
serv/init.go
13
serv/init.go
@ -5,6 +5,7 @@ import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/allow"
|
||||
"github.com/dosco/super-graph/crypto"
|
||||
@ -135,7 +136,17 @@ func initDBPool(c *config) (*pgxpool.Pool, error) {
|
||||
config.MaxConns = conf.DB.PoolSize
|
||||
}
|
||||
|
||||
db, err := pgxpool.ConnectConfig(context.Background(), config)
|
||||
var db *pgxpool.Pool
|
||||
var err error
|
||||
|
||||
for i := 1; i < 10; i++ {
|
||||
db, err = pgxpool.ConnectConfig(context.Background(), config)
|
||||
if err == nil {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
502
serv/rice-box.go
502
serv/rice-box.go
File diff suppressed because one or more lines are too long
34
tmpl/dev.yml
34
tmpl/dev.yml
@ -109,7 +109,7 @@ database:
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
@ -125,7 +125,9 @@ database:
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
#admin_account_id: "5"
|
||||
admin_account_id: "sql:select id from users where admin = true limit 1"
|
||||
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
@ -168,26 +170,14 @@ tables:
|
||||
table: users
|
||||
|
||||
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
#roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
limit: 10
|
||||
|
||||
- name: users
|
||||
query:
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
insert:
|
||||
block: false
|
||||
|
||||
update:
|
||||
block: false
|
||||
|
||||
delete:
|
||||
block: false
|
||||
limit: 10
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
@ -215,8 +205,8 @@ roles:
|
||||
delete:
|
||||
block: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1000
|
||||
tables:
|
||||
- name: users
|
||||
filters: []
|
||||
# - name: admin
|
||||
# match: id = 1000
|
||||
# tables:
|
||||
# - name: users
|
||||
# filters: []
|
||||
|
@ -2,7 +2,10 @@ version: '3.4'
|
||||
services:
|
||||
# Postgres DB
|
||||
db:
|
||||
image: postgres:latest
|
||||
image: postgres:12
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
|
@ -52,9 +52,9 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
dbname: {% app_name_slug %}_production
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
Reference in New Issue
Block a user