Compare commits

..

31 Commits

Author SHA1 Message Date
68a378c00f Fix issue with prepared statements skipped on error 2020-03-31 01:28:39 -04:00
d96eaf14f4 Fix bugs with escape char handling 2020-03-30 10:03:47 -04:00
01e488b69d Fix for bug blocking anon queries 2020-03-21 20:11:04 -04:00
7a450b16ba Fix issue with detecting many to many relationships 2020-03-18 20:19:56 -04:00
1ad8cbf15b Fix minor parser bug 2020-03-17 23:03:41 -04:00
f69f1c67d5 Fix to remove left over debug log 2020-03-16 01:43:26 -04:00
a172193955 Fix to ensure cursor fields can be defined in the query 2020-03-16 01:40:47 -04:00
81338b6123 Fix issues blocking Apollo client 2020-03-14 01:35:42 -04:00
265b93b203 Fix for encrypted cursor in production mode bug 2020-03-06 21:38:01 +05:30
6c240e21b4 Fix bug related to 'anon' role prepared statements 2020-03-06 15:39:15 +05:30
7930719eaa Add ability to set CORS headers 2020-03-06 09:47:51 +05:30
cc687b1b2b Fix issue with Docerfile CMD 2020-03-05 09:13:52 +05:30
3033dcf1a9 Fix issue with setting PORT env var 2020-03-04 15:39:53 +05:30
0381982d19 Fix upx version issue in Dockerfile 2020-03-04 12:27:07 +05:30
2b0a798faa Add 'secrets' command to startup script 2020-03-03 19:44:14 +05:30
8b6c562ac1 Add CSV import command to seed javascript 2020-03-03 13:45:47 +05:30
a1fb89b762 Add support for SQL in variables 2020-02-29 10:35:48 +05:30
c82a7bff0d Misprint (#43) 2020-02-24 10:48:50 +05:30
7acf28bb3c Fix issue with upgrading to postgres 12 docker image #36 2020-02-24 02:37:21 +05:30
be5d4e976a Misprint (#41) 2020-02-24 02:04:23 +05:30
d1b884aec6 Misprint (#40) 2020-02-24 02:03:57 +05:30
4be4ce860b Misprint (#39) 2020-02-24 02:03:40 +05:30
dfa4caf540 Misprint (#37) 2020-02-24 02:03:27 +05:30
7763251fb7 fix "Try the demo app" in docs (#38)
* fix "Try the demo app" in docs

* fix "Get Started" setup in docs
2020-02-24 02:02:22 +05:30
51e105699e Fix corrupt json bug in jsn package 2020-02-24 02:00:11 +05:30
90694f8803 Fix spelling in docs (#34) 2020-02-23 15:41:04 +05:30
ad82f5b267 Fix spelling in docs (#35) 2020-02-23 15:40:42 +05:30
99b37a9c50 Fix bug related to new Postgres docker image 2020-02-23 10:28:32 +05:30
7ec1f59224 Fix bug with cursors and multiple order by 2020-02-23 02:28:37 +05:30
d3ecb1d6cc Fix bug with multi root queries 2020-02-21 10:29:37 +05:30
aed4170e8e Fix bug with cursor filters 2020-02-20 22:53:29 +05:30
46 changed files with 1046 additions and 589 deletions

View File

@ -6,14 +6,19 @@ RUN yarn
RUN yarn build RUN yarn build
# stage: 2 # stage: 2
FROM golang:1.13.4-alpine as go-build FROM golang:1.14-alpine as go-build
RUN apk update && \ RUN apk update && \
apk add --no-cache make && \ apk add --no-cache make && \
apk add --no-cache git && \ apk add --no-cache git && \
apk add --no-cache jq && \
apk add --no-cache upx=3.95-r2 apk add --no-cache upx=3.95-r2
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
ARG SOPS_VERSION=3.5.0
ADD https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux /usr/local/bin/sops
RUN chmod 755 /usr/local/bin/sops
WORKDIR /app WORKDIR /app
COPY . /app COPY . /app
@ -36,10 +41,15 @@ RUN mkdir -p /config
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/ COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
COPY --from=go-build /app/config/* /config/ COPY --from=go-build /app/config/* /config/
COPY --from=go-build /app/super-graph . COPY --from=go-build /app/super-graph .
COPY --from=go-build /app/scripts/start.sh .
COPY --from=go-build /usr/local/bin/sops .
RUN chmod +x /super-graph RUN chmod +x /super-graph
RUN chmod +x /start.sh
USER nobody USER nobody
EXPOSE 8080 ENV GO_ENV production
CMD ./super-graph serv ENTRYPOINT ["./start.sh"]
CMD ["./super-graph", "serv"]

View File

@ -35,7 +35,7 @@ $(GORICE):
$(WEB_BUILD_DIR): $(WEB_BUILD_DIR):
@echo "First install Yarn and create a build of the web UI found under ./web" @echo "First install Yarn and create a build of the web UI found under ./web"
@echo "Command: cd web && yarn build" @echo "Command: cd web && yarn && yarn build"
@exit 1 @exit 1
$(GITCHGLOG): $(GITCHGLOG):
@ -77,7 +77,7 @@ clean:
run: clean run: clean
@go run $(BUILD_FLAGS) main.go $(ARGS) @go run $(BUILD_FLAGS) main.go $(ARGS)
install: install: gen
@echo $(GOPATH) @echo $(GOPATH)
@echo "Commit Hash: `git rev-parse HEAD`" @echo "Commit Hash: `git rev-parse HEAD`"
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`" @echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"

View File

@ -36,6 +36,15 @@ migrations_path: ./config/migrations
# encrypting the cursor data # encrypting the cursor data
secret_key: supercalifajalistics secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: true
# Postgres related environment Variables # Postgres related environment Variables
# SG_DATABASE_HOST # SG_DATABASE_HOST
# SG_DATABASE_PORT # SG_DATABASE_PORT
@ -93,7 +102,7 @@ database:
port: 5432 port: 5432
dbname: app_development dbname: app_development
user: postgres user: postgres
password: '' password: postgres
#schema: "public" #schema: "public"
#pool_size: 10 #pool_size: 10

View File

@ -54,7 +54,7 @@ database:
port: 5432 port: 5432
dbname: app_production dbname: app_production
user: postgres user: postgres
password: '' password: postgres
#pool_size: 10 #pool_size: 10
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"

View File

@ -1,7 +1,10 @@
version: '3.4' version: '3.4'
services: services:
db: db:
image: postgres image: postgres:12
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports: ports:
- "5432:5432" - "5432:5432"

View File

@ -34,6 +34,12 @@ Super Graph has a rich feature set like integrating with your existing Ruby on R
# clone the repository # clone the repository
git clone https://github.com/dosco/super-graph git clone https://github.com/dosco/super-graph
# run db in background
docker-compose up -d db
# see logs and wait until DB is really UP
docker-compose logs db
# setup the demo rails app & database and run it # setup the demo rails app & database and run it
docker-compose run rails_app rake db:create db:migrate db:seed docker-compose run rails_app rake db:create db:migrate db:seed
@ -137,7 +143,7 @@ What if I told you Super Graph will fetch all this data with a single SQL query
```graphql ```graphql
query { query {
products(limit 5, where: { price: { gt: 12 } }) { products(limit: 5, where: { price: { gt: 12 } }) {
id id
name name
description description
@ -153,7 +159,7 @@ query {
} }
} }
purchases( purchases(
limit 10, limit: 10,
order_by: { created_at: desc } , order_by: { created_at: desc } ,
where: { user_id: { eq: $user_id } } where: { user_id: { eq: $user_id } }
) { ) {
@ -216,7 +222,7 @@ You can then add your database schema to the migrations, maybe create some seed
git clone https://github.com/dosco/super-graph && cd super-graph && make install git clone https://github.com/dosco/super-graph && cd super-graph && make install
``` ```
And then create and launch you're new app And then create and launch your new app
```bash ```bash
# create a new app and change to it's directory # create a new app and change to it's directory
@ -286,6 +292,12 @@ for (i = 0; i < 10; i++) {
} }
``` ```
If you want to import a lot of data using a CSV file is the best and fastest option. The `import_csv` command uses the `COPY FROM` Postgres method to load massive amounts of data into tables. The first line of the CSV file must be the header with column names.
```javascript
var post_count = import_csv("posts", "posts.csv")
```
You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided. You can generate the following fake data for your seeding purposes. Below is the list of fake data functions supported by the built-in fake data library. For example `fake.image_url()` will generate a fake image url or `fake.shuffle_strings(['hello', 'world', 'cool'])` will generate a randomly shuffled version of that array of strings or `fake.rand_string(['hello', 'world', 'cool'])` will return a random string from the array provided.
``` ```
@ -1133,7 +1145,7 @@ query {
## Using Variables ## Using Variables
Variables (`$product_id`) and their values (`"product_id": 5`) can be passed along side the GraphQL query. Using variables makes for better client side code as well as improved server side SQL query caching. The build-in web-ui also supports setting variables. Not having to manipulate your GraphQL query string to insert values into it makes for cleaner Variables (`$product_id`) and their values (`"product_id": 5`) can be passed along side the GraphQL query. Using variables makes for better client side code as well as improved server side SQL query caching. The built-in web-ui also supports setting variables. Not having to manipulate your GraphQL query string to insert values into it makes for cleaner
and better client side code. and better client side code.
```javascript ```javascript
@ -1767,7 +1779,7 @@ database:
port: 5432 port: 5432
dbname: app_development dbname: app_development
user: postgres user: postgres
password: '' password: postgres
#schema: "public" #schema: "public"
#pool_size: 10 #pool_size: 10
@ -1969,7 +1981,7 @@ To use Yugabyte in your local development flow just uncomment the following line
## Developing Super Graph ## Developing Super Graph
If you want to build and run Super Graph from code then the below commands will build the web ui and launch Super Graph in developer mode with a watcher to rebuild on code changes. And the demo rails app is also launched to make it essier to test changes. If you want to build and run Super Graph from code then the below commands will build the web ui and launch Super Graph in developer mode with a watcher to rebuild on code changes. And the demo rails app is also launched to make it easier to test changes.
```bash ```bash

View File

@ -13,7 +13,7 @@ Super Graph code is made up of a number of packages. We have done our best to ke
## QCODE ## QCODE
This package contains the core of the GraphQL conpiler it handling the lexing and parsing of the GraphQL query transforming it into an internal representation called This package contains the core of the GraphQL compiler it handling the lexing and parsing of the GraphQL query transforming it into an internal representation called
`QCode`. `QCode`.
This is the first step of the compiling process the `func NewCompiler(c Config)` function creates a new instance of this compiler which has it's own config. This is the first step of the compiling process the `func NewCompiler(c Config)` function creates a new instance of this compiler which has it's own config.
@ -71,7 +71,7 @@ item{itemObjOpen, 16, 20} // {
... ...
``` ```
These tokens are then fed into the parser `parse.go` the parser does the work of generating an abstract syntax tree (AST) from the tokens. This AST is an internal representation (data structure) and is not exposed outside the package. Sinc the AST is a tree a stack `stack.go` is used to walk the tree and generate the QCode AST. The QCode data structure is also a tree (represented as an array). This is then returned to the caller of the compile function. These tokens are then fed into the parser `parse.go` the parser does the work of generating an abstract syntax tree (AST) from the tokens. This AST is an internal representation (data structure) and is not exposed outside the package. Since the AST is a tree a stack `stack.go` is used to walk the tree and generate the QCode AST. The QCode data structure is also a tree (represented as an array). This is then returned to the caller of the compile function.
```go ```go
type Operation struct { type Operation struct {

View File

@ -19,7 +19,7 @@ default: &default
encoding: unicode encoding: unicode
host: db host: db
username: postgres username: postgres
password: password: postgres
pool: 5 pool: 5
development: development:

3
go.mod
View File

@ -2,7 +2,6 @@ module github.com/dosco/super-graph
require ( require (
github.com/GeertJohan/go.rice v1.0.0 github.com/GeertJohan/go.rice v1.0.0
github.com/Masterminds/semver v1.5.0
github.com/NYTimes/gziphandler v1.1.1 github.com/NYTimes/gziphandler v1.1.1
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
@ -12,7 +11,6 @@ require (
github.com/dgrijalva/jwt-go v3.2.0+incompatible github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/dlclark/regexp2 v1.2.0 // indirect github.com/dlclark/regexp2 v1.2.0 // indirect
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c // indirect
github.com/fsnotify/fsnotify v1.4.7 github.com/fsnotify/fsnotify v1.4.7
github.com/garyburd/redigo v1.6.0 github.com/garyburd/redigo v1.6.0
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
@ -23,6 +21,7 @@ require (
github.com/magiconair/properties v1.8.1 // indirect github.com/magiconair/properties v1.8.1 // indirect
github.com/pelletier/go-toml v1.4.0 // indirect github.com/pelletier/go-toml v1.4.0 // indirect
github.com/pkg/errors v0.8.1 github.com/pkg/errors v0.8.1
github.com/rs/cors v1.7.0
github.com/rs/zerolog v1.15.0 github.com/rs/zerolog v1.15.0
github.com/spf13/afero v1.2.2 // indirect github.com/spf13/afero v1.2.2 // indirect
github.com/spf13/cobra v0.0.5 github.com/spf13/cobra v0.0.5

6
go.sum
View File

@ -5,8 +5,6 @@ github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0= github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ= github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0= github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I= github.com/NYTimes/gziphandler v1.1.1 h1:ZUDjpQae29j0ryrS0u/B8HZfJBtBQHjqw2rQ2cqUQ3I=
github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c= github.com/NYTimes/gziphandler v1.1.1/go.mod h1:n/CVRwUEOgIxrgPvAQhUUr9oeUtvrhMomdKFjzJNB0c=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
@ -54,8 +52,6 @@ github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc= github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw= github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw=
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA= github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c h1:/bXaeEuNG6V0HeyEGw11DYLW5BGsOPlcVRIXbHNUWSo=
github.com/dvyukov/go-fuzz v0.0.0-20191206100749-a378175e205c/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I= github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo= github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc= github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
@ -184,6 +180,8 @@ github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA= github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU= github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg= github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
github.com/rs/cors v1.7.0/go.mod h1:gFx+x8UowdsKA9AchylcLynDq+nNFfI8FkUZdN/jGCU=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ= github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY= github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=

View File

@ -27,9 +27,21 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
var k []byte var k []byte
state := expectKey state := expectKey
instr := false
slash := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if instr && b[i] == '\\' {
slash++
continue
}
if b[i] == '"' && (slash%2 == 0) {
instr = !instr
}
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
if !instr {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
d++ d++
@ -37,6 +49,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
d-- d--
} }
} }
}
if state == expectKey { if state == expectKey {
switch b[i] { switch b[i] {
@ -64,7 +77,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
state = expectKeyClose state = expectKeyClose
s = i s = i
case state == expectKeyClose && (b[i-1] != '\\' && b[i] == '"'): case state == expectKeyClose && (b[i] == '"' && (slash%2 == 0)):
state = expectColon state = expectColon
k = b[(s + 1):i] k = b[(s + 1):i]
@ -74,7 +87,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
case state == expectValue && b[i] == '"': case state == expectValue && b[i] == '"':
state = expectString state = expectString
case state == expectString && (b[i-1] != '\\' && b[i] == '"'): case state == expectString && (b[i] == '"' && (slash%2 == 0)):
e = i e = i
case state == expectValue && b[i] == '[': case state == expectValue && b[i] == '[':

View File

@ -51,8 +51,21 @@ func Get(b []byte, keys [][]byte) []Field {
state := expectKey state := expectKey
n := 0 n := 0
instr := false
slash := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if instr && b[i] == '\\' {
slash++
continue
}
if b[i] == '"' && (slash%2 == 0) {
instr = !instr
}
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
if !instr {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
d++ d++
@ -60,13 +73,14 @@ func Get(b []byte, keys [][]byte) []Field {
d-- d--
} }
} }
}
switch { switch {
case state == expectKey && b[i] == '"': case state == expectKey && b[i] == '"':
state = expectKeyClose state = expectKeyClose
s = i s = i
case state == expectKeyClose && (b[i-1] != '\\' && b[i] == '"'): case state == expectKeyClose && (b[i] == '"' && (slash%2 == 0)):
state = expectColon state = expectColon
k = b[(s + 1):i] k = b[(s + 1):i]
@ -77,7 +91,7 @@ func Get(b []byte, keys [][]byte) []Field {
state = expectString state = expectString
s = i s = i
case state == expectString && (b[i-1] != '\\' && b[i] == '"'): case state == expectString && (b[i] == '"' && (slash%2 == 0)):
e = i e = i
case state == expectValue && b[i] == '[': case state == expectValue && b[i] == '[':
@ -148,6 +162,8 @@ func Get(b []byte, keys [][]byte) []Field {
state = expectKey state = expectKey
e = 0 e = 0
} }
slash = 0
} }
return res[:n] return res[:n]

View File

@ -2,6 +2,9 @@ package jsn
import ( import (
"bytes" "bytes"
"fmt"
"io/ioutil"
"strings"
"testing" "testing"
) )
@ -161,6 +164,10 @@ var (
input6 = ` input6 = `
{"users" : [{"id" : 1, "email" : "vicram@gmail.com", "slug" : "vikram-rangnekar", "threads" : [], "threads_cursor" : null}, {"id" : 3, "email" : "marareilly@lang.name", "slug" : "raymundo-corwin", "threads" : [{"id" : 9, "title" : "Et alias et aut porro praesentium nam in voluptatem reiciendis quisquam perspiciatis inventore eos quia et et enim qui amet."}, {"id" : 25, "title" : "Ipsam quam nemo culpa tempore amet optio sit sed eligendi autem consequatur quaerat rem velit quibusdam quibusdam optio a voluptatem."}], "threads_cursor" : 25}], "users_cursor" : 3}` {"users" : [{"id" : 1, "email" : "vicram@gmail.com", "slug" : "vikram-rangnekar", "threads" : [], "threads_cursor" : null}, {"id" : 3, "email" : "marareilly@lang.name", "slug" : "raymundo-corwin", "threads" : [{"id" : 9, "title" : "Et alias et aut porro praesentium nam in voluptatem reiciendis quisquam perspiciatis inventore eos quia et et enim qui amet."}, {"id" : 25, "title" : "Ipsam quam nemo culpa tempore amet optio sit sed eligendi autem consequatur quaerat rem velit quibusdam quibusdam optio a voluptatem."}], "threads_cursor" : 25}], "users_cursor" : 3}`
input7, _ = ioutil.ReadFile("test7.json")
input8, _ = ioutil.ReadFile("test8.json")
) )
func TestGet(t *testing.T) { func TestGet(t *testing.T) {
@ -256,6 +263,32 @@ func TestGet2(t *testing.T) {
} }
} }
func TestGet3(t *testing.T) {
values := Get(input7, [][]byte{[]byte("data")})
v := values[0].Value
if !bytes.Equal(v[len(v)-11:], []byte(`Rangnekar"}`)) {
t.Fatal("corrupt ending")
}
}
func TestGet4(t *testing.T) {
exp := `"# \n\n@@@java\npackage main\n\nimport (\n \"net/http\"\n \"strings\"\n\n \"github.com/gin-gonic/gin\"\n)\n\nfunc main() {\n r := gin.Default()\n r.LoadHTMLGlob(\"templates/*\")\n\n r.GET(\"/\", handleIndex)\n r.GET(\"/to/:name\", handleIndex)\n r.Run()\n}\n\n// Hello is page data for the template\ntype Hello struct {\n Name string\n}\n\nfunc handleIndex(c *gin.Context) {\n name := c.Param(\"name\")\n if name != \"\" {\n name = strings.TrimPrefix(c.Param(\"name\"), \"/\")\n }\n c.HTML(http.StatusOK, \"hellofly.tmpl\", gin.H{\"Name\": name})\n}\n@@@\n\n\\"`
exp = strings.ReplaceAll(exp, "@", "`")
values := Get(input8, [][]byte{[]byte("body")})
if string(values[0].Key) != "body" {
t.Fatal("unexpected key")
}
if string(values[0].Value) != exp {
fmt.Println(string(values[0].Value))
t.Fatal("unexpected value")
}
}
func TestValue(t *testing.T) { func TestValue(t *testing.T) {
v1 := []byte("12345") v1 := []byte("12345")
if !bytes.Equal(Value(v1), v1) { if !bytes.Equal(Value(v1), v1) {

View File

@ -10,10 +10,21 @@ func Keys(b []byte) [][]byte {
st := NewStack() st := NewStack()
ae := 0 ae := 0
instr := false
slash := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if instr && b[i] == '\\' {
slash++
continue
}
if b[i] == '"' && (slash%2 == 0) {
instr = !instr
}
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
if !instr {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
d++ d++
@ -21,6 +32,7 @@ func Keys(b []byte) [][]byte {
d-- d--
} }
} }
}
si := st.Peek() si := st.Peek()
@ -47,7 +59,7 @@ func Keys(b []byte) [][]byte {
state = expectKeyClose state = expectKeyClose
s = i s = i
case state == expectKeyClose && (b[i-1] != '\\' && b[i] == '"'): case state == expectKeyClose && (b[i] == '"' && (slash%2 == 0)):
state = expectColon state = expectColon
k = b[(s + 1):i] k = b[(s + 1):i]
@ -58,7 +70,7 @@ func Keys(b []byte) [][]byte {
state = expectString state = expectString
s = i s = i
case state == expectString && (b[i-1] != '\\' && b[i] == '"'): case state == expectString && (b[i] == '"' && (slash%2 == 0)):
e = i e = i
case state == expectValue && b[i] == '{': case state == expectValue && b[i] == '{':
@ -130,6 +142,7 @@ func Keys(b []byte) [][]byte {
e = 0 e = 0
} }
slash = 0
} }
return res return res

View File

@ -12,6 +12,11 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
return errors.New("'from' and 'to' must be of the same length") return errors.New("'from' and 'to' must be of the same length")
} }
if len(from) == 0 || len(to) == 0 {
_, err := w.Write(b)
return err
}
h := xxhash.New() h := xxhash.New()
tmap := make(map[uint64]int, len(from)) tmap := make(map[uint64]int, len(from))
@ -32,13 +37,26 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
state := expectKey state := expectKey
ws, we := -1, len(b) ws, we := -1, len(b)
instr := false
slash := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if instr && b[i] == '\\' {
slash++
continue
}
// skip any left padding whitespace // skip any left padding whitespace
if ws == -1 && (b[i] == '{' || b[i] == '[') { if ws == -1 && (b[i] == '{' || b[i] == '[') {
ws = i ws = i
} }
if b[i] == '"' && (slash%2 == 0) {
instr = !instr
}
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
if !instr {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
d++ d++
@ -46,13 +64,14 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
d-- d--
} }
} }
}
switch { switch {
case state == expectKey && b[i] == '"': case state == expectKey && b[i] == '"':
state = expectKeyClose state = expectKeyClose
s = i s = i
case state == expectKeyClose && (b[i-1] != '\\' && b[i] == '"'): case state == expectKeyClose && (b[i] == '"' && (slash%2 == 0)):
state = expectColon state = expectColon
if _, err := h.Write(b[(s + 1):i]); err != nil { if _, err := h.Write(b[(s + 1):i]); err != nil {
return err return err
@ -66,7 +85,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
state = expectString state = expectString
s = i s = i
case state == expectString && (b[i-1] != '\\' && b[i] == '"'): case state == expectString && (b[i] == '"' && (slash%2 == 0)):
e = i e = i
case state == expectValue && b[i] == '[': case state == expectValue && b[i] == '[':
@ -160,6 +179,8 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
e = 0 e = 0
d = 0 d = 0
} }
slash = 0
} }
if ws == -1 || (ws == 0 && we == len(b)) { if ws == -1 || (ws == 0 && we == len(b)) {

View File

@ -11,9 +11,21 @@ func Strip(b []byte, path [][]byte) []byte {
pi := 0 pi := 0
pm := false pm := false
state := expectKey state := expectKey
instr := false
slash := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if instr && b[i] == '\\' {
slash++
continue
}
if b[i] == '"' && (slash%2 == 0) {
instr = !instr
}
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
if !instr {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
d++ d++
@ -21,13 +33,14 @@ func Strip(b []byte, path [][]byte) []byte {
d-- d--
} }
} }
}
switch { switch {
case state == expectKey && b[i] == '"': case state == expectKey && b[i] == '"':
state = expectKeyClose state = expectKeyClose
s = i s = i
case state == expectKeyClose && (b[i-1] != '\\' && b[i] == '"'): case state == expectKeyClose && (b[i] == '"' && (slash%2 == 0)):
state = expectColon state = expectColon
if pi == len(path) { if pi == len(path) {
pi = 0 pi = 0
@ -44,7 +57,7 @@ func Strip(b []byte, path [][]byte) []byte {
state = expectString state = expectString
s = i s = i
case state == expectString && (b[i-1] != '\\' && b[i] == '"'): case state == expectString && (b[i] == '"' && (slash%2 == 0)):
e = i e = i
case state == expectValue && b[i] == '[': case state == expectValue && b[i] == '[':
@ -101,6 +114,8 @@ func Strip(b []byte, path [][]byte) []byte {
state = expectKey state = expectKey
e = 0 e = 0
} }
slash = 0
} }
return ob return ob

1
jsn/test7.json Normal file

File diff suppressed because one or more lines are too long

7
jsn/test8.json Normal file
View File

@ -0,0 +1,7 @@
{
"data": {
"slug": "javapackage-mainimport-nethttp-strings-githubcomgi-2786",
"published": true,
"body": "# \n\n```java\npackage main\n\nimport (\n \"net/http\"\n \"strings\"\n\n \"github.com/gin-gonic/gin\"\n)\n\nfunc main() {\n r := gin.Default()\n r.LoadHTMLGlob(\"templates/*\")\n\n r.GET(\"/\", handleIndex)\n r.GET(\"/to/:name\", handleIndex)\n r.Run()\n}\n\n// Hello is page data for the template\ntype Hello struct {\n Name string\n}\n\nfunc handleIndex(c *gin.Context) {\n name := c.Param(\"name\")\n if name != \"\" {\n name = strings.TrimPrefix(c.Param(\"name\"), \"/\")\n }\n c.HTML(http.StatusOK, \"hellofly.tmpl\", gin.H{\"Name\": name})\n}\n```\n\n\\"
}
}

View File

@ -35,34 +35,38 @@ func (c *compilerContext) renderBaseColumns(
c.renderComma(i) c.renderComma(i)
realColsRendered = append(realColsRendered, n) realColsRendered = append(realColsRendered, n)
colWithTable(c.w, ti.Name, cn) colWithTable(c.w, ti.Name, cn)
i++
continue
}
if isSearch && !isRealCol { } else {
switch { switch {
case cn == "search_rank": case isSearch && cn == "search_rank":
if err := c.renderColumnSearchRank(sel, ti, col, i); err != nil { if err := c.renderColumnSearchRank(sel, ti, col, i); err != nil {
return nil, false, err return nil, false, err
} }
i++
case strings.HasPrefix(cn, "search_headline_"): case isSearch && strings.HasPrefix(cn, "search_headline_"):
if err := c.renderColumnSearchHeadline(sel, ti, col, i); err != nil { if err := c.renderColumnSearchHeadline(sel, ti, col, i); err != nil {
return nil, false, err return nil, false, err
} }
i++
case cn == "__typename":
if err := c.renderColumnTypename(sel, ti, col, i); err != nil {
return nil, false, err
} }
} else {
case strings.HasSuffix(cn, "_cursor"):
continue
default:
if err := c.renderColumnFunction(sel, ti, col, i); err != nil { if err := c.renderColumnFunction(sel, ti, col, i); err != nil {
return nil, false, err return nil, false, err
} }
isAgg = true isAgg = true
}
}
i++ i++
} }
}
if isCursorPaged { if isCursorPaged {
if _, ok := colmap[ti.PrimaryCol.Key]; !ok { if _, ok := colmap[ti.PrimaryCol.Key]; !ok {
@ -148,6 +152,20 @@ func (c *compilerContext) renderColumnSearchHeadline(sel *qcode.Select, ti *DBTa
return nil return nil
} }
func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error {
if isColumnBlocked(sel, col.Name) {
return nil
}
c.renderComma(columnsRendered)
io.WriteString(c.w, `(`)
squoted(c.w, ti.Name)
io.WriteString(c.w, ` :: text)`)
alias(c.w, col.Name)
return nil
}
func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error { func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error {
pl := funcPrefixLen(col.Name) pl := funcPrefixLen(col.Name)
// if pl == 0 { // if pl == 0 {
@ -168,7 +186,7 @@ func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInf
return nil return nil
} }
fn := cn[0 : pl-1] fn := col.Name[:pl-1]
c.renderComma(columnsRendered) c.renderComma(columnsRendered)

View File

@ -17,6 +17,10 @@ const (
closeBlock = 500 closeBlock = 500
) )
var (
ErrAllTablesSkipped = errors.New("all tables skipped. cannot render query")
)
type Variables map[string]json.RawMessage type Variables map[string]json.RawMessage
type Config struct { type Config struct {
@ -89,7 +93,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
io.WriteString(c.w, `SELECT json_build_object(`) io.WriteString(c.w, `SELECT json_build_object(`)
for _, id := range qc.Roots { for _, id := range qc.Roots {
root := &qc.Selects[id] root := &qc.Selects[id]
if root.SkipRender { if root.SkipRender || len(root.Cols) == 0 {
continue continue
} }
@ -100,19 +104,14 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
io.WriteString(c.w, `, `) io.WriteString(c.w, `, `)
} }
ti, err := c.schema.GetTable(root.Name) c.renderRootSelect(root)
if err != nil {
return 0, err
}
c.renderRootSelect(root, ti)
i++ i++
} }
io.WriteString(c.w, `) as "__root" FROM `) io.WriteString(c.w, `) as "__root" FROM `)
if i == 0 { if i == 0 {
return 0, errors.New("all tables skipped. cannot render query") return 0, ErrAllTablesSkipped
} }
var ignored uint32 var ignored uint32
@ -127,6 +126,10 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
if id < closeBlock { if id < closeBlock {
sel := &c.s[id] sel := &c.s[id]
if len(sel.Cols) == 0 {
continue
}
ti, err := c.schema.GetTable(sel.Name) ti, err := c.schema.GetTable(sel.Name)
if err != nil { if err != nil {
return 0, err return 0, err
@ -136,11 +139,11 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
io.WriteString(c.w, `(`) io.WriteString(c.w, `(`)
} else { } else {
c.renderLateralJoin(sel) c.renderLateralJoin(sel)
}
if !ti.Singular { if !ti.Singular {
c.renderPluralSelect(sel, ti) c.renderPluralSelect(sel, ti)
} }
}
skipped, err := c.renderSelect(sel, ti, vars) skipped, err := c.renderSelect(sel, ti, vars)
if err != nil { if err != nil {
@ -169,6 +172,11 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
return 0, err return 0, err
} }
if !ti.Singular {
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sel", sel.ID)
}
if sel.ParentID == -1 { if sel.ParentID == -1 {
io.WriteString(c.w, `)`) io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sel", sel.ID) aliasWithID(c.w, "__sel", sel.ID)
@ -177,10 +185,6 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
io.WriteString(c.w, `, `) io.WriteString(c.w, `, `)
} }
} else { } else {
if !ti.Singular {
io.WriteString(c.w, `)`)
aliasWithID(c.w, "__sel", sel.ID)
}
c.renderLateralJoinClose(sel) c.renderLateralJoinClose(sel)
} }
@ -233,50 +237,25 @@ func (c *compilerContext) renderPluralSelect(sel *qcode.Select, ti *DBTableInfo)
return nil return nil
} }
func (c *compilerContext) renderRootSelect(sel *qcode.Select, ti *DBTableInfo) error { func (c *compilerContext) renderRootSelect(sel *qcode.Select) error {
io.WriteString(c.w, `'`) io.WriteString(c.w, `'`)
io.WriteString(c.w, sel.FieldName) io.WriteString(c.w, sel.FieldName)
io.WriteString(c.w, `', `) io.WriteString(c.w, `', `)
if ti.Singular {
io.WriteString(c.w, `"__sel_`) io.WriteString(c.w, `"__sel_`)
int2string(c.w, sel.ID) int2string(c.w, sel.ID)
io.WriteString(c.w, `"."json"`) io.WriteString(c.w, `"."json"`)
} else {
io.WriteString(c.w, `coalesce(json_agg("__sel_`)
int2string(c.w, sel.ID)
io.WriteString(c.w, `"."json"), '[]')`)
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
n := 0
// check if primary key already included in order by
// query argument
for _, ob := range sel.OrderBy {
if ob.Col == ti.PrimaryCol.Key {
n = 1
break
}
}
if n == 1 {
n = len(sel.OrderBy)
} else {
n = len(sel.OrderBy) + 1
}
io.WriteString(c.w, `, '`) io.WriteString(c.w, `, '`)
io.WriteString(c.w, sel.FieldName) io.WriteString(c.w, sel.FieldName)
io.WriteString(c.w, `_cursor', CONCAT_WS(','`) io.WriteString(c.w, `_cursor', `)
for i := 0; i < n; i++ {
io.WriteString(c.w, `, max("__cur_`) io.WriteString(c.w, `"__sel_`)
int2string(c.w, int32(i)) int2string(c.w, sel.ID)
io.WriteString(c.w, `")`) io.WriteString(c.w, `"."cursor"`)
}
io.WriteString(c.w, `)`)
} }
}
return nil return nil
} }
@ -296,56 +275,27 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
if sel.Paging.Type != qcode.PtOffset { if sel.Paging.Type != qcode.PtOffset {
colmap[ti.PrimaryCol.Key] = struct{}{} colmap[ti.PrimaryCol.Key] = struct{}{}
addPrimaryKey := true
addToOrderBy := true
for _, ob := range sel.OrderBy { for _, ob := range sel.OrderBy {
if ob.Col == ti.PrimaryCol.Key { if ob.Col == ti.PrimaryCol.Key {
addToOrderBy = false addPrimaryKey = false
} break
if sel.Paging.Cursor {
fil := qcode.AddFilter(sel)
fil.Col = ob.Col
fil.Type = qcode.ValRef
fil.Table = "__cur"
fil.Val = ob.Col
switch ob.Order {
case qcode.OrderAsc:
fil.Op = qcode.OpGreaterThan
case qcode.OrderDesc:
fil.Op = qcode.OpLesserThan
}
} }
} }
if addToOrderBy { if addPrimaryKey {
var op qcode.ExpOp ob := &qcode.OrderBy{Col: ti.PrimaryCol.Name, Order: qcode.OrderAsc}
ob := &qcode.OrderBy{Col: ti.PrimaryCol.Name} if sel.Paging.Type == qcode.PtBackward {
sel.OrderBy = append(sel.OrderBy, ob)
switch sel.Paging.Type {
case qcode.PtForward:
op = qcode.OpGreaterThan
ob.Order = qcode.OrderAsc
case qcode.PtBackward:
op = qcode.OpLesserThan
ob.Order = qcode.OrderDesc ob.Order = qcode.OrderDesc
} }
sel.OrderBy = append(sel.OrderBy, ob)
}
}
if sel.Paging.Cursor { if sel.Paging.Cursor {
fil := qcode.AddFilter(sel) c.addSeekPredicate(sel)
fil.Op = op
fil.Col = ti.PrimaryCol.Name
fil.Type = qcode.ValRef
fil.Table = "__cur"
fil.Val = ti.PrimaryCol.Name
}
}
} }
for _, id := range sel.Children { for _, id := range sel.Children {
@ -393,6 +343,72 @@ func (c *compilerContext) initSelect(sel *qcode.Select, ti *DBTableInfo, vars Va
return skipped, cols, nil return skipped, cols, nil
} }
// This
// (A, B, C) >= (X, Y, Z)
//
// Becomes
// (A > X)
// OR ((A = X) AND (B > Y))
// OR ((A = X) AND (B = Y) AND (C > Z))
// OR ((A = X) AND (B = Y) AND (C = Z))
func (c *compilerContext) addSeekPredicate(sel *qcode.Select) error {
var or, and *qcode.Exp
obLen := len(sel.OrderBy)
if obLen > 1 {
or = qcode.NewFilter()
or.Op = qcode.OpOr
}
for i := 0; i < obLen; i++ {
if i > 0 {
and = qcode.NewFilter()
and.Op = qcode.OpAnd
}
for n, ob := range sel.OrderBy {
f := qcode.NewFilter()
f.Col = ob.Col
f.Type = qcode.ValRef
f.Table = "__cur"
f.Val = ob.Col
if obLen == 1 {
qcode.AddFilter(sel, f)
return nil
}
switch {
case i > 0 && n != i:
f.Op = qcode.OpEquals
case ob.Order == qcode.OrderDesc:
f.Op = qcode.OpLesserThan
default:
f.Op = qcode.OpGreaterThan
}
if and != nil {
and.Children = append(and.Children, f)
} else {
or.Children = append(or.Children, f)
}
if n == i {
break
}
}
if and != nil {
or.Children = append(or.Children, and)
}
}
qcode.AddFilter(sel, or)
return nil
}
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, error) { func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars Variables) (uint32, error) {
var rel *DBRel var rel *DBRel
var err error var err error
@ -428,31 +444,6 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
} }
} }
//if !ti.Singular {
// io.WriteString(c.w, `SELECT coalesce(json_agg(json_build_object(`)
// if err := c.renderColumns(sel, ti, skipped); err != nil {
// return 0, err
// }
// io.WriteString(c.w, `)), '[]') AS "json"`)
// if sel.Paging.Type != qcode.PtOffset {
// for i, ob := range sel.OrderBy {
// io.WriteString(c.w, `, LAST_VALUE(`)
// colWithTableID(c.w, ti.Name, sel.ID, ob.Col)
// io.WriteString(c.w, `) OVER() AS "__cur_`)
// int2string(c.w, int32(i))
// io.WriteString(c.w, `"`)
// }
// io.WriteString(c.w, `LAST_VALUE(`)
// colWithTableID(c.w, ti.Name, sel.ID, ti.PrimaryCol.Name)
// io.WriteString(c.w, `) OVER() AS "__cursor_`)
// int2string(c.w, int32(len(sel.OrderBy)))
// io.WriteString(c.w, `"`)
//}
//}
io.WriteString(c.w, ` FROM (`) io.WriteString(c.w, ` FROM (`)
// FROM (SELECT .... ) // FROM (SELECT .... )
@ -519,23 +510,26 @@ func (c *compilerContext) renderJoinByName(table, parent string, id int32) error
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error { func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
i := 0 i := 0
var cn string
for _, col := range sel.Cols { for _, col := range sel.Cols {
n := funcPrefixLen(col.Name) if n := funcPrefixLen(col.Name); n != 0 {
if n != 0 {
if !sel.Functions { if !sel.Functions {
continue continue
} }
if len(sel.Allowed) != 0 { cn = col.Name[n:]
if _, ok := sel.Allowed[col.Name[n:]]; !ok {
continue
}
}
} else { } else {
if len(sel.Allowed) != 0 { cn = col.Name
if _, ok := sel.Allowed[col.Name]; !ok {
if strings.HasSuffix(cn, "_cursor") {
continue continue
} }
} }
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[cn]; !ok {
continue
}
} }
if i != 0 { if i != 0 {
@ -586,9 +580,6 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
continue continue
} }
childSel := &c.s[id] childSel := &c.s[id]
if childSel.SkipRender {
continue
}
if i != 0 { if i != 0 {
io.WriteString(c.w, ", ") io.WriteString(c.w, ", ")
@ -596,6 +587,11 @@ func (c *compilerContext) renderJoinColumns(sel *qcode.Select, ti *DBTableInfo,
squoted(c.w, childSel.FieldName) squoted(c.w, childSel.FieldName)
if childSel.SkipRender {
io.WriteString(c.w, `, NULL`)
continue
}
io.WriteString(c.w, `, "__sel_`) io.WriteString(c.w, `, "__sel_`)
int2string(c.w, childSel.ID) int2string(c.w, childSel.ID)
io.WriteString(c.w, `"."json"`) io.WriteString(c.w, `"."json"`)
@ -988,8 +984,12 @@ func (c *compilerContext) renderOp(ex *qcode.Exp, ti *DBTableInfo) error {
switch ex.Op { switch ex.Op {
case qcode.OpEquals: case qcode.OpEquals:
io.WriteString(c.w, `IS NOT DISTINCT FROM`) io.WriteString(c.w, `=`)
case qcode.OpNotEquals: case qcode.OpNotEquals:
io.WriteString(c.w, `!=`)
case qcode.OpNotDistinct:
io.WriteString(c.w, `IS NOT DISTINCT FROM`)
case qcode.OpDistinct:
io.WriteString(c.w, `IS DISTINCT FROM`) io.WriteString(c.w, `IS DISTINCT FROM`)
case qcode.OpGreaterOrEquals: case qcode.OpGreaterOrEquals:
io.WriteString(c.w, `>=`) io.WriteString(c.w, `>=`)
@ -1140,9 +1140,15 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
switch ex.Type { switch ex.Type {
case qcode.ValVar: case qcode.ValVar:
if val, ok := vars[ex.Val]; ok { val, ok := vars[ex.Val]
switch {
case ok && strings.HasPrefix(val, "sql:"):
io.WriteString(c.w, ` (`)
io.WriteString(c.w, val[4:])
io.WriteString(c.w, `)`)
case ok:
squoted(c.w, val) squoted(c.w, val)
} else { default:
io.WriteString(c.w, ` '{{`) io.WriteString(c.w, ` '{{`)
io.WriteString(c.w, ex.Val) io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `}}'`) io.WriteString(c.w, `}}'`)

View File

@ -327,7 +327,7 @@ func jsonColumnAsTable(t *testing.T) {
compileGQLToPSQL(t, gql, nil, "admin") compileGQLToPSQL(t, gql, nil, "admin")
} }
func skipUserIDForAnonRole(t *testing.T) { func nullForAuthRequiredInAnon(t *testing.T) {
gql := `query { gql := `query {
products { products {
id id
@ -387,7 +387,7 @@ func TestCompileQuery(t *testing.T) {
t.Run("multiRoot", multiRoot) t.Run("multiRoot", multiRoot)
t.Run("jsonColumnAsTable", jsonColumnAsTable) t.Run("jsonColumnAsTable", jsonColumnAsTable)
t.Run("withCursor", withCursor) t.Run("withCursor", withCursor)
t.Run("skipUserIDForAnonRole", skipUserIDForAnonRole) t.Run("nullForAuthRequiredInAnon", nullForAuthRequiredInAnon)
t.Run("blockedQuery", blockedQuery) t.Run("blockedQuery", blockedQuery)
t.Run("blockedFunctions", blockedFunctions) t.Run("blockedFunctions", blockedFunctions)
} }

View File

@ -66,7 +66,14 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
} }
for i, t := range info.Tables { for i, t := range info.Tables {
err := schema.updateRelationships(t, info.Columns[i]) err := schema.firstDegreeRels(t, info.Columns[i])
if err != nil {
return nil, err
}
}
for i, t := range info.Tables {
err := schema.secondDegreeRels(t, info.Columns[i])
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -131,8 +138,7 @@ func (s *DBSchema) addTable(
return nil return nil
} }
func (s *DBSchema) updateRelationships(t DBTable, cols []DBColumn) error { func (s *DBSchema) firstDegreeRels(t DBTable, cols []DBColumn) error {
jcols := make([]DBColumn, 0, len(cols))
ct := t.Key ct := t.Key
cti, ok := s.t[ct] cti, ok := s.t[ct]
if !ok { if !ok {
@ -230,6 +236,51 @@ func (s *DBSchema) updateRelationships(t DBTable, cols []DBColumn) error {
if err := s.SetRel(ft, ct, rel2); err != nil { if err := s.SetRel(ft, ct, rel2); err != nil {
return err return err
} }
}
return nil
}
func (s *DBSchema) secondDegreeRels(t DBTable, cols []DBColumn) error {
jcols := make([]DBColumn, 0, len(cols))
ct := t.Key
cti, ok := s.t[ct]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", ct)
}
for i := range cols {
c := cols[i]
if len(c.FKeyTable) == 0 {
continue
}
// Foreign key column name
ft := strings.ToLower(c.FKeyTable)
ti, ok := s.t[ft]
if !ok {
return fmt.Errorf("invalid foreign key table '%s'", ft)
}
// This is an embedded relationship like when a json/jsonb column
// is exposed as a table
if c.Name == c.FKeyTable && len(c.FKeyColID) == 0 {
continue
}
if len(c.FKeyColID) == 0 {
continue
}
// Foreign key column id
fcid := c.FKeyColID[0]
if _, ok := ti.ColIDMap[fcid]; !ok {
return fmt.Errorf("invalid foreign key column id '%d' for table '%s'",
fcid, ti.Name)
}
jcols = append(jcols, c) jcols = append(jcols, c)
} }
@ -322,6 +373,9 @@ func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
} }
func (s *DBSchema) SetRel(child, parent string, rel *DBRel) error { func (s *DBSchema) SetRel(child, parent string, rel *DBRel) error {
sp := strings.ToLower(flect.Singularize(parent))
pp := strings.ToLower(flect.Pluralize(parent))
sc := strings.ToLower(flect.Singularize(child)) sc := strings.ToLower(flect.Singularize(child))
pc := strings.ToLower(flect.Pluralize(child)) pc := strings.ToLower(flect.Pluralize(child))
@ -333,9 +387,6 @@ func (s *DBSchema) SetRel(child, parent string, rel *DBRel) error {
s.rm[pc] = make(map[string]*DBRel) s.rm[pc] = make(map[string]*DBRel)
} }
sp := strings.ToLower(flect.Singularize(parent))
pp := strings.ToLower(flect.Pluralize(parent))
if _, ok := s.rm[sc][sp]; !ok { if _, ok := s.rm[sc][sp]; !ok {
s.rm[sc][sp] = rel s.rm[sc][sp] = rel
} }

View File

@ -19,6 +19,10 @@ func (rt RelType) String() string {
} }
func (re *DBRel) String() string { func (re *DBRel) String() string {
if re.Type == RelOneToManyThrough {
return fmt.Sprintf("'%s.%s' --(Through: %s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Through, re.Right.Table, re.Right.Col)
}
return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'", return fmt.Sprintf("'%s.%s' --(%s)--> '%s.%s'",
re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col) re.Left.Table, re.Left.Col, re.Type, re.Right.Table, re.Right.Col)
} }

View File

@ -92,7 +92,14 @@ func getTestSchema() *DBSchema {
} }
for i, t := range tables { for i, t := range tables {
err := schema.updateRelationships(t, columns[i]) err := schema.firstDegreeRels(t, columns[i])
if err != nil {
log.Fatal(err)
}
}
for i, t := range tables {
err := schema.secondDegreeRels(t, columns[i])
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
} }

View File

@ -39,62 +39,62 @@ WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT IN
=== RUN TestCompileMutate/bulkUpsert === RUN TestCompileMutate/bulkUpsert
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
=== RUN TestCompileMutate/delete === RUN TestCompileMutate/delete
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") IS NOT DISTINCT FROM '1' :: bigint)) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
--- PASS: TestCompileMutate (0.00s) --- PASS: TestCompileMutate (0.01s)
--- PASS: TestCompileMutate/singleUpsert (0.00s) --- PASS: TestCompileMutate/singleUpsert (0.00s)
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s) --- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
--- PASS: TestCompileMutate/bulkUpsert (0.00s) --- PASS: TestCompileMutate/bulkUpsert (0.00s)
--- PASS: TestCompileMutate/delete (0.00s) --- PASS: TestCompileMutate/delete (0.00s)
=== RUN TestCompileQuery === RUN TestCompileQuery
=== RUN TestCompileQuery/withComplexArgs === RUN TestCompileQuery/withComplexArgs
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/withWhereAndList === RUN TestCompileQuery/withWhereAndList
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/withWhereIsNull === RUN TestCompileQuery/withWhereIsNull
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/withWhereMultiOr === RUN TestCompileQuery/withWhereMultiOr
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'price', "products_0"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/fetchByID === RUN TestCompileQuery/fetchByID
SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
=== RUN TestCompileQuery/searchQuery === RUN TestCompileQuery/searchQuery
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'search_rank', "products_0"."search_rank", 'search_headline_description', "products_0"."search_headline_description") AS "json" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'search_rank', "products_0"."search_rank", 'search_headline_description', "products_0"."search_headline_description") AS "json" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/oneToMany === RUN TestCompileQuery/oneToMany
SELECT json_build_object('users', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('email', "users_0"."email", 'products', "__sel_1"."json") AS "json" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('users', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "users_0"."email", 'products', "__sel_1"."json") AS "json" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/oneToManyReverse === RUN TestCompileQuery/oneToManyReverse
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('name', "products_0"."name", 'price', "products_0"."price", 'users', "__sel_1"."json") AS "json" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "users_1"."email") AS "json" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_0"."name", 'price', "products_0"."price", 'users', "__sel_1"."json") AS "json" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "users_1"."email") AS "json" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/oneToManyArray === RUN TestCompileQuery/oneToManyArray
SELECT json_build_object('tags', coalesce(json_agg("__sel_0"."json"), '[]'), 'product', "__sel_2"."json") as "__root" FROM (SELECT json_build_object('name', "products_2"."name", 'price', "products_2"."price", 'tags', "__sel_3"."json") AS "json" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_3"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "tags_3"."id", 'name', "tags_3"."name") AS "json" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sel_3") AS "__sel_3" ON ('true')) AS "__sel_2", (SELECT json_build_object('name', "tags_0"."name", 'product', "__sel_1"."json") AS "json" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('name', "products_1"."name") AS "json" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('tags', "__sel_0"."json", 'product', "__sel_2"."json") as "__root" FROM (SELECT json_build_object('name', "products_2"."name", 'price', "products_2"."price", 'tags', "__sel_3"."json") AS "json" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_3"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "tags_3"."id", 'name', "tags_3"."name") AS "json" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sel_3") AS "__sel_3" ON ('true')) AS "__sel_2", (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "tags_0"."name", 'product', "__sel_1"."json") AS "json" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('name', "products_1"."name") AS "json" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/manyToMany === RUN TestCompileQuery/manyToMany
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('name', "products_0"."name", 'customers', "__sel_1"."json") AS "json" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "customers_1"."email", 'full_name', "customers_1"."full_name") AS "json" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_0"."name", 'customers', "__sel_1"."json") AS "json" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "customers_1"."email", 'full_name', "customers_1"."full_name") AS "json" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/manyToManyReverse === RUN TestCompileQuery/manyToManyReverse
SELECT json_build_object('customers', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('email', "customers_0"."email", 'full_name', "customers_0"."full_name", 'products', "__sel_1"."json") AS "json" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_1"."name") AS "json" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('customers', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "customers_0"."email", 'full_name', "customers_0"."full_name", 'products', "__sel_1"."json") AS "json" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_1"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_1"."name") AS "json" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sel_1") AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/aggFunction === RUN TestCompileQuery/aggFunction
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('name', "products_0"."name", 'count_price', "products_0"."count_price") AS "json" FROM (SELECT "products"."name", price("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_0"."name", 'count_price', "products_0"."count_price") AS "json" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/aggFunctionBlockedByCol === RUN TestCompileQuery/aggFunctionBlockedByCol
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('name', "products_0"."name") AS "json" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_0"."name") AS "json" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/aggFunctionDisabled === RUN TestCompileQuery/aggFunctionDisabled
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('name', "products_0"."name") AS "json" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "products_0"."name") AS "json" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/aggFunctionWithFilter === RUN TestCompileQuery/aggFunctionWithFilter
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'max_price', "products_0"."max_price") AS "json" FROM (SELECT "products"."id", pri("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'max_price', "products_0"."max_price") AS "json" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/syntheticTables === RUN TestCompileQuery/syntheticTables
SELECT json_build_object('me', "__sel_0"."json") as "__root" FROM (SELECT json_build_object() AS "json" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") IS NOT DISTINCT FROM '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sel_0" SELECT json_build_object('me', "__sel_0"."json") as "__root" FROM (SELECT json_build_object() AS "json" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sel_0"
=== RUN TestCompileQuery/queryWithVariables === RUN TestCompileQuery/queryWithVariables
SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") IS NOT DISTINCT FROM '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
=== RUN TestCompileQuery/withWhereOnRelations === RUN TestCompileQuery/withWhereOnRelations
SELECT json_build_object('users', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "users_0"."id", 'email', "users_0"."email") AS "json" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sel_0" SELECT json_build_object('users', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "users_0"."id", 'email', "users_0"."email") AS "json" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/multiRoot === RUN TestCompileQuery/multiRoot
SELECT json_build_object('customer', "__sel_0"."json", 'user', "__sel_1"."json", 'product', "__sel_2"."json") as "__root" FROM (SELECT json_build_object('id', "products_2"."id", 'name', "products_2"."name", 'customers', "__sel_3"."json", 'customer', "__sel_4"."json") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT json_build_object('email', "customers_4"."email") AS "json" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sel_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_3"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "customers_3"."email") AS "json" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sel_3") AS "__sel_3" ON ('true')) AS "__sel_2", (SELECT json_build_object('id', "users_1"."id", 'email', "users_1"."email") AS "json" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sel_1", (SELECT json_build_object('id', "customers_0"."id") AS "json" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sel_0" SELECT json_build_object('customer', "__sel_0"."json", 'user', "__sel_1"."json", 'product', "__sel_2"."json") as "__root" FROM (SELECT json_build_object('id', "products_2"."id", 'name', "products_2"."name", 'customers', "__sel_3"."json", 'customer', "__sel_4"."json") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT json_build_object('email', "customers_4"."email") AS "json" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sel_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_3"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "customers_3"."email") AS "json" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sel_3") AS "__sel_3" ON ('true')) AS "__sel_2", (SELECT json_build_object('id', "users_1"."id", 'email', "users_1"."email") AS "json" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sel_1", (SELECT json_build_object('id', "customers_0"."id") AS "json" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sel_0"
=== RUN TestCompileQuery/jsonColumnAsTable === RUN TestCompileQuery/jsonColumnAsTable
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'tag_count', "__sel_1"."json") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('count', "tag_count_1"."count", 'tags', "__sel_2"."json") AS "json" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_2"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "tags_2"."name") AS "json" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sel_2") AS "__sel_2" ON ('true')) AS "__sel_1" ON ('true')) AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'tag_count', "__sel_1"."json") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('count', "tag_count_1"."count", 'tags', "__sel_2"."json") AS "json" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("__sel_2"."json"), '[]') as "json" FROM (SELECT json_build_object('name', "tags_2"."name") AS "json" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sel_2") AS "__sel_2" ON ('true')) AS "__sel_1" ON ('true')) AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/withCursor === RUN TestCompileQuery/withCursor
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]'), 'products_cursor', CONCAT_WS(',', max("__cur_0"), max("__cur_1"))) as "__root" FROM (SELECT json_build_object('name', "products_0"."name") AS "json", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."id") > "__cur"."id" :: bigint) AND (("products"."price") < "__cur"."price" :: numeric(7,2)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json", 'products_cursor', "__sel_0"."cursor") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT json_build_object('name', "products_0"."name") AS "json", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/skipUserIDForAnonRole === RUN TestCompileQuery/nullForAuthRequiredInAnon
SELECT json_build_object('products', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0" SELECT json_build_object('products', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'user', NULL) AS "json" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sel_0") AS "__sel_0"
=== RUN TestCompileQuery/blockedQuery === RUN TestCompileQuery/blockedQuery
SELECT json_build_object('user', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "users_0"."id", 'full_name', "users_0"."full_name", 'email', "users_0"."email") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sel_0" SELECT json_build_object('user', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "users_0"."id", 'full_name', "users_0"."full_name", 'email', "users_0"."email") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sel_0"
=== RUN TestCompileQuery/blockedFunctions === RUN TestCompileQuery/blockedFunctions
SELECT json_build_object('users', coalesce(json_agg("__sel_0"."json"), '[]')) as "__root" FROM (SELECT json_build_object('email', "users_0"."email") AS "json" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sel_0" SELECT json_build_object('users', "__sel_0"."json") as "__root" FROM (SELECT coalesce(json_agg("__sel_0"."json"), '[]') as "json" FROM (SELECT json_build_object('email', "users_0"."email") AS "json" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sel_0") AS "__sel_0"
--- PASS: TestCompileQuery (0.02s) --- PASS: TestCompileQuery (0.03s)
--- PASS: TestCompileQuery/withComplexArgs (0.00s) --- PASS: TestCompileQuery/withComplexArgs (0.00s)
--- PASS: TestCompileQuery/withWhereAndList (0.00s) --- PASS: TestCompileQuery/withWhereAndList (0.00s)
--- PASS: TestCompileQuery/withWhereIsNull (0.00s) --- PASS: TestCompileQuery/withWhereIsNull (0.00s)
@ -116,19 +116,19 @@ SELECT json_build_object('users', coalesce(json_agg("__sel_0"."json"), '[]')) as
--- PASS: TestCompileQuery/multiRoot (0.00s) --- PASS: TestCompileQuery/multiRoot (0.00s)
--- PASS: TestCompileQuery/jsonColumnAsTable (0.00s) --- PASS: TestCompileQuery/jsonColumnAsTable (0.00s)
--- PASS: TestCompileQuery/withCursor (0.00s) --- PASS: TestCompileQuery/withCursor (0.00s)
--- PASS: TestCompileQuery/skipUserIDForAnonRole (0.00s) --- PASS: TestCompileQuery/nullForAuthRequiredInAnon (0.00s)
--- PASS: TestCompileQuery/blockedQuery (0.00s) --- PASS: TestCompileQuery/blockedQuery (0.00s)
--- PASS: TestCompileQuery/blockedFunctions (0.00s) --- PASS: TestCompileQuery/blockedFunctions (0.00s)
=== RUN TestCompileUpdate === RUN TestCompileUpdate
=== RUN TestCompileUpdate/singleUpdate === RUN TestCompileUpdate/singleUpdate
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE ((("products"."id") IS NOT DISTINCT FROM '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name") AS "json" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
=== RUN TestCompileUpdate/simpleUpdateWithPresets === RUN TestCompileUpdate/simpleUpdateWithPresets
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") IS NOT DISTINCT FROM '{{user_id}}' :: bigint) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id") AS "json" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id") AS "json" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sel_0"
=== RUN TestCompileUpdate/nestedUpdateManyToMany === RUN TestCompileUpdate/nestedUpdateManyToMany
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT json_build_object('purchase', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('sale_type', "purchases_0"."sale_type", 'quantity', "purchases_0"."quantity", 'due_date', "purchases_0"."due_date", 'product', "__sel_1"."json", 'customer', "__sel_2"."json") AS "json" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "customers_2"."id", 'full_name', "customers_2"."full_name", 'email', "customers_2"."email") AS "json" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sel_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT json_build_object('purchase', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('sale_type', "purchases_0"."sale_type", 'quantity', "purchases_0"."quantity", 'due_date', "purchases_0"."due_date", 'product', "__sel_1"."json", 'customer', "__sel_2"."json") AS "json" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "customers_2"."id", 'full_name', "customers_2"."full_name", 'email', "customers_2"."email") AS "json" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sel_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0"
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT json_build_object('purchase', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('sale_type', "purchases_0"."sale_type", 'quantity', "purchases_0"."quantity", 'due_date', "purchases_0"."due_date", 'product', "__sel_1"."json", 'customer', "__sel_2"."json") AS "json" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "customers_2"."id", 'full_name', "customers_2"."full_name", 'email', "customers_2"."email") AS "json" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sel_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT json_build_object('purchase', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('sale_type', "purchases_0"."sale_type", 'quantity', "purchases_0"."quantity", 'due_date', "purchases_0"."due_date", 'product', "__sel_1"."json", 'customer', "__sel_2"."json") AS "json" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "customers_2"."id", 'full_name', "customers_2"."full_name", 'email', "customers_2"."email") AS "json" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sel_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0"
=== RUN TestCompileUpdate/nestedUpdateOneToMany === RUN TestCompileUpdate/nestedUpdateOneToMany
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") IS NOT DISTINCT FROM '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT json_build_object('user', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "users_0"."id", 'full_name', "users_0"."full_name", 'email', "users_0"."email", 'product', "__sel_1"."json") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT json_build_object('user', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "users_0"."id", 'full_name', "users_0"."full_name", 'email', "users_0"."email", 'product', "__sel_1"."json") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "products_1"."id", 'name', "products_1"."name", 'price', "products_1"."price") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sel_1" ON ('true')) AS "__sel_0"
=== RUN TestCompileUpdate/nestedUpdateOneToOne === RUN TestCompileUpdate/nestedUpdateOneToOne
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'user', "__sel_1"."json") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "users_1"."id", 'full_name', "users_1"."full_name", 'email', "users_1"."email") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sel_1" ON ('true')) AS "__sel_0" WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT json_build_object('product', "__sel_0"."json") as "__root" FROM (SELECT json_build_object('id', "products_0"."id", 'name', "products_0"."name", 'user', "__sel_1"."json") AS "json" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT json_build_object('id', "users_1"."id", 'full_name', "users_1"."full_name", 'email', "users_1"."email") AS "json" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sel_1" ON ('true')) AS "__sel_0"
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect === RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
@ -148,4 +148,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s) --- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s) --- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
PASS PASS
ok github.com/dosco/super-graph/psql 0.130s ok github.com/dosco/super-graph/psql (cached)

View File

@ -222,6 +222,10 @@ func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
quoted(c.w, ti.Name) quoted(c.w, ti.Name)
io.WriteString(c.w, ` WHERE `) io.WriteString(c.w, ` WHERE `)
if root.Where == nil {
return 0, errors.New("'where' clause missing in delete mutation")
}
if err := c.renderWhere(root, ti); err != nil { if err := c.renderWhere(root, ti); err != nil {
return 0, err return 0, err
} }

View File

@ -17,7 +17,7 @@ type parserType int32
const ( const (
maxFields = 100 maxFields = 100
maxArgs = 10 maxArgs = 25
) )
const ( const (
@ -242,7 +242,8 @@ func (p *Parser) parseOp() (*Operation, error) {
if p.peek(itemArgsOpen) { if p.peek(itemArgsOpen) {
p.ignore() p.ignore()
op.Args, err = p.parseArgs(op.Args)
op.Args, err = p.parseOpParams(op.Args)
if err != nil { if err != nil {
return nil, err return nil, err
} }
@ -338,6 +339,13 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
if p.peek(itemObjOpen) { if p.peek(itemObjOpen) {
p.ignore() p.ignore()
st.Push(f.ID) st.Push(f.ID)
} else if p.peek(itemObjClose) {
if st.Len() == 0 {
break
} else {
continue
}
} }
} }
@ -371,6 +379,22 @@ func (p *Parser) parseField(f *Field) error {
return nil return nil
} }
func (p *Parser) parseOpParams(args []Arg) ([]Arg, error) {
for {
if len(args) >= maxArgs {
return nil, fmt.Errorf("too many args (max %d)", maxArgs)
}
if p.peek(itemArgsClose) {
p.ignore()
break
}
p.next()
}
return args, nil
}
func (p *Parser) parseArgs(args []Arg) ([]Arg, error) { func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
var err error var err error
@ -383,6 +407,7 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
p.ignore() p.ignore()
break break
} }
if !p.peek(itemName) { if !p.peek(itemName) {
return nil, errors.New("expecting an argument name") return nil, errors.New("expecting an argument name")
} }

View File

@ -131,6 +131,8 @@ const (
OpEqID OpEqID
OpTsQuery OpTsQuery
OpFalse OpFalse
OpNotDistinct
OpDistinct
) )
type ValType int type ValType int
@ -195,10 +197,9 @@ func NewCompiler(c Config) (*Compiler, error) {
return co, nil return co, nil
} }
func AddFilter(sel *Select) *Exp { func NewFilter() *Exp {
ex := expPool.Get().(*Exp) ex := expPool.Get().(*Exp)
ex.Reset() ex.Reset()
addFilter(sel, ex)
return ex return ex
} }
@ -363,8 +364,8 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
return err return err
} }
// Order is important addFilters must come after compileArgs // Order is important AddFilters must come after compileArgs
com.addFilters(qc, s, role) com.AddFilters(qc, s, role)
if s.ParentID == -1 { if s.ParentID == -1 {
qc.Roots = append(qc.Roots, s.ID) qc.Roots = append(qc.Roots, s.ID)
@ -410,7 +411,7 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
return nil return nil
} }
func (com *Compiler) addFilters(qc *QCode, sel *Select, role string) { func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
var fil *Exp var fil *Exp
var nu bool var nu bool
@ -435,7 +436,7 @@ func (com *Compiler) addFilters(qc *QCode, sel *Select, role string) {
case OpFalse: case OpFalse:
sel.Where = fil sel.Where = fil
default: default:
addFilter(sel, fil) AddFilter(sel, fil)
} }
} }
@ -659,7 +660,7 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
} }
sel.Args[arg.Name] = arg.Val sel.Args[arg.Name] = arg.Val
addFilter(sel, ex) AddFilter(sel, ex)
return nil, true return nil, true
} }
@ -676,7 +677,7 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg, role string) (error,
if nu && role == "anon" { if nu && role == "anon" {
sel.SkipRender = true sel.SkipRender = true
} }
addFilter(sel, ex) AddFilter(sel, ex)
return nil, true return nil, true
} }
@ -820,11 +821,11 @@ func (com *Compiler) getRole(role, field string) *trval {
} }
} }
func addFilter(sel *Select, fil *Exp) { func AddFilter(sel *Select, fil *Exp) {
if sel.Where != nil { if sel.Where != nil {
ow := sel.Where ow := sel.Where
if sel.Where.Op != OpAnd { if sel.Where.Op != OpAnd || !sel.Where.doFree {
sel.Where = expPool.Get().(*Exp) sel.Where = expPool.Get().(*Exp)
sel.Where.Reset() sel.Where.Reset()
sel.Where.Op = OpAnd sel.Where.Op = OpAnd
@ -937,6 +938,12 @@ func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) {
case "is_null": case "is_null":
ex.Op = OpIsNull ex.Op = OpIsNull
ex.Val = node.Val ex.Val = node.Val
case "null_eq", "ndis", "not_distinct":
ex.Op = OpNotDistinct
ex.Val = node.Val
case "null_neq", "dis", "distinct":
ex.Op = OpDistinct
ex.Val = node.Val
default: default:
pushChildren(st, node.exp, node) pushChildren(st, node.exp, node)
return nil, nil // skip node return nil, nil // skip node

View File

@ -4,8 +4,9 @@ import (
"encoding/json" "encoding/json"
"errors" "errors"
"fmt" "fmt"
"strconv"
"strings"
"github.com/Masterminds/semver"
"github.com/adjust/gorails/marshal" "github.com/adjust/gorails/marshal"
) )
@ -37,17 +38,20 @@ func NewAuth(version, secret string) (*Auth, error) {
AuthSalt: authSalt, AuthSalt: authSalt,
} }
ver, err := semver.NewVersion(version) var v1, v2 int
if err != nil { var err error
return nil, fmt.Errorf("rails auth: %s", err)
sv := strings.Split(version, ".")
if len(sv) >= 2 {
if v1, err = strconv.Atoi(sv[0]); err != nil {
return nil, err
}
if v2, err = strconv.Atoi(sv[1]); err != nil {
return nil, err
}
} }
gt52, err := semver.NewConstraint(">= 5.2") if v1 >= 5 && v2 >= 2 {
if err != nil {
return nil, fmt.Errorf("rails auth: %s", err)
}
if gt52.Check(ver) {
ra.Cipher = railsCipher52 ra.Cipher = railsCipher52
} else { } else {
ra.Cipher = railsCipher ra.Cipher = railsCipher

13
scripts/start.sh Executable file
View File

@ -0,0 +1,13 @@
#!/bin/sh
if [ $1 = "secrets" ]
then
./sops --config ./config "${@:2}"
exit 0
fi
if test -f "./config/$SECRETS_FILE"
then
./sops --config ./config exec-env "./config/$SECRETS_FILE" "$*"
else
$@
fi

View File

@ -39,11 +39,16 @@ func argMap(ctx context.Context, vars []byte) func(w io.Writer, tag string) (int
} }
v := fields[0].Value v := fields[0].Value
// Open and close quotes
if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' { if len(v) >= 2 && v[0] == '"' && v[len(v)-1] == '"' {
fields[0].Value = v[1 : len(v)-1] fields[0].Value = v[1 : len(v)-1]
} }
if tag == "cursor" { if tag == "cursor" {
if bytes.EqualFold(v, []byte("null")) {
return io.WriteString(w, ``)
}
v1, err := decrypt(string(fields[0].Value)) v1, err := decrypt(string(fields[0].Value))
if err != nil { if err != nil {
return 0, err return 0, err

View File

@ -3,6 +3,7 @@ package serv
import ( import (
"bytes" "bytes"
"context" "context"
"encoding/csv"
"encoding/json" "encoding/json"
"fmt" "fmt"
"io" "io"
@ -10,9 +11,12 @@ import (
"math/rand" "math/rand"
"os" "os"
"path" "path"
"strconv"
"strings"
"github.com/brianvoe/gofakeit" "github.com/brianvoe/gofakeit"
"github.com/dop251/goja" "github.com/dop251/goja"
"github.com/jackc/pgx/v4"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/valyala/fasttemplate" "github.com/valyala/fasttemplate"
) )
@ -42,6 +46,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
vm := goja.New() vm := goja.New()
vm.Set("graphql", graphQLFunc) vm.Set("graphql", graphQLFunc)
vm.Set("import_csv", importCSV)
console := vm.NewObject() console := vm.NewObject()
console.Set("log", logFunc) //nolint: errcheck console.Set("log", logFunc) //nolint: errcheck
@ -129,6 +134,106 @@ func graphQLFunc(query string, data interface{}, opt map[string]string) map[stri
return val return val
} }
type csvSource struct {
rows [][]string
i int
}
func NewCSVSource(filename string) (*csvSource, error) {
f, err := os.Open(filename)
if err != nil {
return nil, err
}
defer f.Close()
r := csv.NewReader(f)
rows, err := r.ReadAll()
if err != nil {
return nil, err
}
return &csvSource{rows: rows}, nil
}
func (c *csvSource) Next() bool {
return c.i < len(c.rows)
}
func (c *csvSource) Values() ([]interface{}, error) {
var vals []interface{}
var err error
for _, v := range c.rows[c.i] {
switch {
case len(v) == 0:
vals = append(vals, "")
case isDigit(v):
var n int
if n, err = strconv.Atoi(v); err == nil {
vals = append(vals, n)
}
case strings.EqualFold(v, "true") || strings.EqualFold(v, "false"):
var b bool
if b, err = strconv.ParseBool(v); err == nil {
vals = append(vals, b)
}
default:
vals = append(vals, v)
}
if err != nil {
return nil, fmt.Errorf("%w (line no %d)", err, c.i)
}
}
c.i++
return vals, nil
}
func isDigit(v string) bool {
for i := range v {
if v[i] < '0' || v[i] > '9' {
return false
}
}
return true
}
func (c *csvSource) Err() error {
return nil
}
func importCSV(table, filename string) int64 {
if filename[0] != '/' {
filename = path.Join(confPath, filename)
}
s, err := NewCSVSource(filename)
if err != nil {
errlog.Fatal().Err(err).Send()
}
var cols []string
colval, _ := s.Values()
for _, c := range colval {
cols = append(cols, c.(string))
}
n, err := db.CopyFrom(
context.Background(),
pgx.Identifier{table},
cols,
s)
if err != nil {
err = fmt.Errorf("%w (line no %d)", err, s.i)
errlog.Fatal().Err(err).Send()
}
return n
}
//nolint: errcheck //nolint: errcheck
func logFunc(args ...interface{}) { func logFunc(args ...interface{}) {
for _, arg := range args { for _, arg := range args {

View File

@ -19,11 +19,13 @@ func cmdServ(cmd *cobra.Command, args []string) {
fatalInProd(err, "failed to connect to database") fatalInProd(err, "failed to connect to database")
} }
if conf != nil && db != nil {
initCrypto() initCrypto()
initCompiler() initCompiler()
initResolvers() initResolvers()
initAllowList(confPath) initAllowList(confPath)
initPreparedList(confPath) initPreparedList(confPath)
}
startHTTP() startHTTP()
} }

View File

@ -31,6 +31,8 @@ type config struct {
SeedFile string `mapstructure:"seed_file"` SeedFile string `mapstructure:"seed_file"`
MigrationsPath string `mapstructure:"migrations_path"` MigrationsPath string `mapstructure:"migrations_path"`
SecretKey string `mapstructure:"secret_key"` SecretKey string `mapstructure:"secret_key"`
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
Inflections map[string]string Inflections map[string]string
@ -205,8 +207,8 @@ func newConfig(name string) *viper.Viper {
vi.SetDefault("env", "development") vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV") //nolint: errcheck vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("HOST", "HOST") //nolint: errcheck vi.BindEnv("host", "HOST") //nolint: errcheck
vi.BindEnv("PORT", "PORT") //nolint: errcheck vi.BindEnv("port", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80) vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000) vi.SetDefault("auth.rails.max_active", 12000)

View File

@ -152,6 +152,10 @@ func (c *coreContext) resolvePreparedSQL() ([]byte, *stmt, error) {
} }
} }
if root, err = encryptCursor(ps.st.qc, root); err != nil {
return nil, nil, err
}
return root, &ps.st, nil return root, &ps.st, nil
} }

View File

@ -59,12 +59,6 @@ func buildRoleStmt(gql, vars []byte, role string) ([]stmt, error) {
return nil, err return nil, err
} }
// For the 'anon' role in production only compile
// queries for tables defined in the config file.
if conf.Production && ro.Name == "anon" && !hasTablesWithConfig(qc, ro) {
return nil, errors.New("query contains tables with no 'anon' role config")
}
stmts := []stmt{stmt{role: ro, qc: qc}} stmts := []stmt{stmt{role: ro, qc: qc}}
w := &bytes.Buffer{} w := &bytes.Buffer{}
@ -90,7 +84,7 @@ func buildMultiStmt(gql, vars []byte) ([]stmt, error) {
} }
if len(conf.RolesQuery) == 0 { if len(conf.RolesQuery) == 0 {
return buildRoleStmt(gql, vars, "user") return nil, errors.New("roles_query not defined")
} }
stmts := make([]stmt, 0, len(conf.Roles)) stmts := make([]stmt, 0, len(conf.Roles))
@ -99,6 +93,7 @@ func buildMultiStmt(gql, vars []byte) ([]stmt, error) {
for i := 0; i < len(conf.Roles); i++ { for i := 0; i < len(conf.Roles); i++ {
role := &conf.Roles[i] role := &conf.Roles[i]
// skip anon as it's not included in the combined multi-statement
if role.Name == "anon" { if role.Name == "anon" {
continue continue
} }

View File

@ -36,15 +36,20 @@ func encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
continue continue
} }
var buf bytes.Buffer
if len(f.Value) > 2 {
v, err := crypto.Encrypt(f.Value[1:len(f.Value)-1], &internalKey) v, err := crypto.Encrypt(f.Value[1:len(f.Value)-1], &internalKey)
if err != nil { if err != nil {
return nil, err return nil, err
} }
var buf bytes.Buffer
buf.WriteByte('"') buf.WriteByte('"')
buf.WriteString(base64.StdEncoding.EncodeToString(v)) buf.WriteString(base64.StdEncoding.EncodeToString(v))
buf.WriteByte('"') buf.WriteByte('"')
} else {
buf.WriteString(`null`)
}
to[i].Value = buf.Bytes() to[i].Value = buf.Bytes()
} }

View File

@ -8,6 +8,8 @@ import (
"net/http" "net/http"
"strings" "strings"
"time" "time"
"github.com/rs/cors"
) )
const ( const (
@ -61,6 +63,20 @@ type resolver struct {
Duration time.Duration `json:"duration"` Duration time.Duration `json:"duration"`
} }
func apiV1Handler() http.Handler {
h := withAuth(http.HandlerFunc(apiV1), conf.Auth)
if len(conf.AllowedOrigins) != 0 {
c := cors.New(cors.Options{
AllowedOrigins: conf.AllowedOrigins,
AllowCredentials: true,
Debug: conf.DebugCORS,
})
h = c.Handler(h)
}
return h
}
func apiV1(w http.ResponseWriter, r *http.Request) { func apiV1(w http.ResponseWriter, r *http.Request) {
ctx := &coreContext{Context: r.Context()} ctx := &coreContext{Context: r.Context()}
@ -101,7 +117,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
} }
if err != nil { if err != nil {
errlog.Error().Err(err).Msg("failed to handle request") errlog.Error().Err(err).Msg(ctx.req.Query)
errorResp(w, err) errorResp(w, err)
return return
} }

View File

@ -5,6 +5,7 @@ import (
"crypto/sha256" "crypto/sha256"
"fmt" "fmt"
"os" "os"
"time"
"github.com/dosco/super-graph/allow" "github.com/dosco/super-graph/allow"
"github.com/dosco/super-graph/crypto" "github.com/dosco/super-graph/crypto"
@ -135,7 +136,17 @@ func initDBPool(c *config) (*pgxpool.Pool, error) {
config.MaxConns = conf.DB.PoolSize config.MaxConns = conf.DB.PoolSize
} }
db, err := pgxpool.ConnectConfig(context.Background(), config) var db *pgxpool.Pool
var err error
for i := 1; i < 10; i++ {
db, err = pgxpool.ConnectConfig(context.Background(), config)
if err == nil {
break
}
time.Sleep(time.Duration(i*100) * time.Millisecond)
}
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -7,6 +7,7 @@ import (
"io" "io"
"github.com/dosco/super-graph/allow" "github.com/dosco/super-graph/allow"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode" "github.com/dosco/super-graph/qcode"
"github.com/jackc/pgconn" "github.com/jackc/pgconn"
"github.com/jackc/pgx/v4" "github.com/jackc/pgx/v4"
@ -82,12 +83,6 @@ func prepareStmt(item allow.Item) error {
qt := qcode.GetQType(gql) qt := qcode.GetQType(gql)
q := []byte(gql) q := []byte(gql)
if len(vars) == 0 {
logger.Debug().Msgf("Prepared statement:\n%s\n", gql)
} else {
logger.Debug().Msgf("Prepared statement:\n%s\n%s\n", vars, gql)
}
tx, err := db.Begin(context.Background()) tx, err := db.Begin(context.Background())
if err != nil { if err != nil {
return err return err
@ -109,7 +104,7 @@ func prepareStmt(item allow.Item) error {
return err return err
} }
logger.Debug().Msg("Prepared statement role: user") logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
err = prepare(tx, stmts1, stmtHash(item.Name, "user")) err = prepare(tx, stmts1, stmtHash(item.Name, "user"))
if err != nil { if err != nil {
@ -117,9 +112,12 @@ func prepareStmt(item allow.Item) error {
} }
if conf.isAnonRoleDefined() { if conf.isAnonRoleDefined() {
logger.Debug().Msg("Prepared statement for role: anon") logger.Debug().Msgf("Prepared statement 'query %s' (anon)", item.Name)
stmts2, err := buildRoleStmt(q, vars, "anon") stmts2, err := buildRoleStmt(q, vars, "anon")
if err == psql.ErrAllTablesSkipped {
return nil
}
if err != nil { if err != nil {
return err return err
} }
@ -132,11 +130,17 @@ func prepareStmt(item allow.Item) error {
case qcode.QTMutation: case qcode.QTMutation:
for _, role := range conf.Roles { for _, role := range conf.Roles {
logger.Debug().Msgf("Prepared statement for role: %s", role.Name) logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
stmts, err := buildRoleStmt(q, vars, role.Name) stmts, err := buildRoleStmt(q, vars, role.Name)
if err != nil { if err != nil {
return err if len(item.Vars) == 0 {
logger.Warn().Err(err).Msg(item.Query)
} else {
logger.Warn().Err(err).Msgf("%s %s", item.Vars, item.Query)
}
continue
} }
err = prepare(tx, stmts, stmtHash(item.Name, role.Name)) err = prepare(tx, stmts, stmtHash(item.Name, role.Name))

View File

@ -108,7 +108,11 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
// Ensure that we use the correct events, as they are not uniform across // Ensure that we use the correct events, as they are not uniform across
// platforms. See https://github.com/fsnotify/fsnotify/issues/74 // platforms. See https://github.com/fsnotify/fsnotify/issues/74
if conf != nil && !conf.Production && strings.HasSuffix(event.Name, "/allow.list") { if conf != nil && strings.HasSuffix(event.Name, "/allow.list") {
continue
}
if conf.Production {
continue continue
} }

File diff suppressed because one or more lines are too long

View File

@ -154,7 +154,7 @@ func routeHandler() (http.Handler, error) {
routes := map[string]http.Handler{ routes := map[string]http.Handler{
"/health": http.HandlerFunc(health), "/health": http.HandlerFunc(health),
"/api/v1/graphql": withAuth(http.HandlerFunc(apiV1), conf.Auth), "/api/v1/graphql": apiV1Handler(),
} }
if err := setActionRoutes(routes); err != nil { if err := setActionRoutes(routes); err != nil {

View File

@ -36,6 +36,15 @@ migrations_path: ./config/migrations
# encrypting the cursor data # encrypting the cursor data
secret_key: supercalifajalistics secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: false
# Postgres related environment Variables # Postgres related environment Variables
# SG_DATABASE_HOST # SG_DATABASE_HOST
# SG_DATABASE_PORT # SG_DATABASE_PORT
@ -109,7 +118,7 @@ database:
port: 5432 port: 5432
dbname: {% app_name_slug %}_development dbname: {% app_name_slug %}_development
user: postgres user: postgres
password: '' password: postgres
#schema: "public" #schema: "public"
#pool_size: 10 #pool_size: 10
@ -125,7 +134,9 @@ database:
# Define additional variables here to be used with filters # Define additional variables here to be used with filters
variables: variables:
admin_account_id: "5" #admin_account_id: "5"
admin_account_id: "sql:select id from users where admin = true limit 1"
# Field and table names that you wish to block # Field and table names that you wish to block
blocklist: blocklist:
@ -168,26 +179,14 @@ tables:
table: users table: users
roles_query: "SELECT * FROM users WHERE id = $user_id" #roles_query: "SELECT * FROM users WHERE id = $user_id"
roles: roles:
- name: anon - name: anon
tables: tables:
- name: products - name: users
limit: 10
query: query:
columns: ["id", "name", "description" ] limit: 10
aggregation: false
insert:
block: false
update:
block: false
delete:
block: false
- name: user - name: user
tables: tables:
@ -215,8 +214,8 @@ roles:
delete: delete:
block: true block: true
- name: admin # - name: admin
match: id = 1000 # match: id = 1000
tables: # tables:
- name: users # - name: users
filters: [] # filters: []

View File

@ -2,7 +2,10 @@ version: '3.4'
services: services:
# Postgres DB # Postgres DB
db: db:
image: postgres:latest image: postgres:12
environment:
POSTGRES_USER: postgres
POSTGRES_PASSWORD: postgres
ports: ports:
- "5432:5432" - "5432:5432"

View File

@ -24,7 +24,11 @@ auth_fail_block: true
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
# response # response
enable_tracing: true enable_tracing: false
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: false
# File that points to the database seeding script # File that points to the database seeding script
# seed_file: seed.js # seed_file: seed.js
@ -36,6 +40,15 @@ enable_tracing: true
# encrypting the cursor data # encrypting the cursor data
# secret_key: supercalifajalistics # secret_key: supercalifajalistics
# CORS: A list of origins a cross-domain request can be executed from.
# If the special * value is present in the list, all origins will be allowed.
# An origin may contain a wildcard (*) to replace 0 or more
# characters (i.e.: http://*.domain.com).
# cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
# cors_debug: false
# Postgres related environment Variables # Postgres related environment Variables
# SG_DATABASE_HOST # SG_DATABASE_HOST
# SG_DATABASE_PORT # SG_DATABASE_PORT
@ -52,9 +65,9 @@ database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {% app_name_slug %}_development dbname: {% app_name_slug %}_production
user: postgres user: postgres
password: '' password: postgres
#pool_size: 10 #pool_size: 10
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"