Compare commits
36 Commits
Author | SHA1 | Date | |
---|---|---|---|
3bf9f02a9f | |||
533c767e1d | |||
84d55dbc8a | |||
5aafff6310 | |||
840aaf64ff | |||
7bbb56a328 | |||
394b08b2fe | |||
842252f9e2 | |||
279f5616d1 | |||
04bb88f74b | |||
38ed6dbc5f | |||
ec2f8d0c58 | |||
9b51065414 | |||
1a70603b1a | |||
505335d872 | |||
bdc8c65a09 | |||
03fe29b088 | |||
5857efdd70 | |||
bdffe7b14e | |||
ae7cde0433 | |||
6293d37e73 | |||
7a3fe5a1df | |||
2a32c179ba | |||
0a02bde219 | |||
966aa9ce8c | |||
6f18d56ca0 | |||
c400461835 | |||
a6691de1b7 | |||
e6934cda02 | |||
4cf7956ff5 | |||
5356455904 | |||
074aded5c0 | |||
c7557f761f | |||
09d6460a13 | |||
40c99e9ef3 | |||
75ff5510d4 |
@ -5,18 +5,18 @@ info:
|
||||
repository_url: https://github.com/dosco/super-graph
|
||||
options:
|
||||
commits:
|
||||
# filters:
|
||||
# Type:
|
||||
# - feat
|
||||
# - fix
|
||||
# - perf
|
||||
# - refactor
|
||||
filters:
|
||||
Type:
|
||||
- feat
|
||||
- fix
|
||||
- perf
|
||||
- refactor
|
||||
commit_groups:
|
||||
# title_maps:
|
||||
# feat: Features
|
||||
# fix: Bug Fixes
|
||||
# perf: Performance Improvements
|
||||
# refactor: Code Refactoring
|
||||
title_maps:
|
||||
feat: Features
|
||||
fix: Bug Fixes
|
||||
perf: Performance Improvements
|
||||
refactor: Code Refactoring
|
||||
header:
|
||||
pattern: "^((\\w+)\\s.*)$"
|
||||
pattern_maps:
|
||||
|
8
.deepsource.toml
Normal file
8
.deepsource.toml
Normal file
@ -0,0 +1,8 @@
|
||||
version = 1
|
||||
|
||||
[[analyzers]]
|
||||
name = "go"
|
||||
enabled = true
|
||||
|
||||
[analyzers.meta]
|
||||
import_path = "github.com/dosco/super-graph"
|
3
.gitignore
vendored
3
.gitignore
vendored
@ -23,6 +23,8 @@
|
||||
/tmp/runner-build
|
||||
/demo/tmp
|
||||
|
||||
.idea
|
||||
*.iml
|
||||
.vscode
|
||||
.DS_Store
|
||||
.swp
|
||||
@ -35,4 +37,5 @@ suppressions
|
||||
release
|
||||
.gofuzz
|
||||
*-fuzz.zip
|
||||
*.test
|
||||
|
||||
|
@ -7,7 +7,7 @@ rules:
|
||||
- name: run
|
||||
match: \.go$
|
||||
ignore: web|examples|docs|_test\.go$
|
||||
command: go run cmd/main.go serv
|
||||
command: go run main.go serv
|
||||
- name: test
|
||||
match: _test\.go$
|
||||
command: go test -cover {PKG}
|
570
CHANGELOG.md
570
CHANGELOG.md
@ -1,401 +1,371 @@
|
||||
<a name="unreleased"></a>
|
||||
## [Unreleased]
|
||||
|
||||
### Add
|
||||
- Add config driven custom table relationships
|
||||
- Add support for `websearch_to_tsquery` in PG 11
|
||||
|
||||
### Create
|
||||
- Create CODE_OF_CONDUCT.md
|
||||
<a name="v0.13.22"></a>
|
||||
## [v0.13.22] - 2020-05-01
|
||||
|
||||
### Fix
|
||||
- Fix bug with remote join example
|
||||
- Fix grammer / syntax
|
||||
<a name="v0.13.21"></a>
|
||||
## [v0.13.21] - 2020-04-24
|
||||
|
||||
### Update
|
||||
- Update issue templates
|
||||
- Update CONTRIBUTING.md
|
||||
- Update issue templates
|
||||
- Update feature_request.md
|
||||
<a name="v0.13.20"></a>
|
||||
## [v0.13.20] - 2020-04-24
|
||||
|
||||
<a name="v0.13.19"></a>
|
||||
## [v0.13.19] - 2020-04-23
|
||||
|
||||
<a name="v0.13.18"></a>
|
||||
## [v0.13.18] - 2020-04-23
|
||||
|
||||
<a name="v0.13.17"></a>
|
||||
## [v0.13.17] - 2020-04-22
|
||||
|
||||
<a name="v0.13.16"></a>
|
||||
## [v0.13.16] - 2020-04-21
|
||||
### Features
|
||||
- feat : improve the generated introspection schema and avoid the chirino/graphql api leaking through the core api. ([#53](https://github.com/dosco/super-graph/issues/53))
|
||||
|
||||
|
||||
<a name="v0.13.15"></a>
|
||||
## [v0.13.15] - 2020-04-20
|
||||
|
||||
<a name="v0.13.14"></a>
|
||||
## [v0.13.14] - 2020-04-19
|
||||
|
||||
<a name="v0.13.13"></a>
|
||||
## [v0.13.13] - 2020-04-19
|
||||
|
||||
<a name="v0.13.12"></a>
|
||||
## [v0.13.12] - 2020-04-19
|
||||
|
||||
<a name="v0.13.11"></a>
|
||||
## [v0.13.11] - 2020-04-18
|
||||
|
||||
<a name="v0.13.10"></a>
|
||||
## [v0.13.10] - 2020-04-17
|
||||
|
||||
<a name="v0.13.9"></a>
|
||||
## [v0.13.9] - 2020-04-16
|
||||
|
||||
<a name="v0.13.8"></a>
|
||||
## [v0.13.8] - 2020-04-16
|
||||
|
||||
<a name="v0.13.7"></a>
|
||||
## [v0.13.7] - 2020-04-16
|
||||
|
||||
<a name="v0.13.6"></a>
|
||||
## [v0.13.6] - 2020-04-13
|
||||
|
||||
<a name="v0.13.5"></a>
|
||||
## [v0.13.5] - 2020-04-13
|
||||
|
||||
<a name="v0.13.4"></a>
|
||||
## [v0.13.4] - 2020-04-12
|
||||
|
||||
<a name="v0.13.3"></a>
|
||||
## [v0.13.3] - 2020-04-12
|
||||
|
||||
<a name="v0.13.2"></a>
|
||||
## [v0.13.2] - 2020-04-11
|
||||
|
||||
<a name="v0.13.1"></a>
|
||||
## [v0.13.1] - 2020-04-11
|
||||
|
||||
<a name="v0.13.0"></a>
|
||||
## [v0.13.0] - 2020-04-10
|
||||
|
||||
<a name="v0.12.49"></a>
|
||||
## [v0.12.49] - 2020-04-01
|
||||
|
||||
<a name="v0.12.48"></a>
|
||||
## [v0.12.48] - 2020-03-31
|
||||
|
||||
<a name="v0.12.47"></a>
|
||||
## [v0.12.47] - 2020-03-30
|
||||
|
||||
<a name="v0.12.46"></a>
|
||||
## [v0.12.46] - 2020-03-21
|
||||
|
||||
<a name="v0.12.45"></a>
|
||||
## [v0.12.45] - 2020-03-18
|
||||
|
||||
<a name="v0.12.44"></a>
|
||||
## [v0.12.44] - 2020-03-16
|
||||
|
||||
<a name="v0.12.43"></a>
|
||||
## [v0.12.43] - 2020-03-16
|
||||
|
||||
<a name="v0.12.42"></a>
|
||||
## [v0.12.42] - 2020-03-14
|
||||
|
||||
<a name="v0.12.41"></a>
|
||||
## [v0.12.41] - 2020-03-06
|
||||
|
||||
<a name="v0.12.40"></a>
|
||||
## [v0.12.40] - 2020-03-06
|
||||
|
||||
<a name="v0.12.39"></a>
|
||||
## [v0.12.39] - 2020-03-06
|
||||
|
||||
<a name="v0.12.38"></a>
|
||||
## [v0.12.38] - 2020-03-05
|
||||
|
||||
<a name="v0.12.37"></a>
|
||||
## [v0.12.37] - 2020-03-04
|
||||
|
||||
<a name="v0.12.36"></a>
|
||||
## [v0.12.36] - 2020-03-04
|
||||
|
||||
<a name="v0.12.35"></a>
|
||||
## [v0.12.35] - 2020-03-03
|
||||
|
||||
<a name="v0.12.34"></a>
|
||||
## [v0.12.34] - 2020-03-03
|
||||
|
||||
<a name="v0.12.33"></a>
|
||||
## [v0.12.33] - 2020-02-29
|
||||
|
||||
<a name="v0.12.32"></a>
|
||||
## [v0.12.32] - 2020-02-24
|
||||
### Bug Fixes
|
||||
- fix "Try the demo app" in docs ([#38](https://github.com/dosco/super-graph/issues/38))
|
||||
|
||||
|
||||
<a name="v0.12.31"></a>
|
||||
## [v0.12.31] - 2020-02-23
|
||||
|
||||
<a name="v0.12.30"></a>
|
||||
## [v0.12.30] - 2020-02-23
|
||||
|
||||
<a name="v0.12.29"></a>
|
||||
## [v0.12.29] - 2020-02-21
|
||||
|
||||
<a name="v0.12.28"></a>
|
||||
## [v0.12.28] - 2020-02-20
|
||||
|
||||
<a name="v0.12.27"></a>
|
||||
## [v0.12.27] - 2020-02-19
|
||||
|
||||
<a name="v0.12.26"></a>
|
||||
## [v0.12.26] - 2020-02-11
|
||||
|
||||
<a name="v0.12.25"></a>
|
||||
## [v0.12.25] - 2020-02-10
|
||||
|
||||
<a name="v0.12.24"></a>
|
||||
## [v0.12.24] - 2020-02-03
|
||||
|
||||
<a name="v0.12.23"></a>
|
||||
## [v0.12.23] - 2020-02-02
|
||||
|
||||
<a name="v0.12.22"></a>
|
||||
## [v0.12.22] - 2020-02-01
|
||||
|
||||
<a name="v0.12.21"></a>
|
||||
## [v0.12.21] - 2020-01-31
|
||||
|
||||
<a name="v0.12.20"></a>
|
||||
## [v0.12.20] - 2020-01-28
|
||||
|
||||
<a name="v0.12.19"></a>
|
||||
## [v0.12.19] - 2020-01-26
|
||||
|
||||
<a name="v0.12.18"></a>
|
||||
## [v0.12.18] - 2020-01-20
|
||||
|
||||
<a name="v0.12.17"></a>
|
||||
## [v0.12.17] - 2020-01-20
|
||||
|
||||
<a name="v0.12.16"></a>
|
||||
## [v0.12.16] - 2020-01-19
|
||||
|
||||
<a name="v0.12.15"></a>
|
||||
## [v0.12.15] - 2020-01-17
|
||||
|
||||
<a name="v0.12.14"></a>
|
||||
## [v0.12.14] - 2020-01-17
|
||||
|
||||
<a name="v0.12.13"></a>
|
||||
## [v0.12.13] - 2020-01-16
|
||||
|
||||
<a name="v0.12.12"></a>
|
||||
## [v0.12.12] - 2020-01-15
|
||||
|
||||
<a name="v0.12.11"></a>
|
||||
## [v0.12.11] - 2020-01-14
|
||||
|
||||
<a name="v0.12.10"></a>
|
||||
## [v0.12.10] - 2020-01-14
|
||||
|
||||
<a name="v0.12.9"></a>
|
||||
## [v0.12.9] - 2020-01-14
|
||||
|
||||
<a name="v0.12.8"></a>
|
||||
## [v0.12.8] - 2020-01-13
|
||||
|
||||
<a name="v0.12.7"></a>
|
||||
## [v0.12.7] - 2020-01-11
|
||||
### Pull Requests
|
||||
- Merge pull request [#22](https://github.com/dosco/super-graph/issues/22) from bhaskarmurthy/fix-grammer-syntax
|
||||
|
||||
|
||||
<a name="v0.12.6"></a>
|
||||
## [v0.12.6] - 2019-12-02
|
||||
### Add
|
||||
- Add support for `websearch_to_tsquery` in PG 11
|
||||
|
||||
|
||||
<a name="v0.12.5"></a>
|
||||
## [v0.12.5] - 2019-11-30
|
||||
### Add
|
||||
- Add a guide to the internals of the codebase
|
||||
- Add a CONTRIBUTING.md guide for contributors
|
||||
- Add a CHANGLOG.md
|
||||
- Add issue templates
|
||||
|
||||
### Fix
|
||||
- Fix for missing filters on nested selectors
|
||||
|
||||
### Refactor
|
||||
- Refactor rename 'Select.Table` to `Select.Name`
|
||||
|
||||
|
||||
<a name="v0.12.4"></a>
|
||||
## [v0.12.4] - 2019-11-28
|
||||
### Move
|
||||
- Move license from MIT to Apache 2.0. Add Makefile
|
||||
|
||||
|
||||
<a name="v0.12.3"></a>
|
||||
## [v0.12.3] - 2019-11-26
|
||||
### Added
|
||||
- Added support for query names to the allow.list
|
||||
|
||||
|
||||
<a name="v0.12.2"></a>
|
||||
## [v0.12.2] - 2019-11-25
|
||||
### Fix
|
||||
- Fix bug with compiling anon queries
|
||||
|
||||
|
||||
<a name="v0.12.1"></a>
|
||||
## [v0.12.1] - 2019-11-22
|
||||
### Move
|
||||
- Move sql query logging from info to debug
|
||||
|
||||
|
||||
<a name="v0.12.0"></a>
|
||||
## [v0.12.0] - 2019-11-22
|
||||
### Use
|
||||
- Use logger error instead of panic in goja handlers
|
||||
|
||||
|
||||
<a name="v0.11.9"></a>
|
||||
## [v0.11.9] - 2019-11-22
|
||||
### Add
|
||||
- Add a db:reset command only for dev mode
|
||||
|
||||
|
||||
<a name="v0.11.8"></a>
|
||||
## [v0.11.8] - 2019-11-21
|
||||
### Optimize
|
||||
- Optimize db queries limit use of transactions
|
||||
|
||||
|
||||
<a name="v0.11.7"></a>
|
||||
## [v0.11.7] - 2019-11-19
|
||||
### Added
|
||||
- Added support for multi-root queries
|
||||
|
||||
|
||||
<a name="v0.11.6"></a>
|
||||
## [v0.11.6] - 2019-11-15
|
||||
### Fix
|
||||
- Fix issues with JWT auth
|
||||
- Fix bug with migration filename generation
|
||||
- Fix bug with migration file name
|
||||
|
||||
|
||||
<a name="v0.11.5"></a>
|
||||
## [v0.11.5] - 2019-11-10
|
||||
### Fix
|
||||
- Fix bug with migration template name
|
||||
|
||||
|
||||
<a name="v0.11.4"></a>
|
||||
## [v0.11.4] - 2019-11-10
|
||||
### Fix
|
||||
- Fix bug with creating new migrations
|
||||
|
||||
|
||||
<a name="v0.11.3"></a>
|
||||
## [v0.11.3] - 2019-11-09
|
||||
### Fix
|
||||
- Fix macro syntax bug in app templates
|
||||
|
||||
|
||||
<a name="v0.11.2"></a>
|
||||
## [v0.11.2] - 2019-11-07
|
||||
### Fix
|
||||
- Fix bugs and add new production mode
|
||||
|
||||
|
||||
<a name="v0.11.1"></a>
|
||||
## [v0.11.1] - 2019-11-05
|
||||
### Add
|
||||
- Add nested where clause to filter based on related tables
|
||||
|
||||
### Block
|
||||
- Block unauthorized requests when 'anon' role is not defined
|
||||
|
||||
### Update
|
||||
- Update docs and website with new features
|
||||
|
||||
|
||||
<a name="v0.11"></a>
|
||||
## [v0.11] - 2019-11-01
|
||||
### Add
|
||||
- Add config driven presets for insert, update and upsert
|
||||
- Add config driven presets for insert, update and upserta
|
||||
- Add RBAC option to disable functions eg. count
|
||||
- Add fuzz testing to 'serv' for the GQL hash parser
|
||||
- Add fuzz testing to 'jsn' and 'qcode'
|
||||
- Add ability to block queries and mutations by role
|
||||
- Add built in 'anon' and 'user' roles
|
||||
- Add role based access control
|
||||
|
||||
### Allow
|
||||
- Allow config files to inherit from other config files
|
||||
|
||||
### Change
|
||||
- Change config key inherit to inherits
|
||||
|
||||
### Get
|
||||
- Get RBAC working for queries and mutations
|
||||
|
||||
### Optimize
|
||||
- Optimize prepared statement flow for RBAC
|
||||
|
||||
### Preserve
|
||||
- Preserve allow.list ordering on save
|
||||
|
||||
### Update
|
||||
- Update filters section in guide
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#11](https://github.com/dosco/super-graph/issues/11) from dosco/rbac
|
||||
|
||||
|
||||
<a name="v0.10.1"></a>
|
||||
## [v0.10.1] - 2019-10-06
|
||||
### Add
|
||||
- Add ability to set filters per operation / action
|
||||
- Add upsert mutation
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#10](https://github.com/dosco/super-graph/issues/10) from FourSigma/sm-examples-folder
|
||||
|
||||
|
||||
<a name="v0.10"></a>
|
||||
## [v0.10] - 2019-10-04
|
||||
### Fix
|
||||
- Fix return values for bulk mutations and delete
|
||||
- Fix issues with mutation SQL
|
||||
- Fix broken demo app
|
||||
- Fix typo in 'across'
|
||||
|
||||
### Remove
|
||||
- Remove extra link from README
|
||||
|
||||
### Update
|
||||
- Update docs, getting started guide and mutations
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#6](https://github.com/dosco/super-graph/issues/6) from muesli/typo-fixes
|
||||
|
||||
|
||||
<a name="v0.9"></a>
|
||||
## [v0.9] - 2019-10-01
|
||||
### Fix
|
||||
- Fix demo rails app broken build
|
||||
|
||||
|
||||
<a name="v0.8"></a>
|
||||
## [v0.8] - 2019-09-30
|
||||
### Fix
|
||||
- Fix invalid import bug
|
||||
|
||||
### Update
|
||||
- Update documentation site
|
||||
|
||||
|
||||
<a name="v0.7"></a>
|
||||
## [v0.7] - 2019-09-29
|
||||
### Failure
|
||||
- Failure to prepare statements should be a warning
|
||||
|
||||
### Fix
|
||||
- Fix duplicte column bug
|
||||
|
||||
|
||||
<a name="v0.6"></a>
|
||||
## [v0.6] - 2019-09-29
|
||||
### Add
|
||||
- Add database setup commands
|
||||
- Add binary compression back to Dockerfile
|
||||
- Add initialization command to setup new apps
|
||||
- Add migrate command
|
||||
- Add database seeding capability
|
||||
- Add session variable for user id
|
||||
- Add delete mutation
|
||||
- Add update mutation
|
||||
- Add insert mutation with bulk insert
|
||||
- Add GoTO Aug, 19 presentation
|
||||
- Add support for prepared statements
|
||||
- Add end-to-end benchmaking
|
||||
- Add object pooling for parser expressions
|
||||
- Add request / response debugging for remote joins
|
||||
- Add a presentation about GraphQL
|
||||
- Add validation for remote JSON
|
||||
- Add tracing for API stitching
|
||||
- Add REST API stitching
|
||||
- Add SQL query cacheing
|
||||
- Add support for GraphQL variables
|
||||
- Add fuzz testing to qcode
|
||||
- Add test for Rails Redis cookie store integration
|
||||
- Add an install guide
|
||||
|
||||
### Change
|
||||
- Change fuzz test name to qcode
|
||||
- Change logo from PNG to SVG
|
||||
|
||||
### Enabke
|
||||
- Enabke reload on config change
|
||||
|
||||
### Fix
|
||||
- Fix missing config name bug
|
||||
- Fix new app templates
|
||||
- Fix help message for migrate
|
||||
- Fix session variable bug
|
||||
- Fix test failures in `psql` and `serv`
|
||||
- Fix demo docker services startup order
|
||||
- Fix wrong value for false token bug. Reported by [@ThisIsMissEm](https://github.com/ThisIsMissEm)
|
||||
- Fix allow.list file discovery bug
|
||||
- Fix bug with allow list path
|
||||
- Fix wrong value for use_allow_list in dev config
|
||||
- Fix startup bug in demo script
|
||||
- Fix url bug in allow list
|
||||
- Fix bug [#676](https://github.com/dosco/super-graph/issues/676) found by fuzzer
|
||||
- Fix race-condition in remote joins
|
||||
- Fix cookie passing in web ui
|
||||
- Fix bug with passing cookies in web ui
|
||||
- Fix null pointer with invalid argument values
|
||||
- Fix infinite loop bug in lexer
|
||||
- Fix null pointer issue found by fuzz test
|
||||
- Fix issue with fuzzbuzz config
|
||||
- Fix demo to run as memory only
|
||||
- Fix auth documentation
|
||||
- Fix issue with web ui sizing
|
||||
- Fix issue preventing docker-compose deploy
|
||||
- Fix try demo documentation
|
||||
|
||||
### Futher
|
||||
- Futher reduce allocations across hot paths
|
||||
- Futher reduce allocations on the compiler hot path
|
||||
- Futher optimize json parsing and editing performance
|
||||
|
||||
### Highlight
|
||||
- Highlight top features better on the site
|
||||
|
||||
### Improve
|
||||
- Improve readability of json parser code
|
||||
- Improve the motivation section in the readme
|
||||
- Improve the demo experience
|
||||
|
||||
### Make
|
||||
- Make remote joins use parallel http requests
|
||||
|
||||
### Merge
|
||||
- Merge branch 'master' into optimize-psql
|
||||
|
||||
### New
|
||||
- New low allocation fast json parsing and editing library
|
||||
|
||||
### Optimize
|
||||
- Optimize lexer and fix bugs
|
||||
- Optimize the sql generator hot path
|
||||
|
||||
### Reduce
|
||||
- Reduce alllocations done by the stack
|
||||
- Reduce steps to run the demo
|
||||
- Reduce allocations and improve perf over 50%
|
||||
|
||||
### Remove
|
||||
- Remove unused packages
|
||||
- Remove the 'hello' test app folder
|
||||
- Remove other allocations in psql
|
||||
|
||||
### Use
|
||||
- Use hash's as ids for table relationships
|
||||
|
||||
### Watch
|
||||
- Watch and reload on config changes
|
||||
|
||||
|
||||
<a name="v0.5"></a>
|
||||
## [v0.5] - 2019-04-10
|
||||
### Add
|
||||
- Add supprt for new Rails 5.2 aes-256-gcm cookies
|
||||
- Add query support for ts_rank and ts_headline
|
||||
- Add full text search support using TSV indexes
|
||||
- Add missing assets folder
|
||||
- Add fetch by ID feature
|
||||
- Add documentation
|
||||
|
||||
### Cleanup
|
||||
- Cleanup and redesign config files
|
||||
|
||||
### Fix
|
||||
- Fix bug with auth config parsing
|
||||
|
||||
### Redesign
|
||||
- Redesign config file architecture
|
||||
|
||||
### Reduce
|
||||
- Reduce realloc of maps and slices
|
||||
|
||||
### Update
|
||||
- Update docs with full-text search information
|
||||
|
||||
|
||||
<a name="v0.4"></a>
|
||||
## [v0.4] - 2019-04-01
|
||||
|
||||
<a name="v0.3"></a>
|
||||
## [v0.3] - 2019-04-01
|
||||
### Add
|
||||
- Add SQL execution timing and tracing
|
||||
- Add support for HAVING with aggregate queries
|
||||
- Add aggregrate functions to GQL queries
|
||||
- Add Auth0 JWT support
|
||||
- Add React UI building to the docker build flow
|
||||
- Add compiler profiling
|
||||
- Add bechmarks for GQL to SQL compile
|
||||
- Add tests for gql to sql compile
|
||||
|
||||
### Cleanup
|
||||
- Cleanup Dockerfile
|
||||
|
||||
### Fix
|
||||
- Fix recurring packer issue docker hub builds
|
||||
- Fix issue with asset packer breaking Docker builds
|
||||
- Fix missing git package in Dockerfile
|
||||
- Fix docker ignore values
|
||||
- Fix image build failure on docker hub
|
||||
- Fix build issue in Dockerfile
|
||||
- Fix bugs and document the 'where' clause
|
||||
- Fix perf issue with inflections
|
||||
|
||||
### Optimize
|
||||
- Optimize docker image
|
||||
|
||||
### Pack
|
||||
- Pack web UI with app into a single binary
|
||||
|
||||
### Upgrade
|
||||
- Upgrade web UI packages
|
||||
|
||||
|
||||
<a name="0.3"></a>
|
||||
## 0.3 - 2019-03-24
|
||||
### First
|
||||
- First commit
|
||||
|
||||
### Fix
|
||||
- Fix license to MIT
|
||||
|
||||
|
||||
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.12.6...HEAD
|
||||
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.13.22...HEAD
|
||||
[v0.13.22]: https://github.com/dosco/super-graph/compare/v0.13.21...v0.13.22
|
||||
[v0.13.21]: https://github.com/dosco/super-graph/compare/v0.13.20...v0.13.21
|
||||
[v0.13.20]: https://github.com/dosco/super-graph/compare/v0.13.19...v0.13.20
|
||||
[v0.13.19]: https://github.com/dosco/super-graph/compare/v0.13.18...v0.13.19
|
||||
[v0.13.18]: https://github.com/dosco/super-graph/compare/v0.13.17...v0.13.18
|
||||
[v0.13.17]: https://github.com/dosco/super-graph/compare/v0.13.16...v0.13.17
|
||||
[v0.13.16]: https://github.com/dosco/super-graph/compare/v0.13.15...v0.13.16
|
||||
[v0.13.15]: https://github.com/dosco/super-graph/compare/v0.13.14...v0.13.15
|
||||
[v0.13.14]: https://github.com/dosco/super-graph/compare/v0.13.13...v0.13.14
|
||||
[v0.13.13]: https://github.com/dosco/super-graph/compare/v0.13.12...v0.13.13
|
||||
[v0.13.12]: https://github.com/dosco/super-graph/compare/v0.13.11...v0.13.12
|
||||
[v0.13.11]: https://github.com/dosco/super-graph/compare/v0.13.10...v0.13.11
|
||||
[v0.13.10]: https://github.com/dosco/super-graph/compare/v0.13.9...v0.13.10
|
||||
[v0.13.9]: https://github.com/dosco/super-graph/compare/v0.13.8...v0.13.9
|
||||
[v0.13.8]: https://github.com/dosco/super-graph/compare/v0.13.7...v0.13.8
|
||||
[v0.13.7]: https://github.com/dosco/super-graph/compare/v0.13.6...v0.13.7
|
||||
[v0.13.6]: https://github.com/dosco/super-graph/compare/v0.13.5...v0.13.6
|
||||
[v0.13.5]: https://github.com/dosco/super-graph/compare/v0.13.4...v0.13.5
|
||||
[v0.13.4]: https://github.com/dosco/super-graph/compare/v0.13.3...v0.13.4
|
||||
[v0.13.3]: https://github.com/dosco/super-graph/compare/v0.13.2...v0.13.3
|
||||
[v0.13.2]: https://github.com/dosco/super-graph/compare/v0.13.1...v0.13.2
|
||||
[v0.13.1]: https://github.com/dosco/super-graph/compare/v0.13.0...v0.13.1
|
||||
[v0.13.0]: https://github.com/dosco/super-graph/compare/v0.12.49...v0.13.0
|
||||
[v0.12.49]: https://github.com/dosco/super-graph/compare/v0.12.48...v0.12.49
|
||||
[v0.12.48]: https://github.com/dosco/super-graph/compare/v0.12.47...v0.12.48
|
||||
[v0.12.47]: https://github.com/dosco/super-graph/compare/v0.12.46...v0.12.47
|
||||
[v0.12.46]: https://github.com/dosco/super-graph/compare/v0.12.45...v0.12.46
|
||||
[v0.12.45]: https://github.com/dosco/super-graph/compare/v0.12.44...v0.12.45
|
||||
[v0.12.44]: https://github.com/dosco/super-graph/compare/v0.12.43...v0.12.44
|
||||
[v0.12.43]: https://github.com/dosco/super-graph/compare/v0.12.42...v0.12.43
|
||||
[v0.12.42]: https://github.com/dosco/super-graph/compare/v0.12.41...v0.12.42
|
||||
[v0.12.41]: https://github.com/dosco/super-graph/compare/v0.12.40...v0.12.41
|
||||
[v0.12.40]: https://github.com/dosco/super-graph/compare/v0.12.39...v0.12.40
|
||||
[v0.12.39]: https://github.com/dosco/super-graph/compare/v0.12.38...v0.12.39
|
||||
[v0.12.38]: https://github.com/dosco/super-graph/compare/v0.12.37...v0.12.38
|
||||
[v0.12.37]: https://github.com/dosco/super-graph/compare/v0.12.36...v0.12.37
|
||||
[v0.12.36]: https://github.com/dosco/super-graph/compare/v0.12.35...v0.12.36
|
||||
[v0.12.35]: https://github.com/dosco/super-graph/compare/v0.12.34...v0.12.35
|
||||
[v0.12.34]: https://github.com/dosco/super-graph/compare/v0.12.33...v0.12.34
|
||||
[v0.12.33]: https://github.com/dosco/super-graph/compare/v0.12.32...v0.12.33
|
||||
[v0.12.32]: https://github.com/dosco/super-graph/compare/v0.12.31...v0.12.32
|
||||
[v0.12.31]: https://github.com/dosco/super-graph/compare/v0.12.30...v0.12.31
|
||||
[v0.12.30]: https://github.com/dosco/super-graph/compare/v0.12.29...v0.12.30
|
||||
[v0.12.29]: https://github.com/dosco/super-graph/compare/v0.12.28...v0.12.29
|
||||
[v0.12.28]: https://github.com/dosco/super-graph/compare/v0.12.27...v0.12.28
|
||||
[v0.12.27]: https://github.com/dosco/super-graph/compare/v0.12.26...v0.12.27
|
||||
[v0.12.26]: https://github.com/dosco/super-graph/compare/v0.12.25...v0.12.26
|
||||
[v0.12.25]: https://github.com/dosco/super-graph/compare/v0.12.24...v0.12.25
|
||||
[v0.12.24]: https://github.com/dosco/super-graph/compare/v0.12.23...v0.12.24
|
||||
[v0.12.23]: https://github.com/dosco/super-graph/compare/v0.12.22...v0.12.23
|
||||
[v0.12.22]: https://github.com/dosco/super-graph/compare/v0.12.21...v0.12.22
|
||||
[v0.12.21]: https://github.com/dosco/super-graph/compare/v0.12.20...v0.12.21
|
||||
[v0.12.20]: https://github.com/dosco/super-graph/compare/v0.12.19...v0.12.20
|
||||
[v0.12.19]: https://github.com/dosco/super-graph/compare/v0.12.18...v0.12.19
|
||||
[v0.12.18]: https://github.com/dosco/super-graph/compare/v0.12.17...v0.12.18
|
||||
[v0.12.17]: https://github.com/dosco/super-graph/compare/v0.12.16...v0.12.17
|
||||
[v0.12.16]: https://github.com/dosco/super-graph/compare/v0.12.15...v0.12.16
|
||||
[v0.12.15]: https://github.com/dosco/super-graph/compare/v0.12.14...v0.12.15
|
||||
[v0.12.14]: https://github.com/dosco/super-graph/compare/v0.12.13...v0.12.14
|
||||
[v0.12.13]: https://github.com/dosco/super-graph/compare/v0.12.12...v0.12.13
|
||||
[v0.12.12]: https://github.com/dosco/super-graph/compare/v0.12.11...v0.12.12
|
||||
[v0.12.11]: https://github.com/dosco/super-graph/compare/v0.12.10...v0.12.11
|
||||
[v0.12.10]: https://github.com/dosco/super-graph/compare/v0.12.9...v0.12.10
|
||||
[v0.12.9]: https://github.com/dosco/super-graph/compare/v0.12.8...v0.12.9
|
||||
[v0.12.8]: https://github.com/dosco/super-graph/compare/v0.12.7...v0.12.8
|
||||
[v0.12.7]: https://github.com/dosco/super-graph/compare/v0.12.6...v0.12.7
|
||||
[v0.12.6]: https://github.com/dosco/super-graph/compare/v0.12.5...v0.12.6
|
||||
[v0.12.5]: https://github.com/dosco/super-graph/compare/v0.12.4...v0.12.5
|
||||
[v0.12.4]: https://github.com/dosco/super-graph/compare/v0.12.3...v0.12.4
|
||||
|
@ -1,7 +1,7 @@
|
||||
# stage: 1
|
||||
FROM node:10 as react-build
|
||||
WORKDIR /web
|
||||
COPY /cmd/internal/serv/web/ ./
|
||||
COPY /internal/serv/web/ ./
|
||||
RUN yarn
|
||||
RUN yarn build
|
||||
|
||||
@ -24,8 +24,8 @@ RUN chmod 755 /usr/local/bin/sops
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
||||
RUN mkdir -p /app/cmd/internal/serv/web/build
|
||||
COPY --from=react-build /web/build/ ./cmd/internal/serv/web/build
|
||||
RUN mkdir -p /app/internal/serv/web/build
|
||||
COPY --from=react-build /web/build/ ./internal/serv/web/build
|
||||
|
||||
RUN go mod vendor
|
||||
RUN make build
|
||||
@ -45,7 +45,7 @@ RUN mkdir -p /config
|
||||
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
COPY --from=go-build /app/config/* /config/
|
||||
COPY --from=go-build /app/super-graph .
|
||||
COPY --from=go-build /app/cmd/scripts/start.sh .
|
||||
COPY --from=go-build /app/internal/scripts/start.sh .
|
||||
COPY --from=go-build /usr/local/bin/sops .
|
||||
|
||||
RUN chmod +x /super-graph
|
||||
|
24
Makefile
24
Makefile
@ -12,30 +12,30 @@ endif
|
||||
export GO111MODULE := on
|
||||
|
||||
# Build-time Go variables
|
||||
version = github.com/dosco/super-graph/serv.version
|
||||
gitBranch = github.com/dosco/super-graph/serv.gitBranch
|
||||
lastCommitSHA = github.com/dosco/super-graph/serv.lastCommitSHA
|
||||
lastCommitTime = github.com/dosco/super-graph/serv.lastCommitTime
|
||||
version = github.com/dosco/super-graph/internal/serv.version
|
||||
gitBranch = github.com/dosco/super-graph/internal/serv.gitBranch
|
||||
lastCommitSHA = github.com/dosco/super-graph/internal/serv.lastCommitSHA
|
||||
lastCommitTime = github.com/dosco/super-graph/internal/serv.lastCommitTime
|
||||
|
||||
BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime}=${BUILD_DATE}" -X "${version}=${BUILD_VERSION}" -X ${gitBranch}=${BUILD_BRANCH}'
|
||||
|
||||
.PHONY: all build gen clean test run lint changlog release version help $(PLATFORMS)
|
||||
|
||||
test:
|
||||
@go test -v ./...
|
||||
@go test -v -short -race ./...
|
||||
|
||||
BIN_DIR := $(GOPATH)/bin
|
||||
GORICE := $(BIN_DIR)/rice
|
||||
GOLANGCILINT := $(BIN_DIR)/golangci-lint
|
||||
GITCHGLOG := $(BIN_DIR)/git-chglog
|
||||
WEB_BUILD_DIR := ./cmd/internal/serv/web/build/manifest.json
|
||||
WEB_BUILD_DIR := ./internal/serv/web/build/manifest.json
|
||||
|
||||
$(GORICE):
|
||||
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
|
||||
|
||||
$(WEB_BUILD_DIR):
|
||||
@echo "First install Yarn and create a build of the web UI then re-run make install"
|
||||
@echo "Run this command: yarn --cwd cmd/internal/serv/web/ build"
|
||||
@echo "Run this command: yarn --cwd internal/serv/web/ build"
|
||||
@exit 1
|
||||
|
||||
$(GITCHGLOG):
|
||||
@ -45,7 +45,7 @@ changelog: $(GITCHGLOG)
|
||||
@git-chglog $(ARGS)
|
||||
|
||||
$(GOLANGCILINT):
|
||||
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.21.0
|
||||
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.25.1
|
||||
|
||||
lint: $(GOLANGCILINT)
|
||||
@golangci-lint run ./... --skip-dirs-use-default
|
||||
@ -57,7 +57,7 @@ os = $(word 1, $@)
|
||||
|
||||
$(PLATFORMS): lint test
|
||||
@mkdir -p release
|
||||
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 cmd/main.go
|
||||
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 main.go
|
||||
|
||||
release: windows linux darwin
|
||||
|
||||
@ -69,7 +69,7 @@ gen: $(GORICE) $(WEB_BUILD_DIR)
|
||||
@go generate ./...
|
||||
|
||||
$(BINARY): clean
|
||||
@go build $(BUILD_FLAGS) -o $(BINARY) cmd/main.go
|
||||
@go build $(BUILD_FLAGS) -o $(BINARY) main.go
|
||||
|
||||
clean:
|
||||
@rm -f $(BINARY)
|
||||
@ -77,10 +77,10 @@ clean:
|
||||
run: clean
|
||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||
|
||||
install: build
|
||||
@mv $(BINARY) $(GOPATH)/bin/$(BINARY)
|
||||
install: clean build
|
||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||
@mv $(BINARY) $(GOPATH)/bin/$(BINARY)
|
||||
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
|
||||
|
||||
uninstall: clean
|
||||
|
36
README.md
36
README.md
@ -1,6 +1,6 @@
|
||||
<img src="docs/guide/.vuepress/public/super-graph.png" width="250" />
|
||||
|
||||
### Build web products faster. Secure high performance GraphQL
|
||||
### Build web products faster. Secure high-performance GraphQL
|
||||
|
||||
[](https://pkg.go.dev/github.com/dosco/super-graph/core?tab=doc)
|
||||

|
||||
@ -10,15 +10,12 @@
|
||||
|
||||
## What's Super Graph?
|
||||
|
||||
Designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance GraphQL API for Postgres DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a GO library and a service, use it in your own code or run it as a seperate service.
|
||||
Designed to 100x your developer productivity. Super Graph will instantly, and without you writing any code, provide a high performance GraphQL API for your PostgresSQL DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a Go library and a service, use it in your own code or run it as a separate service.
|
||||
|
||||
## Using it as a service
|
||||
|
||||
```console
|
||||
git clone https://github.com/dosco/super-graph
|
||||
cd ./super-graph
|
||||
make install
|
||||
|
||||
go get github.com/dosco/super-graph
|
||||
super-graph new <app_name>
|
||||
```
|
||||
|
||||
@ -38,17 +35,12 @@ import (
|
||||
func main() {
|
||||
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||
sg, err := core.NewSuperGraph(nil, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
query := `
|
||||
@ -61,7 +53,7 @@ func main() {
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(res.Data))
|
||||
@ -70,7 +62,7 @@ func main() {
|
||||
|
||||
## About Super Graph
|
||||
|
||||
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
|
||||
After working on several products through my career I found that we spend way too much time on building API backends. Most APIs also need constant updating, and this costs time and money.
|
||||
|
||||
It's always the same thing, figure out what the UI needs then build an endpoint for it. Most API code involves struggling with an ORM to query a database and mangle the data into a shape that the UI expects to see.
|
||||
|
||||
@ -78,28 +70,27 @@ I didn't want to write this code anymore, I wanted the computer to do it. Enter
|
||||
|
||||
Having worked with compilers before I saw this as a compiler problem. Why not build a compiler that converts GraphQL to highly efficient SQL.
|
||||
|
||||
This compiler is what sits at the heart of Super Graph with layers of useful functionality around it like authentication, remote joins, rails integration, database migrations and everything else needed for you to build production ready apps with it.
|
||||
This compiler is what sits at the heart of Super Graph, with layers of useful functionality around it like authentication, remote joins, rails integration, database migrations, and everything else needed for you to build production-ready apps with it.
|
||||
|
||||
## Features
|
||||
|
||||
- Complex nested queries and mutations
|
||||
- Auto learns database tables and relationships
|
||||
- Role and Attribute based access control
|
||||
- Opaque cursor based efficient pagination
|
||||
- Full text search and aggregations
|
||||
- Role and Attribute-based access control
|
||||
- Opaque cursor-based efficient pagination
|
||||
- Full-text search and aggregations
|
||||
- JWT tokens supported (Auth0, etc)
|
||||
- Join database queries with remote REST APIs
|
||||
- Also works with existing Ruby-On-Rails apps
|
||||
- Rails authentication supported (Redis, Memcache, Cookie)
|
||||
- A simple config file
|
||||
- High performance GO codebase
|
||||
- High performance Go codebase
|
||||
- Tiny docker image and low memory requirements
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Database seeding tool
|
||||
- Works with Postgres and YugabyteDB
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
[supergraph.dev](https://supergraph.dev)
|
||||
@ -119,4 +110,3 @@ Twitter or Discord.
|
||||
|
||||
Copyright (c) 2019-present Vikram Rangnekar
|
||||
|
||||
|
||||
|
@ -1,37 +0,0 @@
|
||||
package serv
|
||||
|
||||
import "net/http"
|
||||
|
||||
//nolint: errcheck
|
||||
func introspect(w http.ResponseWriter) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
||||
"data": {
|
||||
"__schema": {
|
||||
"queryType": {
|
||||
"name": "Query"
|
||||
},
|
||||
"mutationType": null,
|
||||
"subscriptionType": null
|
||||
}
|
||||
},
|
||||
"extensions":{
|
||||
"tracing":{
|
||||
"version":1,
|
||||
"startTime":"2019-06-04T19:53:31.093Z",
|
||||
"endTime":"2019-06-04T19:53:31.108Z",
|
||||
"duration":15219720,
|
||||
"execution": {
|
||||
"resolvers": [{
|
||||
"path": ["__schema"],
|
||||
"parentType": "Query",
|
||||
"fieldName": "__schema",
|
||||
"returnType": "__Schema!",
|
||||
"startOffset": 50950,
|
||||
"duration": 17187
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}`))
|
||||
}
|
@ -1,30 +0,0 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
||||
"main.js": "/static/js/main.04d74040.chunk.js",
|
||||
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
|
||||
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
|
||||
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
|
||||
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
|
||||
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
|
||||
"index.html": "/index.html",
|
||||
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
|
||||
"service-worker.js": "/service-worker.js",
|
||||
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
||||
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
||||
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
||||
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
||||
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
||||
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
||||
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
||||
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
||||
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
||||
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/js/runtime-main.4aea9da3.js",
|
||||
"static/js/2.03370bd3.chunk.js",
|
||||
"static/css/main.c6b5c55c.chunk.css",
|
||||
"static/js/main.04d74040.chunk.js"
|
||||
]
|
||||
}
|
@ -1 +0,0 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>
|
@ -1,58 +0,0 @@
|
||||
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
||||
{
|
||||
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
|
||||
"url": "/index.html"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/css/main.c6b5c55c.chunk.css"
|
||||
},
|
||||
{
|
||||
"revision": "c156a125990ddf5dcc51",
|
||||
"url": "/static/js/2.03370bd3.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/js/main.04d74040.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "427262b6771d3f49a7c5",
|
||||
"url": "/static/js/runtime-main.4aea9da3.js"
|
||||
},
|
||||
{
|
||||
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
||||
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
||||
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
||||
},
|
||||
{
|
||||
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
||||
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
||||
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
||||
},
|
||||
{
|
||||
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
||||
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
||||
},
|
||||
{
|
||||
"revision": "d94118379d362fc161aa1246bcc14d43",
|
||||
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
||||
},
|
||||
{
|
||||
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
||||
"url": "/static/media/getOutline.js.c04e3998.flow"
|
||||
},
|
||||
{
|
||||
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
||||
"url": "/static/media/index.js.02c24280.flow"
|
||||
},
|
||||
{
|
||||
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
||||
"url": "/static/media/logo.57ee3b60.png"
|
||||
}
|
||||
]);
|
@ -1,2 +0,0 @@
|
||||
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
||||
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
File diff suppressed because one or more lines are too long
@ -1,2 +0,0 @@
|
||||
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
|
||||
//# sourceMappingURL=main.04d74040.chunk.js.map
|
File diff suppressed because one or more lines are too long
@ -1,328 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
DocumentNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
NamedTypeNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
DefinitionQueryResult,
|
||||
Diagnostic,
|
||||
GraphQLCache,
|
||||
GraphQLConfig,
|
||||
GraphQLProjectConfig,
|
||||
Uri,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
|
||||
import {Kind, parse, print} from 'graphql';
|
||||
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
import {getHoverInformation} from './getHoverInformation';
|
||||
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
||||
import {
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
getDefinitionQueryResultForNamedType,
|
||||
} from './getDefinition';
|
||||
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {
|
||||
FRAGMENT_DEFINITION,
|
||||
OBJECT_TYPE_DEFINITION,
|
||||
INTERFACE_TYPE_DEFINITION,
|
||||
ENUM_TYPE_DEFINITION,
|
||||
UNION_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_DEFINITION,
|
||||
INPUT_OBJECT_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_EXTENSION,
|
||||
OBJECT_TYPE_EXTENSION,
|
||||
INTERFACE_TYPE_EXTENSION,
|
||||
UNION_TYPE_EXTENSION,
|
||||
ENUM_TYPE_EXTENSION,
|
||||
INPUT_OBJECT_TYPE_EXTENSION,
|
||||
DIRECTIVE_DEFINITION,
|
||||
FRAGMENT_SPREAD,
|
||||
OPERATION_DEFINITION,
|
||||
NAMED_TYPE,
|
||||
} = Kind;
|
||||
|
||||
export class GraphQLLanguageService {
|
||||
_graphQLCache: GraphQLCache;
|
||||
_graphQLConfig: GraphQLConfig;
|
||||
|
||||
constructor(cache: GraphQLCache) {
|
||||
this._graphQLCache = cache;
|
||||
this._graphQLConfig = cache.getGraphQLConfig();
|
||||
}
|
||||
|
||||
async getDiagnostics(
|
||||
query: string,
|
||||
uri: Uri,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Promise<Array<Diagnostic>> {
|
||||
// Perform syntax diagnostics first, as this doesn't require
|
||||
// schema/fragment definitions, even the project configuration.
|
||||
let queryHasExtensions = false;
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
||||
const schemaPath = projectConfig.schemaPath;
|
||||
try {
|
||||
const queryAST = parse(query);
|
||||
if (!schemaPath || uri !== schemaPath) {
|
||||
queryHasExtensions = queryAST.definitions.some(definition => {
|
||||
switch (definition.kind) {
|
||||
case OBJECT_TYPE_DEFINITION:
|
||||
case INTERFACE_TYPE_DEFINITION:
|
||||
case ENUM_TYPE_DEFINITION:
|
||||
case UNION_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_DEFINITION:
|
||||
case INPUT_OBJECT_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_EXTENSION:
|
||||
case OBJECT_TYPE_EXTENSION:
|
||||
case INTERFACE_TYPE_EXTENSION:
|
||||
case UNION_TYPE_EXTENSION:
|
||||
case ENUM_TYPE_EXTENSION:
|
||||
case INPUT_OBJECT_TYPE_EXTENSION:
|
||||
case DIRECTIVE_DEFINITION:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// If there's a matching config, proceed to prepare to run validation
|
||||
let source = query;
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
||||
query,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
const dependenciesSource = fragmentDependencies.reduce(
|
||||
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
||||
'',
|
||||
);
|
||||
|
||||
source = `${source} ${dependenciesSource}`;
|
||||
|
||||
let validationAst = null;
|
||||
try {
|
||||
validationAst = parse(source);
|
||||
} catch (error) {
|
||||
// the query string is already checked to be parsed properly - errors
|
||||
// from this parse must be from corrupted fragment dependencies.
|
||||
// For IDEs we don't care for errors outside of the currently edited
|
||||
// query, so we return an empty array here.
|
||||
return [];
|
||||
}
|
||||
|
||||
// Check if there are custom validation rules to be used
|
||||
let customRules;
|
||||
const customRulesModulePath =
|
||||
projectConfig.extensions.customValidationRules;
|
||||
if (customRulesModulePath) {
|
||||
/* eslint-disable no-implicit-coercion */
|
||||
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
||||
if (rulesPath) {
|
||||
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
||||
}
|
||||
/* eslint-enable no-implicit-coercion */
|
||||
}
|
||||
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName, queryHasExtensions)
|
||||
.catch(() => null);
|
||||
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
async getAutocompleteSuggestions(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Array<CompletionItem>> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getAutocompleteSuggestions(schema, query, position);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async getHoverInformation(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Hover.contents> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getHoverInformation(schema, query, position);
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
async getDefinition(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const node = getASTNodeAtPosition(query, ast, position);
|
||||
if (node) {
|
||||
switch (node.kind) {
|
||||
case FRAGMENT_SPREAD:
|
||||
return this._getDefinitionForFragmentSpread(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
case FRAGMENT_DEFINITION:
|
||||
case OPERATION_DEFINITION:
|
||||
return getDefinitionQueryResultForDefinitionNode(
|
||||
filePath,
|
||||
query,
|
||||
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
||||
);
|
||||
case NAMED_TYPE:
|
||||
return this._getDefinitionForNamedType(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async _getDefinitionForNamedType(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: NamedTypeNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
||||
ast,
|
||||
objectTypeDefinitions,
|
||||
);
|
||||
|
||||
const localObjectTypeDefinitions = ast.definitions.filter(
|
||||
definition =>
|
||||
definition.kind === OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === ENUM_TYPE_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
||||
TypeDefinitionNode,
|
||||
>);
|
||||
|
||||
const localOperationDefinationInfos = typeCastedDefs.map(
|
||||
(definition: TypeDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForNamedType(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localOperationDefinationInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async _getDefinitionForFragmentSpread(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: FragmentSpreadNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
||||
ast,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
|
||||
const localFragDefinitions = ast.definitions.filter(
|
||||
definition => definition.kind === FRAGMENT_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
||||
FragmentDefinitionNode,
|
||||
>);
|
||||
|
||||
const localFragInfos = typeCastedDefs.map(
|
||||
(definition: FragmentDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForFragmentSpread(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localFragInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
@ -1,204 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
||||
import {isCompositeType} from 'graphql';
|
||||
import {
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
} from 'graphql/type/introspection';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
// Utility for returning the state representing the Definition this token state
|
||||
// is within, if any.
|
||||
export function getDefinitionState(tokenState: State): ?State {
|
||||
let definitionState;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
case 'Mutation':
|
||||
case 'Subscription':
|
||||
case 'FragmentDefinition':
|
||||
definitionState = state;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return definitionState;
|
||||
}
|
||||
|
||||
// Gets the field definition given a type and field name
|
||||
export function getFieldDef(
|
||||
schema: GraphQLSchema,
|
||||
type: GraphQLType,
|
||||
fieldName: string,
|
||||
): ?GraphQLField<*, *> {
|
||||
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return SchemaMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return TypeMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
||||
return TypeNameMetaFieldDef;
|
||||
}
|
||||
if (type.getFields && typeof type.getFields === 'function') {
|
||||
return (type.getFields()[fieldName]: any);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
||||
export function forEachState(
|
||||
stack: State,
|
||||
fn: (state: State) => ?TypeInfo,
|
||||
): void {
|
||||
const reverseStateStack = [];
|
||||
let state = stack;
|
||||
while (state && state.kind) {
|
||||
reverseStateStack.push(state);
|
||||
state = state.prevState;
|
||||
}
|
||||
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
||||
fn(reverseStateStack[i]);
|
||||
}
|
||||
}
|
||||
|
||||
export function objectValues(object: Object): Array<any> {
|
||||
const keys = Object.keys(object);
|
||||
const len = keys.length;
|
||||
const values = new Array(len);
|
||||
for (let i = 0; i < len; ++i) {
|
||||
values[i] = object[keys[i]];
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
// Create the expected hint response given a possible list and a token
|
||||
export function hintList(
|
||||
token: ContextToken,
|
||||
list: Array<CompletionItem>,
|
||||
): Array<CompletionItem> {
|
||||
return filterAndSortList(list, normalizeText(token.string));
|
||||
}
|
||||
|
||||
// Given a list of hint entries and currently typed text, sort and filter to
|
||||
// provide a concise list.
|
||||
function filterAndSortList(
|
||||
list: Array<CompletionItem>,
|
||||
text: string,
|
||||
): Array<CompletionItem> {
|
||||
if (!text) {
|
||||
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
||||
}
|
||||
|
||||
const byProximity = list.map(entry => ({
|
||||
proximity: getProximity(normalizeText(entry.label), text),
|
||||
entry,
|
||||
}));
|
||||
|
||||
const conciseMatches = filterNonEmpty(
|
||||
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
||||
pair => !pair.entry.isDeprecated,
|
||||
);
|
||||
|
||||
const sortedMatches = conciseMatches.sort(
|
||||
(a, b) =>
|
||||
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
||||
a.proximity - b.proximity ||
|
||||
a.entry.label.length - b.entry.label.length,
|
||||
);
|
||||
|
||||
return sortedMatches.map(pair => pair.entry);
|
||||
}
|
||||
|
||||
// Filters the array by the predicate, unless it results in an empty array,
|
||||
// in which case return the original array.
|
||||
function filterNonEmpty(
|
||||
array: Array<Object>,
|
||||
predicate: (entry: Object) => boolean,
|
||||
): Array<Object> {
|
||||
const filtered = array.filter(predicate);
|
||||
return filtered.length === 0 ? array : filtered;
|
||||
}
|
||||
|
||||
function normalizeText(text: string): string {
|
||||
return text.toLowerCase().replace(/\W/g, '');
|
||||
}
|
||||
|
||||
// Determine a numeric proximity for a suggestion based on current text.
|
||||
function getProximity(suggestion: string, text: string): number {
|
||||
// start with lexical distance
|
||||
let proximity = lexicalDistance(text, suggestion);
|
||||
if (suggestion.length > text.length) {
|
||||
// do not penalize long suggestions.
|
||||
proximity -= suggestion.length - text.length - 1;
|
||||
// penalize suggestions not starting with this phrase
|
||||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
||||
}
|
||||
return proximity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the lexical distance between strings A and B.
|
||||
*
|
||||
* The "distance" between two strings is given by counting the minimum number
|
||||
* of edits needed to transform string A into string B. An edit can be an
|
||||
* insertion, deletion, or substitution of a single character, or a swap of two
|
||||
* adjacent characters.
|
||||
*
|
||||
* This distance can be useful for detecting typos in input or sorting
|
||||
*
|
||||
* @param {string} a
|
||||
* @param {string} b
|
||||
* @return {int} distance in number of edits
|
||||
*/
|
||||
function lexicalDistance(a: string, b: string): number {
|
||||
let i;
|
||||
let j;
|
||||
const d = [];
|
||||
const aLength = a.length;
|
||||
const bLength = b.length;
|
||||
|
||||
for (i = 0; i <= aLength; i++) {
|
||||
d[i] = [i];
|
||||
}
|
||||
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
d[0][j] = j;
|
||||
}
|
||||
|
||||
for (i = 1; i <= aLength; i++) {
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
||||
|
||||
d[i][j] = Math.min(
|
||||
d[i - 1][j] + 1,
|
||||
d[i][j - 1] + 1,
|
||||
d[i - 1][j - 1] + cost,
|
||||
);
|
||||
|
||||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
||||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return d[aLength][bLength];
|
||||
}
|
@ -1,665 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
FragmentDefinitionNode,
|
||||
GraphQLDirective,
|
||||
GraphQLSchema,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
|
||||
import {
|
||||
GraphQLBoolean,
|
||||
GraphQLEnumType,
|
||||
GraphQLInputObjectType,
|
||||
GraphQLList,
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
assertAbstractType,
|
||||
doTypesOverlap,
|
||||
getNamedType,
|
||||
getNullableType,
|
||||
isAbstractType,
|
||||
isCompositeType,
|
||||
isInputType,
|
||||
} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
forEachState,
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
hintList,
|
||||
objectValues,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
/**
|
||||
* Given GraphQLSchema, queryText, and context of the current position within
|
||||
* the source text, provide a list of typeahead entries.
|
||||
*/
|
||||
export function getAutocompleteSuggestions(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Array<CompletionItem> {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
const state =
|
||||
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
||||
|
||||
// relieve flow errors by checking if `state` exists
|
||||
if (!state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
|
||||
// Definition kinds
|
||||
if (kind === 'Document') {
|
||||
return hintList(token, [
|
||||
{label: 'query'},
|
||||
{label: 'mutation'},
|
||||
{label: 'subscription'},
|
||||
{label: 'fragment'},
|
||||
{label: '{'},
|
||||
]);
|
||||
}
|
||||
|
||||
// Field names
|
||||
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
||||
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Argument names
|
||||
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
||||
const argDefs = typeInfo.argDefs;
|
||||
if (argDefs) {
|
||||
return hintList(
|
||||
token,
|
||||
argDefs.map(argDef => ({
|
||||
label: argDef.name,
|
||||
detail: String(argDef.type),
|
||||
documentation: argDef.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input Object fields
|
||||
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
||||
if (typeInfo.objectFieldDefs) {
|
||||
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
||||
return hintList(
|
||||
token,
|
||||
objectFields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input values: Enum and Boolean
|
||||
if (
|
||||
kind === 'EnumValue' ||
|
||||
(kind === 'ListValue' && step === 1) ||
|
||||
(kind === 'ObjectField' && step === 2) ||
|
||||
(kind === 'Argument' && step === 2)
|
||||
) {
|
||||
return getSuggestionsForInputValues(token, typeInfo);
|
||||
}
|
||||
|
||||
// Fragment type conditions
|
||||
if (
|
||||
(kind === 'TypeCondition' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState != null &&
|
||||
state.prevState.kind === 'TypeCondition')
|
||||
) {
|
||||
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Fragment spread names
|
||||
if (kind === 'FragmentSpread' && step === 1) {
|
||||
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
||||
}
|
||||
|
||||
// Variable definition types
|
||||
if (
|
||||
(kind === 'VariableDefinition' && step === 2) ||
|
||||
(kind === 'ListType' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState &&
|
||||
(state.prevState.kind === 'VariableDefinition' ||
|
||||
state.prevState.kind === 'ListType'))
|
||||
) {
|
||||
return getSuggestionsForVariableDefinition(token, schema);
|
||||
}
|
||||
|
||||
// Directive names
|
||||
if (kind === 'Directive') {
|
||||
return getSuggestionsForDirective(token, state, schema);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
// Helper functions to get suggestions for each kinds
|
||||
function getSuggestionsForFieldNames(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (typeInfo.parentType) {
|
||||
const parentType = typeInfo.parentType;
|
||||
const fields =
|
||||
parentType.getFields instanceof Function
|
||||
? objectValues(parentType.getFields())
|
||||
: [];
|
||||
if (isAbstractType(parentType)) {
|
||||
fields.push(TypeNameMetaFieldDef);
|
||||
}
|
||||
if (parentType === schema.getQueryType()) {
|
||||
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
fields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
isDeprecated: field.isDeprecated,
|
||||
deprecationReason: field.deprecationReason,
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForInputValues(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
): Array<CompletionItem> {
|
||||
const namedInputType = getNamedType(typeInfo.inputType);
|
||||
if (namedInputType instanceof GraphQLEnumType) {
|
||||
const values = namedInputType.getValues();
|
||||
return hintList(
|
||||
token,
|
||||
values.map(value => ({
|
||||
label: value.name,
|
||||
detail: String(namedInputType),
|
||||
documentation: value.description,
|
||||
isDeprecated: value.isDeprecated,
|
||||
deprecationReason: value.deprecationReason,
|
||||
})),
|
||||
);
|
||||
} else if (namedInputType === GraphQLBoolean) {
|
||||
return hintList(token, [
|
||||
{
|
||||
label: 'true',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not false.',
|
||||
},
|
||||
{
|
||||
label: 'false',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not true.',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentTypeConditions(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
let possibleTypes;
|
||||
if (typeInfo.parentType) {
|
||||
if (isAbstractType(typeInfo.parentType)) {
|
||||
const abstractType = assertAbstractType(typeInfo.parentType);
|
||||
// Collect both the possible Object types as well as the interfaces
|
||||
// they implement.
|
||||
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
||||
const possibleIfaceMap = Object.create(null);
|
||||
possibleObjTypes.forEach(type => {
|
||||
type.getInterfaces().forEach(iface => {
|
||||
possibleIfaceMap[iface.name] = iface;
|
||||
});
|
||||
});
|
||||
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
||||
} else {
|
||||
// The parent type is a non-abstract Object type, so the only possible
|
||||
// type that can be used is that same type.
|
||||
possibleTypes = [typeInfo.parentType];
|
||||
}
|
||||
} else {
|
||||
const typeMap = schema.getTypeMap();
|
||||
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
possibleTypes.map(type => {
|
||||
const namedType = getNamedType(type);
|
||||
return {
|
||||
label: String(type),
|
||||
documentation: (namedType && namedType.description) || '',
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentSpread(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
): Array<CompletionItem> {
|
||||
const typeMap = schema.getTypeMap();
|
||||
const defState = getDefinitionState(token.state);
|
||||
const fragments = getFragmentDefinitions(queryText);
|
||||
|
||||
// Filter down to only the fragments which may exist here.
|
||||
const relevantFrags = fragments.filter(
|
||||
frag =>
|
||||
// Only include fragments with known types.
|
||||
typeMap[frag.typeCondition.name.value] &&
|
||||
// Only include fragments which are not cyclic.
|
||||
!(
|
||||
defState &&
|
||||
defState.kind === 'FragmentDefinition' &&
|
||||
defState.name === frag.name.value
|
||||
) &&
|
||||
// Only include fragments which could possibly be spread here.
|
||||
isCompositeType(typeInfo.parentType) &&
|
||||
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
||||
doTypesOverlap(
|
||||
schema,
|
||||
typeInfo.parentType,
|
||||
typeMap[frag.typeCondition.name.value],
|
||||
),
|
||||
);
|
||||
|
||||
return hintList(
|
||||
token,
|
||||
relevantFrags.map(frag => ({
|
||||
label: frag.name.value,
|
||||
detail: String(typeMap[frag.typeCondition.name.value]),
|
||||
documentation: `fragment ${frag.name.value} on ${
|
||||
frag.typeCondition.name.value
|
||||
}`,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getFragmentDefinitions(
|
||||
queryText: string,
|
||||
): Array<FragmentDefinitionNode> {
|
||||
const fragmentDefs = [];
|
||||
runOnlineParser(queryText, (_, state) => {
|
||||
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
||||
fragmentDefs.push({
|
||||
kind: 'FragmentDefinition',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.name,
|
||||
},
|
||||
selectionSet: {
|
||||
kind: 'SelectionSet',
|
||||
selections: [],
|
||||
},
|
||||
typeCondition: {
|
||||
kind: 'NamedType',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.type,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return fragmentDefs;
|
||||
}
|
||||
|
||||
function getSuggestionsForVariableDefinition(
|
||||
token: ContextToken,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
const inputTypeMap = schema.getTypeMap();
|
||||
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
||||
return hintList(
|
||||
token,
|
||||
inputTypes.map(type => ({
|
||||
label: type.name,
|
||||
documentation: type.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForDirective(
|
||||
token: ContextToken,
|
||||
state: State,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (state.prevState && state.prevState.kind) {
|
||||
const directives = schema
|
||||
.getDirectives()
|
||||
.filter(directive => canUseDirective(state.prevState, directive));
|
||||
return hintList(
|
||||
token,
|
||||
directives.map(directive => ({
|
||||
label: directive.name,
|
||||
documentation: directive.description || '',
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
export function getTokenAtPosition(
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
): ContextToken {
|
||||
let styleAtCursor = null;
|
||||
let stateAtCursor = null;
|
||||
let stringAtCursor = null;
|
||||
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
||||
if (index === cursor.line) {
|
||||
if (stream.getCurrentPosition() >= cursor.character) {
|
||||
styleAtCursor = style;
|
||||
stateAtCursor = {...state};
|
||||
stringAtCursor = stream.current();
|
||||
return 'BREAK';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Return the state/style of parsed token in case those at cursor aren't
|
||||
// available.
|
||||
return {
|
||||
start: token.start,
|
||||
end: token.end,
|
||||
string: stringAtCursor || token.string,
|
||||
state: stateAtCursor || token.state,
|
||||
style: styleAtCursor || token.style,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides an utility function to parse a given query text and construct a
|
||||
* `token` context object.
|
||||
* A token context provides useful information about the token/style that
|
||||
* CharacterStream currently possesses, as well as the end state and style
|
||||
* of the token.
|
||||
*/
|
||||
type callbackFnType = (
|
||||
stream: CharacterStream,
|
||||
state: State,
|
||||
style: string,
|
||||
index: number,
|
||||
) => void | 'BREAK';
|
||||
|
||||
function runOnlineParser(
|
||||
queryText: string,
|
||||
callback: callbackFnType,
|
||||
): ContextToken {
|
||||
const lines = queryText.split('\n');
|
||||
const parser = onlineParser();
|
||||
let state = parser.startState();
|
||||
let style = '';
|
||||
|
||||
let stream: CharacterStream = new CharacterStream('');
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
style = parser.token(stream, state);
|
||||
const code = callback(stream, state, style, i);
|
||||
if (code === 'BREAK') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Above while loop won't run if there is an empty line.
|
||||
// Run the callback one more time to catch this.
|
||||
callback(stream, state, style, i);
|
||||
|
||||
if (!state.kind) {
|
||||
state = parser.startState();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start: stream.getStartOfToken(),
|
||||
end: stream.getCurrentPosition(),
|
||||
string: stream.current(),
|
||||
state,
|
||||
style,
|
||||
};
|
||||
}
|
||||
|
||||
function canUseDirective(
|
||||
state: $PropertyType<State, 'prevState'>,
|
||||
directive: GraphQLDirective,
|
||||
): boolean {
|
||||
if (!state || !state.kind) {
|
||||
return false;
|
||||
}
|
||||
const kind = state.kind;
|
||||
const locations = directive.locations;
|
||||
switch (kind) {
|
||||
case 'Query':
|
||||
return locations.indexOf('QUERY') !== -1;
|
||||
case 'Mutation':
|
||||
return locations.indexOf('MUTATION') !== -1;
|
||||
case 'Subscription':
|
||||
return locations.indexOf('SUBSCRIPTION') !== -1;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
return locations.indexOf('FIELD') !== -1;
|
||||
case 'FragmentDefinition':
|
||||
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
||||
case 'FragmentSpread':
|
||||
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
||||
case 'InlineFragment':
|
||||
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
||||
|
||||
// Schema Definitions
|
||||
case 'SchemaDef':
|
||||
return locations.indexOf('SCHEMA') !== -1;
|
||||
case 'ScalarDef':
|
||||
return locations.indexOf('SCALAR') !== -1;
|
||||
case 'ObjectTypeDef':
|
||||
return locations.indexOf('OBJECT') !== -1;
|
||||
case 'FieldDef':
|
||||
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
||||
case 'InterfaceDef':
|
||||
return locations.indexOf('INTERFACE') !== -1;
|
||||
case 'UnionDef':
|
||||
return locations.indexOf('UNION') !== -1;
|
||||
case 'EnumDef':
|
||||
return locations.indexOf('ENUM') !== -1;
|
||||
case 'EnumValue':
|
||||
return locations.indexOf('ENUM_VALUE') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_OBJECT') !== -1;
|
||||
case 'InputValueDef':
|
||||
const prevStateKind = state.prevState && state.prevState.kind;
|
||||
switch (prevStateKind) {
|
||||
case 'ArgumentsDef':
|
||||
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Utility for collecting rich type information given any token's state
|
||||
// from the graphql-mode parser.
|
||||
export function getTypeInfo(
|
||||
schema: GraphQLSchema,
|
||||
tokenState: State,
|
||||
): TypeInfo {
|
||||
let argDef;
|
||||
let argDefs;
|
||||
let directiveDef;
|
||||
let enumValue;
|
||||
let fieldDef;
|
||||
let inputType;
|
||||
let objectFieldDefs;
|
||||
let parentType;
|
||||
let type;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
type = schema.getQueryType();
|
||||
break;
|
||||
case 'Mutation':
|
||||
type = schema.getMutationType();
|
||||
break;
|
||||
case 'Subscription':
|
||||
type = schema.getSubscriptionType();
|
||||
break;
|
||||
case 'InlineFragment':
|
||||
case 'FragmentDefinition':
|
||||
if (state.type) {
|
||||
type = schema.getType(state.type);
|
||||
}
|
||||
break;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
if (!type || !state.name) {
|
||||
fieldDef = null;
|
||||
} else {
|
||||
fieldDef = parentType
|
||||
? getFieldDef(schema, parentType, state.name)
|
||||
: null;
|
||||
type = fieldDef ? fieldDef.type : null;
|
||||
}
|
||||
break;
|
||||
case 'SelectionSet':
|
||||
parentType = getNamedType(type);
|
||||
break;
|
||||
case 'Directive':
|
||||
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
||||
break;
|
||||
case 'Arguments':
|
||||
if (!state.prevState) {
|
||||
argDefs = null;
|
||||
} else {
|
||||
switch (state.prevState.kind) {
|
||||
case 'Field':
|
||||
argDefs = fieldDef && fieldDef.args;
|
||||
break;
|
||||
case 'Directive':
|
||||
argDefs = directiveDef && directiveDef.args;
|
||||
break;
|
||||
case 'AliasedField':
|
||||
const name = state.prevState && state.prevState.name;
|
||||
if (!name) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
const field = parentType
|
||||
? getFieldDef(schema, parentType, name)
|
||||
: null;
|
||||
if (!field) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
argDefs = field.args;
|
||||
break;
|
||||
default:
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'Argument':
|
||||
if (argDefs) {
|
||||
for (let i = 0; i < argDefs.length; i++) {
|
||||
if (argDefs[i].name === state.name) {
|
||||
argDef = argDefs[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
inputType = argDef && argDef.type;
|
||||
break;
|
||||
case 'EnumValue':
|
||||
const enumType = getNamedType(inputType);
|
||||
enumValue =
|
||||
enumType instanceof GraphQLEnumType
|
||||
? find(enumType.getValues(), val => val.value === state.name)
|
||||
: null;
|
||||
break;
|
||||
case 'ListValue':
|
||||
const nullableType = getNullableType(inputType);
|
||||
inputType =
|
||||
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
||||
break;
|
||||
case 'ObjectValue':
|
||||
const objectType = getNamedType(inputType);
|
||||
objectFieldDefs =
|
||||
objectType instanceof GraphQLInputObjectType
|
||||
? objectType.getFields()
|
||||
: null;
|
||||
break;
|
||||
case 'ObjectField':
|
||||
const objectField =
|
||||
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
||||
inputType = objectField && objectField.type;
|
||||
break;
|
||||
case 'NamedType':
|
||||
if (state.name) {
|
||||
type = schema.getType(state.name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
argDef,
|
||||
argDefs,
|
||||
directiveDef,
|
||||
enumValue,
|
||||
fieldDef,
|
||||
inputType,
|
||||
objectFieldDefs,
|
||||
parentType,
|
||||
type,
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the first item in the array which causes predicate to return truthy.
|
||||
function find(array, predicate) {
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
if (predicate(array[i])) {
|
||||
return array[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
@ -1,136 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
NamedTypeNode,
|
||||
TypeDefinitionNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Definition,
|
||||
DefinitionQueryResult,
|
||||
FragmentInfo,
|
||||
Position,
|
||||
Range,
|
||||
Uri,
|
||||
ObjectTypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
||||
import invariant from 'assert';
|
||||
|
||||
export const LANGUAGE = 'GraphQL';
|
||||
|
||||
function getRange(text: string, node: ASTNode): Range {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return locToRange(text, location);
|
||||
}
|
||||
|
||||
function getPosition(text: string, node: ASTNode): Position {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return offsetToPosition(text, location.start);
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForNamedType(
|
||||
text: string,
|
||||
node: NamedTypeNode,
|
||||
dependencies: Array<ObjectTypeInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = node.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name && definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, node)),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForFragmentSpread(
|
||||
text: string,
|
||||
fragment: FragmentSpreadNode,
|
||||
dependencies: Array<FragmentInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = fragment.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, fragment)),
|
||||
};
|
||||
}
|
||||
|
||||
export function getDefinitionQueryResultForDefinitionNode(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): DefinitionQueryResult {
|
||||
return {
|
||||
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
||||
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForFragmentDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForNodeDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: TypeDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
@ -1,172 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
DocumentNode,
|
||||
GraphQLError,
|
||||
GraphQLSchema,
|
||||
Location,
|
||||
SourceLocation,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Diagnostic,
|
||||
CustomValidationRule,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import invariant from 'assert';
|
||||
import {findDeprecatedUsages, parse} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
Position,
|
||||
Range,
|
||||
validateWithCustomRules,
|
||||
} from 'graphql-language-service-utils';
|
||||
|
||||
export const SEVERITY = {
|
||||
ERROR: 1,
|
||||
WARNING: 2,
|
||||
INFORMATION: 3,
|
||||
HINT: 4,
|
||||
};
|
||||
|
||||
export function getDiagnostics(
|
||||
query: string,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
let ast = null;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
export function validateQuery(
|
||||
ast: DocumentNode,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
// We cannot validate the query unless a schema is provided.
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const validationErrorAnnotations = mapCat(
|
||||
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
||||
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
||||
);
|
||||
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
||||
// support older versions of graphql-js.
|
||||
const deprecationWarningAnnotations = !findDeprecatedUsages
|
||||
? []
|
||||
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
||||
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
||||
);
|
||||
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
||||
}
|
||||
|
||||
// General utility for map-cating (aka flat-mapping).
|
||||
function mapCat<T>(
|
||||
array: Array<T>,
|
||||
mapper: (item: T) => Array<any>,
|
||||
): Array<any> {
|
||||
return Array.prototype.concat.apply([], array.map(mapper));
|
||||
}
|
||||
|
||||
function annotations(
|
||||
error: GraphQLError,
|
||||
severity: number,
|
||||
type: string,
|
||||
): Array<Diagnostic> {
|
||||
if (!error.nodes) {
|
||||
return [];
|
||||
}
|
||||
return error.nodes.map(node => {
|
||||
const highlightNode =
|
||||
node.kind !== 'Variable' && node.name
|
||||
? node.name
|
||||
: node.variable
|
||||
? node.variable
|
||||
: node;
|
||||
|
||||
invariant(error.locations, 'GraphQL validation error requires locations.');
|
||||
const loc = error.locations[0];
|
||||
const highlightLoc = getLocation(highlightNode);
|
||||
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
||||
return {
|
||||
source: `GraphQL: ${type}`,
|
||||
message: error.message,
|
||||
severity,
|
||||
range: new Range(
|
||||
new Position(loc.line - 1, loc.column - 1),
|
||||
new Position(loc.line - 1, end),
|
||||
),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getRange(location: SourceLocation, queryText: string) {
|
||||
const parser = onlineParser();
|
||||
const state = parser.startState();
|
||||
const lines = queryText.split('\n');
|
||||
|
||||
invariant(
|
||||
lines.length >= location.line,
|
||||
'Query text must have more lines than where the error happened',
|
||||
);
|
||||
|
||||
let stream = null;
|
||||
|
||||
for (let i = 0; i < location.line; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
const style = parser.token(stream, state);
|
||||
if (style === 'invalidchar') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
invariant(stream, 'Expected Parser stream to be available.');
|
||||
|
||||
const line = location.line - 1;
|
||||
const start = stream.getStartOfToken();
|
||||
const end = stream.getCurrentPosition();
|
||||
|
||||
return new Range(new Position(line, start), new Position(line, end));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get location info from a node in a type-safe way.
|
||||
*
|
||||
* The only way a node could not have a location is if we initialized the parser
|
||||
* (and therefore the lexer) with the `noLocation` option, but we always
|
||||
* call `parse` without options above.
|
||||
*/
|
||||
function getLocation(node: any): Location {
|
||||
const typeCastedNode = (node: ASTNode);
|
||||
const location = typeCastedNode.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return location;
|
||||
}
|
@ -1,186 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/**
|
||||
* Ported from codemirror-graphql
|
||||
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
||||
*/
|
||||
|
||||
import type {GraphQLSchema} from 'graphql';
|
||||
import type {ContextToken} from 'graphql-language-service-types';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
||||
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
||||
|
||||
export function getHoverInformation(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Hover.contents {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
if (!schema || !token || !token.state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const state = token.state;
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
const options = {schema};
|
||||
|
||||
// Given a Schema and a Token, produce the contents of an info tooltip.
|
||||
// To do this, create a div element that we will render "into" and then pass
|
||||
// it to various rendering functions.
|
||||
if (
|
||||
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
||||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
||||
) {
|
||||
const into = [];
|
||||
renderField(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.fieldDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
||||
const into = [];
|
||||
renderDirective(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.directiveDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
||||
const into = [];
|
||||
renderArg(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.argDef);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'EnumValue' &&
|
||||
typeInfo.enumValue &&
|
||||
typeInfo.enumValue.description
|
||||
) {
|
||||
const into = [];
|
||||
renderEnumValue(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.enumValue);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'NamedType' &&
|
||||
typeInfo.type &&
|
||||
typeInfo.type.description
|
||||
) {
|
||||
const into = [];
|
||||
renderType(into, typeInfo, options, typeInfo.type);
|
||||
renderDescription(into, options, typeInfo.type);
|
||||
return into.join('').trim();
|
||||
}
|
||||
}
|
||||
|
||||
function renderField(into, typeInfo, options) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
||||
}
|
||||
|
||||
function renderQualifiedField(into, typeInfo, options) {
|
||||
if (!typeInfo.fieldDef) {
|
||||
return;
|
||||
}
|
||||
const fieldName = (typeInfo.fieldDef.name: string);
|
||||
if (fieldName.slice(0, 2) !== '__') {
|
||||
renderType(into, typeInfo, options, typeInfo.parentType);
|
||||
text(into, '.');
|
||||
}
|
||||
text(into, fieldName);
|
||||
}
|
||||
|
||||
function renderDirective(into, typeInfo, options) {
|
||||
if (!typeInfo.directiveDef) {
|
||||
return;
|
||||
}
|
||||
const name = '@' + typeInfo.directiveDef.name;
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderArg(into, typeInfo, options) {
|
||||
if (typeInfo.directiveDef) {
|
||||
renderDirective(into, typeInfo, options);
|
||||
} else if (typeInfo.fieldDef) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
}
|
||||
|
||||
if (!typeInfo.argDef) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = typeInfo.argDef.name;
|
||||
text(into, '(');
|
||||
text(into, name);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, ')');
|
||||
}
|
||||
|
||||
function renderTypeAnnotation(into, typeInfo, options, t) {
|
||||
text(into, ': ');
|
||||
renderType(into, typeInfo, options, t);
|
||||
}
|
||||
|
||||
function renderEnumValue(into, typeInfo, options) {
|
||||
if (!typeInfo.enumValue) {
|
||||
return;
|
||||
}
|
||||
const name = typeInfo.enumValue.name;
|
||||
renderType(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, '.');
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderType(into, typeInfo, options, t) {
|
||||
if (!t) {
|
||||
return;
|
||||
}
|
||||
if (t instanceof GraphQLNonNull) {
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, '!');
|
||||
} else if (t instanceof GraphQLList) {
|
||||
text(into, '[');
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, ']');
|
||||
} else {
|
||||
text(into, t.name);
|
||||
}
|
||||
}
|
||||
|
||||
function renderDescription(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const description =
|
||||
typeof def.description === 'string' ? def.description : null;
|
||||
if (description) {
|
||||
text(into, '\n\n');
|
||||
text(into, description);
|
||||
}
|
||||
renderDeprecation(into, options, def);
|
||||
}
|
||||
|
||||
function renderDeprecation(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const reason =
|
||||
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
||||
if (!reason) {
|
||||
return;
|
||||
}
|
||||
text(into, '\n\n');
|
||||
text(into, 'Deprecated: ');
|
||||
text(into, reason);
|
||||
}
|
||||
|
||||
function text(into: string[], content: string) {
|
||||
into.push(content);
|
||||
}
|
@ -1,121 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
Outline,
|
||||
TextToken,
|
||||
TokenKind,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import {Kind, parse, visit} from 'graphql';
|
||||
import {offsetToPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {INLINE_FRAGMENT} = Kind;
|
||||
|
||||
const OUTLINEABLE_KINDS = {
|
||||
Field: true,
|
||||
OperationDefinition: true,
|
||||
Document: true,
|
||||
SelectionSet: true,
|
||||
Name: true,
|
||||
FragmentDefinition: true,
|
||||
FragmentSpread: true,
|
||||
InlineFragment: true,
|
||||
};
|
||||
|
||||
type OutlineTreeConverterType = {[name: string]: Function};
|
||||
|
||||
export function getOutline(queryText: string): ?Outline {
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(queryText);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const visitorFns = outlineTreeConverter(queryText);
|
||||
const outlineTrees = visit(ast, {
|
||||
leave(node) {
|
||||
if (
|
||||
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
||||
visitorFns[node.kind]
|
||||
) {
|
||||
return visitorFns[node.kind](node);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
return {outlineTrees};
|
||||
}
|
||||
|
||||
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
||||
const meta = node => ({
|
||||
representativeName: node.name,
|
||||
startPosition: offsetToPosition(docText, node.loc.start),
|
||||
endPosition: offsetToPosition(docText, node.loc.end),
|
||||
children: node.selectionSet || [],
|
||||
});
|
||||
return {
|
||||
Field: node => {
|
||||
const tokenizedText = node.alias
|
||||
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
||||
: [];
|
||||
tokenizedText.push(buildToken('plain', node.name));
|
||||
return {tokenizedText, ...meta(node)};
|
||||
},
|
||||
OperationDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', node.operation),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
Document: node => node.definitions,
|
||||
SelectionSet: node =>
|
||||
concatMap(node.selections, child => {
|
||||
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
||||
}),
|
||||
Name: node => node.value,
|
||||
FragmentDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', 'fragment'),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
FragmentSpread: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('plain', '...'),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
InlineFragment: node => node.selectionSet,
|
||||
};
|
||||
}
|
||||
|
||||
function buildToken(kind: TokenKind, value: string): TextToken {
|
||||
return {kind, value};
|
||||
}
|
||||
|
||||
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
||||
const res = [];
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
const x = fn(arr[i], i);
|
||||
if (Array.isArray(x)) {
|
||||
res.push(...x);
|
||||
} else {
|
||||
res.push(x);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
@ -1,31 +0,0 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
export {
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
forEachState,
|
||||
objectValues,
|
||||
hintList,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
|
||||
export {
|
||||
LANGUAGE,
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
} from './getDefinition';
|
||||
|
||||
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
||||
export {getOutline} from './getOutline';
|
||||
export {getHoverInformation} from './getHoverInformation';
|
||||
|
||||
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
@ -1,7 +0,0 @@
|
||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3">
|
||||
<g fill="#61DAFB">
|
||||
<path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/>
|
||||
<circle cx="420.9" cy="296.5" r="45.7"/>
|
||||
<path d="M520.5 78.1z"/>
|
||||
</g>
|
||||
</svg>
|
Before Width: | Height: | Size: 2.6 KiB |
@ -30,7 +30,7 @@ reload_on_config_change: true
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
migrations_path: ./config/migrations
|
||||
migrations_path: ./migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
@ -45,6 +45,13 @@ cors_allowed_origins: ["*"]
|
||||
# Debug Cross Origin Resource Sharing requests
|
||||
cors_debug: true
|
||||
|
||||
# Default API path prefix is /api you can change it if you like
|
||||
# api_path: "/data"
|
||||
|
||||
# Cache-Control header can help cache queries if your CDN supports cache-control
|
||||
# on POST requests (does not work with not mutations)
|
||||
# cache_control: "public, max-age=300, s-maxage=600"
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
# SG_DATABASE_PORT
|
||||
@ -116,18 +123,18 @@ database:
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 1m
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: customers
|
||||
|
@ -30,7 +30,7 @@ enable_tracing: true
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
# migrations_path: migrations
|
||||
# migrations_path: ./migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
|
64
core/api.go
64
core/api.go
@ -16,17 +16,12 @@
|
||||
func main() {
|
||||
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||
sg, err := core.NewSuperGraph(nil, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
query := `
|
||||
@ -39,7 +34,7 @@
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(res.Data))
|
||||
@ -55,6 +50,7 @@ import (
|
||||
_log "log"
|
||||
"os"
|
||||
|
||||
"github.com/chirino/graphql"
|
||||
"github.com/dosco/super-graph/core/internal/allow"
|
||||
"github.com/dosco/super-graph/core/internal/crypto"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
@ -81,6 +77,7 @@ type SuperGraph struct {
|
||||
conf *Config
|
||||
db *sql.DB
|
||||
log *_log.Logger
|
||||
dbinfo *psql.DBInfo
|
||||
schema *psql.DBSchema
|
||||
allowList *allow.List
|
||||
encKey [32]byte
|
||||
@ -92,15 +89,26 @@ type SuperGraph struct {
|
||||
anonExists bool
|
||||
qc *qcode.Compiler
|
||||
pc *psql.Compiler
|
||||
ge *graphql.Engine
|
||||
}
|
||||
|
||||
// NewSuperGraph creates the SuperGraph struct, this involves querying the database to learn its
|
||||
// schemas and relationships
|
||||
func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
||||
return newSuperGraph(conf, db, nil)
|
||||
}
|
||||
|
||||
// newSuperGraph helps with writing tests and benchmarks
|
||||
func newSuperGraph(conf *Config, db *sql.DB, dbinfo *psql.DBInfo) (*SuperGraph, error) {
|
||||
if conf == nil {
|
||||
conf = &Config{}
|
||||
}
|
||||
|
||||
sg := &SuperGraph{
|
||||
conf: conf,
|
||||
db: db,
|
||||
log: _log.New(os.Stdout, "", 0),
|
||||
conf: conf,
|
||||
db: db,
|
||||
dbinfo: dbinfo,
|
||||
log: _log.New(os.Stdout, "", 0),
|
||||
}
|
||||
|
||||
if err := sg.initConfig(); err != nil {
|
||||
@ -123,6 +131,10 @@ func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := sg.initGraphQLEgine(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(conf.SecretKey) != 0 {
|
||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||
conf.SecretKey = ""
|
||||
@ -154,7 +166,24 @@ type Result struct {
|
||||
// In developer mode all names queries are saved into a file `allow.list` and in production mode only
|
||||
// queries from this file can be run.
|
||||
func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMessage) (*Result, error) {
|
||||
ct := scontext{Context: c, sg: sg, query: query, vars: vars}
|
||||
var res Result
|
||||
|
||||
res.op = qcode.GetQType(query)
|
||||
res.name = allow.QueryName(query)
|
||||
|
||||
// use the chirino/graphql library for introspection queries
|
||||
// disabled when allow list is enforced
|
||||
if !sg.conf.UseAllowList && res.name == "IntrospectionQuery" {
|
||||
r := sg.ge.ServeGraphQL(&graphql.Request{Query: query})
|
||||
res.Data = r.Data
|
||||
|
||||
if r.Error() != nil {
|
||||
res.Error = r.Error().Error()
|
||||
}
|
||||
return &res, r.Error()
|
||||
}
|
||||
|
||||
ct := scontext{Context: c, sg: sg, query: query, vars: vars, res: res}
|
||||
|
||||
if len(vars) <= 2 {
|
||||
ct.vars = nil
|
||||
@ -166,9 +195,6 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
||||
ct.role = "anon"
|
||||
}
|
||||
|
||||
ct.res.op = qcode.GetQType(query)
|
||||
ct.res.name = allow.QueryName(query)
|
||||
|
||||
data, err := ct.execQuery()
|
||||
if err != nil {
|
||||
return &ct.res, err
|
||||
@ -178,3 +204,9 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
||||
|
||||
return &ct.res, nil
|
||||
}
|
||||
|
||||
// GraphQLSchema function return the GraphQL schema for the underlying database connected
|
||||
// to this instance of Super Graph
|
||||
func (sg *SuperGraph) GraphQLSchema() (string, error) {
|
||||
return sg.ge.Schema.String(), nil
|
||||
}
|
||||
|
62
core/api_test.go
Normal file
62
core/api_test.go
Normal file
@ -0,0 +1,62 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
"github.com/DATA-DOG/go-sqlmock"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
)
|
||||
|
||||
func BenchmarkGraphQL(b *testing.B) {
|
||||
ct := context.WithValue(context.Background(), UserIDKey, "1")
|
||||
|
||||
db, _, err := sqlmock.New()
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
defer db.Close()
|
||||
|
||||
// mock.ExpectQuery(`^SELECT jsonb_build_object`).WithArgs()
|
||||
c := &Config{DefaultBlock: true}
|
||||
sg, err := newSuperGraph(c, db, psql.GetTestDBInfo())
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
|
||||
query := `
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
user {
|
||||
full_name
|
||||
phone
|
||||
email
|
||||
}
|
||||
customers {
|
||||
id
|
||||
email
|
||||
}
|
||||
}
|
||||
users {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, err = sg.GraphQL(ct, query, nil)
|
||||
}
|
||||
})
|
||||
|
||||
fmt.Println(err)
|
||||
|
||||
//fmt.Println(mock.ExpectationsWereMet())
|
||||
|
||||
}
|
33
core/args.go
33
core/args.go
@ -9,6 +9,8 @@ import (
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
// argMap function is used to string replace variables with values by
|
||||
// the fasttemplate code
|
||||
func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
|
||||
return func(w io.Writer, tag string) (int, error) {
|
||||
switch tag {
|
||||
@ -56,10 +58,13 @@ func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
|
||||
return w.Write(v1)
|
||||
}
|
||||
|
||||
return w.Write(escQuote(fields[0].Value))
|
||||
return w.Write(escSQuote(fields[0].Value))
|
||||
}
|
||||
}
|
||||
|
||||
// argList function is used to create a list of arguments to pass
|
||||
// to a prepared statement. FYI no escaping of single quotes is
|
||||
// needed here
|
||||
func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
vars := make([]interface{}, len(args))
|
||||
|
||||
@ -113,7 +118,7 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
if v, ok := fields[string(av)]; ok {
|
||||
switch v[0] {
|
||||
case '[', '{':
|
||||
vars[i] = escQuote(v)
|
||||
vars[i] = v
|
||||
default:
|
||||
var val interface{}
|
||||
if err := json.Unmarshal(v, &val); err != nil {
|
||||
@ -132,27 +137,25 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
|
||||
return vars, nil
|
||||
}
|
||||
|
||||
func escQuote(b []byte) []byte {
|
||||
f := false
|
||||
for i := range b {
|
||||
if b[i] == '\'' {
|
||||
f = true
|
||||
break
|
||||
}
|
||||
}
|
||||
if !f {
|
||||
return b
|
||||
}
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
//
|
||||
func escSQuote(b []byte) []byte {
|
||||
var buf *bytes.Buffer
|
||||
s := 0
|
||||
for i := range b {
|
||||
if b[i] == '\'' {
|
||||
if buf == nil {
|
||||
buf = &bytes.Buffer{}
|
||||
}
|
||||
buf.Write(b[s:i])
|
||||
buf.WriteString(`''`)
|
||||
s = i + 1
|
||||
}
|
||||
}
|
||||
|
||||
if buf == nil {
|
||||
return b
|
||||
}
|
||||
|
||||
l := len(b)
|
||||
if s < (l - 1) {
|
||||
buf.Write(b[s:l])
|
||||
|
13
core/args_test.go
Normal file
13
core/args_test.go
Normal file
@ -0,0 +1,13 @@
|
||||
package core
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestEscQuote(t *testing.T) {
|
||||
val := "That's the worst, don''t be calling me's again"
|
||||
exp := "That''s the worst, don''''t be calling me''s again"
|
||||
ret := escSQuote([]byte(val))
|
||||
|
||||
if exp != string(ret) {
|
||||
t.Errorf("escSQuote failed: %s", string(ret))
|
||||
}
|
||||
}
|
@ -167,16 +167,16 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
|
||||
return w.String(), nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
|
||||
for _, id := range qc.Roots {
|
||||
t, err := sg.schema.GetTable(qc.Selects[id].Name)
|
||||
if err != nil {
|
||||
return false
|
||||
}
|
||||
// func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
|
||||
// for _, id := range qc.Roots {
|
||||
// t, err := sg.schema.GetTable(qc.Selects[id].Name)
|
||||
// if err != nil {
|
||||
// return false
|
||||
// }
|
||||
|
||||
if r := role.GetTable(t.Name); r == nil {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
// if r := role.GetTable(t.Name); r == nil {
|
||||
// return false
|
||||
// }
|
||||
// }
|
||||
// return true
|
||||
// }
|
||||
|
@ -3,6 +3,7 @@ package core
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
@ -10,16 +11,56 @@ import (
|
||||
|
||||
// Core struct contains core specific config value
|
||||
type Config struct {
|
||||
SecretKey string `mapstructure:"secret_key"`
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
AllowListFile string `mapstructure:"allow_list_file"`
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
Vars map[string]string `mapstructure:"variables"`
|
||||
Blocklist []string
|
||||
Tables []Table
|
||||
RolesQuery string `mapstructure:"roles_query"`
|
||||
Roles []Role
|
||||
Inflections map[string]string
|
||||
// SecretKey is used to encrypt opaque values such as
|
||||
// the cursor. Auto-generated if not set
|
||||
SecretKey string `mapstructure:"secret_key"`
|
||||
|
||||
// UseAllowList (aka production mode) when set to true ensures
|
||||
// only queries lists in the allow.list file can be used. All
|
||||
// queries are pre-prepared so no compiling happens and things are
|
||||
// very fast.
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
|
||||
// AllowListFile if the path to allow list file if not set the
|
||||
// path is assumed to tbe the same as the config path (allow.list)
|
||||
AllowListFile string `mapstructure:"allow_list_file"`
|
||||
|
||||
// SetUserID forces the database session variable `user.id` to
|
||||
// be set to the user id. This variables can be used by triggers
|
||||
// or other database functions
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
|
||||
// DefaultBlock ensures only tables configured under the `anon` role
|
||||
// config can be queries if the `anon` role. For example if the table
|
||||
// `users` is not listed under the anon role then it will be filtered
|
||||
// out of any unauthenticated queries that mention it.
|
||||
DefaultBlock bool `mapstructure:"default_block"`
|
||||
|
||||
// Vars is a map of hardcoded variables that can be leveraged in your
|
||||
// queries (eg variable admin_id will be $admin_id in the query)
|
||||
Vars map[string]string `mapstructure:"variables"`
|
||||
|
||||
// Blocklist is a list of tables and columns that should be filtered
|
||||
// out from any and all queries
|
||||
Blocklist []string
|
||||
|
||||
// Tables contains all table specific configuration such as aliased tables
|
||||
// creating relationships between tables, etc
|
||||
Tables []Table
|
||||
|
||||
// RolesQuery if set enabled attributed based access control. This query
|
||||
// is use to fetch the user attributes that then dynamically define the users
|
||||
// role.
|
||||
RolesQuery string `mapstructure:"roles_query"`
|
||||
|
||||
// Roles contains all the configuration for all the roles you want to support
|
||||
// `user` and `anon` are two default roles. User role is for when a user ID is
|
||||
// available and Anon when it's not.
|
||||
Roles []Role
|
||||
|
||||
// Inflections is to add additionally singular to plural mappings
|
||||
// to the engine (eg. sheep: sheep)
|
||||
Inflections map[string]string `mapstructure:"inflections"`
|
||||
}
|
||||
|
||||
// Table struct defines a database table
|
||||
@ -155,9 +196,13 @@ func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.AddConfigPath("./config")
|
||||
if len(filepath.Ext(configFile)) != 0 {
|
||||
vi.SetConfigFile(configFile)
|
||||
} else {
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.AddConfigPath("./config")
|
||||
}
|
||||
|
||||
return vi
|
||||
}
|
||||
|
56
core/core.go
56
core/core.go
@ -14,6 +14,11 @@ import (
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
|
||||
const (
|
||||
OpQuery int = iota
|
||||
OpMutation
|
||||
)
|
||||
|
||||
type extensions struct {
|
||||
Tracing *trace `json:"tracing,omitempty"`
|
||||
}
|
||||
@ -50,26 +55,33 @@ type scontext struct {
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initCompilers() error {
|
||||
di, err := psql.GetDBInfo(sg.db)
|
||||
if err != nil {
|
||||
var err error
|
||||
|
||||
// If sg.di is not null then it's probably set
|
||||
// for tests
|
||||
if sg.dbinfo == nil {
|
||||
sg.dbinfo, err = psql.GetDBInfo(sg.db)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if err = addTables(sg.conf, sg.dbinfo); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = addTables(sg.conf, di); err != nil {
|
||||
if err = addForeignKeys(sg.conf, sg.dbinfo); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err = addForeignKeys(sg.conf, di); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sg.schema, err = psql.NewDBSchema(di, getDBTableAliases(sg.conf))
|
||||
sg.schema, err = psql.NewDBSchema(sg.dbinfo, getDBTableAliases(sg.conf))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
sg.qc, err = qcode.NewCompiler(qcode.Config{
|
||||
Blocklist: sg.conf.Blocklist,
|
||||
DefaultBlock: sg.conf.DefaultBlock,
|
||||
Blocklist: sg.conf.Blocklist,
|
||||
})
|
||||
if err != nil {
|
||||
return err
|
||||
@ -94,15 +106,12 @@ func (c *scontext) execQuery() ([]byte, error) {
|
||||
|
||||
if c.sg.conf.UseAllowList {
|
||||
data, st, err = c.resolvePreparedSQL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
} else {
|
||||
data, st, err = c.resolveSQL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(data) == 0 || st.skipped == 0 {
|
||||
@ -325,7 +334,20 @@ func (c *scontext) executeRoleQuery(tx *sql.Tx) (string, error) {
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (r *Result) Operation() string {
|
||||
func (r *Result) Operation() int {
|
||||
switch r.op {
|
||||
case qcode.QTQuery:
|
||||
return OpQuery
|
||||
|
||||
case qcode.QTMutation, qcode.QTInsert, qcode.QTUpdate, qcode.QTUpsert, qcode.QTDelete:
|
||||
return OpMutation
|
||||
|
||||
default:
|
||||
return -1
|
||||
}
|
||||
}
|
||||
|
||||
func (r *Result) OperationName() string {
|
||||
return r.op.String()
|
||||
}
|
||||
|
||||
|
@ -5,8 +5,8 @@ import (
|
||||
"encoding/base64"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/crypto"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
func (sg *SuperGraph) encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
|
||||
|
13
core/init.go
13
core/init.go
@ -70,8 +70,18 @@ func (sg *SuperGraph) initConfig() error {
|
||||
sg.roles["user"] = &ur
|
||||
}
|
||||
|
||||
// If anon role is not defined and DefaultBlock is not then then create it
|
||||
if _, ok := sg.roles["anon"]; !ok && !c.DefaultBlock {
|
||||
ur := Role{
|
||||
Name: "anon",
|
||||
tm: make(map[string]*RoleTable),
|
||||
}
|
||||
c.Roles = append(c.Roles, ur)
|
||||
sg.roles["anon"] = &ur
|
||||
}
|
||||
|
||||
// Roles: validate and sanitize
|
||||
c.RolesQuery = sanitize(c.RolesQuery)
|
||||
c.RolesQuery = sanitizeVars(c.RolesQuery)
|
||||
|
||||
if len(c.RolesQuery) == 0 {
|
||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||
@ -108,6 +118,7 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
||||
if err := addTable(di, t.Columns, t); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -9,6 +9,8 @@ import (
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
const (
|
||||
@ -230,6 +232,8 @@ func (al *List) Load() ([]Item, error) {
|
||||
}
|
||||
|
||||
func (al *List) save(item Item) error {
|
||||
var buf bytes.Buffer
|
||||
|
||||
item.Name = QueryName(item.Query)
|
||||
item.key = strings.ToLower(item.Name)
|
||||
|
||||
@ -298,9 +302,16 @@ func (al *List) save(item Item) error {
|
||||
}
|
||||
|
||||
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
|
||||
vj, err := json.MarshalIndent(v.Vars, "", " ")
|
||||
buf.Reset()
|
||||
|
||||
if err := jsn.Clear(&buf, v.Vars); err != nil {
|
||||
return fmt.Errorf("failed to clean vars: %w", err)
|
||||
}
|
||||
vj := json.RawMessage(buf.Bytes())
|
||||
|
||||
vj, err = json.MarshalIndent(vj, "", " ")
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to marshal vars: %v", err)
|
||||
return fmt.Errorf("failed to marshal vars: %w", err)
|
||||
}
|
||||
|
||||
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
|
||||
|
@ -0,0 +1,88 @@
|
||||
package cockraochdb_test
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"log"
|
||||
"os"
|
||||
"os/exec"
|
||||
"regexp"
|
||||
"sync/atomic"
|
||||
"testing"
|
||||
|
||||
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCockroachDB(t *testing.T) {
|
||||
|
||||
dir, err := ioutil.TempDir("", "temp-cockraochdb-")
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
cmd := exec.Command("cockroach", "start", "--insecure", "--listen-addr", ":0", "--http-addr", ":0", "--store=path="+dir)
|
||||
finder := &urlFinder{
|
||||
c: make(chan bool),
|
||||
}
|
||||
cmd.Stdout = finder
|
||||
cmd.Stderr = ioutil.Discard
|
||||
|
||||
err = cmd.Start()
|
||||
if err != nil {
|
||||
t.Skip("is CockroachDB installed?: " + err.Error())
|
||||
}
|
||||
fmt.Println("started temporary cockroach db")
|
||||
|
||||
stopped := int32(0)
|
||||
stopDatabase := func() {
|
||||
fmt.Println("stopping temporary cockroach db")
|
||||
if atomic.CompareAndSwapInt32(&stopped, 0, 1) {
|
||||
if err := cmd.Process.Kill(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
if _, err := cmd.Process.Wait(); err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
os.RemoveAll(dir)
|
||||
}
|
||||
}
|
||||
defer stopDatabase()
|
||||
|
||||
// Wait till we figure out the URL we should connect to...
|
||||
<-finder.c
|
||||
db, err := sql.Open("pgx", finder.URL)
|
||||
if err != nil {
|
||||
stopDatabase()
|
||||
require.NoError(t, err)
|
||||
}
|
||||
integration_tests.SetupSchema(t, db)
|
||||
|
||||
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||
if t.Name() == "TestCockroachDB/nested_insert" {
|
||||
t.Skip("nested inserts currently not working yet on cockroach db")
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
type urlFinder struct {
|
||||
c chan bool
|
||||
done bool
|
||||
URL string
|
||||
}
|
||||
|
||||
func (finder *urlFinder) Write(p []byte) (n int, err error) {
|
||||
s := string(p)
|
||||
urlRegex := regexp.MustCompile(`\nsql:\s+(postgresql:[^\s]+)\n`)
|
||||
if !finder.done {
|
||||
submatch := urlRegex.FindAllStringSubmatch(s, -1)
|
||||
if submatch != nil {
|
||||
finder.URL = submatch[0][1]
|
||||
finder.done = true
|
||||
close(finder.c)
|
||||
}
|
||||
}
|
||||
return len(p), nil
|
||||
}
|
260
core/internal/integration_tests/integration_tests.go
Normal file
260
core/internal/integration_tests/integration_tests.go
Normal file
@ -0,0 +1,260 @@
|
||||
package integration_tests
|
||||
|
||||
import (
|
||||
"context"
|
||||
"database/sql"
|
||||
"encoding/json"
|
||||
"io/ioutil"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func SetupSchema(t *testing.T, db *sql.DB) {
|
||||
|
||||
_, err := db.Exec(`
|
||||
CREATE TABLE users (
|
||||
id integer PRIMARY KEY,
|
||||
full_name text
|
||||
)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = db.Exec(`CREATE TABLE product (
|
||||
id integer PRIMARY KEY,
|
||||
name text,
|
||||
weight float
|
||||
)`)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = db.Exec(`CREATE TABLE line_item (
|
||||
id integer PRIMARY KEY,
|
||||
product integer REFERENCES product(id),
|
||||
quantity integer,
|
||||
price float
|
||||
)`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func DropSchema(t *testing.T, db *sql.DB) {
|
||||
|
||||
_, err := db.Exec(`DROP TABLE IF EXISTS line_item`)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = db.Exec(`DROP TABLE IF EXISTS product`)
|
||||
require.NoError(t, err)
|
||||
|
||||
_, err = db.Exec(`DROP TABLE IF EXISTS users`)
|
||||
require.NoError(t, err)
|
||||
}
|
||||
|
||||
func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
||||
config := core.Config{DefaultBlock: true}
|
||||
config.UseAllowList = false
|
||||
config.AllowListFile = "./allow.list"
|
||||
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||
|
||||
config.Roles = []core.Role{
|
||||
core.Role{
|
||||
Name: "anon",
|
||||
Tables: []core.RoleTable{
|
||||
core.RoleTable{Name: "users", Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "product", Query: core.Query{Limit: 100}},
|
||||
core.RoleTable{Name: "line_item", Query: core.Query{Limit: 100}},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
sg, err := core.NewSuperGraph(&config, db)
|
||||
require.NoError(t, err)
|
||||
ctx := context.Background()
|
||||
|
||||
t.Run("seed fixtures", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`mutation { products (insert: $products) { id } }`,
|
||||
json.RawMessage(`{"products":[
|
||||
{"id":1, "name":"Charmin Ultra Soft", "weight": 0.5},
|
||||
{"id":2, "name":"Hand Sanitizer", "weight": 0.2},
|
||||
{"id":3, "name":"Case of Corona", "weight": 1.2}
|
||||
]}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"products": [{"id": 1}, {"id": 2}, {"id": 3}]}`, string(res.Data))
|
||||
|
||||
res, err = sg.GraphQL(ctx,
|
||||
`mutation { line_items (insert: $line_items) { id } }`,
|
||||
json.RawMessage(`{"line_items":[
|
||||
{"id":5001, "product":1, "price":6.95, "quantity":10},
|
||||
{"id":5002, "product":2, "price":10.99, "quantity":2}
|
||||
]}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_items": [{"id": 5001}, {"id": 5002}]}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("get line item", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||
json.RawMessage(`{"id":5001}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 10}}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("get line items", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`query { line_items { id, price, quantity } }`,
|
||||
json.RawMessage(`{}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 10}, {"id": 5002, "price": 10.99, "quantity": 2}]}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("update line item", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`mutation { line_item(update:$update, id:$id) { id } }`,
|
||||
json.RawMessage(`{"id":5001, "update":{"quantity":20}}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_item": {"id": 5001}}`, string(res.Data))
|
||||
|
||||
res, err = sg.GraphQL(ctx,
|
||||
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||
json.RawMessage(`{"id":5001}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 20}}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("delete line item", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`mutation { line_item(delete:true, id:$id) { id } }`,
|
||||
json.RawMessage(`{"id":5002}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_item": {"id": 5002}}`, string(res.Data))
|
||||
|
||||
res, err = sg.GraphQL(ctx,
|
||||
`query { line_items { id, price, quantity } }`,
|
||||
json.RawMessage(`{}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 20}]}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("nested insert", func(t *testing.T) {
|
||||
before(t)
|
||||
res, err := sg.GraphQL(ctx,
|
||||
`mutation { line_items (insert: $line_item) { id, product { name } } }`,
|
||||
json.RawMessage(`{"line_item":
|
||||
{"id":5003, "product": { "connect": { "id": 1} }, "price":10.95, "quantity":15}
|
||||
}`))
|
||||
require.NoError(t, err, res.SQL())
|
||||
require.Equal(t, `{"line_items": [{"id": 5003, "product": {"name": "Charmin Ultra Soft"}}]}`, string(res.Data))
|
||||
})
|
||||
|
||||
t.Run("schema introspection", func(t *testing.T) {
|
||||
before(t)
|
||||
schema, err := sg.GraphQLSchema()
|
||||
require.NoError(t, err)
|
||||
// Uncomment the following line if you need to regenerate the expected schema.
|
||||
//ioutil.WriteFile("../introspection.graphql", []byte(schema), 0644)
|
||||
expected, err := ioutil.ReadFile("../introspection.graphql")
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, string(expected), schema)
|
||||
})
|
||||
|
||||
res, err := sg.GraphQL(ctx, introspectionQuery, json.RawMessage(``))
|
||||
assert.NoError(t, err)
|
||||
assert.Contains(t, string(res.Data),
|
||||
`{"queryType":{"name":"Query"},"mutationType":{"name":"Mutation"},"subscriptionType":null,"types":`)
|
||||
}
|
||||
|
||||
const introspectionQuery = `
|
||||
query IntrospectionQuery {
|
||||
__schema {
|
||||
queryType { name }
|
||||
mutationType { name }
|
||||
subscriptionType { name }
|
||||
types {
|
||||
...FullType
|
||||
}
|
||||
directives {
|
||||
name
|
||||
description
|
||||
locations
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
fragment FullType on __Type {
|
||||
kind
|
||||
name
|
||||
description
|
||||
fields(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
args {
|
||||
...InputValue
|
||||
}
|
||||
type {
|
||||
...TypeRef
|
||||
}
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
inputFields {
|
||||
...InputValue
|
||||
}
|
||||
interfaces {
|
||||
...TypeRef
|
||||
}
|
||||
enumValues(includeDeprecated: true) {
|
||||
name
|
||||
description
|
||||
isDeprecated
|
||||
deprecationReason
|
||||
}
|
||||
possibleTypes {
|
||||
...TypeRef
|
||||
}
|
||||
}
|
||||
fragment InputValue on __InputValue {
|
||||
name
|
||||
description
|
||||
type { ...TypeRef }
|
||||
defaultValue
|
||||
}
|
||||
fragment TypeRef on __Type {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
ofType {
|
||||
kind
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
`
|
319
core/internal/integration_tests/introspection.graphql
Normal file
319
core/internal/integration_tests/introspection.graphql
Normal file
@ -0,0 +1,319 @@
|
||||
input FloatExpression {
|
||||
contained_in:String!
|
||||
contains:[Float!]!
|
||||
eq:Float!
|
||||
equals:Float!
|
||||
greater_or_equals:Float!
|
||||
greater_than:Float!
|
||||
gt:Float!
|
||||
gte:Float!
|
||||
has_key:Float!
|
||||
has_key_all:[Float!]!
|
||||
has_key_any:[Float!]!
|
||||
ilike:String!
|
||||
in:[Float!]!
|
||||
is_null:Boolean!
|
||||
lesser_or_equals:Float!
|
||||
lesser_than:Float!
|
||||
like:String!
|
||||
lt:Float!
|
||||
lte:Float!
|
||||
neq:Float!
|
||||
nilike:String!
|
||||
nin:[Float!]!
|
||||
nlike:String!
|
||||
not_equals:Float!
|
||||
not_ilike:String!
|
||||
not_in:[Float!]!
|
||||
not_like:String!
|
||||
not_similar:String!
|
||||
nsimilar:String!
|
||||
similar:String!
|
||||
}
|
||||
input IntExpression {
|
||||
contained_in:String!
|
||||
contains:[Int!]!
|
||||
eq:Int!
|
||||
equals:Int!
|
||||
greater_or_equals:Int!
|
||||
greater_than:Int!
|
||||
gt:Int!
|
||||
gte:Int!
|
||||
has_key:Int!
|
||||
has_key_all:[Int!]!
|
||||
has_key_any:[Int!]!
|
||||
ilike:String!
|
||||
in:[Int!]!
|
||||
is_null:Boolean!
|
||||
lesser_or_equals:Int!
|
||||
lesser_than:Int!
|
||||
like:String!
|
||||
lt:Int!
|
||||
lte:Int!
|
||||
neq:Int!
|
||||
nilike:String!
|
||||
nin:[Int!]!
|
||||
nlike:String!
|
||||
not_equals:Int!
|
||||
not_ilike:String!
|
||||
not_in:[Int!]!
|
||||
not_like:String!
|
||||
not_similar:String!
|
||||
nsimilar:String!
|
||||
similar:String!
|
||||
}
|
||||
type Mutation {
|
||||
line_item(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput
|
||||
):line_itemOutput
|
||||
line_items(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput, inserts:[line_itemInput!]!, updates:[line_itemInput!]!, upserts:[line_itemInput!]!
|
||||
):line_itemOutput
|
||||
product(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:productInput, update:productInput, upsert:productInput
|
||||
):productOutput
|
||||
products(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:productInput, update:productInput, upsert:productInput, inserts:[productInput!]!, updates:[productInput!]!, upserts:[productInput!]!
|
||||
):productOutput
|
||||
user(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:userInput, update:userInput, upsert:userInput
|
||||
):userOutput
|
||||
users(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!, insert:userInput, update:userInput, upsert:userInput, inserts:[userInput!]!, updates:[userInput!]!, upserts:[userInput!]!
|
||||
):userOutput
|
||||
}
|
||||
enum OrderDirection {
|
||||
asc
|
||||
desc
|
||||
}
|
||||
type Query {
|
||||
line_item(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):line_itemOutput
|
||||
line_items(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):[line_itemOutput!]!
|
||||
product(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):productOutput
|
||||
products(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):[productOutput!]!
|
||||
user(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):userOutput
|
||||
users(
|
||||
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||
"Finds the record by the primary key"
|
||||
id:Int!
|
||||
):[userOutput!]!
|
||||
}
|
||||
input StringExpression {
|
||||
contained_in:String!
|
||||
contains:[String!]!
|
||||
eq:String!
|
||||
equals:String!
|
||||
greater_or_equals:String!
|
||||
greater_than:String!
|
||||
gt:String!
|
||||
gte:String!
|
||||
has_key:String!
|
||||
has_key_all:[String!]!
|
||||
has_key_any:[String!]!
|
||||
ilike:String!
|
||||
in:[String!]!
|
||||
is_null:Boolean!
|
||||
lesser_or_equals:String!
|
||||
lesser_than:String!
|
||||
like:String!
|
||||
lt:String!
|
||||
lte:String!
|
||||
neq:String!
|
||||
nilike:String!
|
||||
nin:[String!]!
|
||||
nlike:String!
|
||||
not_equals:String!
|
||||
not_ilike:String!
|
||||
not_in:[String!]!
|
||||
not_like:String!
|
||||
not_similar:String!
|
||||
nsimilar:String!
|
||||
similar:String!
|
||||
}
|
||||
input line_itemExpression {
|
||||
and:line_itemExpression!
|
||||
id:IntExpression!
|
||||
not:line_itemExpression!
|
||||
or:line_itemExpression!
|
||||
price:FloatExpression!
|
||||
product:IntExpression!
|
||||
quantity:IntExpression!
|
||||
}
|
||||
input line_itemInput {
|
||||
id:Int!
|
||||
price:Float
|
||||
product:Int
|
||||
quantity:Int
|
||||
}
|
||||
input line_itemOrderBy {
|
||||
id:OrderDirection!
|
||||
price:OrderDirection!
|
||||
product:OrderDirection!
|
||||
quantity:OrderDirection!
|
||||
}
|
||||
type line_itemOutput {
|
||||
avg_id:Int!
|
||||
avg_price:Float
|
||||
avg_product:Int
|
||||
avg_quantity:Int
|
||||
count_id:Int!
|
||||
count_price:Float
|
||||
count_product:Int
|
||||
count_quantity:Int
|
||||
id:Int!
|
||||
max_id:Int!
|
||||
max_price:Float
|
||||
max_product:Int
|
||||
max_quantity:Int
|
||||
min_id:Int!
|
||||
min_price:Float
|
||||
min_product:Int
|
||||
min_quantity:Int
|
||||
price:Float
|
||||
product:Int
|
||||
quantity:Int
|
||||
stddev_id:Int!
|
||||
stddev_pop_id:Int!
|
||||
stddev_pop_price:Float
|
||||
stddev_pop_product:Int
|
||||
stddev_pop_quantity:Int
|
||||
stddev_price:Float
|
||||
stddev_product:Int
|
||||
stddev_quantity:Int
|
||||
stddev_samp_id:Int!
|
||||
stddev_samp_price:Float
|
||||
stddev_samp_product:Int
|
||||
stddev_samp_quantity:Int
|
||||
var_pop_id:Int!
|
||||
var_pop_price:Float
|
||||
var_pop_product:Int
|
||||
var_pop_quantity:Int
|
||||
var_samp_id:Int!
|
||||
var_samp_price:Float
|
||||
var_samp_product:Int
|
||||
var_samp_quantity:Int
|
||||
variance_id:Int!
|
||||
variance_price:Float
|
||||
variance_product:Int
|
||||
variance_quantity:Int
|
||||
}
|
||||
input productExpression {
|
||||
and:productExpression!
|
||||
id:IntExpression!
|
||||
name:StringExpression!
|
||||
not:productExpression!
|
||||
or:productExpression!
|
||||
weight:FloatExpression!
|
||||
}
|
||||
input productInput {
|
||||
id:Int!
|
||||
name:String
|
||||
weight:Float
|
||||
}
|
||||
input productOrderBy {
|
||||
id:OrderDirection!
|
||||
name:OrderDirection!
|
||||
weight:OrderDirection!
|
||||
}
|
||||
type productOutput {
|
||||
avg_id:Int!
|
||||
avg_weight:Float
|
||||
count_id:Int!
|
||||
count_weight:Float
|
||||
id:Int!
|
||||
max_id:Int!
|
||||
max_weight:Float
|
||||
min_id:Int!
|
||||
min_weight:Float
|
||||
name:String
|
||||
stddev_id:Int!
|
||||
stddev_pop_id:Int!
|
||||
stddev_pop_weight:Float
|
||||
stddev_samp_id:Int!
|
||||
stddev_samp_weight:Float
|
||||
stddev_weight:Float
|
||||
var_pop_id:Int!
|
||||
var_pop_weight:Float
|
||||
var_samp_id:Int!
|
||||
var_samp_weight:Float
|
||||
variance_id:Int!
|
||||
variance_weight:Float
|
||||
weight:Float
|
||||
}
|
||||
input userExpression {
|
||||
and:userExpression!
|
||||
full_name:StringExpression!
|
||||
id:IntExpression!
|
||||
not:userExpression!
|
||||
or:userExpression!
|
||||
}
|
||||
input userInput {
|
||||
full_name:String
|
||||
id:Int!
|
||||
}
|
||||
input userOrderBy {
|
||||
full_name:OrderDirection!
|
||||
id:OrderDirection!
|
||||
}
|
||||
type userOutput {
|
||||
avg_id:Int!
|
||||
count_id:Int!
|
||||
full_name:String
|
||||
id:Int!
|
||||
max_id:Int!
|
||||
min_id:Int!
|
||||
stddev_id:Int!
|
||||
stddev_pop_id:Int!
|
||||
stddev_samp_id:Int!
|
||||
var_pop_id:Int!
|
||||
var_samp_id:Int!
|
||||
variance_id:Int!
|
||||
}
|
||||
schema {
|
||||
mutation: Mutation
|
||||
query: Query
|
||||
}
|
@ -0,0 +1,27 @@
|
||||
package cockraochdb_test
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestCockroachDB(t *testing.T) {
|
||||
|
||||
url, found := os.LookupEnv("SG_POSTGRESQL_TEST_URL")
|
||||
if !found {
|
||||
t.Skip("set the SG_POSTGRESQL_TEST_URL env variable if you want to run integration tests against a PostgreSQL database")
|
||||
}
|
||||
|
||||
db, err := sql.Open("pgx", url)
|
||||
require.NoError(t, err)
|
||||
|
||||
integration_tests.DropSchema(t, db)
|
||||
integration_tests.SetupSchema(t, db)
|
||||
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||
})
|
||||
}
|
@ -167,7 +167,7 @@ func (c *compilerContext) renderColumnTypename(sel *qcode.Select, ti *DBTableInf
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderColumnFunction(sel *qcode.Select, ti *DBTableInfo, col qcode.Column, columnsRendered int) error {
|
||||
pl := funcPrefixLen(col.Name)
|
||||
pl := funcPrefixLen(c.schema.fm, col.Name)
|
||||
// if pl == 0 {
|
||||
// //fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||
// io.WriteString(c.w, `'`)
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
var (
|
||||
qcompileTest, _ = qcode.NewCompiler(qcode.Config{})
|
||||
|
||||
schema = getTestSchema()
|
||||
schema = GetTestSchema()
|
||||
|
||||
vars = NewVariables(map[string]string{
|
||||
"admin_account_id": "5",
|
||||
|
@ -21,9 +21,17 @@ func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
||||
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
|
||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `json_array_elements(`)
|
||||
}
|
||||
io.WriteString(c.w, `'{{`)
|
||||
io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, `}}' :: json AS j)`)
|
||||
io.WriteString(c.w, `}}' :: json`)
|
||||
if insert[0] == '[' {
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
io.WriteString(c.w, ` AS j)`)
|
||||
|
||||
st := util.NewStack()
|
||||
st.Push(kvitem{_type: itemInsert, key: ti.Name, val: insert, ti: ti})
|
||||
@ -90,26 +98,9 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||
renderNestedInsertRelColumns(w, item.kvitem, true)
|
||||
|
||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||
renderNestedInsertRelTables(w, item.kvitem)
|
||||
|
||||
if item.array {
|
||||
io.WriteString(w, `json_populate_recordset`)
|
||||
} else {
|
||||
io.WriteString(w, `json_populate_record`)
|
||||
}
|
||||
|
||||
io.WriteString(w, `(NULL::`)
|
||||
io.WriteString(w, ti.Name)
|
||||
|
||||
if len(item.path) == 0 {
|
||||
io.WriteString(w, `, i.j) t RETURNING *)`)
|
||||
} else {
|
||||
io.WriteString(w, `, i.j->`)
|
||||
joinPath(w, item.path)
|
||||
io.WriteString(w, `) t RETURNING *)`)
|
||||
}
|
||||
|
||||
io.WriteString(w, ` RETURNING *)`)
|
||||
return nil
|
||||
}
|
||||
|
||||
@ -172,21 +163,21 @@ func renderNestedInsertRelColumns(w io.Writer, item kvitem, values bool) error {
|
||||
func renderNestedInsertRelTables(w io.Writer, item kvitem) error {
|
||||
if len(item.items) == 0 {
|
||||
if item.relPC != nil && item.relPC.Type == RelOneToMany {
|
||||
quoted(w, item.relPC.Left.Table)
|
||||
io.WriteString(w, `, `)
|
||||
quoted(w, item.relPC.Left.Table)
|
||||
}
|
||||
} else {
|
||||
// Render tables needed to set values if child-to-parent
|
||||
// relationship is one-to-many
|
||||
for _, v := range item.items {
|
||||
if v.relCP.Type == RelOneToMany {
|
||||
io.WriteString(w, `, `)
|
||||
if v._ctype > 0 {
|
||||
io.WriteString(w, `"_x_`)
|
||||
io.WriteString(w, v.relCP.Left.Table)
|
||||
io.WriteString(w, `", `)
|
||||
io.WriteString(w, `"`)
|
||||
} else {
|
||||
quoted(w, v.relCP.Left.Table)
|
||||
io.WriteString(w, `, `)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package psql
|
||||
package psql_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
@ -7,9 +7,9 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
"github.com/dosco/super-graph/core/internal/util"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
type itemType int
|
||||
@ -396,7 +396,12 @@ func renderInsertUpdateColumns(w io.Writer,
|
||||
}
|
||||
|
||||
if values {
|
||||
colWithTable(w, "t", cn.Name)
|
||||
io.WriteString(w, `CAST( i.j ->>`)
|
||||
io.WriteString(w, `'`)
|
||||
io.WriteString(w, cn.Name)
|
||||
io.WriteString(w, `' AS `)
|
||||
io.WriteString(w, cn.Type)
|
||||
io.WriteString(w, `)`)
|
||||
} else {
|
||||
quoted(w, cn.Name)
|
||||
}
|
||||
|
@ -1,4 +1,4 @@
|
||||
package psql
|
||||
package psql_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
@ -1,4 +1,4 @@
|
||||
package psql
|
||||
package psql_test
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
@ -8,6 +8,7 @@ import (
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
"github.com/dosco/super-graph/core/internal/qcode"
|
||||
)
|
||||
|
||||
@ -19,7 +20,7 @@ const (
|
||||
|
||||
var (
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *Compiler
|
||||
pcompile *psql.Compiler
|
||||
expected map[string][]string
|
||||
)
|
||||
|
||||
@ -133,13 +134,16 @@ func TestMain(m *testing.M) {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
schema := getTestSchema()
|
||||
schema, err := psql.GetTestSchema()
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
vars := NewVariables(map[string]string{
|
||||
vars := psql.NewVariables(map[string]string{
|
||||
"admin_account_id": "5",
|
||||
})
|
||||
|
||||
pcompile = NewCompiler(Config{
|
||||
pcompile = psql.NewCompiler(psql.Config{
|
||||
Schema: schema,
|
||||
Vars: vars,
|
||||
})
|
||||
@ -173,7 +177,7 @@ func TestMain(m *testing.M) {
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func compileGQLToPSQL(t *testing.T, gql string, vars Variables, role string) {
|
||||
func compileGQLToPSQL(t *testing.T, gql string, vars psql.Variables, role string) {
|
||||
generateTestFile := false
|
||||
|
||||
if generateTestFile {
|
||||
|
@ -141,7 +141,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
c.renderLateralJoin(sel)
|
||||
}
|
||||
|
||||
if !ti.Singular {
|
||||
if !ti.IsSingular {
|
||||
c.renderPluralSelect(sel, ti)
|
||||
}
|
||||
|
||||
@ -178,7 +178,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sj", sel.ID)
|
||||
|
||||
if !ti.Singular {
|
||||
if !ti.IsSingular {
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "__sj", sel.ID)
|
||||
}
|
||||
@ -438,7 +438,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
||||
|
||||
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
||||
int2string(c.w, sel.ID)
|
||||
io.WriteString(c.w, `") `)
|
||||
io.WriteString(c.w, `".*) `)
|
||||
|
||||
if sel.Paging.Type != qcode.PtOffset {
|
||||
for i := range sel.OrderBy {
|
||||
@ -543,7 +543,7 @@ func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo, skip
|
||||
var cn string
|
||||
|
||||
for _, col := range sel.Cols {
|
||||
if n := funcPrefixLen(col.Name); n != 0 {
|
||||
if n := funcPrefixLen(c.schema.fm, col.Name); n != 0 {
|
||||
if !sel.Functions {
|
||||
continue
|
||||
}
|
||||
@ -706,7 +706,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
||||
}
|
||||
|
||||
switch {
|
||||
case ti.Singular:
|
||||
case ti.IsSingular:
|
||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||
|
||||
case len(sel.Paging.Limit) != 0:
|
||||
@ -921,8 +921,6 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
|
||||
st.Push('(')
|
||||
|
||||
case qcode.OpNot:
|
||||
//fmt.Printf("1> %s %d %s %s\n", val.Op, len(val.Children), val.Children[0].Op, val.Children[1].Op)
|
||||
|
||||
st.Push(val.Children[0])
|
||||
st.Push(qcode.OpNot)
|
||||
|
||||
@ -1193,7 +1191,7 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *
|
||||
io.WriteString(c.w, col.Type)
|
||||
}
|
||||
|
||||
func funcPrefixLen(fn string) int {
|
||||
func funcPrefixLen(fm map[string]*DBFunction, fn string) int {
|
||||
switch {
|
||||
case strings.HasPrefix(fn, "avg_"):
|
||||
return 4
|
||||
@ -1218,6 +1216,14 @@ func funcPrefixLen(fn string) int {
|
||||
case strings.HasPrefix(fn, "var_samp_"):
|
||||
return 9
|
||||
}
|
||||
fnLen := len(fn)
|
||||
|
||||
for k := range fm {
|
||||
kLen := len(k)
|
||||
if kLen < fnLen && k[0] == fn[0] && strings.HasPrefix(fn, k) && fn[kLen] == '_' {
|
||||
return kLen + 1
|
||||
}
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
package psql
|
||||
package psql_test
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
|
@ -11,17 +11,20 @@ type DBSchema struct {
|
||||
ver int
|
||||
t map[string]*DBTableInfo
|
||||
rm map[string]map[string]*DBRel
|
||||
fm map[string]*DBFunction
|
||||
}
|
||||
|
||||
type DBTableInfo struct {
|
||||
Name string
|
||||
Type string
|
||||
Singular bool
|
||||
IsSingular bool
|
||||
Columns []DBColumn
|
||||
PrimaryCol *DBColumn
|
||||
TSVCol *DBColumn
|
||||
ColMap map[string]*DBColumn
|
||||
ColIDMap map[int16]*DBColumn
|
||||
Singular string
|
||||
Plural string
|
||||
}
|
||||
|
||||
type RelType int
|
||||
@ -54,8 +57,10 @@ type DBRel struct {
|
||||
|
||||
func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
schema := &DBSchema{
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
ver: info.Version,
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
fm: make(map[string]*DBFunction, len(info.Functions)),
|
||||
}
|
||||
|
||||
for i, t := range info.Tables {
|
||||
@ -79,6 +84,12 @@ func NewDBSchema(info *DBInfo, aliases map[string][]string) (*DBSchema, error) {
|
||||
}
|
||||
}
|
||||
|
||||
for k, f := range info.Functions {
|
||||
if len(f.Params) == 1 {
|
||||
schema.fm[strings.ToLower(f.Name)] = &info.Functions[k]
|
||||
}
|
||||
}
|
||||
|
||||
return schema, nil
|
||||
}
|
||||
|
||||
@ -89,23 +100,28 @@ func (s *DBSchema) addTable(
|
||||
colidmap := make(map[int16]*DBColumn, len(cols))
|
||||
|
||||
singular := flect.Singularize(t.Key)
|
||||
plural := flect.Pluralize(t.Key)
|
||||
|
||||
s.t[singular] = &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
Singular: true,
|
||||
Columns: cols,
|
||||
ColMap: colmap,
|
||||
ColIDMap: colidmap,
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: true,
|
||||
Columns: cols,
|
||||
ColMap: colmap,
|
||||
ColIDMap: colidmap,
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
|
||||
plural := flect.Pluralize(t.Key)
|
||||
s.t[plural] = &DBTableInfo{
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
Singular: false,
|
||||
Columns: cols,
|
||||
ColMap: colmap,
|
||||
ColIDMap: colidmap,
|
||||
Name: t.Name,
|
||||
Type: t.Type,
|
||||
IsSingular: false,
|
||||
Columns: cols,
|
||||
ColMap: colmap,
|
||||
ColIDMap: colidmap,
|
||||
Singular: singular,
|
||||
Plural: plural,
|
||||
}
|
||||
|
||||
if al, ok := aliases[t.Key]; ok {
|
||||
@ -364,6 +380,14 @@ func (s *DBSchema) updateSchemaOTMT(
|
||||
return nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) GetTableNames() []string {
|
||||
var names []string
|
||||
for name := range s.t {
|
||||
names = append(names, name)
|
||||
}
|
||||
return names
|
||||
}
|
||||
|
||||
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
|
||||
t, ok := s.t[table]
|
||||
if !ok {
|
||||
@ -424,3 +448,11 @@ func (s *DBSchema) GetRel(child, parent string) (*DBRel, error) {
|
||||
}
|
||||
return rel, nil
|
||||
}
|
||||
|
||||
func (s *DBSchema) GetFunctions() []*DBFunction {
|
||||
var funcs []*DBFunction
|
||||
for _, f := range s.fm {
|
||||
funcs = append(funcs, f)
|
||||
}
|
||||
return funcs
|
||||
}
|
||||
|
@ -10,10 +10,11 @@ import (
|
||||
)
|
||||
|
||||
type DBInfo struct {
|
||||
Version int
|
||||
Tables []DBTable
|
||||
Columns [][]DBColumn
|
||||
colmap map[string]map[string]*DBColumn
|
||||
Version int
|
||||
Tables []DBTable
|
||||
Columns [][]DBColumn
|
||||
Functions []DBFunction
|
||||
colMap map[string]map[string]*DBColumn
|
||||
}
|
||||
|
||||
func GetDBInfo(db *sql.DB) (*DBInfo, error) {
|
||||
@ -35,41 +36,56 @@ func GetDBInfo(db *sql.DB) (*DBInfo, error) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
di.colmap = make(map[string]map[string]*DBColumn, len(di.Tables))
|
||||
|
||||
for i, t := range di.Tables {
|
||||
for _, t := range di.Tables {
|
||||
cols, err := GetColumns(db, "public", t.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
di.Columns = append(di.Columns, cols)
|
||||
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||
}
|
||||
|
||||
for n, c := range di.Columns[i] {
|
||||
di.colmap[t.Key][c.Key] = &di.Columns[i][n]
|
||||
}
|
||||
di.colMap = newColMap(di.Tables, di.Columns)
|
||||
|
||||
di.Functions, err = GetFunctions(db)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return di, nil
|
||||
}
|
||||
|
||||
func newColMap(tables []DBTable, columns [][]DBColumn) map[string]map[string]*DBColumn {
|
||||
cm := make(map[string]map[string]*DBColumn, len(tables))
|
||||
|
||||
for i, t := range tables {
|
||||
cols := columns[i]
|
||||
cm[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||
|
||||
for n, c := range cols {
|
||||
cm[t.Key][c.Key] = &columns[i][n]
|
||||
}
|
||||
}
|
||||
|
||||
return cm
|
||||
}
|
||||
|
||||
func (di *DBInfo) AddTable(t DBTable, cols []DBColumn) {
|
||||
t.ID = di.Tables[len(di.Tables)-1].ID
|
||||
|
||||
di.Tables = append(di.Tables, t)
|
||||
di.colmap[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||
di.colMap[t.Key] = make(map[string]*DBColumn, len(cols))
|
||||
|
||||
for i := range cols {
|
||||
cols[i].ID = int16(i)
|
||||
c := &cols[i]
|
||||
di.colmap[t.Key][c.Key] = c
|
||||
di.colMap[t.Key][c.Key] = c
|
||||
}
|
||||
di.Columns = append(di.Columns, cols)
|
||||
}
|
||||
|
||||
func (di *DBInfo) GetColumn(table, column string) (*DBColumn, bool) {
|
||||
v, ok := di.colmap[strings.ToLower(table)][strings.ToLower(column)]
|
||||
v, ok := di.colMap[strings.ToLower(table)][strings.ToLower(column)]
|
||||
return v, ok
|
||||
}
|
||||
|
||||
@ -237,6 +253,71 @@ ORDER BY id;`
|
||||
return cols, nil
|
||||
}
|
||||
|
||||
type DBFunction struct {
|
||||
Name string
|
||||
Params []DBFuncParam
|
||||
}
|
||||
|
||||
type DBFuncParam struct {
|
||||
ID int
|
||||
Name sql.NullString
|
||||
Type string
|
||||
}
|
||||
|
||||
func GetFunctions(db *sql.DB) ([]DBFunction, error) {
|
||||
sqlStmt := `
|
||||
SELECT
|
||||
routines.routine_name,
|
||||
parameters.specific_name,
|
||||
parameters.data_type,
|
||||
parameters.parameter_name,
|
||||
parameters.ordinal_position
|
||||
FROM
|
||||
information_schema.routines
|
||||
RIGHT JOIN
|
||||
information_schema.parameters
|
||||
ON (routines.specific_name = parameters.specific_name and parameters.ordinal_position IS NOT NULL)
|
||||
WHERE
|
||||
routines.specific_schema = 'public'
|
||||
ORDER BY
|
||||
routines.routine_name, parameters.ordinal_position;`
|
||||
|
||||
rows, err := db.Query(sqlStmt)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("Error fetching functions: %s", err)
|
||||
}
|
||||
defer rows.Close()
|
||||
|
||||
var funcs []DBFunction
|
||||
fm := make(map[string]int)
|
||||
|
||||
parameterIndex := 1
|
||||
for rows.Next() {
|
||||
var fn, fid string
|
||||
fp := DBFuncParam{}
|
||||
|
||||
err = rows.Scan(&fn, &fid, &fp.Type, &fp.Name, &fp.ID)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if !fp.Name.Valid {
|
||||
fp.Name.String = string(parameterIndex)
|
||||
fp.Name.Valid = true
|
||||
}
|
||||
|
||||
if i, ok := fm[fid]; ok {
|
||||
funcs[i].Params = append(funcs[i].Params, fp)
|
||||
} else {
|
||||
funcs = append(funcs, DBFunction{Name: fn, Params: []DBFuncParam{fp}})
|
||||
fm[fid] = len(funcs) - 1
|
||||
}
|
||||
parameterIndex++
|
||||
}
|
||||
|
||||
return funcs, nil
|
||||
}
|
||||
|
||||
// func GetValType(type string) qcode.ValType {
|
||||
// switch {
|
||||
// case "bigint", "integer", "smallint", "numeric", "bigserial":
|
||||
|
@ -1,11 +1,10 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"log"
|
||||
"strings"
|
||||
)
|
||||
|
||||
func getTestSchema() *DBSchema {
|
||||
func GetTestDBInfo() *DBInfo {
|
||||
tables := []DBTable{
|
||||
DBTable{Name: "customers", Type: "table"},
|
||||
DBTable{Name: "users", Type: "table"},
|
||||
@ -74,36 +73,19 @@ func getTestSchema() *DBSchema {
|
||||
}
|
||||
}
|
||||
|
||||
schema := &DBSchema{
|
||||
ver: 110000,
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
return &DBInfo{
|
||||
Version: 110000,
|
||||
Tables: tables,
|
||||
Columns: columns,
|
||||
Functions: []DBFunction{},
|
||||
colMap: newColMap(tables, columns),
|
||||
}
|
||||
}
|
||||
|
||||
func GetTestSchema() (*DBSchema, error) {
|
||||
aliases := map[string][]string{
|
||||
"users": []string{"mes"},
|
||||
}
|
||||
|
||||
for i, t := range tables {
|
||||
err := schema.addTable(t, columns[i], aliases)
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for i, t := range tables {
|
||||
err := schema.firstDegreeRels(t, columns[i])
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
for i, t := range tables {
|
||||
err := schema.secondDegreeRels(t, columns[i])
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
}
|
||||
|
||||
return schema
|
||||
return NewDBSchema(GetTestDBInfo(), aliases)
|
||||
}
|
@ -1,25 +1,25 @@
|
||||
=== RUN TestCompileInsert
|
||||
=== RUN TestCompileInsert/simpleInsert
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/singleInsert
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT "t"."name", "t"."description", "t"."price", "t"."user_id" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/bulkInsert
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j->'product') t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileInsert (0.02s)
|
||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||
@ -33,67 +33,67 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id"
|
||||
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
||||
=== RUN TestCompileMutate
|
||||
=== RUN TestCompileMutate/singleUpsert
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/singleUpsertWhere
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/bulkUpsert
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileMutate/delete
|
||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileMutate (0.01s)
|
||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileMutate (0.00s)
|
||||
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
||||
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
||||
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
||||
--- PASS: TestCompileMutate/delete (0.00s)
|
||||
=== RUN TestCompileQuery
|
||||
=== RUN TestCompileQuery/withComplexArgs
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereAndList
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereIsNull
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereMultiOr
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/fetchByID
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/searchQuery
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/oneToMany
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/oneToManyReverse
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/oneToManyArray
|
||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToMany
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/manyToManyReverse
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunction
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunctionDisabled
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/aggFunctionWithFilter
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/syntheticTables
|
||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/queryWithVariables
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withWhereOnRelations
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/multiRoot
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4") AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/withCursor
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0") - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/blockedQuery
|
||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileQuery/blockedFunctions
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||
--- PASS: TestCompileQuery (0.02s)
|
||||
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
||||
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
||||
@ -121,23 +121,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
||||
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
||||
=== RUN TestCompileUpdate
|
||||
=== RUN TestCompileUpdate/singleUpdate
|
||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||
--- PASS: TestCompileUpdate (0.02s)
|
||||
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
||||
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
||||
@ -148,4 +148,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.320s
|
||||
ok github.com/dosco/super-graph/core/internal/psql 0.306s
|
||||
|
@ -91,25 +91,9 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
||||
|
||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
||||
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||
renderNestedUpdateRelTables(w, item.kvitem)
|
||||
|
||||
if item.array {
|
||||
io.WriteString(w, `json_populate_recordset`)
|
||||
} else {
|
||||
io.WriteString(w, `json_populate_record`)
|
||||
}
|
||||
|
||||
io.WriteString(w, `(NULL::`)
|
||||
io.WriteString(w, ti.Name)
|
||||
|
||||
if len(item.path) == 0 {
|
||||
io.WriteString(w, `, i.j) t)`)
|
||||
} else {
|
||||
io.WriteString(w, `, i.j->`)
|
||||
joinPath(w, item.path)
|
||||
io.WriteString(w, `) t) `)
|
||||
}
|
||||
io.WriteString(w, `) `)
|
||||
|
||||
if item.id != 0 {
|
||||
// Render sql to set id values if child-to-parent
|
||||
@ -137,9 +121,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
||||
io.WriteString(w, `)`)
|
||||
|
||||
} else {
|
||||
io.WriteString(w, ` WHERE `)
|
||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||
return err
|
||||
if qc.Selects[0].Where != nil {
|
||||
io.WriteString(w, ` WHERE `)
|
||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -202,9 +188,9 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
|
||||
// relationship is one-to-many
|
||||
for _, v := range item.items {
|
||||
if v._ctype > 0 && v.relCP.Type == RelOneToMany {
|
||||
io.WriteString(w, `"_x_`)
|
||||
io.WriteString(w, `, "_x_`)
|
||||
io.WriteString(w, v.relCP.Left.Table)
|
||||
io.WriteString(w, `", `)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1,4 +1,4 @@
|
||||
package psql
|
||||
package psql_test
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
|
17
core/internal/qcode/bench.9
Normal file
17
core/internal/qcode/bench.9
Normal file
@ -0,0 +1,17 @@
|
||||
goos: darwin
|
||||
goarch: amd64
|
||||
pkg: github.com/dosco/super-graph/core/internal/qcode
|
||||
BenchmarkQCompile
|
||||
BenchmarkQCompile-16 129614 8649 ns/op 3756 B/op 28 allocs/op
|
||||
BenchmarkQCompileP
|
||||
BenchmarkQCompileP-16 487488 2525 ns/op 3792 B/op 28 allocs/op
|
||||
BenchmarkParse
|
||||
BenchmarkParse-16 127582 8731 ns/op 3902 B/op 18 allocs/op
|
||||
BenchmarkParseP
|
||||
BenchmarkParseP-16 561373 2223 ns/op 3903 B/op 18 allocs/op
|
||||
BenchmarkSchemaParse
|
||||
BenchmarkSchemaParse-16 209142 5523 ns/op 3968 B/op 57 allocs/op
|
||||
BenchmarkSchemaParseP
|
||||
BenchmarkSchemaParseP-16 716437 1734 ns/op 3968 B/op 57 allocs/op
|
||||
PASS
|
||||
ok github.com/dosco/super-graph/core/internal/qcode 8.483s
|
@ -7,7 +7,8 @@ import (
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Blocklist []string
|
||||
Blocklist []string
|
||||
DefaultBlock bool
|
||||
}
|
||||
|
||||
type QueryConfig struct {
|
||||
|
@ -602,7 +602,7 @@ func (t parserType) String() string {
|
||||
// nodePool.Put(n)
|
||||
// freeList = append(freeList, Frees{n, loc})
|
||||
// } else {
|
||||
// fmt.Printf(">>>>(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
||||
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
|
||||
// }
|
||||
// }
|
||||
|
||||
|
@ -2,6 +2,7 @@ package qcode
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"github.com/chirino/graphql/schema"
|
||||
"testing"
|
||||
)
|
||||
|
||||
@ -130,7 +131,7 @@ updateThread {
|
||||
}
|
||||
|
||||
var gql = []byte(`
|
||||
products(
|
||||
{products(
|
||||
# returns only 30 items
|
||||
limit: 30,
|
||||
|
||||
@ -148,7 +149,7 @@ var gql = []byte(`
|
||||
id
|
||||
name
|
||||
price
|
||||
}`)
|
||||
}}`)
|
||||
|
||||
func BenchmarkQCompile(b *testing.B) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
@ -181,3 +182,59 @@ func BenchmarkQCompileP(b *testing.B) {
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkParse(b *testing.B) {
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
_, err := Parse(gql)
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkParseP(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, err := Parse(gql)
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
func BenchmarkSchemaParse(b *testing.B) {
|
||||
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
for n := 0; n < b.N; n++ {
|
||||
doc := schema.QueryDocument{}
|
||||
err := doc.Parse(string(gql))
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkSchemaParseP(b *testing.B) {
|
||||
b.ResetTimer()
|
||||
b.ReportAllocs()
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
doc := schema.QueryDocument{}
|
||||
err := doc.Parse(string(gql))
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
}
|
||||
})
|
||||
}
|
||||
|
@ -170,6 +170,7 @@ const (
|
||||
)
|
||||
|
||||
type Compiler struct {
|
||||
db bool // default block tables if not defined in anon role
|
||||
tr map[string]map[string]*trval
|
||||
bl map[string]struct{}
|
||||
}
|
||||
@ -179,7 +180,7 @@ var expPool = sync.Pool{
|
||||
}
|
||||
|
||||
func NewCompiler(c Config) (*Compiler, error) {
|
||||
co := &Compiler{}
|
||||
co := &Compiler{db: c.DefaultBlock}
|
||||
co.tr = make(map[string]map[string]*trval)
|
||||
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
||||
|
||||
@ -413,12 +414,12 @@ func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
|
||||
func (com *Compiler) AddFilters(qc *QCode, sel *Select, role string) {
|
||||
var fil *Exp
|
||||
var nu bool
|
||||
var nu bool // user required (or not) in this filter
|
||||
|
||||
if trv, ok := com.tr[role][sel.Name]; ok {
|
||||
fil, nu = trv.filter(qc.Type)
|
||||
|
||||
} else if role == "anon" {
|
||||
} else if com.db && role == "anon" {
|
||||
// Tables not defined under the anon role will not be rendered
|
||||
sel.SkipRender = true
|
||||
}
|
||||
|
@ -1,12 +1,17 @@
|
||||
package qcode
|
||||
|
||||
func GetQType(gql string) QType {
|
||||
ic := false
|
||||
for i := range gql {
|
||||
b := gql[i]
|
||||
if b == '{' {
|
||||
switch {
|
||||
case b == '#':
|
||||
ic = true
|
||||
case b == '\n':
|
||||
ic = false
|
||||
case !ic && b == '{':
|
||||
return QTQuery
|
||||
}
|
||||
if al(b) {
|
||||
case !ic && al(b):
|
||||
switch b {
|
||||
case 'm', 'M':
|
||||
return QTMutation
|
||||
|
50
core/internal/qcode/utils_test.go
Normal file
50
core/internal/qcode/utils_test.go
Normal file
@ -0,0 +1,50 @@
|
||||
package qcode
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestGetQType(t *testing.T) {
|
||||
type args struct {
|
||||
gql string
|
||||
}
|
||||
type ts struct {
|
||||
name string
|
||||
args args
|
||||
want QType
|
||||
}
|
||||
tests := []ts{
|
||||
ts{
|
||||
name: "query",
|
||||
args: args{gql: " query {"},
|
||||
want: QTQuery,
|
||||
},
|
||||
ts{
|
||||
name: "mutation",
|
||||
args: args{gql: " mutation {"},
|
||||
want: QTMutation,
|
||||
},
|
||||
ts{
|
||||
name: "default query",
|
||||
args: args{gql: " {"},
|
||||
want: QTQuery,
|
||||
},
|
||||
ts{
|
||||
name: "default query with comment",
|
||||
args: args{gql: `# query is good
|
||||
{`},
|
||||
want: QTQuery,
|
||||
},
|
||||
ts{
|
||||
name: "failed query with comment",
|
||||
args: args{gql: `# query is good query {`},
|
||||
want: -1,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
if got := GetQType(tt.args.gql); got != tt.want {
|
||||
t.Errorf("GetQType() = %v, want %v", got, tt.want)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
490
core/introspec.go
Normal file
490
core/introspec.go
Normal file
@ -0,0 +1,490 @@
|
||||
package core
|
||||
|
||||
import (
|
||||
"strings"
|
||||
|
||||
"github.com/chirino/graphql"
|
||||
"github.com/chirino/graphql/resolvers"
|
||||
"github.com/chirino/graphql/schema"
|
||||
"github.com/dosco/super-graph/core/internal/psql"
|
||||
)
|
||||
|
||||
var typeMap map[string]string = map[string]string{
|
||||
"smallint": "Int",
|
||||
"integer": "Int",
|
||||
"bigint": "Int",
|
||||
"smallserial": "Int",
|
||||
"serial": "Int",
|
||||
"bigserial": "Int",
|
||||
"decimal": "Float",
|
||||
"numeric": "Float",
|
||||
"real": "Float",
|
||||
"double precision": "Float",
|
||||
"money": "Float",
|
||||
"boolean": "Boolean",
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) initGraphQLEgine() error {
|
||||
engine := graphql.New()
|
||||
engineSchema := engine.Schema
|
||||
dbSchema := sg.schema
|
||||
|
||||
if err := engineSchema.Parse(`enum OrderDirection { asc desc }`); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gqltype := func(col psql.DBColumn) schema.Type {
|
||||
typeName := typeMap[strings.ToLower(col.Type)]
|
||||
if typeName == "" {
|
||||
typeName = "String"
|
||||
}
|
||||
var t schema.Type = &schema.TypeName{Name: typeName}
|
||||
if col.NotNull {
|
||||
t = &schema.NonNull{OfType: t}
|
||||
}
|
||||
return t
|
||||
}
|
||||
|
||||
query := &schema.Object{
|
||||
Name: "Query",
|
||||
Fields: schema.FieldList{},
|
||||
}
|
||||
mutation := &schema.Object{
|
||||
Name: "Mutation",
|
||||
Fields: schema.FieldList{},
|
||||
}
|
||||
engineSchema.Types[query.Name] = query
|
||||
engineSchema.Types[mutation.Name] = mutation
|
||||
engineSchema.EntryPoints[schema.Query] = query
|
||||
engineSchema.EntryPoints[schema.Mutation] = mutation
|
||||
|
||||
//validGraphQLIdentifierRegex := regexp.MustCompile(`^[A-Za-z_][A-Za-z_0-9]*$`)
|
||||
|
||||
scalarExpressionTypesNeeded := map[string]bool{}
|
||||
tableNames := dbSchema.GetTableNames()
|
||||
funcs := dbSchema.GetFunctions()
|
||||
|
||||
for _, table := range tableNames {
|
||||
ti, err := dbSchema.GetTable(table)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if !ti.IsSingular {
|
||||
continue
|
||||
}
|
||||
|
||||
singularName := ti.Singular
|
||||
// if !validGraphQLIdentifierRegex.MatchString(singularName) {
|
||||
// return errors.New("table name is not a valid GraphQL identifier: " + singularName)
|
||||
// }
|
||||
pluralName := ti.Plural
|
||||
// if !validGraphQLIdentifierRegex.MatchString(pluralName) {
|
||||
// return errors.New("table name is not a valid GraphQL identifier: " + pluralName)
|
||||
// }
|
||||
|
||||
outputType := &schema.Object{
|
||||
Name: singularName + "Output",
|
||||
Fields: schema.FieldList{},
|
||||
}
|
||||
engineSchema.Types[outputType.Name] = outputType
|
||||
|
||||
inputType := &schema.InputObject{
|
||||
Name: singularName + "Input",
|
||||
Fields: schema.InputValueList{},
|
||||
}
|
||||
engineSchema.Types[inputType.Name] = inputType
|
||||
|
||||
orderByType := &schema.InputObject{
|
||||
Name: singularName + "OrderBy",
|
||||
Fields: schema.InputValueList{},
|
||||
}
|
||||
engineSchema.Types[orderByType.Name] = orderByType
|
||||
|
||||
expressionTypeName := singularName + "Expression"
|
||||
expressionType := &schema.InputObject{
|
||||
Name: expressionTypeName,
|
||||
Fields: schema.InputValueList{
|
||||
&schema.InputValue{
|
||||
Name: "and",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "or",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
|
||||
},
|
||||
},
|
||||
}
|
||||
engineSchema.Types[expressionType.Name] = expressionType
|
||||
|
||||
for _, col := range ti.Columns {
|
||||
colName := col.Name
|
||||
// if !validGraphQLIdentifierRegex.MatchString(colName) {
|
||||
// return errors.New("column name is not a valid GraphQL identifier: " + colName)
|
||||
// }
|
||||
|
||||
colType := gqltype(col)
|
||||
nullableColType := ""
|
||||
if x, ok := colType.(*schema.NonNull); ok {
|
||||
nullableColType = x.OfType.(*schema.TypeName).Name
|
||||
} else {
|
||||
nullableColType = colType.(*schema.TypeName).Name
|
||||
}
|
||||
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: colName,
|
||||
Type: colType,
|
||||
})
|
||||
|
||||
for _, f := range funcs {
|
||||
if col.Type != f.Params[0].Type {
|
||||
continue
|
||||
}
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: f.Name + "_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
}
|
||||
|
||||
// If it's a numeric type...
|
||||
if nullableColType == "Float" || nullableColType == "Int" {
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "avg_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "count_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "max_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "min_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "stddev_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "stddev_pop_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "stddev_samp_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "variance_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "var_pop_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||
Name: "var_samp_" + colName,
|
||||
Type: colType,
|
||||
})
|
||||
}
|
||||
|
||||
inputType.Fields = append(inputType.Fields, &schema.InputValue{
|
||||
Name: colName,
|
||||
Type: colType,
|
||||
})
|
||||
orderByType.Fields = append(orderByType.Fields, &schema.InputValue{
|
||||
Name: colName,
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "OrderDirection"}},
|
||||
})
|
||||
|
||||
scalarExpressionTypesNeeded[nullableColType] = true
|
||||
|
||||
expressionType.Fields = append(expressionType.Fields, &schema.InputValue{
|
||||
Name: colName,
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: nullableColType + "Expression"}},
|
||||
})
|
||||
}
|
||||
|
||||
outputTypeName := &schema.TypeName{Name: outputType.Name}
|
||||
inputTypeName := &schema.TypeName{Name: inputType.Name}
|
||||
pluralOutputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: outputType.Name}}}}
|
||||
pluralInputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: inputType.Name}}}}
|
||||
|
||||
args := schema.InputValueList{
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: "To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."},
|
||||
Name: "order_by",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: orderByType.Name}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "where",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionType.Name}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "limit",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "offset",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "first",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "last",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "before",
|
||||
Type: &schema.TypeName{Name: "String"},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "after",
|
||||
Type: &schema.TypeName{Name: "String"},
|
||||
},
|
||||
}
|
||||
if ti.PrimaryCol != nil {
|
||||
t := gqltype(*ti.PrimaryCol)
|
||||
if _, ok := t.(*schema.NonNull); !ok {
|
||||
t = &schema.NonNull{OfType: t}
|
||||
}
|
||||
args = append(args, &schema.InputValue{
|
||||
Desc: schema.Description{Text: "Finds the record by the primary key"},
|
||||
Name: "id",
|
||||
Type: t,
|
||||
})
|
||||
}
|
||||
|
||||
if ti.TSVCol != nil {
|
||||
args = append(args, &schema.InputValue{
|
||||
Desc: schema.Description{Text: "Performs full text search using a TSV index"},
|
||||
Name: "search",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
})
|
||||
}
|
||||
|
||||
query.Fields = append(query.Fields, &schema.Field{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: singularName,
|
||||
Type: outputTypeName,
|
||||
Args: args,
|
||||
})
|
||||
query.Fields = append(query.Fields, &schema.Field{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: pluralName,
|
||||
Type: pluralOutputTypeName,
|
||||
Args: args,
|
||||
})
|
||||
|
||||
mutationArgs := append(args, schema.InputValueList{
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "insert",
|
||||
Type: inputTypeName,
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "update",
|
||||
Type: inputTypeName,
|
||||
},
|
||||
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "upsert",
|
||||
Type: inputTypeName,
|
||||
},
|
||||
}...)
|
||||
|
||||
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||
Name: singularName,
|
||||
Args: mutationArgs,
|
||||
Type: outputType,
|
||||
})
|
||||
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||
Name: pluralName,
|
||||
Args: append(mutationArgs, schema.InputValueList{
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "inserts",
|
||||
Type: pluralInputTypeName,
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "updates",
|
||||
Type: pluralInputTypeName,
|
||||
},
|
||||
&schema.InputValue{
|
||||
Desc: schema.Description{Text: ""},
|
||||
Name: "upserts",
|
||||
Type: pluralInputTypeName,
|
||||
},
|
||||
}...),
|
||||
Type: outputType,
|
||||
})
|
||||
}
|
||||
|
||||
for typeName := range scalarExpressionTypesNeeded {
|
||||
expressionType := &schema.InputObject{
|
||||
Name: typeName + "Expression",
|
||||
Fields: schema.InputValueList{
|
||||
&schema.InputValue{
|
||||
Name: "eq",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "equals",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "neq",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not_equals",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "gt",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "greater_than",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "lt",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "lesser_than",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "gte",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "greater_or_equals",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "lte",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "lesser_or_equals",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "in",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "nin",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not_in",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
|
||||
&schema.InputValue{
|
||||
Name: "like",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "nlike",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not_like",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "ilike",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "nilike",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not_ilike",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "similar",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "nsimilar",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "not_similar",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "has_key",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "has_key_any",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "has_key_all",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "contains",
|
||||
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "contained_in",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
|
||||
},
|
||||
&schema.InputValue{
|
||||
Name: "is_null",
|
||||
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Boolean"}},
|
||||
},
|
||||
},
|
||||
}
|
||||
engineSchema.Types[expressionType.Name] = expressionType
|
||||
}
|
||||
|
||||
if err := engineSchema.ResolveTypes(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
engine.Resolver = resolvers.Func(func(request *resolvers.ResolveRequest, next resolvers.Resolution) resolvers.Resolution {
|
||||
resolver := resolvers.MetadataResolver.Resolve(request, next)
|
||||
if resolver != nil {
|
||||
return resolver
|
||||
}
|
||||
resolver = resolvers.MethodResolver.Resolve(request, next) // needed by the MetadataResolver
|
||||
if resolver != nil {
|
||||
return resolver
|
||||
}
|
||||
|
||||
return nil
|
||||
})
|
||||
|
||||
sg.ge = engine
|
||||
return nil
|
||||
}
|
@ -3,7 +3,7 @@ package core
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"database/sql"
|
||||
"encoding/hex"
|
||||
"fmt"
|
||||
@ -58,21 +58,14 @@ func (sg *SuperGraph) initPrepared() error {
|
||||
}
|
||||
|
||||
err := sg.prepareStmt(v)
|
||||
if err == nil {
|
||||
if err != nil {
|
||||
sg.log.Printf("WRN %s: %v", v.Name, err)
|
||||
} else {
|
||||
success++
|
||||
continue
|
||||
}
|
||||
|
||||
// if len(v.Vars) == 0 {
|
||||
// logger.Warn().Err(err).Msg(v.Query)
|
||||
// } else {
|
||||
// logger.Warn().Err(err).Msgf("%s %s", v.Vars, v.Query)
|
||||
// }
|
||||
}
|
||||
|
||||
// logger.Info().
|
||||
// Msgf("Registered %d of %d queries from allow.list as prepared statements",
|
||||
// success, len(list))
|
||||
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
||||
|
||||
return nil
|
||||
}
|
||||
@ -84,13 +77,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
|
||||
qt := qcode.GetQType(query)
|
||||
ct := context.Background()
|
||||
|
||||
tx, err := sg.db.BeginTx(ct, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback() //nolint: errcheck
|
||||
|
||||
switch qt {
|
||||
case qcode.QTQuery:
|
||||
var stmts1 []stmt
|
||||
@ -108,7 +94,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
|
||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
||||
|
||||
err = sg.prepare(ct, tx, stmts1, stmtHash(item.Name, "user"))
|
||||
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -124,7 +110,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, tx, stmts2, stmtHash(item.Name, "anon"))
|
||||
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -135,36 +121,29 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||
|
||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||
|
||||
if err != nil {
|
||||
// if len(item.Vars) == 0 {
|
||||
// logger.Warn().Err(err).Msg(item.Query)
|
||||
// } else {
|
||||
// logger.Warn().Err(err).Msgf("%s %s", item.Vars, item.Query)
|
||||
// }
|
||||
if err == psql.ErrAllTablesSkipped {
|
||||
continue
|
||||
}
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = sg.prepare(ct, tx, stmts, stmtHash(item.Name, role.Name))
|
||||
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if err := tx.Commit(); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sg *SuperGraph) prepare(ct context.Context, tx *sql.Tx, st []stmt, key string) error {
|
||||
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||
finalSQL, am := processTemplate(st[0].sql)
|
||||
|
||||
sd, err := tx.Prepare(finalSQL)
|
||||
sd, err := sg.db.Prepare(finalSQL)
|
||||
if err != nil {
|
||||
return err
|
||||
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
|
||||
}
|
||||
|
||||
sg.prepared[key] = &preparedItem{
|
||||
@ -256,7 +235,9 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
sg.log.Printf("WRN allow list disabled no file specified")
|
||||
}
|
||||
|
||||
if sg.conf.UseAllowList {
|
||||
// When list is not eabled it is still created and
|
||||
// and new queries are saved to it.
|
||||
if !sg.conf.UseAllowList {
|
||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||
}
|
||||
|
||||
@ -270,7 +251,7 @@ func (sg *SuperGraph) initAllowList() error {
|
||||
|
||||
// nolint: errcheck
|
||||
func stmtHash(name string, role string) string {
|
||||
h := sha1.New()
|
||||
h := sha256.New()
|
||||
io.WriteString(h, strings.ToLower(name))
|
||||
io.WriteString(h, role)
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
|
@ -120,20 +120,20 @@ func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
||||
}
|
||||
defer res.Body.Close()
|
||||
|
||||
if r.Debug {
|
||||
// reqDump, err := httputil.DumpRequestOut(req, true)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
// if r.Debug {
|
||||
// reqDump, err := httputil.DumpRequestOut(req, true)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// resDump, err := httputil.DumpResponse(res, true)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
// resDump, err := httputil.DumpResponse(res, true)
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// logger.Debug().Msgf("Remote Request Debug:\n%s\n%s",
|
||||
// reqDump, resDump)
|
||||
}
|
||||
// logger.Debug().Msgf("Remote Request Debug:\n%s\n%s",
|
||||
// reqDump, resDump)
|
||||
// }
|
||||
|
||||
if res.StatusCode != 200 {
|
||||
return nil,
|
||||
|
@ -104,7 +104,7 @@ query {
|
||||
</div>
|
||||
|
||||
<div class="text-2xl md:text-3xl">
|
||||
Super Graph is a library and service that fetches data from any Postgres database using just GraphQL. No more struggling with ORMs and SQL to wrangle data out of the database. No more having to figure out the right joins or making ineffiient queries. However complex the GraphQL, Super Graph will always generate just one single efficient SQL query. The goal is to save you time and money so you can focus on you're apps core value.
|
||||
Super Graph is a library and service that fetches data from any Postgres database using just GraphQL. No more struggling with ORMs and SQL to wrangle data out of the database. No more having to figure out the right joins or making inefficient queries. However complex the GraphQL, Super Graph will always generate just one single efficient SQL query. The goal is to save you time and money so you can focus on you're apps core value.
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -145,17 +145,12 @@ import (
|
||||
func main() {
|
||||
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
conf, err := config.NewConfig("./config")
|
||||
sg, err := core.NewSuperGraph(nil, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
graphqlQuery := `
|
||||
@ -168,7 +163,7 @@ func main() {
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), graphqlQuery, nil)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(res.Data))
|
||||
|
@ -10,7 +10,7 @@ longTagline: Get an instant high performance GraphQL API for Postgres. No code n
|
||||
actionText: Get Started, Free, Open Source →
|
||||
actionLink: /guide
|
||||
|
||||
description: Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on you're apps core value.
|
||||
description: Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on your apps core value.
|
||||
|
||||
features:
|
||||
- title: Simple
|
||||
|
@ -32,7 +32,7 @@ For this to work you have to ensure that the option `:domain => :all` is added t
|
||||
|
||||
### With an NGINX loadbalancer
|
||||
|
||||
If you're infrastructure is fronted by NGINX then it should be configured so that all requests to your GraphQL API path are proxyed to Super Graph. In the example NGINX config below all requests to the path `/api/v1/graphql` are routed to wherever you have Super Graph installed within your architecture. This example is derived from the config file example at [/microservices-nginx-gateway/nginx.conf](https://github.com/launchany/microservices-nginx-gateway/blob/master/nginx.conf)
|
||||
If your infrastructure is fronted by NGINX then it should be configured so that all requests to your GraphQL API path are proxyed to Super Graph. In the example NGINX config below all requests to the path `/api/v1/graphql` are routed to wherever you have Super Graph installed within your architecture. This example is derived from the config file example at [/microservices-nginx-gateway/nginx.conf](https://github.com/launchany/microservices-nginx-gateway/blob/master/nginx.conf)
|
||||
|
||||
::: tip NGINX with sub-domain
|
||||
Yes, NGINX is very flexible and you can configure it to keep Super Graph a subdomain instead of on the same top level domain. I'm sure a little Googleing will get you some great example configs for that.
|
||||
|
@ -347,12 +347,10 @@ beer_style
|
||||
beer_yeast
|
||||
|
||||
// Cars
|
||||
vehicle
|
||||
vehicle_type
|
||||
car
|
||||
car_type
|
||||
car_maker
|
||||
car_model
|
||||
fuel_type
|
||||
transmission_gear_type
|
||||
|
||||
// Text
|
||||
word
|
||||
@ -438,8 +436,8 @@ hipster_paragraph
|
||||
hipster_sentence
|
||||
|
||||
// File
|
||||
extension
|
||||
mine_type
|
||||
file_extension
|
||||
file_mine_type
|
||||
|
||||
// Numbers
|
||||
number
|
||||
@ -463,11 +461,18 @@ mac_address
|
||||
digit
|
||||
letter
|
||||
lexify
|
||||
rand_string
|
||||
shuffle_strings
|
||||
numerify
|
||||
```
|
||||
|
||||
Other utility functions
|
||||
|
||||
```
|
||||
shuffle_strings(string_array)
|
||||
make_slug(text)
|
||||
make_slug_lang(text, lang)
|
||||
```
|
||||
|
||||
### Migrations
|
||||
|
||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||
@ -725,6 +730,32 @@ query {
|
||||
}
|
||||
```
|
||||
|
||||
### Custom Functions
|
||||
|
||||
Any function defined in the database like the below `add_five` that adds 5 to any number given to it can be used
|
||||
within your query. The one limitation is that it should be a function that only accepts a single argument. The function is used within you're GraphQL in similar way to how aggregrations are used above. Example below
|
||||
|
||||
```grahql
|
||||
query {
|
||||
thread(id: 5) {
|
||||
id
|
||||
total_votes
|
||||
add_five_total_votes
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
Postgres user-defined function `add_five`
|
||||
```
|
||||
CREATE OR REPLACE FUNCTION add_five(a integer) RETURNS integer AS $$
|
||||
BEGIN
|
||||
|
||||
RETURN a + 5;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
```
|
||||
|
||||
|
||||
In GraphQL mutations is the operation type for when you need to modify data. Super Graph supports the `insert`, `update`, `upsert` and `delete`. You can also do complex nested inserts and updates.
|
||||
|
||||
When using mutations the data must be passed as variables since Super Graphs compiles the query into an prepared statement in the database for maximum speed. Prepared statements are are functions in your code when called they accept arguments and your variables are passed in as those arguments.
|
||||
@ -1038,7 +1069,7 @@ mutation {
|
||||
|
||||
### Pagination
|
||||
|
||||
This is a must have feature of any API. When you want your users to go thought a list page by page or implement some fancy infinite scroll you're going to need pagination. There are two ways to paginate in Super Graph.
|
||||
This is a must have feature of any API. When you want your users to go through a list page by page or implement some fancy infinite scroll you're going to need pagination. There are two ways to paginate in Super Graph.
|
||||
|
||||
Limit-Offset
|
||||
This is simple enough but also inefficient when working with a large number of total items. Limit, limits the number of items fetched and offset is the point you want to fetch from. The below query will fetch 10 results at a time starting with the 100th item. You will have to keep updating offset (110, 120, 130, etc ) to walk thought the results so make offset a variable.
|
||||
@ -1054,7 +1085,7 @@ query {
|
||||
```
|
||||
|
||||
#### Cursor
|
||||
This is a powerful and highly efficient way to paginate though a large number of results. Infact it does not matter how many total results there are this will always be lighting fast. You can use a cursor to walk forward of backward though the results. If you plan to implement infinite scroll this is the option you should choose.
|
||||
This is a powerful and highly efficient way to paginate a large number of results. Infact it does not matter how many total results there are this will always be lighting fast. You can use a cursor to walk forward or backward through the results. If you plan to implement infinite scroll this is the option you should choose.
|
||||
|
||||
When going this route the results will contain a cursor value this is an encrypted string that you don't have to worry about just pass this back in to the next API call and you'll received the next set of results. The cursor value is encrypted since its contents should only matter to Super Graph and not the client. Also since the primary key is used for this feature it's possible you might not want to leak it's value to clients.
|
||||
|
||||
@ -1704,7 +1735,7 @@ reload_on_config_change: true
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
migrations_path: ./config/migrations
|
||||
migrations_path: ./migrations
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
@ -1790,18 +1821,37 @@ database:
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 1m
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
# Set up an secure tls encrypted db connection
|
||||
enable_tls: false
|
||||
|
||||
# Required for tls. For example with Google Cloud SQL it's
|
||||
# <gcp-project-id>:<cloud-sql-instance>"
|
||||
# server_name: blah
|
||||
|
||||
# Required for tls. Can be a file path or the contents of the pem file
|
||||
# server_cert: ./server-ca.pem
|
||||
|
||||
# Required for tls. Can be a file path or the contents of the pem file
|
||||
# client_cert: ./client-cert.pem
|
||||
|
||||
# Required for tls. Can be a file path or the contents of the pem file
|
||||
# client_key: ./client-key.pem
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
admin_account_id: "5"
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
# Create custom actions with their own api endpoints
|
||||
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
||||
|
@ -3,6 +3,11 @@ services:
|
||||
db:
|
||||
image: postgres
|
||||
tmpfs: /var/lib/postgresql/data
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
rails_app:
|
||||
image: dosco/super-graph-demo:latest
|
||||
|
43
go.mod
43
go.mod
@ -1,36 +1,43 @@
|
||||
module github.com/dosco/super-graph
|
||||
|
||||
require (
|
||||
github.com/DATA-DOG/go-sqlmock v1.4.1
|
||||
github.com/GeertJohan/go.rice v1.0.0
|
||||
github.com/NYTimes/gziphandler v1.1.1
|
||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||
github.com/brianvoe/gofakeit v3.18.0+incompatible
|
||||
github.com/cespare/xxhash v1.1.0
|
||||
github.com/cespare/xxhash/v2 v2.1.0
|
||||
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||
github.com/cespare/xxhash/v2 v2.1.1
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
|
||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733
|
||||
github.com/fsnotify/fsnotify v1.4.7
|
||||
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0
|
||||
github.com/fsnotify/fsnotify v1.4.9
|
||||
github.com/garyburd/redigo v1.6.0
|
||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
|
||||
github.com/gobuffalo/flect v0.1.6
|
||||
github.com/jackc/pgtype v1.0.1
|
||||
github.com/jackc/pgx/v4 v4.0.1
|
||||
github.com/magiconair/properties v1.8.1 // indirect
|
||||
github.com/pelletier/go-toml v1.4.0 // indirect
|
||||
github.com/pkg/errors v0.8.1
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||
github.com/gobuffalo/flect v0.2.1
|
||||
github.com/gosimple/slug v1.9.0
|
||||
github.com/jackc/pgtype v1.3.0
|
||||
github.com/jackc/pgx/v4 v4.6.0
|
||||
github.com/mitchellh/mapstructure v1.2.2 // indirect
|
||||
github.com/pelletier/go-toml v1.7.0 // indirect
|
||||
github.com/pkg/errors v0.9.1
|
||||
github.com/rs/cors v1.7.0
|
||||
github.com/spf13/afero v1.2.2 // indirect
|
||||
github.com/spf13/cobra v0.0.5
|
||||
github.com/spf13/cast v1.3.1 // indirect
|
||||
github.com/spf13/cobra v1.0.0
|
||||
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||
github.com/spf13/pflag v1.0.5 // indirect
|
||||
github.com/spf13/viper v1.4.0
|
||||
github.com/valyala/fasttemplate v1.0.1
|
||||
github.com/spf13/viper v1.6.3
|
||||
github.com/stretchr/testify v1.5.1
|
||||
github.com/valyala/fasttemplate v1.1.0
|
||||
go.uber.org/zap v1.14.1
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad
|
||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 // indirect
|
||||
gopkg.in/yaml.v2 v2.2.7 // indirect
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||
gopkg.in/ini.v1 v1.55.0 // indirect
|
||||
)
|
||||
|
||||
go 1.13
|
||||
|
155
go.sum
155
go.sum
@ -1,6 +1,8 @@
|
||||
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
|
||||
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
|
||||
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
|
||||
github.com/DATA-DOG/go-sqlmock v1.4.1 h1:ThlnYciV1iM/V0OSF/dtkqWb6xo5qITT1TJBG1MRDJM=
|
||||
github.com/DATA-DOG/go-sqlmock v1.4.1/go.mod h1:f/Ixk793poVmq4qj/V1dPUg2JEAKC73Q5eFN3EC/SaM=
|
||||
github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/jt0CW30vsg=
|
||||
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
|
||||
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
|
||||
@ -19,24 +21,27 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
|
||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
|
||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||
github.com/brianvoe/gofakeit v3.18.0+incompatible h1:wDOmHc9DLG4nRjUVVaxA+CEglKOW72Y5+4WNxUIkjM8=
|
||||
github.com/brianvoe/gofakeit v3.18.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc=
|
||||
github.com/brianvoe/gofakeit/v5 v5.2.0 h1:De9X+2PQum9U2zCaIDxLV7wx0YBL6c7RN2sFBImzHGI=
|
||||
github.com/brianvoe/gofakeit/v5 v5.2.0/go.mod h1:/ZENnKqX+XrN8SORLe/fu5lZDIo1tuPncWuRD+eyhSI=
|
||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||
github.com/cespare/xxhash/v2 v2.1.0 h1:yTUvW7Vhb89inJ+8irsUqiWjh8iT6sQPZiQzI6ReGkA=
|
||||
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
|
||||
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
|
||||
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w=
|
||||
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
||||
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
|
||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||
@ -50,21 +55,24 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
|
||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0 h1:EfFAcaAwGai/wlDCWwIObHBm3T2C2CCPX/SaS0fpOJ4=
|
||||
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||
github.com/friendsofgo/graphiql v0.2.2/go.mod h1:8Y2kZ36AoTGWs78+VRpvATyt3LJBx0SZXmay80ZTRWo=
|
||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
||||
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible h1:0b/xya7BKGhXuqFESKM4oIiRo9WOt2ebz7KxfreD6ug=
|
||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/gobuffalo/flect v0.1.6 h1:D7KWNRFiCknJKA495/e1BO7oxqf8tbieaLv/ehoZ/+g=
|
||||
github.com/gobuffalo/flect v0.1.6/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80=
|
||||
github.com/gobuffalo/flect v0.2.1 h1:GPoRjEN0QObosV4XwuoWvSd5uSiL0N3e91/xqyY4crQ=
|
||||
github.com/gobuffalo/flect v0.2.1/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc=
|
||||
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
|
||||
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||
@ -76,9 +84,15 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
|
||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
|
||||
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
|
||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||
@ -90,11 +104,13 @@ github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZb
|
||||
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
||||
github.com/jackc/chunkreader/v2 v2.0.0 h1:DUwgMQuuPnS0rhMXenUtZpqZqrR/30NWY+qQvTpSvEs=
|
||||
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
|
||||
github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||
github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
|
||||
github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
|
||||
github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
|
||||
github.com/jackc/pgconn v1.0.1 h1:ZANo4pIkeHKIVD1cQMcxu8fwrwIICLblzi9HCjooZeQ=
|
||||
github.com/jackc/pgconn v1.0.1/go.mod h1:GgY/Lbj1VonNaVdNUHs9AwWom3yP2eymFQ1C8z9r/Lk=
|
||||
github.com/jackc/pgconn v1.5.0 h1:oFSOilzIZkyg787M1fEmyMfOUUvwj0daqYMfaWwNL4o=
|
||||
github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
|
||||
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
||||
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye4717ITLaNwV9mWbJx0dLCpcRzdA=
|
||||
@ -107,25 +123,31 @@ github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0 h1:FApgMJ/GtaXfI0s8Lvd0kaLaRwMOhs4VH92pwkwQQvU=
|
||||
github.com/jackc/pgproto3/v2 v2.0.0/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||
github.com/jackc/pgproto3/v2 v2.0.1 h1:Rdjp4NFjwHnEslx2b66FfCI2S0LhO4itac3hXz6WX9M=
|
||||
github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 h1:Q3tB+ExeflWUW7AFcAhXqk40s9mnNYLk1nOkKNZ5GnU=
|
||||
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
||||
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
||||
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
||||
github.com/jackc/pgtype v1.0.1 h1:7GWB9n3DdnO3TIbj59wMAE9QcHPL4cy/Bbtk5P1Noow=
|
||||
github.com/jackc/pgtype v1.0.1/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0=
|
||||
github.com/jackc/pgtype v1.3.0 h1:l8JvKrby3RI7Kg3bYEeU9TA4vqC38QDpFCfcrC7KuN0=
|
||||
github.com/jackc/pgtype v1.3.0/go.mod h1:b0JqxHvPmljG+HQ5IsvQ0yqeSi4nGcDTVjFoiLDb0Ik=
|
||||
github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o=
|
||||
github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
||||
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
||||
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
||||
github.com/jackc/pgx/v4 v4.0.1 h1:NNrG0MX2AVEJw1NNDYg+ixSXycCfWWKeqMuQHQkAngc=
|
||||
github.com/jackc/pgx/v4 v4.0.1/go.mod h1:NeQ64VJooukJGFLX2r01sJL/gRbKlpvsO2giBvjfgrY=
|
||||
github.com/jackc/pgx/v4 v4.6.0 h1:Fh0O9GdlG4gYpjpwOqjdEodJUQM9jzN3Hdv7PN0xmm0=
|
||||
github.com/jackc/pgx/v4 v4.6.0/go.mod h1:vPh43ZzxijXUVJ+t/EmXBtFmbFVO72cuneCT9oAlxAg=
|
||||
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.0.0 h1:rbjAshlgKscNa7j0jAM0uNQflis5o2XUogPMVAwtcsM=
|
||||
github.com/jackc/puddle v1.0.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
@ -158,17 +180,26 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5
|
||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||
github.com/mitchellh/mapstructure v1.2.2 h1:dxe5oCinTXiTIcfgmZecdCzPmAJKd46KsCWc35r0TV4=
|
||||
github.com/mitchellh/mapstructure v1.2.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
|
||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU=
|
||||
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
||||
github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI=
|
||||
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||
@ -180,6 +211,8 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be h1:ta7tUOvsPHVHGom5hKW5VXNc2xZIkfCKP8iaqOyYtUQ=
|
||||
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be/go.mod h1:MIDFMn7db1kT65GmV94GzpX9Qdi7N/pQlwb+AN8wh+Q=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||
@ -188,14 +221,22 @@ github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||
github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=
|
||||
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/segmentio/ksuid v1.0.2 h1:9yBfKyw4ECGTdALaF09Snw3sLJmYIX6AbPJrAy6MrDc=
|
||||
github.com/segmentio/ksuid v1.0.2/go.mod h1:BXuJDr2byAiHuQaQtSKoXh1J0YmUDurywOXgB2w+OSU=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
||||
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
|
||||
@ -204,18 +245,22 @@ github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
|
||||
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
|
||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
||||
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
|
||||
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||
github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8=
|
||||
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
|
||||
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
|
||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
||||
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||
github.com/spf13/viper v1.6.3 h1:pDDu1OyEDTKzpJwdq4TiuLyMsUgRa/BT5cn5O62NoHs=
|
||||
github.com/spf13/viper v1.6.3/go.mod h1:jUMtyi0/lB5yZH/FjyGAoH7IMNrIhlBf6pXZmbMDvzw=
|
||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||
@ -224,13 +269,24 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible h1:Dw0AFQs6RGO8RxMPGP2LknN/VtHolVH82P9PP0Ni+9w=
|
||||
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
|
||||
github.com/uber/jaeger-lib v1.5.0 h1:OHbgr8l656Ub3Fw5k9SWnBfIEwvoHQ+W2y+Aa9D1Uyo=
|
||||
github.com/uber/jaeger-lib v1.5.0/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
|
||||
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||
@ -249,22 +305,21 @@ go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
||||
go.uber.org/zap v1.14.1 h1:nYDKopTbvAPq/NrUVZwT15y2lpROBiLLyoRTbXOYWOo=
|
||||
go.uber.org/zap v1.14.1/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
|
||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
|
||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7 h1:0hQKqeLdqlt5iIwVOBErRisrHJAN57yOiPRQItI20fU=
|
||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
|
||||
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||
@ -285,8 +340,6 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h
|
||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A=
|
||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
|
||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||
@ -296,8 +349,9 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
|
||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 h1:ZBzSG/7F4eNKz2L3GE9o300RX0Az1Bw5HF7PDraD+qU=
|
||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||
@ -307,16 +361,22 @@ golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGm
|
||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||
golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs=
|
||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20200128220307-520188d60f50 h1:0qnG0gwzB6QPiLDow10WJDdB38c+hQ7ArxO26Qc1boM=
|
||||
golang.org/x/tools v0.0.0-20200128220307-520188d60f50/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||
@ -326,15 +386,22 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
||||
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
||||
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/ini.v1 v1.55.0 h1:E8yzL5unfpW3M6fz/eB7Cb5MQAYSZ7GKo4Qth+N2sgQ=
|
||||
gopkg.in/ini.v1 v1.55.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099 h1:XJP7lxbSxWLOMNdBE4B/STaqVy6L73o0knwj2vIlxnw=
|
||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
||||
|
@ -23,7 +23,7 @@ func newAction(a *Action) (http.Handler, error) {
|
||||
|
||||
httpFn := func(w http.ResponseWriter, r *http.Request) {
|
||||
if err := fn(w, r); err != nil {
|
||||
renderErr(w, err, nil)
|
||||
renderErr(w, err)
|
||||
}
|
||||
}
|
||||
|
@ -3,8 +3,8 @@ package serv
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
@ -45,6 +45,8 @@ type Serv struct {
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
||||
DebugCORS bool `mapstructure:"cors_debug"`
|
||||
APIPath string `mapstructure:"api_path"`
|
||||
CacheControl string `mapstructure:"cache_control"`
|
||||
|
||||
Auth auth.Auth
|
||||
Auths []auth.Auth
|
||||
@ -60,6 +62,11 @@ type Serv struct {
|
||||
PoolSize int32 `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||
EnableTLS bool `mapstructure:"enable_tls"`
|
||||
ServerName string `mapstructure:"server_name"`
|
||||
ServerCert string `mapstructure:"server_cert"`
|
||||
ClientCert string `mapstructure:"client_cert"`
|
||||
ClientKey string `mapstructure:"client_key"`
|
||||
} `mapstructure:"database"`
|
||||
|
||||
Actions []Action
|
@ -26,13 +26,12 @@ var (
|
||||
)
|
||||
|
||||
var (
|
||||
log *_log.Logger // logger
|
||||
zlog *zap.Logger // fast logger
|
||||
logLevel int // log level
|
||||
conf *Config // parsed config
|
||||
confPath string // path to the config file
|
||||
db *sql.DB // database connection pool
|
||||
secretKey [32]byte // encryption key
|
||||
log *_log.Logger // logger
|
||||
zlog *zap.Logger // fast logger
|
||||
logLevel int // log level
|
||||
conf *Config // parsed config
|
||||
confPath string // path to the config file
|
||||
db *sql.DB // database connection pool
|
||||
)
|
||||
|
||||
func Cmd() {
|
||||
@ -156,6 +155,20 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
||||
}
|
||||
|
||||
func BuildDetails() string {
|
||||
if len(version) == 0 {
|
||||
return fmt.Sprintf(`
|
||||
Super Graph (unknown version)
|
||||
For documentation, visit https://supergraph.dev
|
||||
|
||||
To build with version information please use the Makefile
|
||||
> git clone https://github.com/dosco/super-graph
|
||||
> cd super-graph && make install
|
||||
|
||||
Licensed under the Apache Public License 2.0
|
||||
Copyright 2020, Vikram Rangnekar
|
||||
`)
|
||||
}
|
||||
|
||||
return fmt.Sprintf(`
|
||||
Super Graph %v
|
||||
For documentation, visit https://supergraph.dev
|
||||
@ -166,7 +179,7 @@ Branch : %v
|
||||
Go version : %v
|
||||
|
||||
Licensed under the Apache Public License 2.0
|
||||
Copyright 2020, Vikram Rangnekar.
|
||||
Copyright 2020, Vikram Rangnekar
|
||||
`,
|
||||
version,
|
||||
lastCommitSHA,
|
@ -9,7 +9,7 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/migrate"
|
||||
"github.com/dosco/super-graph/internal/serv/internal/migrate"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -55,7 +55,7 @@ func cmdDBReset(cmd *cobra.Command, args []string) {
|
||||
func cmdDBCreate(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
|
||||
db, err := initDB(conf)
|
||||
db, err := initDB(conf, false)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
@ -74,7 +74,7 @@ func cmdDBCreate(cmd *cobra.Command, args []string) {
|
||||
func cmdDBDrop(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
|
||||
db, err := initDB(conf)
|
||||
db, err := initDB(conf, false)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
@ -98,8 +98,9 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
||||
|
||||
initConfOnce()
|
||||
name := args[0]
|
||||
migrationsPath := conf.relPath(conf.MigrationsPath)
|
||||
|
||||
m, err := migrate.FindMigrations(conf.MigrationsPath)
|
||||
m, err := migrate.FindMigrations(migrationsPath)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR error loading migrations: %s", err)
|
||||
}
|
||||
@ -107,8 +108,8 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
||||
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
||||
|
||||
// Write new migration
|
||||
mpath := filepath.Join(conf.MigrationsPath, mname)
|
||||
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
||||
mpath := filepath.Join(migrationsPath, mname)
|
||||
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0600)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
@ -131,7 +132,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
dest := args[0]
|
||||
|
||||
conn, err := initDB(conf)
|
||||
conn, err := initDB(conf, true)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
@ -144,7 +145,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
|
||||
m.Data = getMigrationVars()
|
||||
|
||||
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
||||
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||
}
|
||||
@ -223,7 +224,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
func cmdDBStatus(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
|
||||
db, err := initDB(conf)
|
||||
db, err := initDB(conf, true)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
@ -236,7 +237,7 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
|
||||
|
||||
m.Data = getMigrationVars()
|
||||
|
||||
err = m.LoadMigrations(conf.MigrationsPath)
|
||||
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||
}
|
@ -13,9 +13,10 @@ import (
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/brianvoe/gofakeit"
|
||||
"github.com/brianvoe/gofakeit/v5"
|
||||
"github.com/dop251/goja"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/gosimple/slug"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -28,7 +29,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
|
||||
conf.Production = false
|
||||
|
||||
db, err = initDB(conf)
|
||||
db, err = initDB(conf, true)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
@ -61,6 +62,10 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
setFakeFuncs(fake)
|
||||
vm.Set("fake", fake)
|
||||
|
||||
util := vm.NewObject()
|
||||
setUtilFuncs(util)
|
||||
vm.Set("util", util)
|
||||
|
||||
_, err = vm.RunScript("seed.js", string(b))
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to execute script: %s", err)
|
||||
@ -232,6 +237,10 @@ func imageURL(width int, height int) string {
|
||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||
}
|
||||
|
||||
func getRandValue(values []string) string {
|
||||
return values[rand.Intn(len(values))]
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func setFakeFuncs(f *goja.Object) {
|
||||
gofakeit.Seed(0)
|
||||
@ -259,7 +268,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("country_abr", gofakeit.CountryAbr)
|
||||
f.Set("state", gofakeit.State)
|
||||
f.Set("state_abr", gofakeit.StateAbr)
|
||||
f.Set("status_code", gofakeit.StatusCode)
|
||||
f.Set("street", gofakeit.Street)
|
||||
f.Set("street_name", gofakeit.StreetName)
|
||||
f.Set("street_number", gofakeit.StreetNumber)
|
||||
@ -282,12 +290,10 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("beer_yeast", gofakeit.BeerYeast)
|
||||
|
||||
// Cars
|
||||
f.Set("vehicle", gofakeit.Vehicle)
|
||||
f.Set("vehicle_type", gofakeit.VehicleType)
|
||||
f.Set("car", gofakeit.Car)
|
||||
f.Set("car_type", gofakeit.CarType)
|
||||
f.Set("car_maker", gofakeit.CarMaker)
|
||||
f.Set("car_model", gofakeit.CarModel)
|
||||
f.Set("fuel_type", gofakeit.FuelType)
|
||||
f.Set("transmission_gear_type", gofakeit.TransmissionGearType)
|
||||
|
||||
// Text
|
||||
f.Set("word", gofakeit.Word)
|
||||
@ -315,7 +321,6 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("domain_suffix", gofakeit.DomainSuffix)
|
||||
f.Set("ipv4_address", gofakeit.IPv4Address)
|
||||
f.Set("ipv6_address", gofakeit.IPv6Address)
|
||||
f.Set("simple_status_code", gofakeit.SimpleStatusCode)
|
||||
f.Set("http_method", gofakeit.HTTPMethod)
|
||||
f.Set("user_agent", gofakeit.UserAgent)
|
||||
f.Set("user_agent_firefox", gofakeit.FirefoxUserAgent)
|
||||
@ -379,8 +384,8 @@ func setFakeFuncs(f *goja.Object) {
|
||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||
|
||||
// File
|
||||
f.Set("extension", gofakeit.Extension)
|
||||
f.Set("mine_type", gofakeit.MimeType)
|
||||
f.Set("file_extension", gofakeit.FileExtension)
|
||||
f.Set("file_mine_type", gofakeit.FileMimeType)
|
||||
|
||||
// Numbers
|
||||
f.Set("number", gofakeit.Number)
|
||||
@ -404,10 +409,16 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("digit", gofakeit.Digit)
|
||||
f.Set("letter", gofakeit.Letter)
|
||||
f.Set("lexify", gofakeit.Lexify)
|
||||
f.Set("rand_string", gofakeit.RandString)
|
||||
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
||||
f.Set("rand_string", getRandValue)
|
||||
f.Set("numerify", gofakeit.Numerify)
|
||||
|
||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
||||
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func setUtilFuncs(f *goja.Object) {
|
||||
// Slugs
|
||||
f.Set("make_slug", slug.Make)
|
||||
f.Set("make_slug_lang", slug.MakeLang)
|
||||
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
||||
}
|
@ -19,15 +19,11 @@ func cmdServ(cmd *cobra.Command, args []string) {
|
||||
|
||||
initWatcher()
|
||||
|
||||
db, err = initDB(conf)
|
||||
db, err = initDB(conf, true)
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to connect to database")
|
||||
}
|
||||
|
||||
// if conf != nil && db != nil {
|
||||
// initResolvers()
|
||||
// }
|
||||
|
||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to initialize Super Graph")
|
@ -4,6 +4,7 @@ import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
@ -48,10 +49,6 @@ func ReadInConfig(configFile string) (*Config, error) {
|
||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||
}
|
||||
|
||||
if len(c.Core.AllowListFile) == 0 {
|
||||
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
@ -113,3 +110,11 @@ func GetConfigName() string {
|
||||
|
||||
return ge
|
||||
}
|
||||
|
||||
func (c *Config) relPath(p string) string {
|
||||
if filepath.IsAbs(p) {
|
||||
return p
|
||||
}
|
||||
|
||||
return path.Join(c.cpath, p)
|
||||
}
|
@ -6,10 +6,9 @@ import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||
"github.com/rs/cors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
@ -30,7 +29,7 @@ type gqlReq struct {
|
||||
}
|
||||
|
||||
type errorResp struct {
|
||||
Error error `json:"error"`
|
||||
Error string `json:"error"`
|
||||
}
|
||||
|
||||
func apiV1Handler() http.Handler {
|
||||
@ -56,13 +55,13 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
//nolint: errcheck
|
||||
if conf.AuthFailBlock && !auth.IsAuth(ct) {
|
||||
renderErr(w, errUnauthorized, nil)
|
||||
renderErr(w, errUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
|
||||
if err != nil {
|
||||
renderErr(w, err, nil)
|
||||
renderErr(w, err)
|
||||
return
|
||||
}
|
||||
defer r.Body.Close()
|
||||
@ -71,54 +70,57 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
|
||||
err = json.Unmarshal(b, &req)
|
||||
if err != nil {
|
||||
renderErr(w, err, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.EqualFold(req.OpName, introspectionQuery) {
|
||||
introspect(w)
|
||||
renderErr(w, err)
|
||||
return
|
||||
}
|
||||
|
||||
doLog := true
|
||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||
|
||||
if logLevel >= LogLevelDebug {
|
||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
||||
if !conf.Production && res.QueryName() == introspectionQuery {
|
||||
doLog = false
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
renderErr(w, err, res)
|
||||
return
|
||||
if doLog && logLevel >= LogLevelDebug {
|
||||
log.Printf("DBG query %s: %s", res.QueryName(), res.SQL())
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(res)
|
||||
if err == nil {
|
||||
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
|
||||
w.Header().Set("Cache-Control", conf.CacheControl)
|
||||
}
|
||||
//nolint: errcheck
|
||||
json.NewEncoder(w).Encode(res)
|
||||
|
||||
if doLog && logLevel >= LogLevelInfo {
|
||||
zlog.Info("success",
|
||||
zap.String("op", res.OperationName()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
)
|
||||
}
|
||||
|
||||
} else {
|
||||
renderErr(w, err)
|
||||
|
||||
if doLog && logLevel >= LogLevelInfo {
|
||||
zlog.Error("error",
|
||||
zap.String("op", res.OperationName()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
zap.Error(err),
|
||||
)
|
||||
}
|
||||
|
||||
if logLevel >= LogLevelInfo {
|
||||
zlog.Info("success",
|
||||
zap.String("op", res.Operation()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
|
||||
func renderErr(w http.ResponseWriter, err error) {
|
||||
if err == errUnauthorized {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(&errorResp{err})
|
||||
|
||||
if logLevel >= LogLevelError {
|
||||
if res != nil {
|
||||
zlog.Error(err.Error(),
|
||||
zap.String("op", res.Operation()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
)
|
||||
} else {
|
||||
zlog.Error(err.Error())
|
||||
}
|
||||
}
|
||||
json.NewEncoder(w).Encode(errorResp{err.Error()})
|
||||
}
|
@ -1,8 +1,15 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"crypto/tls"
|
||||
"crypto/x509"
|
||||
"database/sql"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v4"
|
||||
@ -10,8 +17,17 @@ import (
|
||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
const (
|
||||
PEM_SIG = "--BEGIN "
|
||||
)
|
||||
|
||||
func initConf() (*Config, error) {
|
||||
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
||||
cp, err := filepath.Abs(confPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
c, err := ReadInConfig(path.Join(cp, GetConfigName()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -76,38 +92,27 @@ func initConf() (*Config, error) {
|
||||
c.AuthFailBlock = false
|
||||
}
|
||||
|
||||
if len(c.AllowListFile) == 0 {
|
||||
c.AllowListFile = c.relPath("./allow.list")
|
||||
}
|
||||
|
||||
if c.Production {
|
||||
c.UseAllowList = true
|
||||
}
|
||||
|
||||
// In anon role block all tables that are not defined in the role
|
||||
c.DefaultBlock = true
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func initDB(c *Config) (*sql.DB, error) {
|
||||
func initDB(c *Config, useDB bool) (*sql.DB, error) {
|
||||
var db *sql.DB
|
||||
var err error
|
||||
|
||||
// cs := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s",
|
||||
// c.DB.Host, c.DB.Port,
|
||||
// c.DB.User, c.DB.Password,
|
||||
// c.DB.DBName)
|
||||
|
||||
// fmt.Println(">>", cs)
|
||||
|
||||
// for i := 1; i < 10; i++ {
|
||||
// db, err = sql.Open("pgx", cs)
|
||||
// if err == nil {
|
||||
// break
|
||||
// }
|
||||
// time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
// }
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// return db, nil
|
||||
|
||||
config, _ := pgx.ParseConfig("")
|
||||
config.Host = c.DB.Host
|
||||
config.Port = c.DB.Port
|
||||
config.Database = c.DB.DBName
|
||||
config.User = c.DB.User
|
||||
config.Password = c.DB.Password
|
||||
config.RuntimeParams = map[string]string{
|
||||
@ -115,6 +120,63 @@ func initDB(c *Config) (*sql.DB, error) {
|
||||
"search_path": c.DB.Schema,
|
||||
}
|
||||
|
||||
if useDB {
|
||||
config.Database = c.DB.DBName
|
||||
}
|
||||
|
||||
if c.DB.EnableTLS {
|
||||
if len(c.DB.ServerName) == 0 {
|
||||
return nil, errors.New("server_name is required")
|
||||
}
|
||||
if len(c.DB.ServerCert) == 0 {
|
||||
return nil, errors.New("server_cert is required")
|
||||
}
|
||||
if len(c.DB.ClientCert) == 0 {
|
||||
return nil, errors.New("client_cert is required")
|
||||
}
|
||||
if len(c.DB.ClientKey) == 0 {
|
||||
return nil, errors.New("client_key is required")
|
||||
}
|
||||
|
||||
rootCertPool := x509.NewCertPool()
|
||||
var pem []byte
|
||||
var err error
|
||||
|
||||
if strings.Contains(c.DB.ServerCert, PEM_SIG) {
|
||||
pem = []byte(c.DB.ServerCert)
|
||||
} else {
|
||||
pem, err = ioutil.ReadFile(c.relPath(c.DB.ServerCert))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("db tls: %w", err)
|
||||
}
|
||||
|
||||
if ok := rootCertPool.AppendCertsFromPEM(pem); !ok {
|
||||
return nil, errors.New("db tls: failed to append pem")
|
||||
}
|
||||
|
||||
clientCert := make([]tls.Certificate, 0, 1)
|
||||
var certs tls.Certificate
|
||||
|
||||
if strings.Contains(c.DB.ClientCert, PEM_SIG) {
|
||||
certs, err = tls.X509KeyPair([]byte(c.DB.ClientCert), []byte(c.DB.ClientKey))
|
||||
} else {
|
||||
certs, err = tls.LoadX509KeyPair(c.relPath(c.DB.ClientCert), c.relPath(c.DB.ClientKey))
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("db tls: %w", err)
|
||||
}
|
||||
|
||||
clientCert = append(clientCert, certs)
|
||||
config.TLSConfig = &tls.Config{
|
||||
RootCAs: rootCertPool,
|
||||
Certificates: clientCert,
|
||||
ServerName: c.DB.ServerName,
|
||||
}
|
||||
}
|
||||
|
||||
// switch c.LogLevel {
|
||||
// case "debug":
|
||||
// config.LogLevel = pgx.LogLevelDebug
|
@ -9,8 +9,8 @@ import (
|
||||
"strings"
|
||||
|
||||
"github.com/bradfitz/gomemcache/memcache"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/dosco/super-graph/internal/serv/internal/rails"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user