Compare commits

..

15 Commits

Author SHA1 Message Date
d3e32f944a fix: json content type breaks web ui 2020-05-11 21:09:12 -04:00
3bf9f02a9f fix: bug with reading config file by name 2020-05-10 11:26:48 -04:00
533c767e1d fix: benchmark was failing. Also added a benchmark for the chirino/graphql version gql parser to compare results. (#62) 2020-05-07 10:48:01 -04:00
84d55dbc8a feat: remove data from variables saved to allow.list 2020-05-07 10:27:40 -04:00
5aafff6310 chore: add InteliJ editor project files to the .gitignore list. (#61) 2020-05-07 10:24:29 -04:00
840aaf64ff fix: return response as application/json (#59) 2020-05-07 10:24:12 -04:00
7bbb56a328 fix get functions parameters without name (#60) 2020-05-07 03:04:37 -04:00
394b08b2fe chore: update changelog 2020-05-03 21:01:16 -04:00
842252f9e2 fix: fix issue with skipping prepared statements for some roles on error 2020-05-03 20:52:26 -04:00
279f5616d1 fix: fix for issues reported by deepsource 2020-05-03 16:08:34 -04:00
04bb88f74b Add .deepsource.toml 2020-05-03 19:57:42 +00:00
38ed6dbc5f fix: bug with single quote ecape in production mode 2020-05-01 02:20:45 -04:00
ec2f8d0c58 chore: pickup latest version of chirino/graphql module for it’s schema api simplifications. (#58) 2020-05-01 02:03:35 -04:00
9b51065414 fix: grammatical errors (#57) 2020-04-25 09:57:59 -04:00
1a70603b1a feat: add option to set the cache-control header 2020-04-24 20:45:03 -04:00
44 changed files with 925 additions and 630 deletions

View File

@ -5,18 +5,18 @@ info:
repository_url: https://github.com/dosco/super-graph
options:
commits:
# filters:
# Type:
# - feat
# - fix
# - perf
# - refactor
filters:
Type:
- feat
- fix
- perf
- refactor
commit_groups:
# title_maps:
# feat: Features
# fix: Bug Fixes
# perf: Performance Improvements
# refactor: Code Refactoring
title_maps:
feat: Features
fix: Bug Fixes
perf: Performance Improvements
refactor: Code Refactoring
header:
pattern: "^((\\w+)\\s.*)$"
pattern_maps:

8
.deepsource.toml Normal file
View File

@ -0,0 +1,8 @@
version = 1
[[analyzers]]
name = "go"
enabled = true
[analyzers.meta]
import_path = "github.com/dosco/super-graph"

2
.gitignore vendored
View File

@ -23,6 +23,8 @@
/tmp/runner-build
/demo/tmp
.idea
*.iml
.vscode
.DS_Store
.swp

View File

@ -1,401 +1,371 @@
<a name="unreleased"></a>
## [Unreleased]
### Add
- Add config driven custom table relationships
- Add support for `websearch_to_tsquery` in PG 11
### Create
- Create CODE_OF_CONDUCT.md
<a name="v0.13.22"></a>
## [v0.13.22] - 2020-05-01
### Fix
- Fix bug with remote join example
- Fix grammer / syntax
<a name="v0.13.21"></a>
## [v0.13.21] - 2020-04-24
### Update
- Update issue templates
- Update CONTRIBUTING.md
- Update issue templates
- Update feature_request.md
<a name="v0.13.20"></a>
## [v0.13.20] - 2020-04-24
<a name="v0.13.19"></a>
## [v0.13.19] - 2020-04-23
<a name="v0.13.18"></a>
## [v0.13.18] - 2020-04-23
<a name="v0.13.17"></a>
## [v0.13.17] - 2020-04-22
<a name="v0.13.16"></a>
## [v0.13.16] - 2020-04-21
### Features
- feat : improve the generated introspection schema and avoid the chirino/graphql api leaking through the core api. ([#53](https://github.com/dosco/super-graph/issues/53))
<a name="v0.13.15"></a>
## [v0.13.15] - 2020-04-20
<a name="v0.13.14"></a>
## [v0.13.14] - 2020-04-19
<a name="v0.13.13"></a>
## [v0.13.13] - 2020-04-19
<a name="v0.13.12"></a>
## [v0.13.12] - 2020-04-19
<a name="v0.13.11"></a>
## [v0.13.11] - 2020-04-18
<a name="v0.13.10"></a>
## [v0.13.10] - 2020-04-17
<a name="v0.13.9"></a>
## [v0.13.9] - 2020-04-16
<a name="v0.13.8"></a>
## [v0.13.8] - 2020-04-16
<a name="v0.13.7"></a>
## [v0.13.7] - 2020-04-16
<a name="v0.13.6"></a>
## [v0.13.6] - 2020-04-13
<a name="v0.13.5"></a>
## [v0.13.5] - 2020-04-13
<a name="v0.13.4"></a>
## [v0.13.4] - 2020-04-12
<a name="v0.13.3"></a>
## [v0.13.3] - 2020-04-12
<a name="v0.13.2"></a>
## [v0.13.2] - 2020-04-11
<a name="v0.13.1"></a>
## [v0.13.1] - 2020-04-11
<a name="v0.13.0"></a>
## [v0.13.0] - 2020-04-10
<a name="v0.12.49"></a>
## [v0.12.49] - 2020-04-01
<a name="v0.12.48"></a>
## [v0.12.48] - 2020-03-31
<a name="v0.12.47"></a>
## [v0.12.47] - 2020-03-30
<a name="v0.12.46"></a>
## [v0.12.46] - 2020-03-21
<a name="v0.12.45"></a>
## [v0.12.45] - 2020-03-18
<a name="v0.12.44"></a>
## [v0.12.44] - 2020-03-16
<a name="v0.12.43"></a>
## [v0.12.43] - 2020-03-16
<a name="v0.12.42"></a>
## [v0.12.42] - 2020-03-14
<a name="v0.12.41"></a>
## [v0.12.41] - 2020-03-06
<a name="v0.12.40"></a>
## [v0.12.40] - 2020-03-06
<a name="v0.12.39"></a>
## [v0.12.39] - 2020-03-06
<a name="v0.12.38"></a>
## [v0.12.38] - 2020-03-05
<a name="v0.12.37"></a>
## [v0.12.37] - 2020-03-04
<a name="v0.12.36"></a>
## [v0.12.36] - 2020-03-04
<a name="v0.12.35"></a>
## [v0.12.35] - 2020-03-03
<a name="v0.12.34"></a>
## [v0.12.34] - 2020-03-03
<a name="v0.12.33"></a>
## [v0.12.33] - 2020-02-29
<a name="v0.12.32"></a>
## [v0.12.32] - 2020-02-24
### Bug Fixes
- fix "Try the demo app" in docs ([#38](https://github.com/dosco/super-graph/issues/38))
<a name="v0.12.31"></a>
## [v0.12.31] - 2020-02-23
<a name="v0.12.30"></a>
## [v0.12.30] - 2020-02-23
<a name="v0.12.29"></a>
## [v0.12.29] - 2020-02-21
<a name="v0.12.28"></a>
## [v0.12.28] - 2020-02-20
<a name="v0.12.27"></a>
## [v0.12.27] - 2020-02-19
<a name="v0.12.26"></a>
## [v0.12.26] - 2020-02-11
<a name="v0.12.25"></a>
## [v0.12.25] - 2020-02-10
<a name="v0.12.24"></a>
## [v0.12.24] - 2020-02-03
<a name="v0.12.23"></a>
## [v0.12.23] - 2020-02-02
<a name="v0.12.22"></a>
## [v0.12.22] - 2020-02-01
<a name="v0.12.21"></a>
## [v0.12.21] - 2020-01-31
<a name="v0.12.20"></a>
## [v0.12.20] - 2020-01-28
<a name="v0.12.19"></a>
## [v0.12.19] - 2020-01-26
<a name="v0.12.18"></a>
## [v0.12.18] - 2020-01-20
<a name="v0.12.17"></a>
## [v0.12.17] - 2020-01-20
<a name="v0.12.16"></a>
## [v0.12.16] - 2020-01-19
<a name="v0.12.15"></a>
## [v0.12.15] - 2020-01-17
<a name="v0.12.14"></a>
## [v0.12.14] - 2020-01-17
<a name="v0.12.13"></a>
## [v0.12.13] - 2020-01-16
<a name="v0.12.12"></a>
## [v0.12.12] - 2020-01-15
<a name="v0.12.11"></a>
## [v0.12.11] - 2020-01-14
<a name="v0.12.10"></a>
## [v0.12.10] - 2020-01-14
<a name="v0.12.9"></a>
## [v0.12.9] - 2020-01-14
<a name="v0.12.8"></a>
## [v0.12.8] - 2020-01-13
<a name="v0.12.7"></a>
## [v0.12.7] - 2020-01-11
### Pull Requests
- Merge pull request [#22](https://github.com/dosco/super-graph/issues/22) from bhaskarmurthy/fix-grammer-syntax
<a name="v0.12.6"></a>
## [v0.12.6] - 2019-12-02
### Add
- Add support for `websearch_to_tsquery` in PG 11
<a name="v0.12.5"></a>
## [v0.12.5] - 2019-11-30
### Add
- Add a guide to the internals of the codebase
- Add a CONTRIBUTING.md guide for contributors
- Add a CHANGLOG.md
- Add issue templates
### Fix
- Fix for missing filters on nested selectors
### Refactor
- Refactor rename 'Select.Table` to `Select.Name`
<a name="v0.12.4"></a>
## [v0.12.4] - 2019-11-28
### Move
- Move license from MIT to Apache 2.0. Add Makefile
<a name="v0.12.3"></a>
## [v0.12.3] - 2019-11-26
### Added
- Added support for query names to the allow.list
<a name="v0.12.2"></a>
## [v0.12.2] - 2019-11-25
### Fix
- Fix bug with compiling anon queries
<a name="v0.12.1"></a>
## [v0.12.1] - 2019-11-22
### Move
- Move sql query logging from info to debug
<a name="v0.12.0"></a>
## [v0.12.0] - 2019-11-22
### Use
- Use logger error instead of panic in goja handlers
<a name="v0.11.9"></a>
## [v0.11.9] - 2019-11-22
### Add
- Add a db:reset command only for dev mode
<a name="v0.11.8"></a>
## [v0.11.8] - 2019-11-21
### Optimize
- Optimize db queries limit use of transactions
<a name="v0.11.7"></a>
## [v0.11.7] - 2019-11-19
### Added
- Added support for multi-root queries
<a name="v0.11.6"></a>
## [v0.11.6] - 2019-11-15
### Fix
- Fix issues with JWT auth
- Fix bug with migration filename generation
- Fix bug with migration file name
<a name="v0.11.5"></a>
## [v0.11.5] - 2019-11-10
### Fix
- Fix bug with migration template name
<a name="v0.11.4"></a>
## [v0.11.4] - 2019-11-10
### Fix
- Fix bug with creating new migrations
<a name="v0.11.3"></a>
## [v0.11.3] - 2019-11-09
### Fix
- Fix macro syntax bug in app templates
<a name="v0.11.2"></a>
## [v0.11.2] - 2019-11-07
### Fix
- Fix bugs and add new production mode
<a name="v0.11.1"></a>
## [v0.11.1] - 2019-11-05
### Add
- Add nested where clause to filter based on related tables
### Block
- Block unauthorized requests when 'anon' role is not defined
### Update
- Update docs and website with new features
<a name="v0.11"></a>
## [v0.11] - 2019-11-01
### Add
- Add config driven presets for insert, update and upsert
- Add config driven presets for insert, update and upserta
- Add RBAC option to disable functions eg. count
- Add fuzz testing to 'serv' for the GQL hash parser
- Add fuzz testing to 'jsn' and 'qcode'
- Add ability to block queries and mutations by role
- Add built in 'anon' and 'user' roles
- Add role based access control
### Allow
- Allow config files to inherit from other config files
### Change
- Change config key inherit to inherits
### Get
- Get RBAC working for queries and mutations
### Optimize
- Optimize prepared statement flow for RBAC
### Preserve
- Preserve allow.list ordering on save
### Update
- Update filters section in guide
### Pull Requests
- Merge pull request [#11](https://github.com/dosco/super-graph/issues/11) from dosco/rbac
<a name="v0.10.1"></a>
## [v0.10.1] - 2019-10-06
### Add
- Add ability to set filters per operation / action
- Add upsert mutation
### Pull Requests
- Merge pull request [#10](https://github.com/dosco/super-graph/issues/10) from FourSigma/sm-examples-folder
<a name="v0.10"></a>
## [v0.10] - 2019-10-04
### Fix
- Fix return values for bulk mutations and delete
- Fix issues with mutation SQL
- Fix broken demo app
- Fix typo in 'across'
### Remove
- Remove extra link from README
### Update
- Update docs, getting started guide and mutations
### Pull Requests
- Merge pull request [#6](https://github.com/dosco/super-graph/issues/6) from muesli/typo-fixes
<a name="v0.9"></a>
## [v0.9] - 2019-10-01
### Fix
- Fix demo rails app broken build
<a name="v0.8"></a>
## [v0.8] - 2019-09-30
### Fix
- Fix invalid import bug
### Update
- Update documentation site
<a name="v0.7"></a>
## [v0.7] - 2019-09-29
### Failure
- Failure to prepare statements should be a warning
### Fix
- Fix duplicte column bug
<a name="v0.6"></a>
## [v0.6] - 2019-09-29
### Add
- Add database setup commands
- Add binary compression back to Dockerfile
- Add initialization command to setup new apps
- Add migrate command
- Add database seeding capability
- Add session variable for user id
- Add delete mutation
- Add update mutation
- Add insert mutation with bulk insert
- Add GoTO Aug, 19 presentation
- Add support for prepared statements
- Add end-to-end benchmaking
- Add object pooling for parser expressions
- Add request / response debugging for remote joins
- Add a presentation about GraphQL
- Add validation for remote JSON
- Add tracing for API stitching
- Add REST API stitching
- Add SQL query cacheing
- Add support for GraphQL variables
- Add fuzz testing to qcode
- Add test for Rails Redis cookie store integration
- Add an install guide
### Change
- Change fuzz test name to qcode
- Change logo from PNG to SVG
### Enabke
- Enabke reload on config change
### Fix
- Fix missing config name bug
- Fix new app templates
- Fix help message for migrate
- Fix session variable bug
- Fix test failures in `psql` and `serv`
- Fix demo docker services startup order
- Fix wrong value for false token bug. Reported by [@ThisIsMissEm](https://github.com/ThisIsMissEm)
- Fix allow.list file discovery bug
- Fix bug with allow list path
- Fix wrong value for use_allow_list in dev config
- Fix startup bug in demo script
- Fix url bug in allow list
- Fix bug [#676](https://github.com/dosco/super-graph/issues/676) found by fuzzer
- Fix race-condition in remote joins
- Fix cookie passing in web ui
- Fix bug with passing cookies in web ui
- Fix null pointer with invalid argument values
- Fix infinite loop bug in lexer
- Fix null pointer issue found by fuzz test
- Fix issue with fuzzbuzz config
- Fix demo to run as memory only
- Fix auth documentation
- Fix issue with web ui sizing
- Fix issue preventing docker-compose deploy
- Fix try demo documentation
### Futher
- Futher reduce allocations across hot paths
- Futher reduce allocations on the compiler hot path
- Futher optimize json parsing and editing performance
### Highlight
- Highlight top features better on the site
### Improve
- Improve readability of json parser code
- Improve the motivation section in the readme
- Improve the demo experience
### Make
- Make remote joins use parallel http requests
### Merge
- Merge branch 'master' into optimize-psql
### New
- New low allocation fast json parsing and editing library
### Optimize
- Optimize lexer and fix bugs
- Optimize the sql generator hot path
### Reduce
- Reduce alllocations done by the stack
- Reduce steps to run the demo
- Reduce allocations and improve perf over 50%
### Remove
- Remove unused packages
- Remove the 'hello' test app folder
- Remove other allocations in psql
### Use
- Use hash's as ids for table relationships
### Watch
- Watch and reload on config changes
<a name="v0.5"></a>
## [v0.5] - 2019-04-10
### Add
- Add supprt for new Rails 5.2 aes-256-gcm cookies
- Add query support for ts_rank and ts_headline
- Add full text search support using TSV indexes
- Add missing assets folder
- Add fetch by ID feature
- Add documentation
### Cleanup
- Cleanup and redesign config files
### Fix
- Fix bug with auth config parsing
### Redesign
- Redesign config file architecture
### Reduce
- Reduce realloc of maps and slices
### Update
- Update docs with full-text search information
<a name="v0.4"></a>
## [v0.4] - 2019-04-01
<a name="v0.3"></a>
## [v0.3] - 2019-04-01
### Add
- Add SQL execution timing and tracing
- Add support for HAVING with aggregate queries
- Add aggregrate functions to GQL queries
- Add Auth0 JWT support
- Add React UI building to the docker build flow
- Add compiler profiling
- Add bechmarks for GQL to SQL compile
- Add tests for gql to sql compile
### Cleanup
- Cleanup Dockerfile
### Fix
- Fix recurring packer issue docker hub builds
- Fix issue with asset packer breaking Docker builds
- Fix missing git package in Dockerfile
- Fix docker ignore values
- Fix image build failure on docker hub
- Fix build issue in Dockerfile
- Fix bugs and document the 'where' clause
- Fix perf issue with inflections
### Optimize
- Optimize docker image
### Pack
- Pack web UI with app into a single binary
### Upgrade
- Upgrade web UI packages
<a name="0.3"></a>
## 0.3 - 2019-03-24
### First
- First commit
### Fix
- Fix license to MIT
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.12.6...HEAD
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.13.22...HEAD
[v0.13.22]: https://github.com/dosco/super-graph/compare/v0.13.21...v0.13.22
[v0.13.21]: https://github.com/dosco/super-graph/compare/v0.13.20...v0.13.21
[v0.13.20]: https://github.com/dosco/super-graph/compare/v0.13.19...v0.13.20
[v0.13.19]: https://github.com/dosco/super-graph/compare/v0.13.18...v0.13.19
[v0.13.18]: https://github.com/dosco/super-graph/compare/v0.13.17...v0.13.18
[v0.13.17]: https://github.com/dosco/super-graph/compare/v0.13.16...v0.13.17
[v0.13.16]: https://github.com/dosco/super-graph/compare/v0.13.15...v0.13.16
[v0.13.15]: https://github.com/dosco/super-graph/compare/v0.13.14...v0.13.15
[v0.13.14]: https://github.com/dosco/super-graph/compare/v0.13.13...v0.13.14
[v0.13.13]: https://github.com/dosco/super-graph/compare/v0.13.12...v0.13.13
[v0.13.12]: https://github.com/dosco/super-graph/compare/v0.13.11...v0.13.12
[v0.13.11]: https://github.com/dosco/super-graph/compare/v0.13.10...v0.13.11
[v0.13.10]: https://github.com/dosco/super-graph/compare/v0.13.9...v0.13.10
[v0.13.9]: https://github.com/dosco/super-graph/compare/v0.13.8...v0.13.9
[v0.13.8]: https://github.com/dosco/super-graph/compare/v0.13.7...v0.13.8
[v0.13.7]: https://github.com/dosco/super-graph/compare/v0.13.6...v0.13.7
[v0.13.6]: https://github.com/dosco/super-graph/compare/v0.13.5...v0.13.6
[v0.13.5]: https://github.com/dosco/super-graph/compare/v0.13.4...v0.13.5
[v0.13.4]: https://github.com/dosco/super-graph/compare/v0.13.3...v0.13.4
[v0.13.3]: https://github.com/dosco/super-graph/compare/v0.13.2...v0.13.3
[v0.13.2]: https://github.com/dosco/super-graph/compare/v0.13.1...v0.13.2
[v0.13.1]: https://github.com/dosco/super-graph/compare/v0.13.0...v0.13.1
[v0.13.0]: https://github.com/dosco/super-graph/compare/v0.12.49...v0.13.0
[v0.12.49]: https://github.com/dosco/super-graph/compare/v0.12.48...v0.12.49
[v0.12.48]: https://github.com/dosco/super-graph/compare/v0.12.47...v0.12.48
[v0.12.47]: https://github.com/dosco/super-graph/compare/v0.12.46...v0.12.47
[v0.12.46]: https://github.com/dosco/super-graph/compare/v0.12.45...v0.12.46
[v0.12.45]: https://github.com/dosco/super-graph/compare/v0.12.44...v0.12.45
[v0.12.44]: https://github.com/dosco/super-graph/compare/v0.12.43...v0.12.44
[v0.12.43]: https://github.com/dosco/super-graph/compare/v0.12.42...v0.12.43
[v0.12.42]: https://github.com/dosco/super-graph/compare/v0.12.41...v0.12.42
[v0.12.41]: https://github.com/dosco/super-graph/compare/v0.12.40...v0.12.41
[v0.12.40]: https://github.com/dosco/super-graph/compare/v0.12.39...v0.12.40
[v0.12.39]: https://github.com/dosco/super-graph/compare/v0.12.38...v0.12.39
[v0.12.38]: https://github.com/dosco/super-graph/compare/v0.12.37...v0.12.38
[v0.12.37]: https://github.com/dosco/super-graph/compare/v0.12.36...v0.12.37
[v0.12.36]: https://github.com/dosco/super-graph/compare/v0.12.35...v0.12.36
[v0.12.35]: https://github.com/dosco/super-graph/compare/v0.12.34...v0.12.35
[v0.12.34]: https://github.com/dosco/super-graph/compare/v0.12.33...v0.12.34
[v0.12.33]: https://github.com/dosco/super-graph/compare/v0.12.32...v0.12.33
[v0.12.32]: https://github.com/dosco/super-graph/compare/v0.12.31...v0.12.32
[v0.12.31]: https://github.com/dosco/super-graph/compare/v0.12.30...v0.12.31
[v0.12.30]: https://github.com/dosco/super-graph/compare/v0.12.29...v0.12.30
[v0.12.29]: https://github.com/dosco/super-graph/compare/v0.12.28...v0.12.29
[v0.12.28]: https://github.com/dosco/super-graph/compare/v0.12.27...v0.12.28
[v0.12.27]: https://github.com/dosco/super-graph/compare/v0.12.26...v0.12.27
[v0.12.26]: https://github.com/dosco/super-graph/compare/v0.12.25...v0.12.26
[v0.12.25]: https://github.com/dosco/super-graph/compare/v0.12.24...v0.12.25
[v0.12.24]: https://github.com/dosco/super-graph/compare/v0.12.23...v0.12.24
[v0.12.23]: https://github.com/dosco/super-graph/compare/v0.12.22...v0.12.23
[v0.12.22]: https://github.com/dosco/super-graph/compare/v0.12.21...v0.12.22
[v0.12.21]: https://github.com/dosco/super-graph/compare/v0.12.20...v0.12.21
[v0.12.20]: https://github.com/dosco/super-graph/compare/v0.12.19...v0.12.20
[v0.12.19]: https://github.com/dosco/super-graph/compare/v0.12.18...v0.12.19
[v0.12.18]: https://github.com/dosco/super-graph/compare/v0.12.17...v0.12.18
[v0.12.17]: https://github.com/dosco/super-graph/compare/v0.12.16...v0.12.17
[v0.12.16]: https://github.com/dosco/super-graph/compare/v0.12.15...v0.12.16
[v0.12.15]: https://github.com/dosco/super-graph/compare/v0.12.14...v0.12.15
[v0.12.14]: https://github.com/dosco/super-graph/compare/v0.12.13...v0.12.14
[v0.12.13]: https://github.com/dosco/super-graph/compare/v0.12.12...v0.12.13
[v0.12.12]: https://github.com/dosco/super-graph/compare/v0.12.11...v0.12.12
[v0.12.11]: https://github.com/dosco/super-graph/compare/v0.12.10...v0.12.11
[v0.12.10]: https://github.com/dosco/super-graph/compare/v0.12.9...v0.12.10
[v0.12.9]: https://github.com/dosco/super-graph/compare/v0.12.8...v0.12.9
[v0.12.8]: https://github.com/dosco/super-graph/compare/v0.12.7...v0.12.8
[v0.12.7]: https://github.com/dosco/super-graph/compare/v0.12.6...v0.12.7
[v0.12.6]: https://github.com/dosco/super-graph/compare/v0.12.5...v0.12.6
[v0.12.5]: https://github.com/dosco/super-graph/compare/v0.12.4...v0.12.5
[v0.12.4]: https://github.com/dosco/super-graph/compare/v0.12.3...v0.12.4

View File

@ -22,7 +22,7 @@ BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime
.PHONY: all build gen clean test run lint changlog release version help $(PLATFORMS)
test:
@go test -v ./...
@go test -v -short -race ./...
BIN_DIR := $(GOPATH)/bin
GORICE := $(BIN_DIR)/rice
@ -39,13 +39,13 @@ $(WEB_BUILD_DIR):
@exit 1
$(GITCHGLOG):
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/git-chglog
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/cmd/git-chglog
changelog: $(GITCHGLOG)
@git-chglog $(ARGS)
$(GOLANGCILINT):
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.21.0
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.25.1
lint: $(GOLANGCILINT)
@golangci-lint run ./... --skip-dirs-use-default

View File

@ -45,6 +45,13 @@ cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: true
# Default API path prefix is /api you can change it if you like
# api_path: "/data"
# Cache-Control header can help cache queries if your CDN supports cache-control
# on POST requests (does not work with not mutations)
# cache_control: "public, max-age=300, s-maxage=600"
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT

View File

@ -174,7 +174,7 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
// use the chirino/graphql library for introspection queries
// disabled when allow list is enforced
if !sg.conf.UseAllowList && res.name == "IntrospectionQuery" {
r := sg.ge.ExecuteOne(&graphql.EngineRequest{Query: query})
r := sg.ge.ServeGraphQL(&graphql.Request{Query: query})
res.Data = r.Data
if r.Error() != nil {

View File

@ -9,6 +9,8 @@ import (
"github.com/dosco/super-graph/jsn"
)
// argMap function is used to string replace variables with values by
// the fasttemplate code
func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
return func(w io.Writer, tag string) (int, error) {
switch tag {
@ -56,10 +58,13 @@ func (c *scontext) argMap() func(w io.Writer, tag string) (int, error) {
return w.Write(v1)
}
return w.Write(escQuote(fields[0].Value))
return w.Write(escSQuote(fields[0].Value))
}
}
// argList function is used to create a list of arguments to pass
// to a prepared statement. FYI no escaping of single quotes is
// needed here
func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
vars := make([]interface{}, len(args))
@ -113,7 +118,7 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
if v, ok := fields[string(av)]; ok {
switch v[0] {
case '[', '{':
vars[i] = escQuote(v)
vars[i] = v
default:
var val interface{}
if err := json.Unmarshal(v, &val); err != nil {
@ -132,27 +137,25 @@ func (c *scontext) argList(args [][]byte) ([]interface{}, error) {
return vars, nil
}
func escQuote(b []byte) []byte {
f := false
for i := range b {
if b[i] == '\'' {
f = true
break
}
}
if !f {
return b
}
buf := &bytes.Buffer{}
//
func escSQuote(b []byte) []byte {
var buf *bytes.Buffer
s := 0
for i := range b {
if b[i] == '\'' {
if buf == nil {
buf = &bytes.Buffer{}
}
buf.Write(b[s:i])
buf.WriteString(`''`)
s = i + 1
}
}
if buf == nil {
return b
}
l := len(b)
if s < (l - 1) {
buf.Write(b[s:l])

13
core/args_test.go Normal file
View File

@ -0,0 +1,13 @@
package core
import "testing"
func TestEscQuote(t *testing.T) {
val := "That's the worst, don''t be calling me's again"
exp := "That''s the worst, don''''t be calling me''s again"
ret := escSQuote([]byte(val))
if exp != string(ret) {
t.Errorf("escSQuote failed: %s", string(ret))
}
}

View File

@ -167,16 +167,16 @@ func (sg *SuperGraph) renderUserQuery(stmts []stmt) (string, error) {
return w.String(), nil
}
func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
for _, id := range qc.Roots {
t, err := sg.schema.GetTable(qc.Selects[id].Name)
if err != nil {
return false
}
// func (sg *SuperGraph) hasTablesWithConfig(qc *qcode.QCode, role *Role) bool {
// for _, id := range qc.Roots {
// t, err := sg.schema.GetTable(qc.Selects[id].Name)
// if err != nil {
// return false
// }
if r := role.GetTable(t.Name); r == nil {
return false
}
}
return true
}
// if r := role.GetTable(t.Name); r == nil {
// return false
// }
// }
// return true
// }

View File

@ -3,6 +3,7 @@ package core
import (
"fmt"
"path"
"path/filepath"
"strings"
"github.com/spf13/viper"
@ -195,9 +196,13 @@ func newViper(configPath, configFile string) *viper.Viper {
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv()
vi.SetConfigName(configFile)
vi.AddConfigPath(configPath)
vi.AddConfigPath("./config")
if len(filepath.Ext(configFile)) != 0 {
vi.SetConfigFile(configFile)
} else {
vi.SetConfigName(configFile)
vi.AddConfigPath(configPath)
vi.AddConfigPath("./config")
}
return vi
}

View File

@ -14,6 +14,11 @@ import (
"github.com/valyala/fasttemplate"
)
const (
OpQuery int = iota
OpMutation
)
type extensions struct {
Tracing *trace `json:"tracing,omitempty"`
}
@ -329,7 +334,20 @@ func (c *scontext) executeRoleQuery(tx *sql.Tx) (string, error) {
return role, nil
}
func (r *Result) Operation() string {
func (r *Result) Operation() int {
switch r.op {
case qcode.QTQuery:
return OpQuery
case qcode.QTMutation, qcode.QTInsert, qcode.QTUpdate, qcode.QTUpsert, qcode.QTDelete:
return OpMutation
default:
return -1
}
}
func (r *Result) OperationName() string {
return r.op.String()
}

View File

@ -9,6 +9,8 @@ import (
"os"
"sort"
"strings"
"github.com/dosco/super-graph/jsn"
)
const (
@ -230,6 +232,8 @@ func (al *List) Load() ([]Item, error) {
}
func (al *List) save(item Item) error {
var buf bytes.Buffer
item.Name = QueryName(item.Query)
item.key = strings.ToLower(item.Name)
@ -298,9 +302,16 @@ func (al *List) save(item Item) error {
}
if len(v.Vars) != 0 && !bytes.Equal(v.Vars, []byte("{}")) {
vj, err := json.MarshalIndent(v.Vars, "", " ")
buf.Reset()
if err := jsn.Clear(&buf, v.Vars); err != nil {
return fmt.Errorf("failed to clean vars: %w", err)
}
vj := json.RawMessage(buf.Bytes())
vj, err = json.MarshalIndent(vj, "", " ")
if err != nil {
return fmt.Errorf("failed to marshal vars: %v", err)
return fmt.Errorf("failed to marshal vars: %w", err)
}
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))

View File

@ -40,8 +40,12 @@ func TestCockroachDB(t *testing.T) {
stopDatabase := func() {
fmt.Println("stopping temporary cockroach db")
if atomic.CompareAndSwapInt32(&stopped, 0, 1) {
cmd.Process.Kill()
cmd.Process.Wait()
if err := cmd.Process.Kill(); err != nil {
log.Fatal(err)
}
if _, err := cmd.Process.Wait(); err != nil {
log.Fatal(err)
}
os.RemoveAll(dir)
}
}

View File

@ -921,8 +921,6 @@ func (c *compilerContext) renderExp(ex *qcode.Exp, ti *DBTableInfo, skipNested b
st.Push('(')
case qcode.OpNot:
//fmt.Printf("1> %s %d %s %s\n", val.Op, len(val.Children), val.Children[0].Op, val.Children[1].Op)
st.Push(val.Children[0])
st.Push(qcode.OpNot)

View File

@ -382,7 +382,7 @@ func (s *DBSchema) updateSchemaOTMT(
func (s *DBSchema) GetTableNames() []string {
var names []string
for name, _ := range s.t {
for name := range s.t {
names = append(names, name)
}
return names

View File

@ -260,7 +260,7 @@ type DBFunction struct {
type DBFuncParam struct {
ID int
Name string
Name sql.NullString
Type string
}
@ -291,6 +291,7 @@ ORDER BY
var funcs []DBFunction
fm := make(map[string]int)
parameterIndex := 1
for rows.Next() {
var fn, fid string
fp := DBFuncParam{}
@ -300,12 +301,18 @@ ORDER BY
return nil, err
}
if !fp.Name.Valid {
fp.Name.String = string(parameterIndex)
fp.Name.Valid = true
}
if i, ok := fm[fid]; ok {
funcs[i].Params = append(funcs[i].Params, fp)
} else {
funcs = append(funcs, DBFunction{Name: fn, Params: []DBFuncParam{fp}})
fm[fid] = len(funcs) - 1
}
parameterIndex++
}
return funcs, nil

View File

@ -0,0 +1,17 @@
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/core/internal/qcode
BenchmarkQCompile
BenchmarkQCompile-16 129614 8649 ns/op 3756 B/op 28 allocs/op
BenchmarkQCompileP
BenchmarkQCompileP-16 487488 2525 ns/op 3792 B/op 28 allocs/op
BenchmarkParse
BenchmarkParse-16 127582 8731 ns/op 3902 B/op 18 allocs/op
BenchmarkParseP
BenchmarkParseP-16 561373 2223 ns/op 3903 B/op 18 allocs/op
BenchmarkSchemaParse
BenchmarkSchemaParse-16 209142 5523 ns/op 3968 B/op 57 allocs/op
BenchmarkSchemaParseP
BenchmarkSchemaParseP-16 716437 1734 ns/op 3968 B/op 57 allocs/op
PASS
ok github.com/dosco/super-graph/core/internal/qcode 8.483s

View File

@ -602,7 +602,7 @@ func (t parserType) String() string {
// nodePool.Put(n)
// freeList = append(freeList, Frees{n, loc})
// } else {
// fmt.Printf(">>>>(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
// fmt.Printf("(%d) RE_FREE %d %p %s %s\n", loc, freeList[j].loc, freeList[j].n, n.Name, n.Type)
// }
// }

View File

@ -2,6 +2,7 @@ package qcode
import (
"errors"
"github.com/chirino/graphql/schema"
"testing"
)
@ -130,7 +131,7 @@ updateThread {
}
var gql = []byte(`
products(
{products(
# returns only 30 items
limit: 30,
@ -148,7 +149,7 @@ var gql = []byte(`
id
name
price
}`)
}}`)
func BenchmarkQCompile(b *testing.B) {
qcompile, _ := NewCompiler(Config{})
@ -181,3 +182,59 @@ func BenchmarkQCompileP(b *testing.B) {
}
})
}
func BenchmarkParse(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
_, err := Parse(gql)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkParseP(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
_, err := Parse(gql)
if err != nil {
b.Fatal(err)
}
}
})
}
func BenchmarkSchemaParse(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
doc := schema.QueryDocument{}
err := doc.Parse(string(gql))
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkSchemaParseP(b *testing.B) {
b.ResetTimer()
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
doc := schema.QueryDocument{}
err := doc.Parse(string(gql))
if err != nil {
b.Fatal(err)
}
}
})
}

View File

@ -29,19 +29,16 @@ func (sg *SuperGraph) initGraphQLEgine() error {
engineSchema := engine.Schema
dbSchema := sg.schema
engineSchema.Parse(`
enum OrderDirection {
asc
desc
}
`)
if err := engineSchema.Parse(`enum OrderDirection { asc desc }`); err != nil {
return err
}
gqltype := func(col psql.DBColumn) schema.Type {
typeName := typeMap[strings.ToLower(col.Type)]
if typeName == "" {
typeName = "String"
}
var t schema.Type = &schema.TypeName{Ident: schema.Ident{Text: typeName}}
var t schema.Type = &schema.TypeName{Name: typeName}
if col.NotNull {
t = &schema.NonNull{OfType: t}
}
@ -109,16 +106,16 @@ enum OrderDirection {
Name: expressionTypeName,
Fields: schema.InputValueList{
&schema.InputValue{
Name: schema.Ident{Text: "and"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
Name: "and",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "or"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
Name: "or",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
Name: "not",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionTypeName}},
},
},
}
@ -133,9 +130,9 @@ enum OrderDirection {
colType := gqltype(col)
nullableColType := ""
if x, ok := colType.(*schema.NonNull); ok {
nullableColType = x.OfType.(*schema.TypeName).Ident.Text
nullableColType = x.OfType.(*schema.TypeName).Name
} else {
nullableColType = colType.(*schema.TypeName).Ident.Text
nullableColType = colType.(*schema.TypeName).Name
}
outputType.Fields = append(outputType.Fields, &schema.Field{
@ -198,67 +195,67 @@ enum OrderDirection {
}
inputType.Fields = append(inputType.Fields, &schema.InputValue{
Name: schema.Ident{Text: colName},
Name: colName,
Type: colType,
})
orderByType.Fields = append(orderByType.Fields, &schema.InputValue{
Name: schema.Ident{Text: colName},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "OrderDirection"}}},
Name: colName,
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "OrderDirection"}},
})
scalarExpressionTypesNeeded[nullableColType] = true
expressionType.Fields = append(expressionType.Fields, &schema.InputValue{
Name: schema.Ident{Text: colName},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: nullableColType + "Expression"}}},
Name: colName,
Type: &schema.NonNull{OfType: &schema.TypeName{Name: nullableColType + "Expression"}},
})
}
outputTypeName := &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}
inputTypeName := &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}
pluralOutputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}}}}
pluralInputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}}}}
outputTypeName := &schema.TypeName{Name: outputType.Name}
inputTypeName := &schema.TypeName{Name: inputType.Name}
pluralOutputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: outputType.Name}}}}
pluralInputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: inputType.Name}}}}
args := schema.InputValueList{
&schema.InputValue{
Desc: &schema.Description{Text: "To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."},
Name: schema.Ident{Text: "order_by"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: orderByType.Name}}},
Desc: schema.Description{Text: "To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."},
Name: "order_by",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: orderByType.Name}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "where"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionType.Name}}},
Desc: schema.Description{Text: ""},
Name: "where",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: expressionType.Name}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "limit"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
Desc: schema.Description{Text: ""},
Name: "limit",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "offset"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
Desc: schema.Description{Text: ""},
Name: "offset",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "first"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
Desc: schema.Description{Text: ""},
Name: "first",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "last"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
Desc: schema.Description{Text: ""},
Name: "last",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Int"}},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "before"},
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
Desc: schema.Description{Text: ""},
Name: "before",
Type: &schema.TypeName{Name: "String"},
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "after"},
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
Desc: schema.Description{Text: ""},
Name: "after",
Type: &schema.TypeName{Name: "String"},
},
}
if ti.PrimaryCol != nil {
@ -267,28 +264,28 @@ enum OrderDirection {
t = &schema.NonNull{OfType: t}
}
args = append(args, &schema.InputValue{
Desc: &schema.Description{Text: "Finds the record by the primary key"},
Name: schema.Ident{Text: "id"},
Desc: schema.Description{Text: "Finds the record by the primary key"},
Name: "id",
Type: t,
})
}
if ti.TSVCol != nil {
args = append(args, &schema.InputValue{
Desc: &schema.Description{Text: "Performs full text search using a TSV index"},
Name: schema.Ident{Text: "search"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Desc: schema.Description{Text: "Performs full text search using a TSV index"},
Name: "search",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
})
}
query.Fields = append(query.Fields, &schema.Field{
Desc: &schema.Description{Text: ""},
Desc: schema.Description{Text: ""},
Name: singularName,
Type: outputTypeName,
Args: args,
})
query.Fields = append(query.Fields, &schema.Field{
Desc: &schema.Description{Text: ""},
Desc: schema.Description{Text: ""},
Name: pluralName,
Type: pluralOutputTypeName,
Args: args,
@ -296,19 +293,19 @@ enum OrderDirection {
mutationArgs := append(args, schema.InputValueList{
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "insert"},
Desc: schema.Description{Text: ""},
Name: "insert",
Type: inputTypeName,
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "update"},
Desc: schema.Description{Text: ""},
Name: "update",
Type: inputTypeName,
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "upsert"},
Desc: schema.Description{Text: ""},
Name: "upsert",
Type: inputTypeName,
},
}...)
@ -322,18 +319,18 @@ enum OrderDirection {
Name: pluralName,
Args: append(mutationArgs, schema.InputValueList{
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "inserts"},
Desc: schema.Description{Text: ""},
Name: "inserts",
Type: pluralInputTypeName,
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "updates"},
Desc: schema.Description{Text: ""},
Name: "updates",
Type: pluralInputTypeName,
},
&schema.InputValue{
Desc: &schema.Description{Text: ""},
Name: schema.Ident{Text: "upserts"},
Desc: schema.Description{Text: ""},
Name: "upserts",
Type: pluralInputTypeName,
},
}...),
@ -341,138 +338,137 @@ enum OrderDirection {
})
}
for typeName, _ := range scalarExpressionTypesNeeded {
for typeName := range scalarExpressionTypesNeeded {
expressionType := &schema.InputObject{
Name: typeName + "Expression",
Fields: schema.InputValueList{
&schema.InputValue{
Name: schema.Ident{Text: "eq"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "eq",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "equals"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "equals",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "neq"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "neq",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not_equals"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "not_equals",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "gt"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "gt",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "greater_than"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "greater_than",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "lt"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "lt",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "lesser_than"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "lesser_than",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "gte"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "gte",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "greater_or_equals"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "greater_or_equals",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "lte"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "lte",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "lesser_or_equals"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "lesser_or_equals",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "in"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "in",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "nin"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "nin",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not_in"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "not_in",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "like"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "like",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "nlike"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "nlike",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not_like"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "not_like",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "ilike"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "ilike",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "nilike"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "nilike",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not_ilike"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "not_ilike",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "similar"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "similar",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "nsimilar"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "nsimilar",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "not_similar"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "not_similar",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "has_key"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
Name: "has_key",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}},
},
&schema.InputValue{
Name: schema.Ident{Text: "has_key_any"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "has_key_any",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "has_key_all"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "has_key_all",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "contains"},
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
Name: "contains",
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Name: typeName}}}},
},
&schema.InputValue{
Name: schema.Ident{Text: "contained_in"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
Name: "contained_in",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "String"}},
},
&schema.InputValue{
Name: schema.Ident{Text: "is_null"},
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Boolean"}}},
Name: "is_null",
Type: &schema.NonNull{OfType: &schema.TypeName{Name: "Boolean"}},
},
},
}
engineSchema.Types[expressionType.Name] = expressionType
}
err := engineSchema.ResolveTypes()
if err != nil {
if err := engineSchema.ResolveTypes(); err != nil {
return err
}

View File

@ -3,7 +3,7 @@ package core
import (
"bytes"
"context"
"crypto/sha1"
"crypto/sha256"
"database/sql"
"encoding/hex"
"fmt"
@ -122,7 +122,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
if err == psql.ErrAllTablesSkipped {
return nil
continue
}
if err != nil {
return err
@ -251,7 +251,7 @@ func (sg *SuperGraph) initAllowList() error {
// nolint: errcheck
func stmtHash(name string, role string) string {
h := sha1.New()
h := sha256.New()
io.WriteString(h, strings.ToLower(name))
io.WriteString(h, role)
return hex.EncodeToString(h.Sum(nil))

View File

@ -120,20 +120,20 @@ func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
}
defer res.Body.Close()
if r.Debug {
// reqDump, err := httputil.DumpRequestOut(req, true)
// if err != nil {
// return nil, err
// }
// if r.Debug {
// reqDump, err := httputil.DumpRequestOut(req, true)
// if err != nil {
// return nil, err
// }
// resDump, err := httputil.DumpResponse(res, true)
// if err != nil {
// return nil, err
// }
// resDump, err := httputil.DumpResponse(res, true)
// if err != nil {
// return nil, err
// }
// logger.Debug().Msgf("Remote Request Debug:\n%s\n%s",
// reqDump, resDump)
}
// logger.Debug().Msgf("Remote Request Debug:\n%s\n%s",
// reqDump, resDump)
// }
if res.StatusCode != 200 {
return nil,

View File

@ -104,7 +104,7 @@ query {
</div>
<div class="text-2xl md:text-3xl">
Super Graph is a library and service that fetches data from any Postgres database using just GraphQL. No more struggling with ORMs and SQL to wrangle data out of the database. No more having to figure out the right joins or making ineffiient queries. However complex the GraphQL, Super Graph will always generate just one single efficient SQL query. The goal is to save you time and money so you can focus on you're apps core value.
Super Graph is a library and service that fetches data from any Postgres database using just GraphQL. No more struggling with ORMs and SQL to wrangle data out of the database. No more having to figure out the right joins or making inefficient queries. However complex the GraphQL, Super Graph will always generate just one single efficient SQL query. The goal is to save you time and money so you can focus on you're apps core value.
</div>
</div>
</div>

View File

@ -10,7 +10,7 @@ longTagline: Get an instant high performance GraphQL API for Postgres. No code n
actionText: Get Started, Free, Open Source →
actionLink: /guide
description: Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on you're apps core value.
description: Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on your apps core value.
features:
- title: Simple

View File

@ -32,7 +32,7 @@ For this to work you have to ensure that the option `:domain => :all` is added t
### With an NGINX loadbalancer
If you're infrastructure is fronted by NGINX then it should be configured so that all requests to your GraphQL API path are proxyed to Super Graph. In the example NGINX config below all requests to the path `/api/v1/graphql` are routed to wherever you have Super Graph installed within your architecture. This example is derived from the config file example at [/microservices-nginx-gateway/nginx.conf](https://github.com/launchany/microservices-nginx-gateway/blob/master/nginx.conf)
If your infrastructure is fronted by NGINX then it should be configured so that all requests to your GraphQL API path are proxyed to Super Graph. In the example NGINX config below all requests to the path `/api/v1/graphql` are routed to wherever you have Super Graph installed within your architecture. This example is derived from the config file example at [/microservices-nginx-gateway/nginx.conf](https://github.com/launchany/microservices-nginx-gateway/blob/master/nginx.conf)
::: tip NGINX with sub-domain
Yes, NGINX is very flexible and you can configure it to keep Super Graph a subdomain instead of on the same top level domain. I'm sure a little Googleing will get you some great example configs for that.

View File

@ -1069,7 +1069,7 @@ mutation {
### Pagination
This is a must have feature of any API. When you want your users to go thought a list page by page or implement some fancy infinite scroll you're going to need pagination. There are two ways to paginate in Super Graph.
This is a must have feature of any API. When you want your users to go through a list page by page or implement some fancy infinite scroll you're going to need pagination. There are two ways to paginate in Super Graph.
Limit-Offset
This is simple enough but also inefficient when working with a large number of total items. Limit, limits the number of items fetched and offset is the point you want to fetch from. The below query will fetch 10 results at a time starting with the 100th item. You will have to keep updating offset (110, 120, 130, etc ) to walk thought the results so make offset a variable.
@ -1085,7 +1085,7 @@ query {
```
#### Cursor
This is a powerful and highly efficient way to paginate though a large number of results. Infact it does not matter how many total results there are this will always be lighting fast. You can use a cursor to walk forward of backward though the results. If you plan to implement infinite scroll this is the option you should choose.
This is a powerful and highly efficient way to paginate a large number of results. Infact it does not matter how many total results there are this will always be lighting fast. You can use a cursor to walk forward or backward through the results. If you plan to implement infinite scroll this is the option you should choose.
When going this route the results will contain a cursor value this is an encrypted string that you don't have to worry about just pass this back in to the next API call and you'll received the next set of results. The cursor value is encrypted since its contents should only matter to Super Graph and not the client. Also since the primary key is used for this feature it's possible you might not want to leak it's value to clients.

5
go.mod
View File

@ -8,11 +8,11 @@ require (
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
github.com/brianvoe/gofakeit/v5 v5.2.0
github.com/cespare/xxhash/v2 v2.1.1
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a
github.com/daaku/go.zipexe v1.0.1 // indirect
github.com/dgrijalva/jwt-go v3.2.0+incompatible
github.com/dlclark/regexp2 v1.2.0 // indirect
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0
github.com/fsnotify/fsnotify v1.4.9
github.com/garyburd/redigo v1.6.0
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
@ -34,7 +34,6 @@ require (
github.com/valyala/fasttemplate v1.1.0
go.uber.org/zap v1.14.1
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e // indirect
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect

17
go.sum
View File

@ -27,8 +27,8 @@ github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d h1:JnYHwwRhFmQ8DeyfqmIrzpkkxnZ+iT5V1CUd3Linin0=
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d/go.mod h1:+34LPrbHFfKVDPsNfi445UArMEjbeTlCm7C+OpdC7IU=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a h1:WVu7r2vwlrBVmunbSSU+9/3M3AgsQyhE49CKDjHiFq4=
github.com/chirino/graphql v0.0.0-20200430165312-293648399b1a/go.mod h1:wQjjxFMFyMlsWh4Z3nMuHQtevD4Ul9UVQSnz1JOLuP8=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
@ -55,8 +55,8 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65 h1:Nud597JuGCF/MScrb6NNVDRgmuk8X7w3pFc5GvSsm5E=
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0 h1:EfFAcaAwGai/wlDCWwIObHBm3T2C2CCPX/SaS0fpOJ4=
github.com/dop251/goja v0.0.0-20200424152103-d0b8fda54cd0/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
github.com/friendsofgo/graphiql v0.2.2/go.mod h1:8Y2kZ36AoTGWs78+VRpvATyt3LJBx0SZXmay80ZTRWo=
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
@ -90,7 +90,7 @@ github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGa
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
@ -189,8 +189,8 @@ github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRW
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/opentracing/opentracing-go v1.0.2 h1:3jA2P6O1F9UOrWVpwrIo17pu01KWvNWg4X946/Y5Zwg=
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/opentracing/opentracing-go v1.1.0 h1:pWlfV3Bxv7k65HYwkikxat0+s3pV4bsqf19k25Ur8rU=
github.com/opentracing/opentracing-go v1.1.0/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI=
@ -330,8 +330,6 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7 h1:fHDIZ2oxGnUZRN6WgWFCbYBjH9uqVPRCUVUDhs0wnbA=
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@ -352,7 +350,6 @@ golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=

View File

@ -46,6 +46,7 @@ type Serv struct {
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
DebugCORS bool `mapstructure:"cors_debug"`
APIPath string `mapstructure:"api_path"`
CacheControl string `mapstructure:"cache_control"`
Auth auth.Auth
Auths []auth.Auth

View File

@ -26,13 +26,12 @@ var (
)
var (
log *_log.Logger // logger
zlog *zap.Logger // fast logger
logLevel int // log level
conf *Config // parsed config
confPath string // path to the config file
db *sql.DB // database connection pool
secretKey [32]byte // encryption key
log *_log.Logger // logger
zlog *zap.Logger // fast logger
logLevel int // log level
conf *Config // parsed config
confPath string // path to the config file
db *sql.DB // database connection pool
)
func Cmd() {

View File

@ -109,7 +109,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
// Write new migration
mpath := filepath.Join(migrationsPath, mname)
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0600)
if err != nil {
log.Fatalf("ERR %s", err)
}

View File

@ -415,6 +415,7 @@ func setFakeFuncs(f *goja.Object) {
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
}
//nolint: errcheck
func setUtilFuncs(f *goja.Object) {
// Slugs
f.Set("make_slug", slug.Make)

View File

@ -7,6 +7,7 @@ import (
"io/ioutil"
"net/http"
"github.com/dosco/super-graph/core"
"github.com/dosco/super-graph/internal/serv/internal/auth"
"github.com/rs/cors"
"go.uber.org/zap"
@ -51,6 +52,7 @@ func apiV1Handler() http.Handler {
func apiV1(w http.ResponseWriter, r *http.Request) {
ct := r.Context()
w.Header().Set("Content-Type", "application/json")
//nolint: errcheck
if conf.AuthFailBlock && !auth.IsAuth(ct) {
@ -76,7 +78,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
doLog := true
res, err := sg.GraphQL(ct, req.Query, req.Vars)
if !conf.Production && res.QueryName() == "IntrospectionQuery" {
if !conf.Production && res.QueryName() == introspectionQuery {
doLog = false
}
@ -84,19 +86,35 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
log.Printf("DBG query %s: %s", res.QueryName(), res.SQL())
}
if err != nil {
renderErr(w, err)
} else {
if err == nil {
if len(conf.CacheControl) != 0 && res.Operation() == core.OpQuery {
w.Header().Set("Cache-Control", conf.CacheControl)
}
//nolint: errcheck
json.NewEncoder(w).Encode(res)
if doLog && logLevel >= LogLevelInfo {
zlog.Info("success",
zap.String("op", res.OperationName()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
}
} else {
renderErr(w, err)
if doLog && logLevel >= LogLevelInfo {
zlog.Error("error",
zap.String("op", res.OperationName()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
zap.Error(err),
)
}
}
if doLog && logLevel >= LogLevelInfo {
zlog.Info("success",
zap.String("op", res.Operation()),
zap.String("name", res.QueryName()),
zap.String("role", res.Role()),
)
}
}
//nolint: errcheck

View File

@ -1,37 +0,0 @@
package serv
import "net/http"
//nolint: errcheck
func introspect(w http.ResponseWriter) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{
"data": {
"__schema": {
"queryType": {
"name": "Query"
},
"mutationType": null,
"subscriptionType": null
}
},
"extensions":{
"tracing":{
"version":1,
"startTime":"2019-06-04T19:53:31.093Z",
"endTime":"2019-06-04T19:53:31.108Z",
"duration":15219720,
"execution": {
"resolvers": [{
"path": ["__schema"],
"parentType": "Query",
"fieldName": "__schema",
"returnType": "__Schema!",
"startOffset": 50950,
"duration": 17187
}]
}
}
}
}`))
}

View File

@ -46,6 +46,13 @@ cors_allowed_origins: ["*"]
# Debug Cross Origin Resource Sharing requests
cors_debug: false
# Default API path prefix is /api you can change it if you like
# api_path: "/data"
# Cache-Control header can help cache queries if your CDN supports cache-control
# on POST requests (does not work with not mutations)
# cache_control: "public, max-age=300, s-maxage=600"
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT

View File

@ -49,6 +49,13 @@ reload_on_config_change: false
# Debug Cross Origin Resource Sharing requests
# cors_debug: false
# Default API path prefix is /api you can change it if you like
# api_path: "/data"
# Cache-Control header can help cache queries if your CDN supports cache-control
# on POST requests (does not work with not mutations)
# cache_control: "public, max-age=300, s-maxage=600"
# Postgres related environment Variables
# SG_DATABASE_HOST
# SG_DATABASE_PORT

View File

@ -2,7 +2,7 @@ package serv
import (
"bytes"
"crypto/sha1"
"crypto/sha256"
"encoding/hex"
"io"
"os"
@ -16,7 +16,7 @@ import (
// nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b)
h := sha1.New()
h := sha256.New()
query := "query"
s, e := 0, 0

112
jsn/clear.go Normal file
View File

@ -0,0 +1,112 @@
package jsn
import (
"bytes"
"encoding/json"
"io"
)
// Clear function wipes all scalar values from the json including those directly in an array
func Clear(w *bytes.Buffer, v []byte) error {
dec := json.NewDecoder(bytes.NewReader(v))
st := newIntStack()
isValue := false
inArray := false
n := 0
for {
var t json.Token
var err error
if t, err = dec.Token(); err == io.EOF {
break
} else if err != nil {
return err
}
switch v1 := t.(type) {
case int:
if isValue && !inArray {
w.WriteByte('0')
isValue = false
n++
}
case float64:
if isValue && !inArray {
w.WriteString(`0.0`)
isValue = false
n++
}
case bool:
if isValue && !inArray {
w.WriteString(`false`)
isValue = false
n++
}
case json.Number:
if isValue && !inArray {
w.WriteString(`0`)
isValue = false
n++
}
case nil:
if isValue && !inArray {
w.WriteString(`null`)
isValue = false
n++
}
case string:
if !isValue {
if n != 0 {
w.WriteByte(',')
}
io := int(dec.InputOffset())
w.Write(v[io-len(v1)-2 : io])
w.WriteString(`:`)
isValue = true
} else if !inArray {
w.WriteString(`""`)
isValue = false
n++
}
case json.Delim:
switch t.(json.Delim) {
case '[':
st.Push(n)
inArray = true
n = 0
case ']':
n = st.Pop()
inArray = false
isValue = false
n++
case '{':
if n != 0 && !isValue {
w.WriteByte(',')
}
st.Push(n)
inArray = false
isValue = false
n = 0
case '}':
n = st.Pop()
isValue = false
n++
}
w.WriteByte(v[dec.InputOffset()-1])
}
dec.More()
}
return nil
}

47
jsn/intstack.go Normal file
View File

@ -0,0 +1,47 @@
package jsn
type intStack struct {
stA [20]int
st []int
top int
}
// Create a new intStack
func newIntStack() *intStack {
s := &intStack{top: -1}
s.st = s.stA[:0]
return s
}
// Return the number of items in the intStack
func (s *intStack) Len() int {
return (s.top + 1)
}
// View the top item on the intStack
func (s *intStack) Peek() int {
if s.top == -1 {
return -1
}
return s.st[s.top]
}
// Pop the top item of the intStack and return it
func (s *intStack) Pop() int {
if s.top == -1 {
return -1
}
s.top--
return s.st[(s.top + 1)]
}
// Push a value onto the top of the intStack
func (s *intStack) Push(value int) {
s.top++
if len(s.st) <= s.top {
s.st = append(s.st, value)
} else {
s.st[s.top] = value
}
}

View File

@ -509,6 +509,34 @@ func TestKeys3(t *testing.T) {
}
}
func TestClear(t *testing.T) {
var buf bytes.Buffer
json := `{
"insert": {
"created_at": "now",
"test_1a": { "type1": "a", "type2": [{ "a": 2 }] },
"name": "Hello",
"updated_at": "now",
"description": "World"
},
"user": 123,
"tags": [1, 2, "what"]
}`
expected := `{"insert":{"created_at":"","test_1a":{"type1":"","type2":[{"a":0.0}]},"name":"","updated_at":"","description":""},"user":0.0,"tags":[]}`
err := Clear(&buf, []byte(json))
if err != nil {
t.Fatal(err)
}
if buf.String() != expected {
t.Log(buf.String())
t.Error("Does not match expected json")
}
}
func BenchmarkGet(b *testing.B) {
b.ReportAllocs()

View File

@ -10,7 +10,7 @@ func Keys(b []byte) [][]byte {
var k []byte
state := expectValue
st := newStack()
st := newSkipInfoStack()
ae := 0
instr := false
slash := 0

51
jsn/sistack.go Normal file
View File

@ -0,0 +1,51 @@
package jsn
type skipInfo struct {
ss, se int
}
type siStack struct {
stA [20]skipInfo
st []skipInfo
top int
}
// Create a new siStack
func newSkipInfoStack() *siStack {
s := &siStack{top: -1}
s.st = s.stA[:0]
return s
}
// Return the number of items in the siStack
func (s *siStack) Len() int {
return (s.top + 1)
}
// View the top item on the siStack
func (s *siStack) Peek() *skipInfo {
if s.top == -1 {
return nil
}
return &s.st[s.top]
}
// Pop the top item of the siStack and return it
func (s *siStack) Pop() *skipInfo {
if s.top == -1 {
return nil
}
s.top--
return &s.st[(s.top + 1)]
}
// Push a value onto the top of the siStack
func (s *siStack) Push(value skipInfo) {
s.top++
if len(s.st) <= s.top {
s.st = append(s.st, value)
} else {
s.st[s.top] = value
}
}

View File

@ -1,51 +0,0 @@
package jsn
type skipInfo struct {
ss, se int
}
type stack struct {
stA [20]skipInfo
st []skipInfo
top int
}
// Create a new stack
func newStack() *stack {
s := &stack{top: -1}
s.st = s.stA[:0]
return s
}
// Return the number of items in the stack
func (s *stack) Len() int {
return (s.top + 1)
}
// View the top item on the stack
func (s *stack) Peek() *skipInfo {
if s.top == -1 {
return nil
}
return &s.st[s.top]
}
// Pop the top item of the stack and return it
func (s *stack) Pop() *skipInfo {
if s.top == -1 {
return nil
}
s.top--
return &s.st[(s.top + 1)]
}
// Push a value onto the top of the stack
func (s *stack) Push(value skipInfo) {
s.top++
if len(s.st) <= s.top {
s.st = append(s.st, value)
} else {
s.st[s.top] = value
}
}