Compare commits
101 Commits
Author | SHA1 | Date | |
---|---|---|---|
ef50c1957b | |||
41ea6ef6f5 | |||
a266517d17 | |||
7831d27345 | |||
e102da839e | |||
68a378c00f | |||
d96eaf14f4 | |||
01e488b69d | |||
7a450b16ba | |||
1ad8cbf15b | |||
f69f1c67d5 | |||
a172193955 | |||
81338b6123 | |||
265b93b203 | |||
6c240e21b4 | |||
7930719eaa | |||
cc687b1b2b | |||
3033dcf1a9 | |||
0381982d19 | |||
2b0a798faa | |||
8b6c562ac1 | |||
a1fb89b762 | |||
c82a7bff0d | |||
7acf28bb3c | |||
be5d4e976a | |||
d1b884aec6 | |||
4be4ce860b | |||
dfa4caf540 | |||
7763251fb7 | |||
51e105699e | |||
90694f8803 | |||
ad82f5b267 | |||
99b37a9c50 | |||
7ec1f59224 | |||
d3ecb1d6cc | |||
aed4170e8e | |||
c33e93ab37 | |||
3d3e5d9c2b | |||
67b4a4d945 | |||
7413813138 | |||
12007db76e | |||
c85d379fe2 | |||
62fd1eac55 | |||
1a3d74e1ce | |||
3a4d885987 | |||
3bd9b199dd | |||
4ffa1483a4 | |||
52f3b1c7a2 | |||
2d466bfb12 | |||
a0b8907c3c | |||
8097ca3b8f | |||
0e498b0e94 | |||
3eb5b83070 | |||
e3c94d17d1 | |||
7240b27214 | |||
f37d867e32 | |||
5e75cc7b83 | |||
d4dca86267 | |||
76340ab008 | |||
3f5727c22b | |||
7c02226016 | |||
1e31e33707 | |||
0d0d63d8d1 | |||
c40ff38b05 | |||
7a5cf47486 | |||
5803395bd5 | |||
a40bd7fca5 | |||
343589c3bd | |||
482203ba05 | |||
6831d3f56f | |||
96ed3413fc | |||
bf4c496756 | |||
66055516d2 | |||
2d3e3cbae1 | |||
0e16eee93b | |||
679dd1fc83 | |||
3a14a644ce | |||
5da79d91bf | |||
5aceb337d6 | |||
5593c66996 | |||
0f9f8bbf0d | |||
cbfedb6fd2 | |||
37f2417c0b | |||
768e8774e7 | |||
9140e597e1 | |||
94593c9cce | |||
a96c211fe5 | |||
6d47f0df8e | |||
e82bdbed65 | |||
ef51b99fc7 | |||
9ebd03fa8c | |||
aff2a13ba4 | |||
f518d5fc69 | |||
7583326d21 | |||
8024a8420b | |||
6bd27d7c79 | |||
a4c09dedd5 | |||
176514b5f1 | |||
396f4bcfd8 | |||
51c5f037f4 | |||
c576f1ae16 |
49
.chglog/CHANGELOG.tpl.md
Executable file
49
.chglog/CHANGELOG.tpl.md
Executable file
@ -0,0 +1,49 @@
|
||||
{{ if .Versions -}}
|
||||
<a name="unreleased"></a>
|
||||
## [Unreleased]
|
||||
|
||||
{{ if .Unreleased.CommitGroups -}}
|
||||
{{ range .Unreleased.CommitGroups -}}
|
||||
### {{ .Title }}
|
||||
{{ range .Commits -}}
|
||||
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
|
||||
{{ range .Versions }}
|
||||
<a name="{{ .Tag.Name }}"></a>
|
||||
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }}
|
||||
{{ range .CommitGroups -}}
|
||||
### {{ .Title }}
|
||||
{{ range .Commits -}}
|
||||
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .MergeCommits -}}
|
||||
### Pull Requests
|
||||
{{ range .MergeCommits -}}
|
||||
- {{ .Header }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .NoteGroups -}}
|
||||
{{ range .NoteGroups -}}
|
||||
### {{ .Title }}
|
||||
{{ range .Notes }}
|
||||
{{ .Body }}
|
||||
{{ end }}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
|
||||
{{- if .Versions }}
|
||||
[Unreleased]: {{ .Info.RepositoryURL }}/compare/{{ $latest := index .Versions 0 }}{{ $latest.Tag.Name }}...HEAD
|
||||
{{ range .Versions -}}
|
||||
{{ if .Tag.Previous -}}
|
||||
[{{ .Tag.Name }}]: {{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
||||
{{ end -}}
|
27
.chglog/config.yml
Executable file
27
.chglog/config.yml
Executable file
@ -0,0 +1,27 @@
|
||||
style: github
|
||||
template: CHANGELOG.tpl.md
|
||||
info:
|
||||
title: CHANGELOG
|
||||
repository_url: https://github.com/dosco/super-graph
|
||||
options:
|
||||
commits:
|
||||
# filters:
|
||||
# Type:
|
||||
# - feat
|
||||
# - fix
|
||||
# - perf
|
||||
# - refactor
|
||||
commit_groups:
|
||||
# title_maps:
|
||||
# feat: Features
|
||||
# fix: Bug Fixes
|
||||
# perf: Performance Improvements
|
||||
# refactor: Code Refactoring
|
||||
header:
|
||||
pattern: "^((\\w+)\\s.*)$"
|
||||
pattern_maps:
|
||||
- Subject
|
||||
- Type
|
||||
notes:
|
||||
keywords:
|
||||
- BREAKING CHANGE
|
24
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
24
.github/ISSUE_TEMPLATE/bug_report.md
vendored
Normal file
@ -0,0 +1,24 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: bug
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- If you suspect this could be a bug, follow the template. -->
|
||||
|
||||
### What version of Super Graph are you using? `super-graph version`
|
||||
|
||||
|
||||
### Have you tried reproducing the issue with the latest release?
|
||||
|
||||
|
||||
### What is the hardware spec (RAM, OS)?
|
||||
|
||||
|
||||
### Steps to reproduce the issue (config used to run Super Graph).
|
||||
|
||||
|
||||
### Expected behaviour and actual result.
|
12
.github/ISSUE_TEMPLATE/documentation.md
vendored
Normal file
12
.github/ISSUE_TEMPLATE/documentation.md
vendored
Normal file
@ -0,0 +1,12 @@
|
||||
---
|
||||
name: Documentation
|
||||
about: Suggest how we can improve documentation
|
||||
title: ''
|
||||
labels: bug, docs
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- If you think the Super Graph documentation falls short https://supergraph.dev/guide.html please suggest ways we can improve it. -->
|
||||
|
||||
<!-- explain it here. -->
|
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
14
.github/ISSUE_TEMPLATE/feature_request.md
vendored
Normal file
@ -0,0 +1,14 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: enhancement
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
<!-- Please only use this template for submitting feature requests -->
|
||||
|
||||
**What would you like to be added**:
|
||||
|
||||
**Why is this needed**:
|
5
.gitignore
vendored
5
.gitignore
vendored
@ -24,11 +24,14 @@
|
||||
/demo/tmp
|
||||
|
||||
.vscode
|
||||
main
|
||||
.DS_Store
|
||||
.swp
|
||||
.release
|
||||
main
|
||||
super-graph
|
||||
*-fuzz.zip
|
||||
crashers
|
||||
suppressions
|
||||
release
|
||||
.gofuzz
|
||||
*-fuzz.zip
|
||||
|
@ -7,7 +7,7 @@ rules:
|
||||
- name: run
|
||||
match: \.go$
|
||||
ignore: web|examples|docs|_test\.go$
|
||||
command: go run main.go serv
|
||||
command: go run cmd/main.go serv
|
||||
- name: test
|
||||
match: _test\.go$
|
||||
command: go test -cover {PKG}
|
424
CHANGELOG.md
Normal file
424
CHANGELOG.md
Normal file
@ -0,0 +1,424 @@
|
||||
<a name="unreleased"></a>
|
||||
## [Unreleased]
|
||||
|
||||
### Add
|
||||
- Add config driven custom table relationships
|
||||
- Add support for `websearch_to_tsquery` in PG 11
|
||||
|
||||
### Create
|
||||
- Create CODE_OF_CONDUCT.md
|
||||
|
||||
### Fix
|
||||
- Fix bug with remote join example
|
||||
- Fix grammer / syntax
|
||||
|
||||
### Update
|
||||
- Update issue templates
|
||||
- Update CONTRIBUTING.md
|
||||
- Update issue templates
|
||||
- Update feature_request.md
|
||||
|
||||
|
||||
<a name="v0.12.6"></a>
|
||||
## [v0.12.6] - 2019-12-02
|
||||
### Add
|
||||
- Add support for `websearch_to_tsquery` in PG 11
|
||||
|
||||
|
||||
<a name="v0.12.5"></a>
|
||||
## [v0.12.5] - 2019-11-30
|
||||
### Add
|
||||
- Add a guide to the internals of the codebase
|
||||
- Add a CONTRIBUTING.md guide for contributors
|
||||
- Add a CHANGLOG.md
|
||||
- Add issue templates
|
||||
|
||||
### Fix
|
||||
- Fix for missing filters on nested selectors
|
||||
|
||||
### Refactor
|
||||
- Refactor rename 'Select.Table` to `Select.Name`
|
||||
|
||||
|
||||
<a name="v0.12.4"></a>
|
||||
## [v0.12.4] - 2019-11-28
|
||||
### Move
|
||||
- Move license from MIT to Apache 2.0. Add Makefile
|
||||
|
||||
|
||||
<a name="v0.12.3"></a>
|
||||
## [v0.12.3] - 2019-11-26
|
||||
### Added
|
||||
- Added support for query names to the allow.list
|
||||
|
||||
|
||||
<a name="v0.12.2"></a>
|
||||
## [v0.12.2] - 2019-11-25
|
||||
### Fix
|
||||
- Fix bug with compiling anon queries
|
||||
|
||||
|
||||
<a name="v0.12.1"></a>
|
||||
## [v0.12.1] - 2019-11-22
|
||||
### Move
|
||||
- Move sql query logging from info to debug
|
||||
|
||||
|
||||
<a name="v0.12.0"></a>
|
||||
## [v0.12.0] - 2019-11-22
|
||||
### Use
|
||||
- Use logger error instead of panic in goja handlers
|
||||
|
||||
|
||||
<a name="v0.11.9"></a>
|
||||
## [v0.11.9] - 2019-11-22
|
||||
### Add
|
||||
- Add a db:reset command only for dev mode
|
||||
|
||||
|
||||
<a name="v0.11.8"></a>
|
||||
## [v0.11.8] - 2019-11-21
|
||||
### Optimize
|
||||
- Optimize db queries limit use of transactions
|
||||
|
||||
|
||||
<a name="v0.11.7"></a>
|
||||
## [v0.11.7] - 2019-11-19
|
||||
### Added
|
||||
- Added support for multi-root queries
|
||||
|
||||
|
||||
<a name="v0.11.6"></a>
|
||||
## [v0.11.6] - 2019-11-15
|
||||
### Fix
|
||||
- Fix issues with JWT auth
|
||||
- Fix bug with migration filename generation
|
||||
- Fix bug with migration file name
|
||||
|
||||
|
||||
<a name="v0.11.5"></a>
|
||||
## [v0.11.5] - 2019-11-10
|
||||
### Fix
|
||||
- Fix bug with migration template name
|
||||
|
||||
|
||||
<a name="v0.11.4"></a>
|
||||
## [v0.11.4] - 2019-11-10
|
||||
### Fix
|
||||
- Fix bug with creating new migrations
|
||||
|
||||
|
||||
<a name="v0.11.3"></a>
|
||||
## [v0.11.3] - 2019-11-09
|
||||
### Fix
|
||||
- Fix macro syntax bug in app templates
|
||||
|
||||
|
||||
<a name="v0.11.2"></a>
|
||||
## [v0.11.2] - 2019-11-07
|
||||
### Fix
|
||||
- Fix bugs and add new production mode
|
||||
|
||||
|
||||
<a name="v0.11.1"></a>
|
||||
## [v0.11.1] - 2019-11-05
|
||||
### Add
|
||||
- Add nested where clause to filter based on related tables
|
||||
|
||||
### Block
|
||||
- Block unauthorized requests when 'anon' role is not defined
|
||||
|
||||
### Update
|
||||
- Update docs and website with new features
|
||||
|
||||
|
||||
<a name="v0.11"></a>
|
||||
## [v0.11] - 2019-11-01
|
||||
### Add
|
||||
- Add config driven presets for insert, update and upsert
|
||||
- Add config driven presets for insert, update and upserta
|
||||
- Add RBAC option to disable functions eg. count
|
||||
- Add fuzz testing to 'serv' for the GQL hash parser
|
||||
- Add fuzz testing to 'jsn' and 'qcode'
|
||||
- Add ability to block queries and mutations by role
|
||||
- Add built in 'anon' and 'user' roles
|
||||
- Add role based access control
|
||||
|
||||
### Allow
|
||||
- Allow config files to inherit from other config files
|
||||
|
||||
### Change
|
||||
- Change config key inherit to inherits
|
||||
|
||||
### Get
|
||||
- Get RBAC working for queries and mutations
|
||||
|
||||
### Optimize
|
||||
- Optimize prepared statement flow for RBAC
|
||||
|
||||
### Preserve
|
||||
- Preserve allow.list ordering on save
|
||||
|
||||
### Update
|
||||
- Update filters section in guide
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#11](https://github.com/dosco/super-graph/issues/11) from dosco/rbac
|
||||
|
||||
|
||||
<a name="v0.10.1"></a>
|
||||
## [v0.10.1] - 2019-10-06
|
||||
### Add
|
||||
- Add ability to set filters per operation / action
|
||||
- Add upsert mutation
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#10](https://github.com/dosco/super-graph/issues/10) from FourSigma/sm-examples-folder
|
||||
|
||||
|
||||
<a name="v0.10"></a>
|
||||
## [v0.10] - 2019-10-04
|
||||
### Fix
|
||||
- Fix return values for bulk mutations and delete
|
||||
- Fix issues with mutation SQL
|
||||
- Fix broken demo app
|
||||
- Fix typo in 'across'
|
||||
|
||||
### Remove
|
||||
- Remove extra link from README
|
||||
|
||||
### Update
|
||||
- Update docs, getting started guide and mutations
|
||||
|
||||
### Pull Requests
|
||||
- Merge pull request [#6](https://github.com/dosco/super-graph/issues/6) from muesli/typo-fixes
|
||||
|
||||
|
||||
<a name="v0.9"></a>
|
||||
## [v0.9] - 2019-10-01
|
||||
### Fix
|
||||
- Fix demo rails app broken build
|
||||
|
||||
|
||||
<a name="v0.8"></a>
|
||||
## [v0.8] - 2019-09-30
|
||||
### Fix
|
||||
- Fix invalid import bug
|
||||
|
||||
### Update
|
||||
- Update documentation site
|
||||
|
||||
|
||||
<a name="v0.7"></a>
|
||||
## [v0.7] - 2019-09-29
|
||||
### Failure
|
||||
- Failure to prepare statements should be a warning
|
||||
|
||||
### Fix
|
||||
- Fix duplicte column bug
|
||||
|
||||
|
||||
<a name="v0.6"></a>
|
||||
## [v0.6] - 2019-09-29
|
||||
### Add
|
||||
- Add database setup commands
|
||||
- Add binary compression back to Dockerfile
|
||||
- Add initialization command to setup new apps
|
||||
- Add migrate command
|
||||
- Add database seeding capability
|
||||
- Add session variable for user id
|
||||
- Add delete mutation
|
||||
- Add update mutation
|
||||
- Add insert mutation with bulk insert
|
||||
- Add GoTO Aug, 19 presentation
|
||||
- Add support for prepared statements
|
||||
- Add end-to-end benchmaking
|
||||
- Add object pooling for parser expressions
|
||||
- Add request / response debugging for remote joins
|
||||
- Add a presentation about GraphQL
|
||||
- Add validation for remote JSON
|
||||
- Add tracing for API stitching
|
||||
- Add REST API stitching
|
||||
- Add SQL query cacheing
|
||||
- Add support for GraphQL variables
|
||||
- Add fuzz testing to qcode
|
||||
- Add test for Rails Redis cookie store integration
|
||||
- Add an install guide
|
||||
|
||||
### Change
|
||||
- Change fuzz test name to qcode
|
||||
- Change logo from PNG to SVG
|
||||
|
||||
### Enabke
|
||||
- Enabke reload on config change
|
||||
|
||||
### Fix
|
||||
- Fix missing config name bug
|
||||
- Fix new app templates
|
||||
- Fix help message for migrate
|
||||
- Fix session variable bug
|
||||
- Fix test failures in `psql` and `serv`
|
||||
- Fix demo docker services startup order
|
||||
- Fix wrong value for false token bug. Reported by [@ThisIsMissEm](https://github.com/ThisIsMissEm)
|
||||
- Fix allow.list file discovery bug
|
||||
- Fix bug with allow list path
|
||||
- Fix wrong value for use_allow_list in dev config
|
||||
- Fix startup bug in demo script
|
||||
- Fix url bug in allow list
|
||||
- Fix bug [#676](https://github.com/dosco/super-graph/issues/676) found by fuzzer
|
||||
- Fix race-condition in remote joins
|
||||
- Fix cookie passing in web ui
|
||||
- Fix bug with passing cookies in web ui
|
||||
- Fix null pointer with invalid argument values
|
||||
- Fix infinite loop bug in lexer
|
||||
- Fix null pointer issue found by fuzz test
|
||||
- Fix issue with fuzzbuzz config
|
||||
- Fix demo to run as memory only
|
||||
- Fix auth documentation
|
||||
- Fix issue with web ui sizing
|
||||
- Fix issue preventing docker-compose deploy
|
||||
- Fix try demo documentation
|
||||
|
||||
### Futher
|
||||
- Futher reduce allocations across hot paths
|
||||
- Futher reduce allocations on the compiler hot path
|
||||
- Futher optimize json parsing and editing performance
|
||||
|
||||
### Highlight
|
||||
- Highlight top features better on the site
|
||||
|
||||
### Improve
|
||||
- Improve readability of json parser code
|
||||
- Improve the motivation section in the readme
|
||||
- Improve the demo experience
|
||||
|
||||
### Make
|
||||
- Make remote joins use parallel http requests
|
||||
|
||||
### Merge
|
||||
- Merge branch 'master' into optimize-psql
|
||||
|
||||
### New
|
||||
- New low allocation fast json parsing and editing library
|
||||
|
||||
### Optimize
|
||||
- Optimize lexer and fix bugs
|
||||
- Optimize the sql generator hot path
|
||||
|
||||
### Reduce
|
||||
- Reduce alllocations done by the stack
|
||||
- Reduce steps to run the demo
|
||||
- Reduce allocations and improve perf over 50%
|
||||
|
||||
### Remove
|
||||
- Remove unused packages
|
||||
- Remove the 'hello' test app folder
|
||||
- Remove other allocations in psql
|
||||
|
||||
### Use
|
||||
- Use hash's as ids for table relationships
|
||||
|
||||
### Watch
|
||||
- Watch and reload on config changes
|
||||
|
||||
|
||||
<a name="v0.5"></a>
|
||||
## [v0.5] - 2019-04-10
|
||||
### Add
|
||||
- Add supprt for new Rails 5.2 aes-256-gcm cookies
|
||||
- Add query support for ts_rank and ts_headline
|
||||
- Add full text search support using TSV indexes
|
||||
- Add missing assets folder
|
||||
- Add fetch by ID feature
|
||||
- Add documentation
|
||||
|
||||
### Cleanup
|
||||
- Cleanup and redesign config files
|
||||
|
||||
### Fix
|
||||
- Fix bug with auth config parsing
|
||||
|
||||
### Redesign
|
||||
- Redesign config file architecture
|
||||
|
||||
### Reduce
|
||||
- Reduce realloc of maps and slices
|
||||
|
||||
### Update
|
||||
- Update docs with full-text search information
|
||||
|
||||
|
||||
<a name="v0.4"></a>
|
||||
## [v0.4] - 2019-04-01
|
||||
|
||||
<a name="v0.3"></a>
|
||||
## [v0.3] - 2019-04-01
|
||||
### Add
|
||||
- Add SQL execution timing and tracing
|
||||
- Add support for HAVING with aggregate queries
|
||||
- Add aggregrate functions to GQL queries
|
||||
- Add Auth0 JWT support
|
||||
- Add React UI building to the docker build flow
|
||||
- Add compiler profiling
|
||||
- Add bechmarks for GQL to SQL compile
|
||||
- Add tests for gql to sql compile
|
||||
|
||||
### Cleanup
|
||||
- Cleanup Dockerfile
|
||||
|
||||
### Fix
|
||||
- Fix recurring packer issue docker hub builds
|
||||
- Fix issue with asset packer breaking Docker builds
|
||||
- Fix missing git package in Dockerfile
|
||||
- Fix docker ignore values
|
||||
- Fix image build failure on docker hub
|
||||
- Fix build issue in Dockerfile
|
||||
- Fix bugs and document the 'where' clause
|
||||
- Fix perf issue with inflections
|
||||
|
||||
### Optimize
|
||||
- Optimize docker image
|
||||
|
||||
### Pack
|
||||
- Pack web UI with app into a single binary
|
||||
|
||||
### Upgrade
|
||||
- Upgrade web UI packages
|
||||
|
||||
|
||||
<a name="0.3"></a>
|
||||
## 0.3 - 2019-03-24
|
||||
### First
|
||||
- First commit
|
||||
|
||||
### Fix
|
||||
- Fix license to MIT
|
||||
|
||||
|
||||
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.12.6...HEAD
|
||||
[v0.12.6]: https://github.com/dosco/super-graph/compare/v0.12.5...v0.12.6
|
||||
[v0.12.5]: https://github.com/dosco/super-graph/compare/v0.12.4...v0.12.5
|
||||
[v0.12.4]: https://github.com/dosco/super-graph/compare/v0.12.3...v0.12.4
|
||||
[v0.12.3]: https://github.com/dosco/super-graph/compare/v0.12.2...v0.12.3
|
||||
[v0.12.2]: https://github.com/dosco/super-graph/compare/v0.12.1...v0.12.2
|
||||
[v0.12.1]: https://github.com/dosco/super-graph/compare/v0.12.0...v0.12.1
|
||||
[v0.12.0]: https://github.com/dosco/super-graph/compare/v0.11.9...v0.12.0
|
||||
[v0.11.9]: https://github.com/dosco/super-graph/compare/v0.11.8...v0.11.9
|
||||
[v0.11.8]: https://github.com/dosco/super-graph/compare/v0.11.7...v0.11.8
|
||||
[v0.11.7]: https://github.com/dosco/super-graph/compare/v0.11.6...v0.11.7
|
||||
[v0.11.6]: https://github.com/dosco/super-graph/compare/v0.11.5...v0.11.6
|
||||
[v0.11.5]: https://github.com/dosco/super-graph/compare/v0.11.4...v0.11.5
|
||||
[v0.11.4]: https://github.com/dosco/super-graph/compare/v0.11.3...v0.11.4
|
||||
[v0.11.3]: https://github.com/dosco/super-graph/compare/v0.11.2...v0.11.3
|
||||
[v0.11.2]: https://github.com/dosco/super-graph/compare/v0.11.1...v0.11.2
|
||||
[v0.11.1]: https://github.com/dosco/super-graph/compare/v0.11...v0.11.1
|
||||
[v0.11]: https://github.com/dosco/super-graph/compare/v0.10.1...v0.11
|
||||
[v0.10.1]: https://github.com/dosco/super-graph/compare/v0.10...v0.10.1
|
||||
[v0.10]: https://github.com/dosco/super-graph/compare/v0.9...v0.10
|
||||
[v0.9]: https://github.com/dosco/super-graph/compare/v0.8...v0.9
|
||||
[v0.8]: https://github.com/dosco/super-graph/compare/v0.7...v0.8
|
||||
[v0.7]: https://github.com/dosco/super-graph/compare/v0.6...v0.7
|
||||
[v0.6]: https://github.com/dosco/super-graph/compare/v0.5...v0.6
|
||||
[v0.5]: https://github.com/dosco/super-graph/compare/v0.4...v0.5
|
||||
[v0.4]: https://github.com/dosco/super-graph/compare/v0.3...v0.4
|
||||
[v0.3]: https://github.com/dosco/super-graph/compare/0.3...v0.3
|
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
@ -0,0 +1,3 @@
|
||||
# Code of Conduct
|
||||
|
||||
Be excellent to each other. Treat others the way you'd like to be treated. We are all here to learn, build great software and make new friends.
|
82
CONTRIBUTING.md
Normal file
82
CONTRIBUTING.md
Normal file
@ -0,0 +1,82 @@
|
||||
# Contributing to Super Graph
|
||||
|
||||
Super Graph is a very approchable code-base and a project that is easy for almost
|
||||
anyone with basic GO knowledge to start contributing to. It is also a young project
|
||||
so a lot of high value work is there for the taking.
|
||||
|
||||
Even the GraphQL to SQL compiler that is at the heart of Super Graph is essentially a text book compiler with clean and easy to read code. The data structures used by the lexer, parser and sql generator are easy to understand and modify.
|
||||
|
||||
Finally we do have a lot of test for critical parts of the codebase which makes it easy for you to modify with confidence. I'm always available for questions or any sort of guidance so feel fee to reach out over twitter or discord.
|
||||
|
||||
* [Getting Started](#getting-started)
|
||||
* [Setting Up the Development Environment](#setup-development-environment)
|
||||
* [Prerequisites](#prerequisites)
|
||||
* [Get the Super Graph source](#get-source-code)
|
||||
* [Start the development envoirnment ](#start-the-development-envoirnment)
|
||||
* [Testing](#testing-and-linting)
|
||||
* [Contributing](#contributing)
|
||||
* [Guidelines](#guidelines)
|
||||
* [Code style](#code-style)
|
||||
|
||||
## Getting Started
|
||||
|
||||
- Read the [Getting Started Guide](https://supergraph.dev/guide.html#get-started)
|
||||
|
||||
## Setup Development Environment
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Install [Git](https://git-scm.com/) (may be already installed on your system, or available through your OS package manager)
|
||||
- Install [Go 1.13 or above](https://golang.org/doc/install)
|
||||
- Install [Docker](https://docs.docker.com/v17.09/engine/installation/)
|
||||
|
||||
### Get source code
|
||||
|
||||
The entire build flow uses `Makefile` there is a whole list of sub-commands you
|
||||
can use to build, test, install, lint, etc.
|
||||
|
||||
```bash
|
||||
git clone https://github.com/dosco/super-graph
|
||||
cd ./super-graph
|
||||
make help
|
||||
```
|
||||
|
||||
### Start the development envoirnment
|
||||
|
||||
The entire development flow is packaged into a `docker-compose` work flow. The below `up` command will launch A Postgres database, a example e-commerce app in Rails and Super Graph in development mode. The `db:seed` Rails task will insert sample data into Postgres.
|
||||
|
||||
```bash
|
||||
docker-compose -f demo.yml run rails_app rake db:create db:migrate db:seed
|
||||
docker-compose up
|
||||
```
|
||||
|
||||
### Learn how the code works
|
||||
|
||||
[Super Graph codebase explained](https://supergraph.dev/internals.html)
|
||||
|
||||
### Testing and Linting
|
||||
|
||||
```
|
||||
make lint test
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
### Guidelines
|
||||
|
||||
- **Pull requests are welcome**, as long as you're willing to put in the effort to meet the guidelines.
|
||||
- Aim for clear, well written, maintainable code.
|
||||
- Simple and minimal approach to features, like Go.
|
||||
- Refactoring existing code now for better performance, better readability or better testability wins over adding a new feature.
|
||||
- Don't add a function to a module that you don't use right now, or doesn't clearly enable a planned functionality.
|
||||
- Don't ship a half done feature, which would require significant alterations to work fully.
|
||||
- Avoid [Technical debt](https://en.wikipedia.org/wiki/Technical_debt) like cancer.
|
||||
- Leave the code cleaner than when you began.
|
||||
|
||||
### Code style
|
||||
|
||||
- We're following [Go Code Review](https://github.com/golang/go/wiki/CodeReviewComments).
|
||||
- Use `go fmt` to format your code before committing.
|
||||
- If you see *any code* which clearly violates the style guide, please fix it and send a pull request. No need to ask for permission.
|
||||
- Avoid unnecessary vertical spaces. Use your judgment or follow the code review comments.
|
||||
- Wrap your code and comments to 100 characters, unless doing so makes the code less legible.
|
36
Dockerfile
36
Dockerfile
@ -1,33 +1,40 @@
|
||||
# stage: 1
|
||||
FROM node:10 as react-build
|
||||
WORKDIR /web
|
||||
COPY web/ ./
|
||||
COPY /cmd/internal/serv/web/ ./
|
||||
RUN yarn
|
||||
RUN yarn build
|
||||
|
||||
|
||||
|
||||
# stage: 2
|
||||
FROM golang:1.13beta1-alpine as go-build
|
||||
FROM golang:1.14-alpine as go-build
|
||||
RUN apk update && \
|
||||
apk add --no-cache make && \
|
||||
apk add --no-cache git && \
|
||||
apk add --no-cache jq && \
|
||||
apk add --no-cache upx=3.95-r2
|
||||
|
||||
RUN go get -u github.com/rafaelsq/wtc && \
|
||||
go get -u github.com/GeertJohan/go.rice/rice
|
||||
RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
|
||||
|
||||
ARG SOPS_VERSION=3.5.0
|
||||
ADD https://github.com/mozilla/sops/releases/download/v${SOPS_VERSION}/sops-v${SOPS_VERSION}.linux /usr/local/bin/sops
|
||||
RUN chmod 755 /usr/local/bin/sops
|
||||
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
||||
RUN mkdir -p /app/web/build
|
||||
COPY --from=react-build /web/build/ ./web/build/
|
||||
RUN mkdir -p /app/cmd/internal/serv/web/build
|
||||
COPY --from=react-build /web/build/ ./cmd/internal/serv/web/build
|
||||
|
||||
ENV GO111MODULE=on
|
||||
RUN go mod vendor
|
||||
RUN go generate ./... && \
|
||||
CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o super-graph && \
|
||||
echo "Compressing binary, will take a bit of time..." && \
|
||||
RUN make build
|
||||
RUN echo "Compressing binary, will take a bit of time..." && \
|
||||
upx --ultra-brute -qq super-graph && \
|
||||
upx -t super-graph
|
||||
|
||||
|
||||
|
||||
# stage: 3
|
||||
FROM alpine:latest
|
||||
WORKDIR /
|
||||
@ -38,10 +45,15 @@ RUN mkdir -p /config
|
||||
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||
COPY --from=go-build /app/config/* /config/
|
||||
COPY --from=go-build /app/super-graph .
|
||||
COPY --from=go-build /app/cmd/scripts/start.sh .
|
||||
COPY --from=go-build /usr/local/bin/sops .
|
||||
|
||||
RUN chmod +x /super-graph
|
||||
RUN chmod +x /start.sh
|
||||
|
||||
USER nobody
|
||||
|
||||
EXPOSE 8080
|
||||
ENV GO_ENV production
|
||||
|
||||
CMD ./super-graph serv
|
||||
ENTRYPOINT ["./start.sh"]
|
||||
CMD ["./super-graph", "serv"]
|
||||
|
189
LICENSE
189
LICENSE
@ -1,21 +1,176 @@
|
||||
The MIT License (MIT)
|
||||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
Copyright (c) 2019-present Vikram Rangnekar. twitter.com/dosco
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
1. Definitions.
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
109
Makefile
Normal file
109
Makefile
Normal file
@ -0,0 +1,109 @@
|
||||
BUILD ?= $(shell git rev-parse --short HEAD)
|
||||
BUILD_DATE ?= $(shell git log -1 --format=%ci)
|
||||
BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
|
||||
BUILD_VERSION ?= $(shell git describe --always --tags)
|
||||
|
||||
GOPATH ?= $(shell go env GOPATH)
|
||||
|
||||
ifndef GOPATH
|
||||
override GOPATH = $(HOME)/go
|
||||
endif
|
||||
|
||||
export GO111MODULE := on
|
||||
|
||||
# Build-time Go variables
|
||||
version = github.com/dosco/super-graph/serv.version
|
||||
gitBranch = github.com/dosco/super-graph/serv.gitBranch
|
||||
lastCommitSHA = github.com/dosco/super-graph/serv.lastCommitSHA
|
||||
lastCommitTime = github.com/dosco/super-graph/serv.lastCommitTime
|
||||
|
||||
BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime}=${BUILD_DATE}" -X "${version}=${BUILD_VERSION}" -X ${gitBranch}=${BUILD_BRANCH}'
|
||||
|
||||
.PHONY: all build gen clean test run lint changlog release version help $(PLATFORMS)
|
||||
|
||||
test:
|
||||
@go test -v ./...
|
||||
|
||||
BIN_DIR := $(GOPATH)/bin
|
||||
GORICE := $(BIN_DIR)/rice
|
||||
GOLANGCILINT := $(BIN_DIR)/golangci-lint
|
||||
GITCHGLOG := $(BIN_DIR)/git-chglog
|
||||
WEB_BUILD_DIR := ./cmd/internal/serv/web/build/manifest.json
|
||||
|
||||
$(GORICE):
|
||||
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
|
||||
|
||||
$(WEB_BUILD_DIR):
|
||||
@echo "First install Yarn and create a build of the web UI then re-run make install"
|
||||
@echo "Run this command: yarn --cwd cmd/internal/serv/web/ build"
|
||||
@exit 1
|
||||
|
||||
$(GITCHGLOG):
|
||||
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/cmd/git-chglog
|
||||
|
||||
changelog: $(GITCHGLOG)
|
||||
@git-chglog $(ARGS)
|
||||
|
||||
$(GOLANGCILINT):
|
||||
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.21.0
|
||||
|
||||
lint: $(GOLANGCILINT)
|
||||
@golangci-lint run ./... --skip-dirs-use-default
|
||||
|
||||
BINARY := super-graph
|
||||
LDFLAGS := -s -w
|
||||
PLATFORMS := windows linux darwin
|
||||
os = $(word 1, $@)
|
||||
|
||||
$(PLATFORMS): lint test
|
||||
@mkdir -p release
|
||||
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 cmd/main.go
|
||||
|
||||
release: windows linux darwin
|
||||
|
||||
all: lint test $(BINARY)
|
||||
|
||||
build: $(BINARY)
|
||||
|
||||
gen: $(GORICE) $(WEB_BUILD_DIR)
|
||||
@go generate ./...
|
||||
|
||||
$(BINARY): clean
|
||||
@go build $(BUILD_FLAGS) -o $(BINARY) cmd/main.go
|
||||
|
||||
clean:
|
||||
@rm -f $(BINARY)
|
||||
|
||||
run: clean
|
||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||
|
||||
install:
|
||||
@echo $(GOPATH)
|
||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||
@go install $(BUILD_FLAGS) cmd
|
||||
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
|
||||
|
||||
uninstall: clean
|
||||
@go clean -i -x
|
||||
|
||||
version:
|
||||
@echo Super Graph ${BUILD_VERSION}
|
||||
@echo Build: ${BUILD}
|
||||
@echo Build date: ${BUILD_DATE}
|
||||
@echo Branch: ${BUILD_BRANCH}
|
||||
@echo Go version: $(shell go version)
|
||||
|
||||
help:
|
||||
@echo
|
||||
@echo Build commands:
|
||||
@echo " make build - Build supergraph binary"
|
||||
@echo " make install - Install supergraph binary"
|
||||
@echo " make uninstall - Uninstall supergraph binary"
|
||||
@echo " make [platform] - Build for platform [linux|darwin|windows]"
|
||||
@echo " make release - Build all platforms"
|
||||
@echo " make run - Run supergraph (eg. make run ARGS=\"help\")"
|
||||
@echo " make test - Run all tests"
|
||||
@echo " make changelog - Generate changelog (eg. make changelog ARGS=\"help\")"
|
||||
@echo " make help - This help"
|
||||
@echo
|
13
NOTICE
Normal file
13
NOTICE
Normal file
@ -0,0 +1,13 @@
|
||||
Copyright 2019 Vikram Rangnekar
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
93
README.md
93
README.md
@ -1,17 +1,74 @@
|
||||
<a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a>
|
||||
<img src="docs/guide/.vuepress/public/super-graph.png" width="250" />
|
||||
|
||||
# Super Graph - Instant GraphQL APIs for your apps.
|
||||
### Build web products faster. Secure high performance GraphQL
|
||||
|
||||
## Build web products faster. No code needed. GraphQL auto. transformed into efficient database queries.
|
||||
|
||||

|
||||

|
||||

|
||||
[](https://pkg.go.dev/github.com/dosco/super-graph/core?tab=doc)
|
||||

|
||||

|
||||

|
||||
[](https://discord.gg/6pSWCTZ)
|
||||
|
||||

|
||||
## What's Super Graph?
|
||||
|
||||
## The story of Super Graph?
|
||||
Designed to 100x your developer productivity. Super Graph will instantly and without you writing code provide you a high performance GraphQL API for Postgres DB. GraphQL queries are compiled into a single fast SQL query. Super Graph is a GO library and a service, use it in your own code or run it as a seperate service.
|
||||
|
||||
## Using it as a service
|
||||
|
||||
```console
|
||||
git clone https://github.com/dosco/super-graph
|
||||
cd ./super-graph
|
||||
make install
|
||||
|
||||
super-graph new <app_name>
|
||||
```
|
||||
|
||||
## Using it in your own code
|
||||
|
||||
```golang
|
||||
package main
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"time"
|
||||
"github.com/dosco/super-graph/core"
|
||||
_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
func main() {
|
||||
db, err := sql.Open("pgx", "postgres://postgrs:@localhost:5432/example_db")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
conf, err := core.ReadInConfig("./config/dev.yml")
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(conf, db)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
query := `
|
||||
query {
|
||||
posts {
|
||||
id
|
||||
title
|
||||
}
|
||||
}`
|
||||
|
||||
res, err := sg.GraphQL(context.Background(), query, nil)
|
||||
if err != nil {
|
||||
log.Fatalf(err)
|
||||
}
|
||||
|
||||
fmt.Println(string(res.Data))
|
||||
}
|
||||
```
|
||||
|
||||
## About Super Graph
|
||||
|
||||
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
|
||||
|
||||
@ -25,21 +82,23 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
|
||||
|
||||
## Features
|
||||
|
||||
- Role based access control
|
||||
- Works with Ruby-On-Rails databases
|
||||
- Automatically learns database schemas and relationships
|
||||
- Complex nested queries and mutations
|
||||
- Auto learns database tables and relationships
|
||||
- Role and Attribute based access control
|
||||
- Opaque cursor based efficient pagination
|
||||
- Full text search and aggregations
|
||||
- Rails authentication supported (Redis, Memcache, Cookie)
|
||||
- JWT tokens supported (Auth0, etc)
|
||||
- Join database with remote REST APIs
|
||||
- Highly optimized and fast Postgres SQL queries
|
||||
- GraphQL queries and mutations
|
||||
- Join database queries with remote REST APIs
|
||||
- Also works with existing Ruby-On-Rails apps
|
||||
- Rails authentication supported (Redis, Memcache, Cookie)
|
||||
- A simple config file
|
||||
- High performance GO codebase
|
||||
- Tiny docker image and low memory requirements
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Database seeding tool
|
||||
- Works with Postgres and YugabyteDB
|
||||
|
||||
|
||||
## Documentation
|
||||
|
||||
@ -56,7 +115,7 @@ Twitter or Discord.
|
||||
|
||||
## License
|
||||
|
||||
[MIT](http://opensource.org/licenses/MIT)
|
||||
[Apache Public License 2.0](https://opensource.org/licenses/Apache-2.0)
|
||||
|
||||
Copyright (c) 2019-present Vikram Rangnekar
|
||||
|
||||
|
40
cmd/internal/serv/actions.go
Normal file
40
cmd/internal/serv/actions.go
Normal file
@ -0,0 +1,40 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
type actionFn func(w http.ResponseWriter, r *http.Request) error
|
||||
|
||||
func newAction(a *Action) (http.Handler, error) {
|
||||
var fn actionFn
|
||||
var err error
|
||||
|
||||
if len(a.SQL) != 0 {
|
||||
fn, err = newSQLAction(a)
|
||||
} else {
|
||||
return nil, fmt.Errorf("invalid config for action '%s'", a.Name)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
httpFn := func(w http.ResponseWriter, r *http.Request) {
|
||||
if err := fn(w, r); err != nil {
|
||||
renderErr(w, err, nil)
|
||||
}
|
||||
}
|
||||
|
||||
return http.HandlerFunc(httpFn), nil
|
||||
}
|
||||
|
||||
func newSQLAction(a *Action) (actionFn, error) {
|
||||
fn := func(w http.ResponseWriter, r *http.Request) error {
|
||||
_, err := db.ExecContext(r.Context(), a.SQL)
|
||||
return err
|
||||
}
|
||||
|
||||
return fn, nil
|
||||
}
|
106
cmd/internal/serv/api.go
Normal file
106
cmd/internal/serv/api.go
Normal file
@ -0,0 +1,106 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/core"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
const (
|
||||
LogLevelNone int = iota
|
||||
LogLevelInfo
|
||||
LogLevelWarn
|
||||
LogLevelError
|
||||
LogLevelDebug
|
||||
)
|
||||
|
||||
type Core = core.Config
|
||||
|
||||
// Config struct holds the Super Graph config values
|
||||
type Config struct {
|
||||
Core `mapstructure:",squash"`
|
||||
Serv `mapstructure:",squash"`
|
||||
|
||||
cpath string
|
||||
vi *viper.Viper
|
||||
}
|
||||
|
||||
// Serv struct contains config values used by the Super Graph service
|
||||
type Serv struct {
|
||||
AppName string `mapstructure:"app_name"`
|
||||
Production bool
|
||||
LogLevel string `mapstructure:"log_level"`
|
||||
HostPort string `mapstructure:"host_port"`
|
||||
Host string
|
||||
Port string
|
||||
HTTPGZip bool `mapstructure:"http_compress"`
|
||||
WebUI bool `mapstructure:"web_ui"`
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||
SeedFile string `mapstructure:"seed_file"`
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
AllowedOrigins []string `mapstructure:"cors_allowed_origins"`
|
||||
DebugCORS bool `mapstructure:"cors_debug"`
|
||||
|
||||
Auth auth.Auth
|
||||
Auths []auth.Auth
|
||||
|
||||
DB struct {
|
||||
Type string
|
||||
Host string
|
||||
Port uint16
|
||||
DBName string
|
||||
User string
|
||||
Password string
|
||||
Schema string
|
||||
PoolSize int32 `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||
} `mapstructure:"database"`
|
||||
|
||||
Actions []Action
|
||||
}
|
||||
|
||||
// Auth struct contains authentication related config values used by the Super Graph service
|
||||
type Auth struct {
|
||||
Name string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
Salt string
|
||||
SignSalt string `mapstructure:"sign_salt"`
|
||||
AuthSalt string `mapstructure:"auth_salt"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
Name string
|
||||
Value string
|
||||
Exists bool
|
||||
}
|
||||
}
|
||||
|
||||
// Action struct contains config values for a Super Graph service action
|
||||
type Action struct {
|
||||
Name string
|
||||
SQL string
|
||||
AuthName string `mapstructure:"auth_name"`
|
||||
}
|
176
cmd/internal/serv/cmd.go
Normal file
176
cmd/internal/serv/cmd.go
Normal file
@ -0,0 +1,176 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"fmt"
|
||||
_log "log"
|
||||
"os"
|
||||
"runtime"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
//go:generate rice embed-go
|
||||
|
||||
const (
|
||||
serverName = "Super Graph"
|
||||
)
|
||||
|
||||
var (
|
||||
// These variables are set using -ldflags
|
||||
version string
|
||||
gitBranch string
|
||||
lastCommitSHA string
|
||||
lastCommitTime string
|
||||
)
|
||||
|
||||
var (
|
||||
log *_log.Logger // logger
|
||||
zlog *zap.Logger // fast logger
|
||||
logLevel int // log level
|
||||
conf *Config // parsed config
|
||||
confPath string // path to the config file
|
||||
db *sql.DB // database connection pool
|
||||
secretKey [32]byte // encryption key
|
||||
)
|
||||
|
||||
func Cmd() {
|
||||
log = _log.New(os.Stdout, "", 0)
|
||||
zlog = zap.NewExample()
|
||||
|
||||
rootCmd := &cobra.Command{
|
||||
Use: "super-graph",
|
||||
Short: BuildDetails(),
|
||||
}
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "serv",
|
||||
Short: "Run the super-graph service",
|
||||
Run: cmdServ,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:create",
|
||||
Short: "Create database",
|
||||
Run: cmdDBCreate,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:drop",
|
||||
Short: "Drop database",
|
||||
Run: cmdDBDrop,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:seed",
|
||||
Short: "Run the seed script to seed the database",
|
||||
Run: cmdDBSeed,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:migrate",
|
||||
Short: "Migrate the database",
|
||||
Long: `Migrate the database to destination migration version.
|
||||
|
||||
Destination migration version can be one of the following value types:
|
||||
|
||||
Migrate to the most recent migration.
|
||||
e.g. db:migrate up
|
||||
|
||||
Rollback the most recent migration.
|
||||
e.g. db:migrate down
|
||||
|
||||
Migrate to a specific migration.
|
||||
e.g. db:migrate 42
|
||||
|
||||
Migrate forward N steps.
|
||||
e.g. db:migrate +3
|
||||
|
||||
Migrate backward N steps.
|
||||
e.g. db:migrate -2
|
||||
|
||||
Redo previous N steps (migrate backward N steps then forward N steps).
|
||||
e.g. db:migrate -+1
|
||||
`,
|
||||
Run: cmdDBMigrate,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:status",
|
||||
Short: "Print current migration status",
|
||||
Run: cmdDBStatus,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:new NAME",
|
||||
Short: "Generate a new migration",
|
||||
Long: "Generate a new migration with the next sequence number and provided name",
|
||||
Run: cmdDBNew,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:setup",
|
||||
Short: "Setup database",
|
||||
Long: "This command will create, migrate and seed the database",
|
||||
Run: cmdDBSetup,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "db:reset",
|
||||
Short: "Reset database",
|
||||
Long: "This command will drop, create, migrate and seed the database (won't run in production)",
|
||||
Run: cmdDBReset,
|
||||
})
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "new APP-NAME",
|
||||
Short: "Create a new application",
|
||||
Long: "Generate all the required files to start on a new Super Graph app",
|
||||
Run: cmdNew,
|
||||
})
|
||||
|
||||
// rootCmd.AddCommand(&cobra.Command{
|
||||
// Use: fmt.Sprintf("conf:dump [%s]", strings.Join(viper.SupportedExts, "|")),
|
||||
// Short: "Dump config to file",
|
||||
// Long: "Dump current config to a file in the selected format",
|
||||
// Run: cmdConfDump,
|
||||
// })
|
||||
|
||||
rootCmd.AddCommand(&cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Super Graph binary version information",
|
||||
Run: cmdVersion,
|
||||
})
|
||||
|
||||
rootCmd.PersistentFlags().StringVar(&confPath,
|
||||
"path", "./config", "path to config files")
|
||||
|
||||
if err := rootCmd.Execute(); err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
}
|
||||
|
||||
func cmdVersion(cmd *cobra.Command, args []string) {
|
||||
fmt.Printf("%s\n", BuildDetails())
|
||||
}
|
||||
|
||||
func BuildDetails() string {
|
||||
return fmt.Sprintf(`
|
||||
Super Graph %v
|
||||
For documentation, visit https://supergraph.dev
|
||||
|
||||
Commit SHA-1 : %v
|
||||
Commit timestamp : %v
|
||||
Branch : %v
|
||||
Go version : %v
|
||||
|
||||
Licensed under the Apache Public License 2.0
|
||||
Copyright 2020, Vikram Rangnekar.
|
||||
`,
|
||||
version,
|
||||
lastCommitSHA,
|
||||
lastCommitTime,
|
||||
gitBranch,
|
||||
runtime.Version())
|
||||
}
|
21
cmd/internal/serv/cmd_conf.go
Normal file
21
cmd/internal/serv/cmd_conf.go
Normal file
@ -0,0 +1,21 @@
|
||||
package serv
|
||||
|
||||
// func cmdConfDump(cmd *cobra.Command, args []string) {
|
||||
// if len(args) != 1 {
|
||||
// cmd.Help() //nolint: errcheck
|
||||
// os.Exit(1)
|
||||
// }
|
||||
|
||||
// fname := fmt.Sprintf("%s.%s", config.GetConfigName(), args[0])
|
||||
|
||||
// conf, err := initConf()
|
||||
// if err != nil {
|
||||
// log.Fatalf("ERR failed to read config: %s", err)
|
||||
// }
|
||||
|
||||
// if err := conf.WriteConfigAs(fname); err != nil {
|
||||
// log.Fatalf("ERR failed to write config: %s", err)
|
||||
// }
|
||||
|
||||
// log.Printf("INF config dumped to ./%s", fname)
|
||||
// }
|
@ -1,7 +1,6 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
@ -10,23 +9,10 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/dosco/super-graph/migrate"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/migrate"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var sampleMigration = `-- This is a sample migration.
|
||||
|
||||
create table users(
|
||||
id serial primary key,
|
||||
fullname varchar not null,
|
||||
email varchar not null
|
||||
);
|
||||
|
||||
---- create above / drop below ----
|
||||
|
||||
drop table users;
|
||||
`
|
||||
|
||||
var newMigrationText = `-- Write your migrate up statements here
|
||||
|
||||
---- create above / drop below ----
|
||||
@ -36,10 +22,11 @@ var newMigrationText = `-- Write your migrate up statements here
|
||||
`
|
||||
|
||||
func cmdDBSetup(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
cmdDBCreate(cmd, []string{})
|
||||
cmdDBMigrate(cmd, []string{"up"})
|
||||
|
||||
sfile := path.Join(confPath, conf.SeedFile)
|
||||
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||
_, err := os.Stat(sfile)
|
||||
|
||||
if err == nil {
|
||||
@ -47,140 +34,127 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
|
||||
return
|
||||
}
|
||||
|
||||
if os.IsNotExist(err) == false {
|
||||
logger.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile)
|
||||
if !os.IsNotExist(err) {
|
||||
log.Fatalf("ERR unable to check if '%s' exists: %s", sfile, err)
|
||||
}
|
||||
|
||||
logger.Warn().Msgf("failed to read seed file '%s'", sfile)
|
||||
log.Printf("WRN failed to read seed file '%s'", sfile)
|
||||
}
|
||||
|
||||
func cmdDBReset(cmd *cobra.Command, args []string) {
|
||||
initConfOnce()
|
||||
|
||||
if conf.Production {
|
||||
log.Fatalln("ERR db:reset does not work in production")
|
||||
}
|
||||
|
||||
cmdDBDrop(cmd, []string{})
|
||||
cmdDBSetup(cmd, []string{})
|
||||
}
|
||||
|
||||
func cmdDBCreate(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
initConfOnce()
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
conn, err := initDB(conf, false)
|
||||
db, err := initDB(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
defer conn.Close(ctx)
|
||||
defer db.Close()
|
||||
|
||||
sql := fmt.Sprintf("CREATE DATABASE %s", conf.DB.DBName)
|
||||
sql := fmt.Sprintf(`CREATE DATABASE "%s"`, conf.DB.DBName)
|
||||
|
||||
_, err = conn.Exec(ctx, sql)
|
||||
_, err = db.Exec(sql)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to create database")
|
||||
log.Fatalf("ERR failed to create database: %s", err)
|
||||
}
|
||||
|
||||
logger.Info().Msgf("created database '%s'", conf.DB.DBName)
|
||||
log.Printf("INF created database '%s'", conf.DB.DBName)
|
||||
}
|
||||
|
||||
func cmdDBDrop(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
initConfOnce()
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
conn, err := initDB(conf, false)
|
||||
db, err := initDB(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
defer conn.Close(ctx)
|
||||
defer db.Close()
|
||||
|
||||
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS %s`, conf.DB.DBName)
|
||||
sql := fmt.Sprintf(`DROP DATABASE IF EXISTS "%s"`, conf.DB.DBName)
|
||||
|
||||
_, err = conn.Exec(ctx, sql)
|
||||
_, err = db.Exec(sql)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to create database")
|
||||
log.Fatalf("ERR failed to drop database: %s", err)
|
||||
}
|
||||
|
||||
logger.Info().Msgf("dropped database '%s'", conf.DB.DBName)
|
||||
log.Printf("INF dropped database '%s'", conf.DB.DBName)
|
||||
}
|
||||
|
||||
func cmdDBNew(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 1 {
|
||||
cmd.Help()
|
||||
cmd.Help() //nolint: errcheck
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
var err error
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
initConfOnce()
|
||||
name := args[0]
|
||||
|
||||
m, err := migrate.FindMigrations(conf.MigrationsPath)
|
||||
if err != nil {
|
||||
fmt.Fprintf(os.Stderr, "Error loading migrations:\n %v\n", err)
|
||||
os.Exit(1)
|
||||
log.Fatalf("ERR error loading migrations: %s", err)
|
||||
}
|
||||
|
||||
mname := fmt.Sprintf("%03d_%s.sql", (len(m) + 1), name)
|
||||
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
||||
|
||||
// Write new migration
|
||||
mpath := filepath.Join(conf.MigrationsPath, mname)
|
||||
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
defer mfile.Close()
|
||||
|
||||
_, err = mfile.WriteString(newMigrationText)
|
||||
if err != nil {
|
||||
fmt.Fprintln(os.Stderr, err)
|
||||
os.Exit(1)
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
logger.Info().Msgf("created migration '%s'", mpath)
|
||||
|
||||
log.Printf("INR created migration '%s'", mpath)
|
||||
}
|
||||
|
||||
func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
|
||||
if len(args) == 0 {
|
||||
cmd.Help()
|
||||
cmd.Help() //nolint: errcheck
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
initConfOnce()
|
||||
dest := args[0]
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
conn, err := initDB(conf, true)
|
||||
conn, err := initDB(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
defer conn.Close(context.Background())
|
||||
defer conn.Close()
|
||||
|
||||
m, err := migrate.NewMigrator(conn, "schema_version")
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to initializing migrator")
|
||||
log.Fatalf("ERR failed to initializing migrator: %s", err)
|
||||
}
|
||||
|
||||
m.Data = getMigrationVars()
|
||||
|
||||
err = m.LoadMigrations(conf.MigrationsPath)
|
||||
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to load migrations")
|
||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||
}
|
||||
|
||||
if len(m.Migrations) == 0 {
|
||||
logger.Fatal().Msg("No migrations found")
|
||||
log.Fatalf("ERR no migrations found")
|
||||
}
|
||||
|
||||
m.OnStart = func(sequence int32, name, direction, sql string) {
|
||||
logger.Info().Msgf("%s executing %s %s\n%s\n\n",
|
||||
log.Printf("INF %s executing %s %s\n%s\n\n",
|
||||
time.Now().Format("2006-01-02 15:04:05"), name, direction, sql)
|
||||
}
|
||||
|
||||
@ -195,7 +169,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
var n int64
|
||||
n, err = strconv.ParseInt(d, 10, 32)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("invalid destination")
|
||||
log.Fatalf("ERR invalid destination: %s", err)
|
||||
}
|
||||
return int32(n)
|
||||
}
|
||||
@ -219,69 +193,61 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
||||
err = m.MigrateTo(currentVersion + mustParseDestination(dest[1:]))
|
||||
|
||||
} else {
|
||||
cmd.Help()
|
||||
cmd.Help() //nolint: errcheck
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Info().Err(err).Send()
|
||||
|
||||
// logger.Info().Err(err).Send()
|
||||
log.Fatalf("ERR %s", err)
|
||||
|
||||
// if err, ok := err.(m.MigrationPgError); ok {
|
||||
// if err.Detail != "" {
|
||||
// logger.Info().Err(err).Msg(err.Detail)
|
||||
// log.Fatalf("ERR %s", err.Detail)
|
||||
// }
|
||||
|
||||
// if err.Position != 0 {
|
||||
// ele, err := ExtractErrorLine(err.Sql, int(err.Position))
|
||||
// if err != nil {
|
||||
// logger.Fatal().Err(err).Send()
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
|
||||
// prefix := fmt.Sprintf()
|
||||
// logger.Info().Msgf("line %d, %s%s", ele.LineNum, prefix, ele.Text)
|
||||
// log.Fatalf("INF line %d, %s%s", ele.LineNum, ele.Text)
|
||||
// }
|
||||
|
||||
// }
|
||||
// os.Exit(1)
|
||||
}
|
||||
|
||||
logger.Info().Msg("migration done")
|
||||
|
||||
log.Println("INF migration done")
|
||||
}
|
||||
|
||||
func cmdDBStatus(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
initConfOnce()
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
conn, err := initDB(conf, true)
|
||||
db, err := initDB(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
defer conn.Close(context.Background())
|
||||
defer db.Close()
|
||||
|
||||
m, err := migrate.NewMigrator(conn, "schema_version")
|
||||
m, err := migrate.NewMigrator(db, "schema_version")
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to initialize migrator")
|
||||
log.Fatalf("ERR failed to initialize migrator: %s", err)
|
||||
}
|
||||
|
||||
m.Data = getMigrationVars()
|
||||
|
||||
err = m.LoadMigrations(conf.MigrationsPath)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to load migrations")
|
||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||
}
|
||||
|
||||
if len(m.Migrations) == 0 {
|
||||
logger.Fatal().Msg("no migrations found")
|
||||
log.Fatalf("ERR no migrations found")
|
||||
}
|
||||
|
||||
mver, err := m.GetCurrentVersion()
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to retrieve migration")
|
||||
log.Fatalf("ERR failed to retrieve migration: %s", err)
|
||||
}
|
||||
|
||||
var status string
|
||||
@ -292,10 +258,8 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
|
||||
status = "migration(s) pending"
|
||||
}
|
||||
|
||||
fmt.Println("status: ", status)
|
||||
fmt.Printf("version: %d of %d\n", mver, len(m.Migrations))
|
||||
fmt.Println("host: ", conf.DB.Host)
|
||||
fmt.Println("database:", conf.DB.DBName)
|
||||
log.Printf("INF status: %s, version: %d of %d, host: %s, database: %s",
|
||||
status, mver, len(m.Migrations), conf.DB.Host, conf.DB.DBName)
|
||||
}
|
||||
|
||||
type ErrorLineExtract struct {
|
||||
@ -338,3 +302,16 @@ func getMigrationVars() map[string]interface{} {
|
||||
"env": strings.ToLower(os.Getenv("GO_ENV")),
|
||||
}
|
||||
}
|
||||
|
||||
func initConfOnce() {
|
||||
var err error
|
||||
|
||||
if conf != nil {
|
||||
return
|
||||
}
|
||||
|
||||
conf, err = initConf()
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to read config: %s", err)
|
||||
}
|
||||
}
|
@ -16,7 +16,7 @@ import (
|
||||
|
||||
func cmdNew(cmd *cobra.Command, args []string) {
|
||||
if len(args) != 1 {
|
||||
cmd.Help()
|
||||
cmd.Help() //nolint: errcheck
|
||||
os.Exit(1)
|
||||
}
|
||||
|
||||
@ -90,15 +90,15 @@ func cmdNew(cmd *cobra.Command, args []string) {
|
||||
return os.Mkdir(p, os.ModePerm)
|
||||
})
|
||||
|
||||
ifNotExists(path.Join(appMigrationsPath, "100_init.sql"), func(p string) error {
|
||||
if v, err := tmpl.get("100_init.sql"); err == nil {
|
||||
ifNotExists(path.Join(appMigrationsPath, "0_init.sql"), func(p string) error {
|
||||
if v, err := tmpl.get("0_init.sql"); err == nil {
|
||||
return ioutil.WriteFile(p, v, 0644)
|
||||
} else {
|
||||
return err
|
||||
}
|
||||
})
|
||||
|
||||
logger.Info().Msgf("app '%s' initialized", name)
|
||||
log.Printf("INR app '%s' initialized", name)
|
||||
}
|
||||
|
||||
type Templ struct {
|
||||
@ -107,7 +107,7 @@ type Templ struct {
|
||||
}
|
||||
|
||||
func newTempl(data map[string]string) *Templ {
|
||||
return &Templ{rice.MustFindBox("../tmpl"), data}
|
||||
return &Templ{rice.MustFindBox("./tmpl"), data}
|
||||
}
|
||||
|
||||
func (t *Templ) get(name string) ([]byte, error) {
|
||||
@ -115,13 +115,17 @@ func (t *Templ) get(name string) ([]byte, error) {
|
||||
b := bytes.Buffer{}
|
||||
tmpl := fasttemplate.New(v, "{%", "%}")
|
||||
|
||||
tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
|
||||
_, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
|
||||
if val, ok := t.data[strings.TrimSpace(tag)]; ok {
|
||||
return w.Write([]byte(val))
|
||||
}
|
||||
return 0, fmt.Errorf("unknown template variable '%s'", tag)
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return b.Bytes(), nil
|
||||
}
|
||||
|
||||
@ -129,18 +133,18 @@ func ifNotExists(filePath string, doFn func(string) error) {
|
||||
_, err := os.Stat(filePath)
|
||||
|
||||
if err == nil {
|
||||
logger.Info().Err(err).Msgf("create skipped '%s' exists", filePath)
|
||||
log.Printf("ERR create skipped '%s' exists", filePath)
|
||||
return
|
||||
}
|
||||
|
||||
if os.IsNotExist(err) == false {
|
||||
logger.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath)
|
||||
if !os.IsNotExist(err) {
|
||||
log.Fatalf("ERR unable to check if '%s' exists", filePath)
|
||||
}
|
||||
|
||||
err = doFn(filePath)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msgf("unable to create '%s'", filePath)
|
||||
log.Fatalf("ERR unable to create '%s'", filePath)
|
||||
}
|
||||
|
||||
logger.Info().Msgf("created '%s'", filePath)
|
||||
log.Printf("INR created '%s'", filePath)
|
||||
}
|
@ -2,15 +2,20 @@ package serv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"math/rand"
|
||||
"os"
|
||||
"path"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/brianvoe/gofakeit"
|
||||
"github.com/dop251/goja"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
@ -18,30 +23,38 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
|
||||
if conf, err = initConf(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
log.Fatalf("ERR failed to read config: %s", err)
|
||||
}
|
||||
|
||||
conf.Production = false
|
||||
|
||||
db, err = initDBPool(conf)
|
||||
db, err = initDB(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to connect to database")
|
||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||
}
|
||||
|
||||
initCompiler()
|
||||
sfile := path.Join(conf.cpath, conf.SeedFile)
|
||||
|
||||
sfile := path.Join(confPath, conf.SeedFile)
|
||||
|
||||
b, err := ioutil.ReadFile(path.Join(confPath, conf.SeedFile))
|
||||
b, err := ioutil.ReadFile(sfile)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msgf("failed to read seed file '%s'", sfile)
|
||||
log.Fatalf("ERR failed to read seed file %s: %s", sfile, err)
|
||||
}
|
||||
|
||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR failed to initialize Super Graph: %s", err)
|
||||
}
|
||||
|
||||
graphQLFn := func(query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||
return graphQLFunc(sg, query, data, opt)
|
||||
}
|
||||
|
||||
vm := goja.New()
|
||||
vm.Set("graphql", graphQLFunc)
|
||||
vm.Set("graphql", graphQLFn)
|
||||
//vm.Set("import_csv", importCSV)
|
||||
|
||||
console := vm.NewObject()
|
||||
console.Set("log", logFunc)
|
||||
console.Set("log", logFunc) //nolint: errcheck
|
||||
vm.Set("console", console)
|
||||
|
||||
fake := vm.NewObject()
|
||||
@ -50,39 +63,148 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
||||
|
||||
_, err = vm.RunScript("seed.js", string(b))
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to execute script")
|
||||
log.Fatalf("ERR failed to execute script: %s", err)
|
||||
}
|
||||
|
||||
logger.Info().Msg("seed script done")
|
||||
log.Println("INF seed script done")
|
||||
}
|
||||
|
||||
//func runFunc(call goja.FunctionCall) {
|
||||
func graphQLFunc(query string, data interface{}) map[string]interface{} {
|
||||
b, err := json.Marshal(data)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to json serialize")
|
||||
// func runFunc(call goja.FunctionCall) {
|
||||
func graphQLFunc(sg *core.SuperGraph, query string, data interface{}, opt map[string]string) map[string]interface{} {
|
||||
ct := context.Background()
|
||||
|
||||
if v, ok := opt["user_id"]; ok && len(v) != 0 {
|
||||
ct = context.WithValue(ct, core.UserIDKey, v)
|
||||
}
|
||||
|
||||
c := &coreContext{Context: context.Background()}
|
||||
c.req.Query = query
|
||||
c.req.Vars = b
|
||||
c.req.role = "user"
|
||||
// var role string
|
||||
|
||||
res, err := c.execQuery()
|
||||
// if v, ok := opt["role"]; ok && len(v) != 0 {
|
||||
// role = v
|
||||
// } else {
|
||||
// role = "user"
|
||||
// }
|
||||
|
||||
var vars []byte
|
||||
var err error
|
||||
|
||||
if vars, err = json.Marshal(data); err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
|
||||
res, err := sg.GraphQL(ct, query, vars)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("graphql query failed")
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
|
||||
val := make(map[string]interface{})
|
||||
|
||||
err = json.Unmarshal(res, &val)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to deserialize json")
|
||||
if err = json.Unmarshal(res.Data, &val); err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
|
||||
return val
|
||||
}
|
||||
|
||||
type csvSource struct {
|
||||
rows [][]string
|
||||
i int
|
||||
}
|
||||
|
||||
func NewCSVSource(filename string) (*csvSource, error) {
|
||||
f, err := os.Open(filename)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
r := csv.NewReader(f)
|
||||
rows, err := r.ReadAll()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &csvSource{rows: rows}, nil
|
||||
}
|
||||
|
||||
func (c *csvSource) Next() bool {
|
||||
return c.i < len(c.rows)
|
||||
}
|
||||
|
||||
func (c *csvSource) Values() ([]interface{}, error) {
|
||||
var vals []interface{}
|
||||
var err error
|
||||
|
||||
for _, v := range c.rows[c.i] {
|
||||
switch {
|
||||
case len(v) == 0:
|
||||
vals = append(vals, "")
|
||||
case isDigit(v):
|
||||
var n int
|
||||
if n, err = strconv.Atoi(v); err == nil {
|
||||
vals = append(vals, n)
|
||||
}
|
||||
case strings.EqualFold(v, "true") || strings.EqualFold(v, "false"):
|
||||
var b bool
|
||||
if b, err = strconv.ParseBool(v); err == nil {
|
||||
vals = append(vals, b)
|
||||
}
|
||||
default:
|
||||
vals = append(vals, v)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%w (line no %d)", err, c.i)
|
||||
}
|
||||
}
|
||||
c.i++
|
||||
|
||||
return vals, nil
|
||||
}
|
||||
|
||||
func isDigit(v string) bool {
|
||||
for i := range v {
|
||||
if v[i] < '0' || v[i] > '9' {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
func (c *csvSource) Err() error {
|
||||
return nil
|
||||
}
|
||||
|
||||
// func importCSV(table, filename string) int64 {
|
||||
// if filename[0] != '/' {
|
||||
// filename = path.Join(conf.ConfigPathUsed(), filename)
|
||||
// }
|
||||
|
||||
// s, err := NewCSVSource(filename)
|
||||
// if err != nil {
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
|
||||
// var cols []string
|
||||
// colval, _ := s.Values()
|
||||
|
||||
// for _, c := range colval {
|
||||
// cols = append(cols, c.(string))
|
||||
// }
|
||||
|
||||
// n, err := db.Exec(fmt.Sprintf("COPY %s FROM STDIN WITH "),
|
||||
// cols,
|
||||
// s)
|
||||
|
||||
// if err != nil {
|
||||
// err = fmt.Errorf("%w (line no %d)", err, s.i)
|
||||
// log.Fatalf("ERR %s", err)
|
||||
// }
|
||||
|
||||
// return n
|
||||
// }
|
||||
|
||||
//nolint: errcheck
|
||||
func logFunc(args ...interface{}) {
|
||||
for _, arg := range args {
|
||||
if _, ok := arg.(map[string]interface{}); ok {
|
||||
@ -99,6 +221,18 @@ func logFunc(args ...interface{}) {
|
||||
}
|
||||
}
|
||||
|
||||
func avatarURL(size int) string {
|
||||
if size == 0 {
|
||||
size = 200
|
||||
}
|
||||
return fmt.Sprintf("https://i.pravatar.cc/%d?%d", size, rand.Intn(5000))
|
||||
}
|
||||
|
||||
func imageURL(width int, height int) string {
|
||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func setFakeFuncs(f *goja.Object) {
|
||||
gofakeit.Seed(0)
|
||||
|
||||
@ -156,10 +290,9 @@ func setFakeFuncs(f *goja.Object) {
|
||||
f.Set("transmission_gear_type", gofakeit.TransmissionGearType)
|
||||
|
||||
// Text
|
||||
|
||||
f.Set("word", gofakeit.Word)
|
||||
f.Set("sentence", gofakeit.Sentence)
|
||||
f.Set("paragrph", gofakeit.Paragraph)
|
||||
f.Set("paragraph", gofakeit.Paragraph)
|
||||
f.Set("question", gofakeit.Question)
|
||||
f.Set("quote", gofakeit.Quote)
|
||||
|
||||
@ -176,7 +309,8 @@ func setFakeFuncs(f *goja.Object) {
|
||||
|
||||
// Internet
|
||||
f.Set("url", gofakeit.URL)
|
||||
f.Set("image_url", gofakeit.ImageURL)
|
||||
f.Set("image_url", imageURL)
|
||||
f.Set("avatar_url", avatarURL)
|
||||
f.Set("domain_name", gofakeit.DomainName)
|
||||
f.Set("domain_suffix", gofakeit.DomainSuffix)
|
||||
f.Set("ipv4_address", gofakeit.IPv4Address)
|
37
cmd/internal/serv/cmd_serv.go
Normal file
37
cmd/internal/serv/cmd_serv.go
Normal file
@ -0,0 +1,37 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
sg *core.SuperGraph
|
||||
)
|
||||
|
||||
func cmdServ(cmd *cobra.Command, args []string) {
|
||||
var err error
|
||||
|
||||
conf, err = initConf()
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to read config")
|
||||
}
|
||||
|
||||
initWatcher()
|
||||
|
||||
db, err = initDB(conf)
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to connect to database")
|
||||
}
|
||||
|
||||
// if conf != nil && db != nil {
|
||||
// initResolvers()
|
||||
// }
|
||||
|
||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||
if err != nil {
|
||||
fatalInProd(err, "failed to initialize Super Graph")
|
||||
}
|
||||
|
||||
startHTTP()
|
||||
}
|
115
cmd/internal/serv/config.go
Normal file
115
cmd/internal/serv/config.go
Normal file
@ -0,0 +1,115 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"strings"
|
||||
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// ReadInConfig function reads in the config file for the environment specified in the GO_ENV
|
||||
// environment variable. This is the best way to create a new Super Graph config.
|
||||
func ReadInConfig(configFile string) (*Config, error) {
|
||||
cpath := path.Dir(configFile)
|
||||
cfile := path.Base(configFile)
|
||||
vi := newViper(cpath, cfile)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
|
||||
if len(inherits) != 0 {
|
||||
vi = newViper(cpath, inherits)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
return nil, fmt.Errorf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
}
|
||||
|
||||
vi.SetConfigName(cfile)
|
||||
|
||||
if err := vi.MergeInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
c := &Config{cpath: cpath, vi: vi}
|
||||
|
||||
if err := vi.Unmarshal(&c); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||
}
|
||||
|
||||
if len(c.Core.AllowListFile) == 0 {
|
||||
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func newViper(configPath, configFile string) *viper.Viper {
|
||||
vi := viper.New()
|
||||
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
vi.AddConfigPath(configPath)
|
||||
vi.SetConfigName(configFile)
|
||||
vi.AddConfigPath("./config")
|
||||
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
vi.SetDefault("enable_tracing", false)
|
||||
vi.SetDefault("auth_fail_block", "always")
|
||||
vi.SetDefault("seed_file", "seed.js")
|
||||
|
||||
vi.SetDefault("database.type", "postgres")
|
||||
vi.SetDefault("database.host", "localhost")
|
||||
vi.SetDefault("database.port", 5432)
|
||||
vi.SetDefault("database.user", "postgres")
|
||||
vi.SetDefault("database.schema", "public")
|
||||
|
||||
vi.SetDefault("env", "development")
|
||||
|
||||
vi.BindEnv("env", "GO_ENV") //nolint: errcheck
|
||||
vi.BindEnv("host", "HOST") //nolint: errcheck
|
||||
vi.BindEnv("port", "PORT") //nolint: errcheck
|
||||
|
||||
vi.SetDefault("auth.rails.max_idle", 80)
|
||||
vi.SetDefault("auth.rails.max_active", 12000)
|
||||
|
||||
return vi
|
||||
}
|
||||
|
||||
func GetConfigName() string {
|
||||
if len(os.Getenv("GO_ENV")) == 0 {
|
||||
return "dev"
|
||||
}
|
||||
|
||||
ge := strings.ToLower(os.Getenv("GO_ENV"))
|
||||
|
||||
switch {
|
||||
case strings.HasPrefix(ge, "pro"):
|
||||
return "prod"
|
||||
|
||||
case strings.HasPrefix(ge, "sta"):
|
||||
return "stage"
|
||||
|
||||
case strings.HasPrefix(ge, "tes"):
|
||||
return "test"
|
||||
|
||||
case strings.HasPrefix(ge, "dev"):
|
||||
return "dev"
|
||||
}
|
||||
|
||||
return ge
|
||||
}
|
7
cmd/internal/serv/core.go
Normal file
7
cmd/internal/serv/core.go
Normal file
@ -0,0 +1,7 @@
|
||||
package serv
|
||||
|
||||
// func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
|
||||
|
||||
// return nil
|
||||
|
||||
// }
|
@ -4,7 +4,7 @@ package serv
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
gql := string(data)
|
||||
isMutation(gql)
|
||||
QueryName(gql)
|
||||
gqlHash(gql, nil, "")
|
||||
|
||||
return 1
|
@ -10,7 +10,6 @@ func TestFuzzCrashers(t *testing.T) {
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
isMutation(f)
|
||||
gqlHash(f, nil, "")
|
||||
}
|
||||
}
|
25
cmd/internal/serv/health.go
Normal file
25
cmd/internal/serv/health.go
Normal file
@ -0,0 +1,25 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
)
|
||||
|
||||
var healthyResponse = []byte("All's Well")
|
||||
|
||||
func health(w http.ResponseWriter, _ *http.Request) {
|
||||
ct, cancel := context.WithTimeout(context.Background(), conf.DB.PingTimeout)
|
||||
defer cancel()
|
||||
|
||||
if err := db.PingContext(ct); err != nil {
|
||||
log.Printf("ERR error pinging database: %s", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
|
||||
if _, err := w.Write(healthyResponse); err != nil {
|
||||
log.Printf("ERR error writing healthy response: %s", err)
|
||||
w.WriteHeader(http.StatusInternalServerError)
|
||||
return
|
||||
}
|
||||
}
|
124
cmd/internal/serv/http.go
Normal file
124
cmd/internal/serv/http.go
Normal file
@ -0,0 +1,124 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
"strings"
|
||||
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/rs/cors"
|
||||
"go.uber.org/zap"
|
||||
)
|
||||
|
||||
const (
|
||||
maxReadBytes = 100000 // 100Kb
|
||||
introspectionQuery = "IntrospectionQuery"
|
||||
)
|
||||
|
||||
var (
|
||||
errUnauthorized = errors.New("not authorized")
|
||||
)
|
||||
|
||||
type gqlReq struct {
|
||||
OpName string `json:"operationName"`
|
||||
Query string `json:"query"`
|
||||
Vars json.RawMessage `json:"variables"`
|
||||
}
|
||||
|
||||
type errorResp struct {
|
||||
Error error `json:"error"`
|
||||
}
|
||||
|
||||
func apiV1Handler() http.Handler {
|
||||
h, err := auth.WithAuth(http.HandlerFunc(apiV1), &conf.Auth)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
|
||||
if len(conf.AllowedOrigins) != 0 {
|
||||
c := cors.New(cors.Options{
|
||||
AllowedOrigins: conf.AllowedOrigins,
|
||||
AllowCredentials: true,
|
||||
Debug: conf.DebugCORS,
|
||||
})
|
||||
h = c.Handler(h)
|
||||
}
|
||||
|
||||
return h
|
||||
}
|
||||
|
||||
func apiV1(w http.ResponseWriter, r *http.Request) {
|
||||
ct := r.Context()
|
||||
|
||||
//nolint: errcheck
|
||||
if conf.AuthFailBlock && !auth.IsAuth(ct) {
|
||||
renderErr(w, errUnauthorized, nil)
|
||||
return
|
||||
}
|
||||
|
||||
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
|
||||
if err != nil {
|
||||
renderErr(w, err, nil)
|
||||
return
|
||||
}
|
||||
defer r.Body.Close()
|
||||
|
||||
req := gqlReq{}
|
||||
|
||||
err = json.Unmarshal(b, &req)
|
||||
if err != nil {
|
||||
renderErr(w, err, nil)
|
||||
return
|
||||
}
|
||||
|
||||
if strings.EqualFold(req.OpName, introspectionQuery) {
|
||||
introspect(w)
|
||||
return
|
||||
}
|
||||
|
||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||
|
||||
if logLevel >= LogLevelDebug {
|
||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
renderErr(w, err, res)
|
||||
return
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(res)
|
||||
|
||||
if logLevel >= LogLevelInfo {
|
||||
zlog.Info("success",
|
||||
zap.String("op", res.Operation()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
//nolint: errcheck
|
||||
func renderErr(w http.ResponseWriter, err error, res *core.Result) {
|
||||
if err == errUnauthorized {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
}
|
||||
|
||||
json.NewEncoder(w).Encode(&errorResp{err})
|
||||
|
||||
if logLevel >= LogLevelError {
|
||||
if res != nil {
|
||||
zlog.Error(err.Error(),
|
||||
zap.String("op", res.Operation()),
|
||||
zap.String("name", res.QueryName()),
|
||||
zap.String("role", res.Role()),
|
||||
)
|
||||
} else {
|
||||
zlog.Error(err.Error())
|
||||
}
|
||||
}
|
||||
}
|
154
cmd/internal/serv/init.go
Normal file
154
cmd/internal/serv/init.go
Normal file
@ -0,0 +1,154 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"database/sql"
|
||||
"path"
|
||||
"time"
|
||||
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/stdlib"
|
||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||
)
|
||||
|
||||
func initConf() (*Config, error) {
|
||||
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch c.LogLevel {
|
||||
case "debug":
|
||||
logLevel = LogLevelDebug
|
||||
case "error":
|
||||
logLevel = LogLevelError
|
||||
case "warn":
|
||||
logLevel = LogLevelWarn
|
||||
case "info":
|
||||
logLevel = LogLevelInfo
|
||||
default:
|
||||
logLevel = LogLevelNone
|
||||
}
|
||||
|
||||
// Auths: validate and sanitize
|
||||
am := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Auths); i++ {
|
||||
a := &c.Auths[i]
|
||||
a.Name = sanitize(a.Name)
|
||||
|
||||
if _, ok := am[a.Name]; ok {
|
||||
c.Auths = append(c.Auths[:i], c.Auths[i+1:]...)
|
||||
log.Printf("WRN duplicate auth found: %s", a.Name)
|
||||
}
|
||||
am[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
// Actions: validate and sanitize
|
||||
axm := make(map[string]struct{})
|
||||
|
||||
for i := 0; i < len(c.Actions); i++ {
|
||||
a := &c.Actions[i]
|
||||
a.Name = sanitize(a.Name)
|
||||
a.AuthName = sanitize(a.AuthName)
|
||||
|
||||
if _, ok := axm[a.Name]; ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
log.Printf("WRN duplicate action found: %s", a.Name)
|
||||
}
|
||||
|
||||
if _, ok := am[a.AuthName]; !ok {
|
||||
c.Actions = append(c.Actions[:i], c.Actions[i+1:]...)
|
||||
log.Printf("WRN invalid auth_name '%s' for auth: %s", a.AuthName, a.Name)
|
||||
}
|
||||
axm[a.Name] = struct{}{}
|
||||
}
|
||||
|
||||
var anonFound bool
|
||||
|
||||
for _, r := range c.Roles {
|
||||
if sanitize(r.Name) == "anon" {
|
||||
anonFound = true
|
||||
}
|
||||
}
|
||||
|
||||
if !anonFound {
|
||||
log.Printf("WRN unauthenticated requests will be blocked. no role 'anon' defined")
|
||||
c.AuthFailBlock = false
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
func initDB(c *Config) (*sql.DB, error) {
|
||||
var db *sql.DB
|
||||
var err error
|
||||
|
||||
// cs := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s",
|
||||
// c.DB.Host, c.DB.Port,
|
||||
// c.DB.User, c.DB.Password,
|
||||
// c.DB.DBName)
|
||||
|
||||
// fmt.Println(">>", cs)
|
||||
|
||||
// for i := 1; i < 10; i++ {
|
||||
// db, err = sql.Open("pgx", cs)
|
||||
// if err == nil {
|
||||
// break
|
||||
// }
|
||||
// time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
// }
|
||||
|
||||
// if err != nil {
|
||||
// return nil, err
|
||||
// }
|
||||
|
||||
// return db, nil
|
||||
|
||||
config, _ := pgx.ParseConfig("")
|
||||
config.Host = c.DB.Host
|
||||
config.Port = c.DB.Port
|
||||
config.Database = c.DB.DBName
|
||||
config.User = c.DB.User
|
||||
config.Password = c.DB.Password
|
||||
config.RuntimeParams = map[string]string{
|
||||
"application_name": c.AppName,
|
||||
"search_path": c.DB.Schema,
|
||||
}
|
||||
|
||||
// switch c.LogLevel {
|
||||
// case "debug":
|
||||
// config.LogLevel = pgx.LogLevelDebug
|
||||
// case "info":
|
||||
// config.LogLevel = pgx.LogLevelInfo
|
||||
// case "warn":
|
||||
// config.LogLevel = pgx.LogLevelWarn
|
||||
// case "error":
|
||||
// config.LogLevel = pgx.LogLevelError
|
||||
// default:
|
||||
// config.LogLevel = pgx.LogLevelNone
|
||||
// }
|
||||
|
||||
//config.Logger = NewSQLLogger(logger)
|
||||
|
||||
// if c.DB.MaxRetries != 0 {
|
||||
// opt.MaxRetries = c.DB.MaxRetries
|
||||
// }
|
||||
|
||||
// if c.DB.PoolSize != 0 {
|
||||
// config.MaxConns = conf.DB.PoolSize
|
||||
// }
|
||||
|
||||
for i := 1; i < 10; i++ {
|
||||
db = stdlib.OpenDB(*config)
|
||||
if db == nil {
|
||||
break
|
||||
}
|
||||
time.Sleep(time.Duration(i*100) * time.Millisecond)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
127
cmd/internal/serv/internal/auth/auth.go
Normal file
127
cmd/internal/serv/internal/auth/auth.go
Normal file
@ -0,0 +1,127 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
|
||||
"github.com/dosco/super-graph/core"
|
||||
)
|
||||
|
||||
// Auth struct contains authentication related config values used by the Super Graph service
|
||||
type Auth struct {
|
||||
Name string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
SecretKeyBase string `mapstructure:"secret_key_base"`
|
||||
URL string
|
||||
Password string
|
||||
MaxIdle int `mapstructure:"max_idle"`
|
||||
MaxActive int `mapstructure:"max_active"`
|
||||
Salt string
|
||||
SignSalt string `mapstructure:"sign_salt"`
|
||||
AuthSalt string `mapstructure:"auth_salt"`
|
||||
}
|
||||
|
||||
JWT struct {
|
||||
Provider string
|
||||
Secret string
|
||||
PubKeyFile string `mapstructure:"public_key_file"`
|
||||
PubKeyType string `mapstructure:"public_key_type"`
|
||||
}
|
||||
|
||||
Header struct {
|
||||
Name string
|
||||
Value string
|
||||
Exists bool
|
||||
}
|
||||
}
|
||||
|
||||
func SimpleHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||
if len(userIDProvider) != 0 {
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, userIDProvider)
|
||||
}
|
||||
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if len(userID) != 0 {
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, userID)
|
||||
}
|
||||
|
||||
userRole := r.Header.Get("X-User-Role")
|
||||
if len(userRole) != 0 {
|
||||
ctx = context.WithValue(ctx, core.UserRoleKey, userRole)
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}, nil
|
||||
}
|
||||
|
||||
func HeaderHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
hdr := ac.Header
|
||||
|
||||
if len(hdr.Name) == 0 {
|
||||
return nil, fmt.Errorf("auth '%s': no header.name defined", ac.Name)
|
||||
}
|
||||
|
||||
if !hdr.Exists && len(hdr.Value) == 0 {
|
||||
return nil, fmt.Errorf("auth '%s': no header.value defined", ac.Name)
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var fo1 bool
|
||||
value := r.Header.Get(hdr.Name)
|
||||
|
||||
switch {
|
||||
case hdr.Exists:
|
||||
fo1 = (len(value) == 0)
|
||||
|
||||
default:
|
||||
fo1 = (value != hdr.Value)
|
||||
}
|
||||
|
||||
if fo1 {
|
||||
http.Error(w, "401 unauthorized", http.StatusUnauthorized)
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}, nil
|
||||
}
|
||||
|
||||
func WithAuth(next http.Handler, ac *Auth) (http.Handler, error) {
|
||||
var err error
|
||||
|
||||
if ac.CredsInHeader {
|
||||
next, err = SimpleHandler(ac, next)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch ac.Type {
|
||||
case "rails":
|
||||
return RailsHandler(ac, next)
|
||||
|
||||
case "jwt":
|
||||
return JwtHandler(ac, next)
|
||||
|
||||
case "header":
|
||||
return HeaderHandler(ac, next)
|
||||
|
||||
}
|
||||
|
||||
return next, nil
|
||||
}
|
||||
|
||||
func IsAuth(ct context.Context) bool {
|
||||
return ct.Value(core.UserIDKey) != nil
|
||||
}
|
@ -1,4 +1,4 @@
|
||||
package serv
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
@ -7,26 +7,26 @@ import (
|
||||
"strings"
|
||||
|
||||
jwt "github.com/dgrijalva/jwt-go"
|
||||
"github.com/dosco/super-graph/core"
|
||||
)
|
||||
|
||||
const (
|
||||
authHeader = "Authorization"
|
||||
jwtBase int = iota
|
||||
jwtAuth0
|
||||
jwtAuth0 int = iota + 1
|
||||
)
|
||||
|
||||
func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
func JwtHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
var key interface{}
|
||||
var jwtProvider int
|
||||
|
||||
cookie := conf.Auth.Cookie
|
||||
cookie := ac.Cookie
|
||||
|
||||
if conf.Auth.JWT.Provider == "auth0" {
|
||||
if ac.JWT.Provider == "auth0" {
|
||||
jwtProvider = jwtAuth0
|
||||
}
|
||||
|
||||
secret := conf.Auth.JWT.Secret
|
||||
publicKeyFile := conf.Auth.JWT.PubKeyFile
|
||||
secret := ac.JWT.Secret
|
||||
publicKeyFile := ac.JWT.PubKeyFile
|
||||
|
||||
switch {
|
||||
case len(secret) != 0:
|
||||
@ -35,10 +35,10 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
case len(publicKeyFile) != 0:
|
||||
kd, err := ioutil.ReadFile(publicKeyFile)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Send()
|
||||
return nil, err
|
||||
}
|
||||
|
||||
switch conf.Auth.JWT.PubKeyType {
|
||||
switch ac.JWT.PubKeyType {
|
||||
case "ecdsa":
|
||||
key, err = jwt.ParseECPublicKeyFromPEM(kd)
|
||||
|
||||
@ -51,7 +51,7 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Send()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
@ -89,14 +89,17 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
if jwtProvider == jwtAuth0 {
|
||||
sub := strings.Split(claims.Subject, "|")
|
||||
if len(sub) != 2 {
|
||||
ctx = context.WithValue(ctx, userIDProviderKey, sub[0])
|
||||
ctx = context.WithValue(ctx, userIDKey, sub[1])
|
||||
ctx = context.WithValue(ctx, core.UserIDProviderKey, sub[0])
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, sub[1])
|
||||
}
|
||||
} else {
|
||||
ctx = context.WithValue(ctx, userIDKey, claims.Subject)
|
||||
ctx = context.WithValue(ctx, core.UserIDKey, claims.Subject)
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
return
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
}, nil
|
||||
}
|
190
cmd/internal/serv/internal/auth/rails.go
Normal file
190
cmd/internal/serv/internal/auth/rails.go
Normal file
@ -0,0 +1,190 @@
|
||||
package auth
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"net/url"
|
||||
"strings"
|
||||
|
||||
"github.com/bradfitz/gomemcache/memcache"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
||||
"github.com/dosco/super-graph/core"
|
||||
"github.com/garyburd/redigo/redis"
|
||||
)
|
||||
|
||||
func RailsHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
ru := ac.Rails.URL
|
||||
|
||||
if strings.HasPrefix(ru, "memcache:") {
|
||||
return RailsMemcacheHandler(ac, next)
|
||||
}
|
||||
|
||||
if strings.HasPrefix(ru, "redis:") {
|
||||
return RailsRedisHandler(ac, next)
|
||||
}
|
||||
|
||||
return RailsCookieHandler(ac, next)
|
||||
}
|
||||
|
||||
func RailsRedisHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
|
||||
if len(cookie) == 0 {
|
||||
return nil, fmt.Errorf("no auth.cookie defined")
|
||||
}
|
||||
|
||||
if len(ac.Rails.URL) == 0 {
|
||||
return nil, fmt.Errorf("no auth.rails.url defined")
|
||||
}
|
||||
|
||||
rp := &redis.Pool{
|
||||
MaxIdle: ac.Rails.MaxIdle,
|
||||
MaxActive: ac.Rails.MaxActive,
|
||||
Dial: func() (redis.Conn, error) {
|
||||
c, err := redis.DialURL(ac.Rails.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
pwd := ac.Rails.Password
|
||||
if len(pwd) != 0 {
|
||||
if _, err := c.Do("AUTH", pwd); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
return c, nil
|
||||
},
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := fmt.Sprintf("session:%s", ck.Value)
|
||||
sessionData, err := redis.Bytes(rp.Get().Do("GET", key))
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := rails.ParseCookie(string(sessionData))
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}, nil
|
||||
}
|
||||
|
||||
func RailsMemcacheHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
|
||||
if len(cookie) == 0 {
|
||||
return nil, fmt.Errorf("no auth.cookie defined")
|
||||
}
|
||||
|
||||
if len(ac.Rails.URL) == 0 {
|
||||
return nil, fmt.Errorf("no auth.rails.url defined")
|
||||
}
|
||||
|
||||
rURL, err := url.Parse(ac.Rails.URL)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
mc := memcache.New(rURL.Host)
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
key := fmt.Sprintf("session:%s", ck.Value)
|
||||
item, err := mc.Get(key)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := rails.ParseCookie(string(item.Value))
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}, nil
|
||||
}
|
||||
|
||||
func RailsCookieHandler(ac *Auth, next http.Handler) (http.HandlerFunc, error) {
|
||||
cookie := ac.Cookie
|
||||
if len(cookie) == 0 {
|
||||
return nil, fmt.Errorf("no auth.cookie defined")
|
||||
}
|
||||
|
||||
ra, err := railsAuth(ac)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil || len(ck.Value) == 0 {
|
||||
// logger.Warn().Err(err).Msg("rails cookie missing")
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := ra.ParseCookie(ck.Value)
|
||||
if err != nil {
|
||||
// logger.Warn().Err(err).Msg("failed to parse rails cookie")
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
ctx := context.WithValue(r.Context(), core.UserIDKey, userID)
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}, nil
|
||||
}
|
||||
|
||||
func railsAuth(ac *Auth) (*rails.Auth, error) {
|
||||
secret := ac.Rails.SecretKeyBase
|
||||
if len(secret) == 0 {
|
||||
return nil, errors.New("no auth.rails.secret_key_base defined")
|
||||
}
|
||||
|
||||
version := ac.Rails.Version
|
||||
if len(version) == 0 {
|
||||
return nil, errors.New("no auth.rails.version defined")
|
||||
}
|
||||
|
||||
ra, err := rails.NewAuth(version, secret)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if len(ac.Rails.Salt) != 0 {
|
||||
ra.Salt = ac.Rails.Salt
|
||||
}
|
||||
|
||||
if len(ac.Rails.SignSalt) != 0 {
|
||||
ra.SignSalt = ac.Rails.SignSalt
|
||||
}
|
||||
|
||||
if len(ac.Rails.AuthSalt) != 0 {
|
||||
ra.AuthSalt = ac.Rails.AuthSalt
|
||||
}
|
||||
|
||||
return ra, nil
|
||||
}
|
@ -3,6 +3,7 @@ package migrate
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"database/sql"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
@ -12,7 +13,6 @@ import (
|
||||
"strings"
|
||||
"text/template"
|
||||
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/pkg/errors"
|
||||
)
|
||||
|
||||
@ -62,7 +62,7 @@ type MigratorOptions struct {
|
||||
}
|
||||
|
||||
type Migrator struct {
|
||||
conn *pgx.Conn
|
||||
db *sql.DB
|
||||
versionTable string
|
||||
options *MigratorOptions
|
||||
Migrations []*Migration
|
||||
@ -70,12 +70,12 @@ type Migrator struct {
|
||||
Data map[string]interface{} // Data available to use in migrations
|
||||
}
|
||||
|
||||
func NewMigrator(conn *pgx.Conn, versionTable string) (m *Migrator, err error) {
|
||||
return NewMigratorEx(conn, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
|
||||
func NewMigrator(db *sql.DB, versionTable string) (m *Migrator, err error) {
|
||||
return NewMigratorEx(db, versionTable, &MigratorOptions{MigratorFS: defaultMigratorFS{}})
|
||||
}
|
||||
|
||||
func NewMigratorEx(conn *pgx.Conn, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
|
||||
m = &Migrator{conn: conn, versionTable: versionTable, options: opts}
|
||||
func NewMigratorEx(db *sql.DB, versionTable string, opts *MigratorOptions) (m *Migrator, err error) {
|
||||
m = &Migrator{db: db, versionTable: versionTable, options: opts}
|
||||
err = m.ensureSchemaVersionTableExists()
|
||||
m.Migrations = make([]*Migration, 0)
|
||||
m.Data = make(map[string]interface{})
|
||||
@ -244,7 +244,6 @@ func (m *Migrator) AppendMigration(name, upSQL, downSQL string) {
|
||||
UpSQL: upSQL,
|
||||
DownSQL: downSQL,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
// Migrate runs pending migrations
|
||||
@ -255,14 +254,13 @@ func (m *Migrator) Migrate() error {
|
||||
|
||||
// MigrateTo migrates to targetVersion
|
||||
func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
|
||||
ctx := context.Background()
|
||||
// Lock to ensure multiple migrations cannot occur simultaneously
|
||||
lockNum := int64(9628173550095224) // arbitrary random number
|
||||
if _, lockErr := m.conn.Exec(ctx, "select pg_advisory_lock($1)", lockNum); lockErr != nil {
|
||||
if _, lockErr := m.db.Exec("select pg_try_advisory_lock($1)", lockNum); lockErr != nil {
|
||||
return lockErr
|
||||
}
|
||||
defer func() {
|
||||
_, unlockErr := m.conn.Exec(ctx, "select pg_advisory_unlock($1)", lockNum)
|
||||
_, unlockErr := m.db.Exec("select pg_advisory_unlock($1)", lockNum)
|
||||
if err == nil && unlockErr != nil {
|
||||
err = unlockErr
|
||||
}
|
||||
@ -311,11 +309,11 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
tx, err := m.conn.Begin(ctx)
|
||||
tx, err := m.db.BeginTx(ctx, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
defer tx.Rollback() //nolint: errcheck
|
||||
|
||||
// Fire on start callback
|
||||
if m.OnStart != nil {
|
||||
@ -323,7 +321,7 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
|
||||
}
|
||||
|
||||
// Execute the migration
|
||||
_, err = tx.Exec(ctx, sql)
|
||||
_, err = tx.Exec(sql)
|
||||
if err != nil {
|
||||
// if err, ok := err.(pgx.PgError); ok {
|
||||
// return MigrationPgError{Sql: sql, PgError: err}
|
||||
@ -332,15 +330,17 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
|
||||
}
|
||||
|
||||
// Reset all database connection settings. Important to do before updating version as search_path may have been changed.
|
||||
tx.Exec(ctx, "reset all")
|
||||
// if _, err := tx.Exec(ctx, "reset all"); err != nil {
|
||||
// return err
|
||||
// }
|
||||
|
||||
// Add one to the version
|
||||
_, err = tx.Exec(ctx, "update "+m.versionTable+" set version=$1", sequence)
|
||||
_, err = tx.Exec("update "+m.versionTable+" set version=$1", sequence)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err = tx.Commit(ctx)
|
||||
err = tx.Commit()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -352,16 +352,13 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
|
||||
}
|
||||
|
||||
func (m *Migrator) GetCurrentVersion() (v int32, err error) {
|
||||
ctx := context.Background()
|
||||
err = m.db.QueryRow("select version from " + m.versionTable).Scan(&v)
|
||||
|
||||
err = m.conn.QueryRow(ctx, "select version from "+m.versionTable).Scan(&v)
|
||||
return v, err
|
||||
}
|
||||
|
||||
func (m *Migrator) ensureSchemaVersionTableExists() (err error) {
|
||||
ctx := context.Background()
|
||||
|
||||
_, err = m.conn.Exec(ctx, fmt.Sprintf(`
|
||||
_, err = m.db.Exec(fmt.Sprintf(`
|
||||
create table if not exists %s(version int4 not null);
|
||||
|
||||
insert into %s(version)
|
@ -4,8 +4,9 @@ import (
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/Masterminds/semver"
|
||||
"github.com/adjust/gorails/marshal"
|
||||
)
|
||||
|
||||
@ -37,17 +38,20 @@ func NewAuth(version, secret string) (*Auth, error) {
|
||||
AuthSalt: authSalt,
|
||||
}
|
||||
|
||||
ver, err := semver.NewVersion(version)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rails auth: %s", err)
|
||||
var v1, v2 int
|
||||
var err error
|
||||
|
||||
sv := strings.Split(version, ".")
|
||||
if len(sv) >= 2 {
|
||||
if v1, err = strconv.Atoi(sv[0]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
if v2, err = strconv.Atoi(sv[1]); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
gt52, err := semver.NewConstraint(">= 5.2")
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("rails auth: %s", err)
|
||||
}
|
||||
|
||||
if gt52.Check(ver) {
|
||||
if v1 >= 5 && v2 >= 2 {
|
||||
ra.Cipher = railsCipher52
|
||||
} else {
|
||||
ra.Cipher = railsCipher
|
@ -2,6 +2,7 @@ package serv
|
||||
|
||||
import "net/http"
|
||||
|
||||
//nolint: errcheck
|
||||
func introspect(w http.ResponseWriter) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
@ -108,11 +108,15 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
|
||||
// Ensure that we use the correct events, as they are not uniform across
|
||||
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
|
||||
|
||||
if conf.Production == false && strings.HasSuffix(event.Name, "/allow.list") {
|
||||
if conf != nil && strings.HasSuffix(event.Name, "/allow.list") {
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
|
||||
if conf.Production {
|
||||
continue
|
||||
}
|
||||
|
||||
log("INF Reloading, file changed detected: %s", event)
|
||||
|
||||
var trigger bool
|
||||
switch runtime.GOOS {
|
||||
@ -168,7 +172,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
|
||||
func ReExec() {
|
||||
err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ())
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("cannot restart")
|
||||
log.Fatalf("ERR cannot restart: %s", err)
|
||||
}
|
||||
}
|
||||
|
338
cmd/internal/serv/rice-box.go
Normal file
338
cmd/internal/serv/rice-box.go
Normal file
File diff suppressed because one or more lines are too long
179
cmd/internal/serv/serv.go
Normal file
179
cmd/internal/serv/serv.go
Normal file
@ -0,0 +1,179 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"os/signal"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/NYTimes/gziphandler"
|
||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
||||
)
|
||||
|
||||
func initWatcher() {
|
||||
cpath := conf.cpath
|
||||
if conf != nil && !conf.WatchAndReload {
|
||||
return
|
||||
}
|
||||
|
||||
var d dir
|
||||
if len(cpath) == 0 || cpath == "./" {
|
||||
d = Dir("./config", ReExec)
|
||||
} else {
|
||||
d = Dir(cpath, ReExec)
|
||||
}
|
||||
|
||||
go func() {
|
||||
err := Do(log.Printf, d)
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
func startHTTP() {
|
||||
var hostPort string
|
||||
var appName string
|
||||
|
||||
defaultHP := "0.0.0.0:8080"
|
||||
env := os.Getenv("GO_ENV")
|
||||
|
||||
if conf != nil {
|
||||
appName = conf.AppName
|
||||
hp := strings.SplitN(conf.HostPort, ":", 2)
|
||||
|
||||
if len(hp) == 2 {
|
||||
if len(conf.Host) != 0 {
|
||||
hp[0] = conf.Host
|
||||
}
|
||||
|
||||
if len(conf.Port) != 0 {
|
||||
hp[1] = conf.Port
|
||||
}
|
||||
|
||||
hostPort = fmt.Sprintf("%s:%s", hp[0], hp[1])
|
||||
}
|
||||
}
|
||||
|
||||
if len(hostPort) == 0 {
|
||||
hostPort = defaultHP
|
||||
}
|
||||
|
||||
routes, err := routeHandler()
|
||||
if err != nil {
|
||||
log.Fatalf("ERR %s", err)
|
||||
}
|
||||
|
||||
srv := &http.Server{
|
||||
Addr: hostPort,
|
||||
Handler: routes,
|
||||
ReadTimeout: 5 * time.Second,
|
||||
WriteTimeout: 10 * time.Second,
|
||||
MaxHeaderBytes: 1 << 20,
|
||||
}
|
||||
|
||||
idleConnsClosed := make(chan struct{})
|
||||
go func() {
|
||||
sigint := make(chan os.Signal, 1)
|
||||
signal.Notify(sigint, os.Interrupt)
|
||||
<-sigint
|
||||
|
||||
if err := srv.Shutdown(context.Background()); err != nil {
|
||||
log.Fatalln("INF shutdown signal received")
|
||||
}
|
||||
close(idleConnsClosed)
|
||||
}()
|
||||
|
||||
srv.RegisterOnShutdown(func() {
|
||||
db.Close()
|
||||
})
|
||||
|
||||
log.Printf("INF version: %s, git-branch: %s, host-port: %s, app-name: %s, env: %s\n",
|
||||
version, gitBranch, hostPort, appName, env)
|
||||
|
||||
log.Printf("INF %s started\n", serverName)
|
||||
|
||||
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
|
||||
log.Fatalln("INF server closed")
|
||||
}
|
||||
|
||||
<-idleConnsClosed
|
||||
}
|
||||
|
||||
func routeHandler() (http.Handler, error) {
|
||||
mux := http.NewServeMux()
|
||||
|
||||
if conf == nil {
|
||||
return mux, nil
|
||||
}
|
||||
|
||||
routes := map[string]http.Handler{
|
||||
"/health": http.HandlerFunc(health),
|
||||
"/api/v1/graphql": apiV1Handler(),
|
||||
}
|
||||
|
||||
if err := setActionRoutes(routes); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if conf.WebUI {
|
||||
routes["/"] = http.FileServer(rice.MustFindBox("./web/build").HTTPBox())
|
||||
}
|
||||
|
||||
if conf.HTTPGZip {
|
||||
gz := gziphandler.MustNewGzipLevelHandler(6)
|
||||
for k, v := range routes {
|
||||
routes[k] = gz(v)
|
||||
}
|
||||
}
|
||||
|
||||
for k, v := range routes {
|
||||
mux.Handle(k, v)
|
||||
}
|
||||
|
||||
fn := func(w http.ResponseWriter, r *http.Request) {
|
||||
w.Header().Set("Server", serverName)
|
||||
mux.ServeHTTP(w, r)
|
||||
}
|
||||
|
||||
return http.HandlerFunc(fn), nil
|
||||
}
|
||||
|
||||
func setActionRoutes(routes map[string]http.Handler) error {
|
||||
var err error
|
||||
|
||||
for _, a := range conf.Actions {
|
||||
var fn http.Handler
|
||||
|
||||
fn, err = newAction(&a)
|
||||
if err != nil {
|
||||
break
|
||||
}
|
||||
|
||||
p := fmt.Sprintf("/api/v1/actions/%s", strings.ToLower(a.Name))
|
||||
|
||||
if ac := findAuth(a.AuthName); ac != nil {
|
||||
routes[p], err = auth.WithAuth(fn, ac)
|
||||
} else {
|
||||
routes[p] = fn
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func findAuth(name string) *auth.Auth {
|
||||
for _, a := range conf.Auths {
|
||||
if strings.EqualFold(a.Name, name) {
|
||||
return &a
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
43
cmd/internal/serv/sqllog.go
Normal file
43
cmd/internal/serv/sqllog.go
Normal file
@ -0,0 +1,43 @@
|
||||
package serv
|
||||
|
||||
// import (
|
||||
// "context"
|
||||
|
||||
// "github.com/jackc/pgx/v4"
|
||||
// "github.com/rs/zerolog"
|
||||
// )
|
||||
|
||||
// type Logger struct {
|
||||
// logger zerolog.Logger
|
||||
// }
|
||||
|
||||
// // NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
|
||||
// // logging fascade as output.
|
||||
// func NewSQLLogger(logger zerolog.Logger) *Logger {
|
||||
// return &Logger{
|
||||
// logger: // logger.With().Logger(),
|
||||
// }
|
||||
// }
|
||||
|
||||
// func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
|
||||
// var zlevel zerolog.Level
|
||||
// switch level {
|
||||
// case pgx.LogLevelNone:
|
||||
// zlevel = zerolog.NoLevel
|
||||
// case pgx.LogLevelError:
|
||||
// zlevel = zerolog.ErrorLevel
|
||||
// case pgx.LogLevelWarn:
|
||||
// zlevel = zerolog.WarnLevel
|
||||
// case pgx.LogLevelDebug, pgx.LogLevelInfo:
|
||||
// zlevel = zerolog.DebugLevel
|
||||
// default:
|
||||
// zlevel = zerolog.DebugLevel
|
||||
// }
|
||||
|
||||
// if sql, ok := data["sql"]; ok {
|
||||
// delete(data, "sql")
|
||||
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
|
||||
// } else {
|
||||
// pl.// logger.WithLevel(zlevel).Fields(data).Msg(msg)
|
||||
// }
|
||||
// }
|
@ -2,8 +2,11 @@ app_name: "{% app_name %} Development"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
|
||||
# debug, info, warn, error, fatal, panic
|
||||
log_level: "debug"
|
||||
# debug, error, warn, info
|
||||
log_level: "info"
|
||||
|
||||
# enable or disable http compression (uses gzip)
|
||||
http_compress: true
|
||||
|
||||
# When production mode is 'true' only queries
|
||||
# from the allow list are permitted.
|
||||
@ -27,7 +30,21 @@ reload_on_config_change: true
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
migrations_path: ./config/migrations
|
||||
# this must be a relative path under the config path
|
||||
migrations_path: ./migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
secret_key: supercalifajalistics
|
||||
|
||||
# CORS: A list of origins a cross-domain request can be executed from.
|
||||
# If the special * value is present in the list, all origins will be allowed.
|
||||
# An origin may contain a wildcard (*) to replace 0 or more
|
||||
# characters (i.e.: http://*.domain.com).
|
||||
cors_allowed_origins: ["*"]
|
||||
|
||||
# Debug Cross Origin Resource Sharing requests
|
||||
cors_debug: false
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
@ -46,7 +63,7 @@ migrations_path: ./config/migrations
|
||||
# sheep: sheep
|
||||
|
||||
auth:
|
||||
# Can be 'rails' or 'jwt'
|
||||
# Can be 'rails', 'jwt' or 'header'
|
||||
type: rails
|
||||
cookie: _{% app_name_slug %}_session
|
||||
|
||||
@ -80,13 +97,29 @@ auth:
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
||||
# header:
|
||||
# name: dnt
|
||||
# exists: true
|
||||
# value: localhost:8080
|
||||
|
||||
# You can add additional named auths to use with actions
|
||||
# In this example actions using this auth can only be
|
||||
# called from the Google Appengine Cron service that
|
||||
# sets a special header to all it's requests
|
||||
auths:
|
||||
- name: from_taskqueue
|
||||
type: header
|
||||
header:
|
||||
name: X-Appengine-Cron
|
||||
exists: true
|
||||
|
||||
database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
@ -97,23 +130,33 @@ database:
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 1m
|
||||
|
||||
# Define additional variables here to be used with filters
|
||||
variables:
|
||||
account_id: "(select account_id from users where id = $user_id)"
|
||||
#admin_account_id: "5"
|
||||
admin_account_id: "sql:select id from users where admin = true limit 1"
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
# filters: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
# Create custom actions with their own api endpoints
|
||||
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
||||
# A request to this url will execute the configured SQL query
|
||||
# which in this case refreshes a materialized view in the database.
|
||||
# The auth_name is from one of the configured auths
|
||||
actions:
|
||||
- name: refresh_leaderboard_users
|
||||
sql: REFRESH MATERIALIZED VIEW CONCURRENTLY "leaderboard_users"
|
||||
auth_name: from_taskqueue
|
||||
|
||||
tables:
|
||||
- name: customers
|
||||
@ -136,26 +179,15 @@ tables:
|
||||
name: me
|
||||
table: users
|
||||
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
#roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
limit: 10
|
||||
|
||||
- name: users
|
||||
query:
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
insert:
|
||||
block: false
|
||||
|
||||
update:
|
||||
block: false
|
||||
|
||||
delete:
|
||||
block: false
|
||||
limit: 10
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
@ -167,29 +199,24 @@ roles:
|
||||
query:
|
||||
limit: 50
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
disable_functions: false
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
presets:
|
||||
- user_id: "$user_id"
|
||||
- created_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
presets:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
deny: true
|
||||
block: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1
|
||||
tables:
|
||||
- name: users
|
||||
# query:
|
||||
# filters: ["{ account_id: { _eq: $account_id } }"]
|
||||
# - name: admin
|
||||
# match: id = 1000
|
||||
# tables:
|
||||
# - name: users
|
||||
# filters: []
|
59
cmd/internal/serv/tmpl/docker-compose.yml
Normal file
59
cmd/internal/serv/tmpl/docker-compose.yml
Normal file
@ -0,0 +1,59 @@
|
||||
version: '3.4'
|
||||
services:
|
||||
# Postgres DB
|
||||
db:
|
||||
image: postgres:12
|
||||
environment:
|
||||
POSTGRES_USER: postgres
|
||||
POSTGRES_PASSWORD: postgres
|
||||
ports:
|
||||
- "5432:5432"
|
||||
|
||||
# Yugabyte DB
|
||||
# yb-master:
|
||||
# image: yugabytedb/yugabyte:latest
|
||||
# container_name: yb-master-n1
|
||||
# command: [ "/home/yugabyte/bin/yb-master",
|
||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
||||
# "--master_addresses=yb-master-n1:7100",
|
||||
# "--replication_factor=1",
|
||||
# "--enable_ysql=true"]
|
||||
# ports:
|
||||
# - "7000:7000"
|
||||
# environment:
|
||||
# SERVICE_7000_NAME: yb-master
|
||||
|
||||
# db:
|
||||
# image: yugabytedb/yugabyte:latest
|
||||
# container_name: yb-tserver-n1
|
||||
# command: [ "/home/yugabyte/bin/yb-tserver",
|
||||
# "--fs_data_dirs=/mnt/disk0,/mnt/disk1",
|
||||
# "--start_pgsql_proxy",
|
||||
# "--tserver_master_addrs=yb-master-n1:7100"]
|
||||
# ports:
|
||||
# - "9042:9042"
|
||||
# - "6379:6379"
|
||||
# - "5433:5433"
|
||||
# - "9000:9000"
|
||||
# environment:
|
||||
# SERVICE_5433_NAME: ysql
|
||||
# SERVICE_9042_NAME: ycql
|
||||
# SERVICE_6379_NAME: yedis
|
||||
# SERVICE_9000_NAME: yb-tserver
|
||||
# depends_on:
|
||||
# - yb-master
|
||||
|
||||
{% app_name_slug %}_api:
|
||||
image: dosco/super-graph:latest
|
||||
environment:
|
||||
GO_ENV: "development"
|
||||
# Uncomment below for Yugabyte DB
|
||||
# SG_DATABASE_PORT: 5433
|
||||
# SG_DATABASE_USER: yugabyte
|
||||
# SG_DATABASE_PASSWORD: yugabyte
|
||||
volumes:
|
||||
- ./config:/config
|
||||
ports:
|
||||
- "8080:8080"
|
||||
depends_on:
|
||||
- db
|
@ -6,8 +6,12 @@ app_name: "{% app_name %} Production"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: false
|
||||
|
||||
# debug, info, warn, error, fatal, panic, disable
|
||||
log_level: "info"
|
||||
# debug, error, warn, info
|
||||
log_level: "warn"
|
||||
|
||||
# enable or disable http compression (uses gzip)
|
||||
http_compress: true
|
||||
|
||||
# When production mode is 'true' only queries
|
||||
# from the allow list are permitted.
|
||||
# When it's 'false' all queries are saved to the
|
||||
@ -20,7 +24,11 @@ auth_fail_block: true
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
# response
|
||||
enable_tracing: true
|
||||
enable_tracing: false
|
||||
|
||||
# Watch the config folder and reload Super Graph
|
||||
# with the new configs when a change is detected
|
||||
reload_on_config_change: false
|
||||
|
||||
# File that points to the database seeding script
|
||||
# seed_file: seed.js
|
||||
@ -28,6 +36,19 @@ enable_tracing: true
|
||||
# Path pointing to where the migrations can be found
|
||||
# migrations_path: migrations
|
||||
|
||||
# Secret key for general encryption operations like
|
||||
# encrypting the cursor data
|
||||
# secret_key: supercalifajalistics
|
||||
|
||||
# CORS: A list of origins a cross-domain request can be executed from.
|
||||
# If the special * value is present in the list, all origins will be allowed.
|
||||
# An origin may contain a wildcard (*) to replace 0 or more
|
||||
# characters (i.e.: http://*.domain.com).
|
||||
# cors_allowed_origins: ["*"]
|
||||
|
||||
# Debug Cross Origin Resource Sharing requests
|
||||
# cors_debug: false
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
# SG_DATABASE_PORT
|
||||
@ -40,47 +61,13 @@ enable_tracing: true
|
||||
# SG_AUTH_RAILS_REDIS_PASSWORD
|
||||
# SG_AUTH_JWT_PUBLIC_KEY_FILE
|
||||
|
||||
# inflections:
|
||||
# person: people
|
||||
# sheep: sheep
|
||||
|
||||
auth:
|
||||
# Can be 'rails' or 'jwt'
|
||||
type: rails
|
||||
cookie: _{% app_name_slug %}_session
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
# various cookies formats.
|
||||
version: 5.2
|
||||
|
||||
# Found in 'Rails.application.config.secret_key_base'
|
||||
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
|
||||
|
||||
# Remote cookie store. (memcache or redis)
|
||||
# url: redis://127.0.0.1:6379
|
||||
# password: test
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
|
||||
# In most cases you don't need these
|
||||
# salt: "encrypted cookie"
|
||||
# sign_salt: "signed encrypted cookie"
|
||||
# auth_salt: "authenticated encrypted cookie"
|
||||
|
||||
# jwt:
|
||||
# provider: auth0
|
||||
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
|
||||
# public_key_file: /secrets/public_key.pem
|
||||
# public_key_type: ecdsa #rsa
|
||||
|
||||
database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
dbname: {% app_name_slug %}_production
|
||||
user: postgres
|
||||
password: ''
|
||||
password: postgres
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
@ -88,3 +75,6 @@ database:
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# database ping timeout is used for db health checking
|
||||
ping_timeout: 5m
|
@ -5,13 +5,16 @@ import (
|
||||
"crypto/sha1"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"os"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
// nolint: errcheck
|
||||
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||
h.WriteString(k1)
|
||||
h.WriteString(k2)
|
||||
@ -21,6 +24,7 @@ func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||
return v
|
||||
}
|
||||
|
||||
// nolint: errcheck
|
||||
func gqlHash(b string, vars []byte, role string) string {
|
||||
b = strings.TrimSpace(b)
|
||||
h := sha1.New()
|
||||
@ -64,7 +68,7 @@ func gqlHash(b string, vars []byte, role string) string {
|
||||
} else {
|
||||
starting = false
|
||||
s = e
|
||||
for e < len(b) && ws(b[e]) == false {
|
||||
for e < len(b) && !ws(b[e]) {
|
||||
e++
|
||||
}
|
||||
if e != 0 {
|
||||
@ -81,7 +85,7 @@ func gqlHash(b string, vars []byte, role string) string {
|
||||
io.WriteString(h, role)
|
||||
}
|
||||
|
||||
if vars == nil || len(vars) == 0 {
|
||||
if len(vars) == 0 {
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
@ -106,25 +110,23 @@ func al(b byte) bool {
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
|
||||
}
|
||||
|
||||
func isMutation(sql string) bool {
|
||||
for i := range sql {
|
||||
b := sql[i]
|
||||
if b == '{' {
|
||||
return false
|
||||
}
|
||||
if al(b) {
|
||||
return (b == 'm' || b == 'M')
|
||||
}
|
||||
func fatalInProd(err error, msg string) {
|
||||
var wg sync.WaitGroup
|
||||
|
||||
if isDev() {
|
||||
log.Printf("ERR %s: %s", msg, err)
|
||||
} else {
|
||||
log.Fatalf("ERR %s: %s", msg, err)
|
||||
}
|
||||
return false
|
||||
|
||||
wg.Add(1)
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func findStmt(role string, stmts []stmt) *stmt {
|
||||
for i := range stmts {
|
||||
if stmts[i].role.Name != role {
|
||||
continue
|
||||
}
|
||||
return &stmts[i]
|
||||
}
|
||||
return nil
|
||||
func isDev() bool {
|
||||
return strings.HasPrefix(os.Getenv("GO_ENV"), "dev")
|
||||
}
|
||||
|
||||
func sanitize(value string) string {
|
||||
return strings.ToLower(strings.TrimSpace(value))
|
||||
}
|
@ -7,7 +7,7 @@
|
||||
/coverage
|
||||
|
||||
# production
|
||||
/build
|
||||
# /build
|
||||
|
||||
# development
|
||||
/src/components/dataviz/core/*.js.map
|
30
cmd/internal/serv/web/build/asset-manifest.json
Normal file
30
cmd/internal/serv/web/build/asset-manifest.json
Normal file
@ -0,0 +1,30 @@
|
||||
{
|
||||
"files": {
|
||||
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
||||
"main.js": "/static/js/main.04d74040.chunk.js",
|
||||
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
|
||||
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
|
||||
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
|
||||
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
|
||||
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
|
||||
"index.html": "/index.html",
|
||||
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
|
||||
"service-worker.js": "/service-worker.js",
|
||||
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
||||
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
||||
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
||||
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
||||
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
||||
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
||||
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
||||
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
||||
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
||||
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
||||
},
|
||||
"entrypoints": [
|
||||
"static/js/runtime-main.4aea9da3.js",
|
||||
"static/js/2.03370bd3.chunk.js",
|
||||
"static/css/main.c6b5c55c.chunk.css",
|
||||
"static/js/main.04d74040.chunk.js"
|
||||
]
|
||||
}
|
Before Width: | Height: | Size: 15 KiB After Width: | Height: | Size: 15 KiB |
1
cmd/internal/serv/web/build/index.html
Normal file
1
cmd/internal/serv/web/build/index.html
Normal file
@ -0,0 +1 @@
|
||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>
|
@ -0,0 +1,58 @@
|
||||
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
||||
{
|
||||
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
|
||||
"url": "/index.html"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/css/main.c6b5c55c.chunk.css"
|
||||
},
|
||||
{
|
||||
"revision": "c156a125990ddf5dcc51",
|
||||
"url": "/static/js/2.03370bd3.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "6e9467dc213a3e2b84ea",
|
||||
"url": "/static/js/main.04d74040.chunk.js"
|
||||
},
|
||||
{
|
||||
"revision": "427262b6771d3f49a7c5",
|
||||
"url": "/static/js/runtime-main.4aea9da3.js"
|
||||
},
|
||||
{
|
||||
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
||||
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
||||
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
||||
},
|
||||
{
|
||||
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
||||
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
||||
},
|
||||
{
|
||||
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
||||
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
||||
},
|
||||
{
|
||||
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
||||
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
||||
},
|
||||
{
|
||||
"revision": "d94118379d362fc161aa1246bcc14d43",
|
||||
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
||||
},
|
||||
{
|
||||
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
||||
"url": "/static/media/getOutline.js.c04e3998.flow"
|
||||
},
|
||||
{
|
||||
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
||||
"url": "/static/media/index.js.02c24280.flow"
|
||||
},
|
||||
{
|
||||
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
||||
"url": "/static/media/logo.57ee3b60.png"
|
||||
}
|
||||
]);
|
39
cmd/internal/serv/web/build/service-worker.js
Normal file
39
cmd/internal/serv/web/build/service-worker.js
Normal file
@ -0,0 +1,39 @@
|
||||
/**
|
||||
* Welcome to your Workbox-powered service worker!
|
||||
*
|
||||
* You'll need to register this file in your web app and you should
|
||||
* disable HTTP caching for this file too.
|
||||
* See https://goo.gl/nhQhGp
|
||||
*
|
||||
* The rest of the code is auto-generated. Please don't update this file
|
||||
* directly; instead, make changes to your Workbox build configuration
|
||||
* and re-run your build process.
|
||||
* See https://goo.gl/2aRDsh
|
||||
*/
|
||||
|
||||
importScripts("https://storage.googleapis.com/workbox-cdn/releases/4.3.1/workbox-sw.js");
|
||||
|
||||
importScripts(
|
||||
"/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js"
|
||||
);
|
||||
|
||||
self.addEventListener('message', (event) => {
|
||||
if (event.data && event.data.type === 'SKIP_WAITING') {
|
||||
self.skipWaiting();
|
||||
}
|
||||
});
|
||||
|
||||
workbox.core.clientsClaim();
|
||||
|
||||
/**
|
||||
* The workboxSW.precacheAndRoute() method efficiently caches and responds to
|
||||
* requests for URLs in the manifest.
|
||||
* See https://goo.gl/S9QRab
|
||||
*/
|
||||
self.__precacheManifest = [].concat(self.__precacheManifest || []);
|
||||
workbox.precaching.precacheAndRoute(self.__precacheManifest, {});
|
||||
|
||||
workbox.routing.registerNavigationRoute(workbox.precaching.getCacheKeyForURL("/index.html"), {
|
||||
|
||||
blacklist: [/^\/_/,/\/[^/?]+\.[^/]+$/],
|
||||
});
|
@ -0,0 +1,2 @@
|
||||
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
||||
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
@ -0,0 +1 @@
|
||||
{"version":3,"sources":["index.css"],"names":[],"mappings":"AAAA,KACE,QAAS,CACT,SAAU,CACV,mIAEY,CACZ,kCAAmC,CACnC,iCAAkC,CAClC,wBACF,CAEA,KACE,uEAEF,CAEA,6BACE,0BACF","file":"main.c6b5c55c.chunk.css","sourcesContent":["body {\n margin: 0;\n padding: 0;\n font-family: -apple-system, BlinkMacSystemFont, \"Segoe UI\", \"Roboto\", \"Oxygen\",\n \"Ubuntu\", \"Cantarell\", \"Fira Sans\", \"Droid Sans\", \"Helvetica Neue\",\n sans-serif;\n -webkit-font-smoothing: antialiased;\n -moz-osx-font-smoothing: grayscale;\n background-color: #0f202d;\n}\n\ncode {\n font-family: source-code-pro, Menlo, Monaco, Consolas, \"Courier New\",\n monospace;\n}\n\n.playground > div:nth-child(2) {\n height: calc(100vh - 131px);\n}\n"]}
|
File diff suppressed because one or more lines are too long
File diff suppressed because one or more lines are too long
@ -0,0 +1,2 @@
|
||||
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
|
||||
//# sourceMappingURL=main.04d74040.chunk.js.map
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,2 @@
|
||||
!function(e){function r(r){for(var n,l,f=r[0],i=r[1],a=r[2],c=0,s=[];c<f.length;c++)l=f[c],Object.prototype.hasOwnProperty.call(o,l)&&o[l]&&s.push(o[l][0]),o[l]=0;for(n in i)Object.prototype.hasOwnProperty.call(i,n)&&(e[n]=i[n]);for(p&&p(r);s.length;)s.shift()();return u.push.apply(u,a||[]),t()}function t(){for(var e,r=0;r<u.length;r++){for(var t=u[r],n=!0,f=1;f<t.length;f++){var i=t[f];0!==o[i]&&(n=!1)}n&&(u.splice(r--,1),e=l(l.s=t[0]))}return e}var n={},o={1:0},u=[];function l(r){if(n[r])return n[r].exports;var t=n[r]={i:r,l:!1,exports:{}};return e[r].call(t.exports,t,t.exports,l),t.l=!0,t.exports}l.m=e,l.c=n,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!==typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(e,r){if(1&r&&(e=l(e)),8&r)return e;if(4&r&&"object"===typeof e&&e&&e.__esModule)return e;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:e}),2&r&&"string"!=typeof e)for(var n in e)l.d(t,n,function(r){return e[r]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var f=this.webpackJsonpweb=this.webpackJsonpweb||[],i=f.push.bind(f);f.push=r,f=f.slice();for(var a=0;a<f.length;a++)r(f[a]);var p=i;t()}([]);
|
||||
//# sourceMappingURL=runtime-main.4aea9da3.js.map
|
File diff suppressed because one or more lines are too long
@ -0,0 +1,328 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
DocumentNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
TypeDefinitionNode,
|
||||
NamedTypeNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
DefinitionQueryResult,
|
||||
Diagnostic,
|
||||
GraphQLCache,
|
||||
GraphQLConfig,
|
||||
GraphQLProjectConfig,
|
||||
Uri,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
|
||||
import {Kind, parse, print} from 'graphql';
|
||||
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
import {getHoverInformation} from './getHoverInformation';
|
||||
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
||||
import {
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
getDefinitionQueryResultForNamedType,
|
||||
} from './getDefinition';
|
||||
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {
|
||||
FRAGMENT_DEFINITION,
|
||||
OBJECT_TYPE_DEFINITION,
|
||||
INTERFACE_TYPE_DEFINITION,
|
||||
ENUM_TYPE_DEFINITION,
|
||||
UNION_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_DEFINITION,
|
||||
INPUT_OBJECT_TYPE_DEFINITION,
|
||||
SCALAR_TYPE_EXTENSION,
|
||||
OBJECT_TYPE_EXTENSION,
|
||||
INTERFACE_TYPE_EXTENSION,
|
||||
UNION_TYPE_EXTENSION,
|
||||
ENUM_TYPE_EXTENSION,
|
||||
INPUT_OBJECT_TYPE_EXTENSION,
|
||||
DIRECTIVE_DEFINITION,
|
||||
FRAGMENT_SPREAD,
|
||||
OPERATION_DEFINITION,
|
||||
NAMED_TYPE,
|
||||
} = Kind;
|
||||
|
||||
export class GraphQLLanguageService {
|
||||
_graphQLCache: GraphQLCache;
|
||||
_graphQLConfig: GraphQLConfig;
|
||||
|
||||
constructor(cache: GraphQLCache) {
|
||||
this._graphQLCache = cache;
|
||||
this._graphQLConfig = cache.getGraphQLConfig();
|
||||
}
|
||||
|
||||
async getDiagnostics(
|
||||
query: string,
|
||||
uri: Uri,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Promise<Array<Diagnostic>> {
|
||||
// Perform syntax diagnostics first, as this doesn't require
|
||||
// schema/fragment definitions, even the project configuration.
|
||||
let queryHasExtensions = false;
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
||||
const schemaPath = projectConfig.schemaPath;
|
||||
try {
|
||||
const queryAST = parse(query);
|
||||
if (!schemaPath || uri !== schemaPath) {
|
||||
queryHasExtensions = queryAST.definitions.some(definition => {
|
||||
switch (definition.kind) {
|
||||
case OBJECT_TYPE_DEFINITION:
|
||||
case INTERFACE_TYPE_DEFINITION:
|
||||
case ENUM_TYPE_DEFINITION:
|
||||
case UNION_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_DEFINITION:
|
||||
case INPUT_OBJECT_TYPE_DEFINITION:
|
||||
case SCALAR_TYPE_EXTENSION:
|
||||
case OBJECT_TYPE_EXTENSION:
|
||||
case INTERFACE_TYPE_EXTENSION:
|
||||
case UNION_TYPE_EXTENSION:
|
||||
case ENUM_TYPE_EXTENSION:
|
||||
case INPUT_OBJECT_TYPE_EXTENSION:
|
||||
case DIRECTIVE_DEFINITION:
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
});
|
||||
}
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
// If there's a matching config, proceed to prepare to run validation
|
||||
let source = query;
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
||||
query,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
const dependenciesSource = fragmentDependencies.reduce(
|
||||
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
||||
'',
|
||||
);
|
||||
|
||||
source = `${source} ${dependenciesSource}`;
|
||||
|
||||
let validationAst = null;
|
||||
try {
|
||||
validationAst = parse(source);
|
||||
} catch (error) {
|
||||
// the query string is already checked to be parsed properly - errors
|
||||
// from this parse must be from corrupted fragment dependencies.
|
||||
// For IDEs we don't care for errors outside of the currently edited
|
||||
// query, so we return an empty array here.
|
||||
return [];
|
||||
}
|
||||
|
||||
// Check if there are custom validation rules to be used
|
||||
let customRules;
|
||||
const customRulesModulePath =
|
||||
projectConfig.extensions.customValidationRules;
|
||||
if (customRulesModulePath) {
|
||||
/* eslint-disable no-implicit-coercion */
|
||||
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
||||
if (rulesPath) {
|
||||
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
||||
}
|
||||
/* eslint-enable no-implicit-coercion */
|
||||
}
|
||||
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName, queryHasExtensions)
|
||||
.catch(() => null);
|
||||
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
async getAutocompleteSuggestions(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Array<CompletionItem>> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getAutocompleteSuggestions(schema, query, position);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
async getHoverInformation(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<Hover.contents> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
const schema = await this._graphQLCache
|
||||
.getSchema(projectConfig.projectName)
|
||||
.catch(() => null);
|
||||
|
||||
if (schema) {
|
||||
return getHoverInformation(schema, query, position);
|
||||
}
|
||||
return '';
|
||||
}
|
||||
|
||||
async getDefinition(
|
||||
query: string,
|
||||
position: Position,
|
||||
filePath: Uri,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
||||
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const node = getASTNodeAtPosition(query, ast, position);
|
||||
if (node) {
|
||||
switch (node.kind) {
|
||||
case FRAGMENT_SPREAD:
|
||||
return this._getDefinitionForFragmentSpread(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
case FRAGMENT_DEFINITION:
|
||||
case OPERATION_DEFINITION:
|
||||
return getDefinitionQueryResultForDefinitionNode(
|
||||
filePath,
|
||||
query,
|
||||
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
||||
);
|
||||
case NAMED_TYPE:
|
||||
return this._getDefinitionForNamedType(
|
||||
query,
|
||||
ast,
|
||||
node,
|
||||
filePath,
|
||||
projectConfig,
|
||||
);
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
async _getDefinitionForNamedType(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: NamedTypeNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
||||
ast,
|
||||
objectTypeDefinitions,
|
||||
);
|
||||
|
||||
const localObjectTypeDefinitions = ast.definitions.filter(
|
||||
definition =>
|
||||
definition.kind === OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
||||
definition.kind === ENUM_TYPE_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
||||
TypeDefinitionNode,
|
||||
>);
|
||||
|
||||
const localOperationDefinationInfos = typeCastedDefs.map(
|
||||
(definition: TypeDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForNamedType(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localOperationDefinationInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
async _getDefinitionForFragmentSpread(
|
||||
query: string,
|
||||
ast: DocumentNode,
|
||||
node: FragmentSpreadNode,
|
||||
filePath: Uri,
|
||||
projectConfig: GraphQLProjectConfig,
|
||||
): Promise<?DefinitionQueryResult> {
|
||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
||||
projectConfig,
|
||||
);
|
||||
|
||||
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
||||
ast,
|
||||
fragmentDefinitions,
|
||||
);
|
||||
|
||||
const localFragDefinitions = ast.definitions.filter(
|
||||
definition => definition.kind === FRAGMENT_DEFINITION,
|
||||
);
|
||||
|
||||
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
||||
FragmentDefinitionNode,
|
||||
>);
|
||||
|
||||
const localFragInfos = typeCastedDefs.map(
|
||||
(definition: FragmentDefinitionNode) => ({
|
||||
filePath,
|
||||
content: query,
|
||||
definition,
|
||||
}),
|
||||
);
|
||||
|
||||
const result = await getDefinitionQueryResultForFragmentSpread(
|
||||
query,
|
||||
node,
|
||||
dependencies.concat(localFragInfos),
|
||||
);
|
||||
|
||||
return result;
|
||||
}
|
||||
}
|
@ -0,0 +1,204 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
||||
import {isCompositeType} from 'graphql';
|
||||
import {
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
} from 'graphql/type/introspection';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
// Utility for returning the state representing the Definition this token state
|
||||
// is within, if any.
|
||||
export function getDefinitionState(tokenState: State): ?State {
|
||||
let definitionState;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
case 'Mutation':
|
||||
case 'Subscription':
|
||||
case 'FragmentDefinition':
|
||||
definitionState = state;
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return definitionState;
|
||||
}
|
||||
|
||||
// Gets the field definition given a type and field name
|
||||
export function getFieldDef(
|
||||
schema: GraphQLSchema,
|
||||
type: GraphQLType,
|
||||
fieldName: string,
|
||||
): ?GraphQLField<*, *> {
|
||||
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return SchemaMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
||||
return TypeMetaFieldDef;
|
||||
}
|
||||
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
||||
return TypeNameMetaFieldDef;
|
||||
}
|
||||
if (type.getFields && typeof type.getFields === 'function') {
|
||||
return (type.getFields()[fieldName]: any);
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
||||
export function forEachState(
|
||||
stack: State,
|
||||
fn: (state: State) => ?TypeInfo,
|
||||
): void {
|
||||
const reverseStateStack = [];
|
||||
let state = stack;
|
||||
while (state && state.kind) {
|
||||
reverseStateStack.push(state);
|
||||
state = state.prevState;
|
||||
}
|
||||
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
||||
fn(reverseStateStack[i]);
|
||||
}
|
||||
}
|
||||
|
||||
export function objectValues(object: Object): Array<any> {
|
||||
const keys = Object.keys(object);
|
||||
const len = keys.length;
|
||||
const values = new Array(len);
|
||||
for (let i = 0; i < len; ++i) {
|
||||
values[i] = object[keys[i]];
|
||||
}
|
||||
return values;
|
||||
}
|
||||
|
||||
// Create the expected hint response given a possible list and a token
|
||||
export function hintList(
|
||||
token: ContextToken,
|
||||
list: Array<CompletionItem>,
|
||||
): Array<CompletionItem> {
|
||||
return filterAndSortList(list, normalizeText(token.string));
|
||||
}
|
||||
|
||||
// Given a list of hint entries and currently typed text, sort and filter to
|
||||
// provide a concise list.
|
||||
function filterAndSortList(
|
||||
list: Array<CompletionItem>,
|
||||
text: string,
|
||||
): Array<CompletionItem> {
|
||||
if (!text) {
|
||||
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
||||
}
|
||||
|
||||
const byProximity = list.map(entry => ({
|
||||
proximity: getProximity(normalizeText(entry.label), text),
|
||||
entry,
|
||||
}));
|
||||
|
||||
const conciseMatches = filterNonEmpty(
|
||||
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
||||
pair => !pair.entry.isDeprecated,
|
||||
);
|
||||
|
||||
const sortedMatches = conciseMatches.sort(
|
||||
(a, b) =>
|
||||
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
||||
a.proximity - b.proximity ||
|
||||
a.entry.label.length - b.entry.label.length,
|
||||
);
|
||||
|
||||
return sortedMatches.map(pair => pair.entry);
|
||||
}
|
||||
|
||||
// Filters the array by the predicate, unless it results in an empty array,
|
||||
// in which case return the original array.
|
||||
function filterNonEmpty(
|
||||
array: Array<Object>,
|
||||
predicate: (entry: Object) => boolean,
|
||||
): Array<Object> {
|
||||
const filtered = array.filter(predicate);
|
||||
return filtered.length === 0 ? array : filtered;
|
||||
}
|
||||
|
||||
function normalizeText(text: string): string {
|
||||
return text.toLowerCase().replace(/\W/g, '');
|
||||
}
|
||||
|
||||
// Determine a numeric proximity for a suggestion based on current text.
|
||||
function getProximity(suggestion: string, text: string): number {
|
||||
// start with lexical distance
|
||||
let proximity = lexicalDistance(text, suggestion);
|
||||
if (suggestion.length > text.length) {
|
||||
// do not penalize long suggestions.
|
||||
proximity -= suggestion.length - text.length - 1;
|
||||
// penalize suggestions not starting with this phrase
|
||||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
||||
}
|
||||
return proximity;
|
||||
}
|
||||
|
||||
/**
|
||||
* Computes the lexical distance between strings A and B.
|
||||
*
|
||||
* The "distance" between two strings is given by counting the minimum number
|
||||
* of edits needed to transform string A into string B. An edit can be an
|
||||
* insertion, deletion, or substitution of a single character, or a swap of two
|
||||
* adjacent characters.
|
||||
*
|
||||
* This distance can be useful for detecting typos in input or sorting
|
||||
*
|
||||
* @param {string} a
|
||||
* @param {string} b
|
||||
* @return {int} distance in number of edits
|
||||
*/
|
||||
function lexicalDistance(a: string, b: string): number {
|
||||
let i;
|
||||
let j;
|
||||
const d = [];
|
||||
const aLength = a.length;
|
||||
const bLength = b.length;
|
||||
|
||||
for (i = 0; i <= aLength; i++) {
|
||||
d[i] = [i];
|
||||
}
|
||||
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
d[0][j] = j;
|
||||
}
|
||||
|
||||
for (i = 1; i <= aLength; i++) {
|
||||
for (j = 1; j <= bLength; j++) {
|
||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
||||
|
||||
d[i][j] = Math.min(
|
||||
d[i - 1][j] + 1,
|
||||
d[i][j - 1] + 1,
|
||||
d[i - 1][j - 1] + cost,
|
||||
);
|
||||
|
||||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
||||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return d[aLength][bLength];
|
||||
}
|
@ -0,0 +1,665 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
FragmentDefinitionNode,
|
||||
GraphQLDirective,
|
||||
GraphQLSchema,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
CompletionItem,
|
||||
ContextToken,
|
||||
State,
|
||||
TypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
|
||||
import {
|
||||
GraphQLBoolean,
|
||||
GraphQLEnumType,
|
||||
GraphQLInputObjectType,
|
||||
GraphQLList,
|
||||
SchemaMetaFieldDef,
|
||||
TypeMetaFieldDef,
|
||||
TypeNameMetaFieldDef,
|
||||
assertAbstractType,
|
||||
doTypesOverlap,
|
||||
getNamedType,
|
||||
getNullableType,
|
||||
isAbstractType,
|
||||
isCompositeType,
|
||||
isInputType,
|
||||
} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
forEachState,
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
hintList,
|
||||
objectValues,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
/**
|
||||
* Given GraphQLSchema, queryText, and context of the current position within
|
||||
* the source text, provide a list of typeahead entries.
|
||||
*/
|
||||
export function getAutocompleteSuggestions(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Array<CompletionItem> {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
const state =
|
||||
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
||||
|
||||
// relieve flow errors by checking if `state` exists
|
||||
if (!state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
|
||||
// Definition kinds
|
||||
if (kind === 'Document') {
|
||||
return hintList(token, [
|
||||
{label: 'query'},
|
||||
{label: 'mutation'},
|
||||
{label: 'subscription'},
|
||||
{label: 'fragment'},
|
||||
{label: '{'},
|
||||
]);
|
||||
}
|
||||
|
||||
// Field names
|
||||
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
||||
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Argument names
|
||||
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
||||
const argDefs = typeInfo.argDefs;
|
||||
if (argDefs) {
|
||||
return hintList(
|
||||
token,
|
||||
argDefs.map(argDef => ({
|
||||
label: argDef.name,
|
||||
detail: String(argDef.type),
|
||||
documentation: argDef.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input Object fields
|
||||
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
||||
if (typeInfo.objectFieldDefs) {
|
||||
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
||||
return hintList(
|
||||
token,
|
||||
objectFields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// Input values: Enum and Boolean
|
||||
if (
|
||||
kind === 'EnumValue' ||
|
||||
(kind === 'ListValue' && step === 1) ||
|
||||
(kind === 'ObjectField' && step === 2) ||
|
||||
(kind === 'Argument' && step === 2)
|
||||
) {
|
||||
return getSuggestionsForInputValues(token, typeInfo);
|
||||
}
|
||||
|
||||
// Fragment type conditions
|
||||
if (
|
||||
(kind === 'TypeCondition' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState != null &&
|
||||
state.prevState.kind === 'TypeCondition')
|
||||
) {
|
||||
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
||||
}
|
||||
|
||||
// Fragment spread names
|
||||
if (kind === 'FragmentSpread' && step === 1) {
|
||||
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
||||
}
|
||||
|
||||
// Variable definition types
|
||||
if (
|
||||
(kind === 'VariableDefinition' && step === 2) ||
|
||||
(kind === 'ListType' && step === 1) ||
|
||||
(kind === 'NamedType' &&
|
||||
state.prevState &&
|
||||
(state.prevState.kind === 'VariableDefinition' ||
|
||||
state.prevState.kind === 'ListType'))
|
||||
) {
|
||||
return getSuggestionsForVariableDefinition(token, schema);
|
||||
}
|
||||
|
||||
// Directive names
|
||||
if (kind === 'Directive') {
|
||||
return getSuggestionsForDirective(token, state, schema);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
// Helper functions to get suggestions for each kinds
|
||||
function getSuggestionsForFieldNames(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (typeInfo.parentType) {
|
||||
const parentType = typeInfo.parentType;
|
||||
const fields =
|
||||
parentType.getFields instanceof Function
|
||||
? objectValues(parentType.getFields())
|
||||
: [];
|
||||
if (isAbstractType(parentType)) {
|
||||
fields.push(TypeNameMetaFieldDef);
|
||||
}
|
||||
if (parentType === schema.getQueryType()) {
|
||||
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
fields.map(field => ({
|
||||
label: field.name,
|
||||
detail: String(field.type),
|
||||
documentation: field.description,
|
||||
isDeprecated: field.isDeprecated,
|
||||
deprecationReason: field.deprecationReason,
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForInputValues(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
): Array<CompletionItem> {
|
||||
const namedInputType = getNamedType(typeInfo.inputType);
|
||||
if (namedInputType instanceof GraphQLEnumType) {
|
||||
const values = namedInputType.getValues();
|
||||
return hintList(
|
||||
token,
|
||||
values.map(value => ({
|
||||
label: value.name,
|
||||
detail: String(namedInputType),
|
||||
documentation: value.description,
|
||||
isDeprecated: value.isDeprecated,
|
||||
deprecationReason: value.deprecationReason,
|
||||
})),
|
||||
);
|
||||
} else if (namedInputType === GraphQLBoolean) {
|
||||
return hintList(token, [
|
||||
{
|
||||
label: 'true',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not false.',
|
||||
},
|
||||
{
|
||||
label: 'false',
|
||||
detail: String(GraphQLBoolean),
|
||||
documentation: 'Not true.',
|
||||
},
|
||||
]);
|
||||
}
|
||||
|
||||
return [];
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentTypeConditions(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
let possibleTypes;
|
||||
if (typeInfo.parentType) {
|
||||
if (isAbstractType(typeInfo.parentType)) {
|
||||
const abstractType = assertAbstractType(typeInfo.parentType);
|
||||
// Collect both the possible Object types as well as the interfaces
|
||||
// they implement.
|
||||
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
||||
const possibleIfaceMap = Object.create(null);
|
||||
possibleObjTypes.forEach(type => {
|
||||
type.getInterfaces().forEach(iface => {
|
||||
possibleIfaceMap[iface.name] = iface;
|
||||
});
|
||||
});
|
||||
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
||||
} else {
|
||||
// The parent type is a non-abstract Object type, so the only possible
|
||||
// type that can be used is that same type.
|
||||
possibleTypes = [typeInfo.parentType];
|
||||
}
|
||||
} else {
|
||||
const typeMap = schema.getTypeMap();
|
||||
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
||||
}
|
||||
return hintList(
|
||||
token,
|
||||
possibleTypes.map(type => {
|
||||
const namedType = getNamedType(type);
|
||||
return {
|
||||
label: String(type),
|
||||
documentation: (namedType && namedType.description) || '',
|
||||
};
|
||||
}),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForFragmentSpread(
|
||||
token: ContextToken,
|
||||
typeInfo: TypeInfo,
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
): Array<CompletionItem> {
|
||||
const typeMap = schema.getTypeMap();
|
||||
const defState = getDefinitionState(token.state);
|
||||
const fragments = getFragmentDefinitions(queryText);
|
||||
|
||||
// Filter down to only the fragments which may exist here.
|
||||
const relevantFrags = fragments.filter(
|
||||
frag =>
|
||||
// Only include fragments with known types.
|
||||
typeMap[frag.typeCondition.name.value] &&
|
||||
// Only include fragments which are not cyclic.
|
||||
!(
|
||||
defState &&
|
||||
defState.kind === 'FragmentDefinition' &&
|
||||
defState.name === frag.name.value
|
||||
) &&
|
||||
// Only include fragments which could possibly be spread here.
|
||||
isCompositeType(typeInfo.parentType) &&
|
||||
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
||||
doTypesOverlap(
|
||||
schema,
|
||||
typeInfo.parentType,
|
||||
typeMap[frag.typeCondition.name.value],
|
||||
),
|
||||
);
|
||||
|
||||
return hintList(
|
||||
token,
|
||||
relevantFrags.map(frag => ({
|
||||
label: frag.name.value,
|
||||
detail: String(typeMap[frag.typeCondition.name.value]),
|
||||
documentation: `fragment ${frag.name.value} on ${
|
||||
frag.typeCondition.name.value
|
||||
}`,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getFragmentDefinitions(
|
||||
queryText: string,
|
||||
): Array<FragmentDefinitionNode> {
|
||||
const fragmentDefs = [];
|
||||
runOnlineParser(queryText, (_, state) => {
|
||||
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
||||
fragmentDefs.push({
|
||||
kind: 'FragmentDefinition',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.name,
|
||||
},
|
||||
selectionSet: {
|
||||
kind: 'SelectionSet',
|
||||
selections: [],
|
||||
},
|
||||
typeCondition: {
|
||||
kind: 'NamedType',
|
||||
name: {
|
||||
kind: 'Name',
|
||||
value: state.type,
|
||||
},
|
||||
},
|
||||
});
|
||||
}
|
||||
});
|
||||
|
||||
return fragmentDefs;
|
||||
}
|
||||
|
||||
function getSuggestionsForVariableDefinition(
|
||||
token: ContextToken,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
const inputTypeMap = schema.getTypeMap();
|
||||
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
||||
return hintList(
|
||||
token,
|
||||
inputTypes.map(type => ({
|
||||
label: type.name,
|
||||
documentation: type.description,
|
||||
})),
|
||||
);
|
||||
}
|
||||
|
||||
function getSuggestionsForDirective(
|
||||
token: ContextToken,
|
||||
state: State,
|
||||
schema: GraphQLSchema,
|
||||
): Array<CompletionItem> {
|
||||
if (state.prevState && state.prevState.kind) {
|
||||
const directives = schema
|
||||
.getDirectives()
|
||||
.filter(directive => canUseDirective(state.prevState, directive));
|
||||
return hintList(
|
||||
token,
|
||||
directives.map(directive => ({
|
||||
label: directive.name,
|
||||
documentation: directive.description || '',
|
||||
})),
|
||||
);
|
||||
}
|
||||
return [];
|
||||
}
|
||||
|
||||
export function getTokenAtPosition(
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
): ContextToken {
|
||||
let styleAtCursor = null;
|
||||
let stateAtCursor = null;
|
||||
let stringAtCursor = null;
|
||||
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
||||
if (index === cursor.line) {
|
||||
if (stream.getCurrentPosition() >= cursor.character) {
|
||||
styleAtCursor = style;
|
||||
stateAtCursor = {...state};
|
||||
stringAtCursor = stream.current();
|
||||
return 'BREAK';
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Return the state/style of parsed token in case those at cursor aren't
|
||||
// available.
|
||||
return {
|
||||
start: token.start,
|
||||
end: token.end,
|
||||
string: stringAtCursor || token.string,
|
||||
state: stateAtCursor || token.state,
|
||||
style: styleAtCursor || token.style,
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Provides an utility function to parse a given query text and construct a
|
||||
* `token` context object.
|
||||
* A token context provides useful information about the token/style that
|
||||
* CharacterStream currently possesses, as well as the end state and style
|
||||
* of the token.
|
||||
*/
|
||||
type callbackFnType = (
|
||||
stream: CharacterStream,
|
||||
state: State,
|
||||
style: string,
|
||||
index: number,
|
||||
) => void | 'BREAK';
|
||||
|
||||
function runOnlineParser(
|
||||
queryText: string,
|
||||
callback: callbackFnType,
|
||||
): ContextToken {
|
||||
const lines = queryText.split('\n');
|
||||
const parser = onlineParser();
|
||||
let state = parser.startState();
|
||||
let style = '';
|
||||
|
||||
let stream: CharacterStream = new CharacterStream('');
|
||||
|
||||
for (let i = 0; i < lines.length; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
style = parser.token(stream, state);
|
||||
const code = callback(stream, state, style, i);
|
||||
if (code === 'BREAK') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
// Above while loop won't run if there is an empty line.
|
||||
// Run the callback one more time to catch this.
|
||||
callback(stream, state, style, i);
|
||||
|
||||
if (!state.kind) {
|
||||
state = parser.startState();
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
start: stream.getStartOfToken(),
|
||||
end: stream.getCurrentPosition(),
|
||||
string: stream.current(),
|
||||
state,
|
||||
style,
|
||||
};
|
||||
}
|
||||
|
||||
function canUseDirective(
|
||||
state: $PropertyType<State, 'prevState'>,
|
||||
directive: GraphQLDirective,
|
||||
): boolean {
|
||||
if (!state || !state.kind) {
|
||||
return false;
|
||||
}
|
||||
const kind = state.kind;
|
||||
const locations = directive.locations;
|
||||
switch (kind) {
|
||||
case 'Query':
|
||||
return locations.indexOf('QUERY') !== -1;
|
||||
case 'Mutation':
|
||||
return locations.indexOf('MUTATION') !== -1;
|
||||
case 'Subscription':
|
||||
return locations.indexOf('SUBSCRIPTION') !== -1;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
return locations.indexOf('FIELD') !== -1;
|
||||
case 'FragmentDefinition':
|
||||
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
||||
case 'FragmentSpread':
|
||||
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
||||
case 'InlineFragment':
|
||||
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
||||
|
||||
// Schema Definitions
|
||||
case 'SchemaDef':
|
||||
return locations.indexOf('SCHEMA') !== -1;
|
||||
case 'ScalarDef':
|
||||
return locations.indexOf('SCALAR') !== -1;
|
||||
case 'ObjectTypeDef':
|
||||
return locations.indexOf('OBJECT') !== -1;
|
||||
case 'FieldDef':
|
||||
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
||||
case 'InterfaceDef':
|
||||
return locations.indexOf('INTERFACE') !== -1;
|
||||
case 'UnionDef':
|
||||
return locations.indexOf('UNION') !== -1;
|
||||
case 'EnumDef':
|
||||
return locations.indexOf('ENUM') !== -1;
|
||||
case 'EnumValue':
|
||||
return locations.indexOf('ENUM_VALUE') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_OBJECT') !== -1;
|
||||
case 'InputValueDef':
|
||||
const prevStateKind = state.prevState && state.prevState.kind;
|
||||
switch (prevStateKind) {
|
||||
case 'ArgumentsDef':
|
||||
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
||||
case 'InputDef':
|
||||
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
||||
}
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
// Utility for collecting rich type information given any token's state
|
||||
// from the graphql-mode parser.
|
||||
export function getTypeInfo(
|
||||
schema: GraphQLSchema,
|
||||
tokenState: State,
|
||||
): TypeInfo {
|
||||
let argDef;
|
||||
let argDefs;
|
||||
let directiveDef;
|
||||
let enumValue;
|
||||
let fieldDef;
|
||||
let inputType;
|
||||
let objectFieldDefs;
|
||||
let parentType;
|
||||
let type;
|
||||
|
||||
forEachState(tokenState, state => {
|
||||
switch (state.kind) {
|
||||
case 'Query':
|
||||
case 'ShortQuery':
|
||||
type = schema.getQueryType();
|
||||
break;
|
||||
case 'Mutation':
|
||||
type = schema.getMutationType();
|
||||
break;
|
||||
case 'Subscription':
|
||||
type = schema.getSubscriptionType();
|
||||
break;
|
||||
case 'InlineFragment':
|
||||
case 'FragmentDefinition':
|
||||
if (state.type) {
|
||||
type = schema.getType(state.type);
|
||||
}
|
||||
break;
|
||||
case 'Field':
|
||||
case 'AliasedField':
|
||||
if (!type || !state.name) {
|
||||
fieldDef = null;
|
||||
} else {
|
||||
fieldDef = parentType
|
||||
? getFieldDef(schema, parentType, state.name)
|
||||
: null;
|
||||
type = fieldDef ? fieldDef.type : null;
|
||||
}
|
||||
break;
|
||||
case 'SelectionSet':
|
||||
parentType = getNamedType(type);
|
||||
break;
|
||||
case 'Directive':
|
||||
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
||||
break;
|
||||
case 'Arguments':
|
||||
if (!state.prevState) {
|
||||
argDefs = null;
|
||||
} else {
|
||||
switch (state.prevState.kind) {
|
||||
case 'Field':
|
||||
argDefs = fieldDef && fieldDef.args;
|
||||
break;
|
||||
case 'Directive':
|
||||
argDefs = directiveDef && directiveDef.args;
|
||||
break;
|
||||
case 'AliasedField':
|
||||
const name = state.prevState && state.prevState.name;
|
||||
if (!name) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
const field = parentType
|
||||
? getFieldDef(schema, parentType, name)
|
||||
: null;
|
||||
if (!field) {
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
argDefs = field.args;
|
||||
break;
|
||||
default:
|
||||
argDefs = null;
|
||||
break;
|
||||
}
|
||||
}
|
||||
break;
|
||||
case 'Argument':
|
||||
if (argDefs) {
|
||||
for (let i = 0; i < argDefs.length; i++) {
|
||||
if (argDefs[i].name === state.name) {
|
||||
argDef = argDefs[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
inputType = argDef && argDef.type;
|
||||
break;
|
||||
case 'EnumValue':
|
||||
const enumType = getNamedType(inputType);
|
||||
enumValue =
|
||||
enumType instanceof GraphQLEnumType
|
||||
? find(enumType.getValues(), val => val.value === state.name)
|
||||
: null;
|
||||
break;
|
||||
case 'ListValue':
|
||||
const nullableType = getNullableType(inputType);
|
||||
inputType =
|
||||
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
||||
break;
|
||||
case 'ObjectValue':
|
||||
const objectType = getNamedType(inputType);
|
||||
objectFieldDefs =
|
||||
objectType instanceof GraphQLInputObjectType
|
||||
? objectType.getFields()
|
||||
: null;
|
||||
break;
|
||||
case 'ObjectField':
|
||||
const objectField =
|
||||
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
||||
inputType = objectField && objectField.type;
|
||||
break;
|
||||
case 'NamedType':
|
||||
if (state.name) {
|
||||
type = schema.getType(state.name);
|
||||
}
|
||||
break;
|
||||
}
|
||||
});
|
||||
|
||||
return {
|
||||
argDef,
|
||||
argDefs,
|
||||
directiveDef,
|
||||
enumValue,
|
||||
fieldDef,
|
||||
inputType,
|
||||
objectFieldDefs,
|
||||
parentType,
|
||||
type,
|
||||
};
|
||||
}
|
||||
|
||||
// Returns the first item in the array which causes predicate to return truthy.
|
||||
function find(array, predicate) {
|
||||
for (let i = 0; i < array.length; i++) {
|
||||
if (predicate(array[i])) {
|
||||
return array[i];
|
||||
}
|
||||
}
|
||||
return null;
|
||||
}
|
@ -0,0 +1,136 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
FragmentSpreadNode,
|
||||
FragmentDefinitionNode,
|
||||
OperationDefinitionNode,
|
||||
NamedTypeNode,
|
||||
TypeDefinitionNode,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Definition,
|
||||
DefinitionQueryResult,
|
||||
FragmentInfo,
|
||||
Position,
|
||||
Range,
|
||||
Uri,
|
||||
ObjectTypeInfo,
|
||||
} from 'graphql-language-service-types';
|
||||
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
||||
import invariant from 'assert';
|
||||
|
||||
export const LANGUAGE = 'GraphQL';
|
||||
|
||||
function getRange(text: string, node: ASTNode): Range {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return locToRange(text, location);
|
||||
}
|
||||
|
||||
function getPosition(text: string, node: ASTNode): Position {
|
||||
const location = node.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return offsetToPosition(text, location.start);
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForNamedType(
|
||||
text: string,
|
||||
node: NamedTypeNode,
|
||||
dependencies: Array<ObjectTypeInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = node.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name && definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, node)),
|
||||
};
|
||||
}
|
||||
|
||||
export async function getDefinitionQueryResultForFragmentSpread(
|
||||
text: string,
|
||||
fragment: FragmentSpreadNode,
|
||||
dependencies: Array<FragmentInfo>,
|
||||
): Promise<DefinitionQueryResult> {
|
||||
const name = fragment.name.value;
|
||||
const defNodes = dependencies.filter(
|
||||
({definition}) => definition.name.value === name,
|
||||
);
|
||||
if (defNodes.length === 0) {
|
||||
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
||||
return {queryRange: [], definitions: []};
|
||||
}
|
||||
const definitions: Array<Definition> = defNodes.map(
|
||||
({filePath, content, definition}) =>
|
||||
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
||||
);
|
||||
return {
|
||||
definitions,
|
||||
queryRange: definitions.map(_ => getRange(text, fragment)),
|
||||
};
|
||||
}
|
||||
|
||||
export function getDefinitionQueryResultForDefinitionNode(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): DefinitionQueryResult {
|
||||
return {
|
||||
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
||||
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForFragmentDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
||||
|
||||
function getDefinitionForNodeDefinition(
|
||||
path: Uri,
|
||||
text: string,
|
||||
definition: TypeDefinitionNode,
|
||||
): Definition {
|
||||
const name = definition.name;
|
||||
invariant(name, 'Expected ASTNode to have a Name.');
|
||||
return {
|
||||
path,
|
||||
position: getPosition(text, definition),
|
||||
range: getRange(text, definition),
|
||||
name: name.value || '',
|
||||
language: LANGUAGE,
|
||||
// This is a file inside the project root, good enough for now
|
||||
projectRoot: path,
|
||||
};
|
||||
}
|
@ -0,0 +1,172 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
ASTNode,
|
||||
DocumentNode,
|
||||
GraphQLError,
|
||||
GraphQLSchema,
|
||||
Location,
|
||||
SourceLocation,
|
||||
} from 'graphql';
|
||||
import type {
|
||||
Diagnostic,
|
||||
CustomValidationRule,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import invariant from 'assert';
|
||||
import {findDeprecatedUsages, parse} from 'graphql';
|
||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
||||
import {
|
||||
Position,
|
||||
Range,
|
||||
validateWithCustomRules,
|
||||
} from 'graphql-language-service-utils';
|
||||
|
||||
export const SEVERITY = {
|
||||
ERROR: 1,
|
||||
WARNING: 2,
|
||||
INFORMATION: 3,
|
||||
HINT: 4,
|
||||
};
|
||||
|
||||
export function getDiagnostics(
|
||||
query: string,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
let ast = null;
|
||||
try {
|
||||
ast = parse(query);
|
||||
} catch (error) {
|
||||
const range = getRange(error.locations[0], query);
|
||||
return [
|
||||
{
|
||||
severity: SEVERITY.ERROR,
|
||||
message: error.message,
|
||||
source: 'GraphQL: Syntax',
|
||||
range,
|
||||
},
|
||||
];
|
||||
}
|
||||
|
||||
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
||||
}
|
||||
|
||||
export function validateQuery(
|
||||
ast: DocumentNode,
|
||||
schema: ?GraphQLSchema = null,
|
||||
customRules?: Array<CustomValidationRule>,
|
||||
isRelayCompatMode?: boolean,
|
||||
): Array<Diagnostic> {
|
||||
// We cannot validate the query unless a schema is provided.
|
||||
if (!schema) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const validationErrorAnnotations = mapCat(
|
||||
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
||||
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
||||
);
|
||||
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
||||
// support older versions of graphql-js.
|
||||
const deprecationWarningAnnotations = !findDeprecatedUsages
|
||||
? []
|
||||
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
||||
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
||||
);
|
||||
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
||||
}
|
||||
|
||||
// General utility for map-cating (aka flat-mapping).
|
||||
function mapCat<T>(
|
||||
array: Array<T>,
|
||||
mapper: (item: T) => Array<any>,
|
||||
): Array<any> {
|
||||
return Array.prototype.concat.apply([], array.map(mapper));
|
||||
}
|
||||
|
||||
function annotations(
|
||||
error: GraphQLError,
|
||||
severity: number,
|
||||
type: string,
|
||||
): Array<Diagnostic> {
|
||||
if (!error.nodes) {
|
||||
return [];
|
||||
}
|
||||
return error.nodes.map(node => {
|
||||
const highlightNode =
|
||||
node.kind !== 'Variable' && node.name
|
||||
? node.name
|
||||
: node.variable
|
||||
? node.variable
|
||||
: node;
|
||||
|
||||
invariant(error.locations, 'GraphQL validation error requires locations.');
|
||||
const loc = error.locations[0];
|
||||
const highlightLoc = getLocation(highlightNode);
|
||||
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
||||
return {
|
||||
source: `GraphQL: ${type}`,
|
||||
message: error.message,
|
||||
severity,
|
||||
range: new Range(
|
||||
new Position(loc.line - 1, loc.column - 1),
|
||||
new Position(loc.line - 1, end),
|
||||
),
|
||||
};
|
||||
});
|
||||
}
|
||||
|
||||
export function getRange(location: SourceLocation, queryText: string) {
|
||||
const parser = onlineParser();
|
||||
const state = parser.startState();
|
||||
const lines = queryText.split('\n');
|
||||
|
||||
invariant(
|
||||
lines.length >= location.line,
|
||||
'Query text must have more lines than where the error happened',
|
||||
);
|
||||
|
||||
let stream = null;
|
||||
|
||||
for (let i = 0; i < location.line; i++) {
|
||||
stream = new CharacterStream(lines[i]);
|
||||
while (!stream.eol()) {
|
||||
const style = parser.token(stream, state);
|
||||
if (style === 'invalidchar') {
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
invariant(stream, 'Expected Parser stream to be available.');
|
||||
|
||||
const line = location.line - 1;
|
||||
const start = stream.getStartOfToken();
|
||||
const end = stream.getCurrentPosition();
|
||||
|
||||
return new Range(new Position(line, start), new Position(line, end));
|
||||
}
|
||||
|
||||
/**
|
||||
* Get location info from a node in a type-safe way.
|
||||
*
|
||||
* The only way a node could not have a location is if we initialized the parser
|
||||
* (and therefore the lexer) with the `noLocation` option, but we always
|
||||
* call `parse` without options above.
|
||||
*/
|
||||
function getLocation(node: any): Location {
|
||||
const typeCastedNode = (node: ASTNode);
|
||||
const location = typeCastedNode.loc;
|
||||
invariant(location, 'Expected ASTNode to have a location.');
|
||||
return location;
|
||||
}
|
@ -0,0 +1,186 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
/**
|
||||
* Ported from codemirror-graphql
|
||||
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
||||
*/
|
||||
|
||||
import type {GraphQLSchema} from 'graphql';
|
||||
import type {ContextToken} from 'graphql-language-service-types';
|
||||
import type {Hover} from 'vscode-languageserver-types';
|
||||
import type {Position} from 'graphql-language-service-utils';
|
||||
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
||||
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
||||
|
||||
export function getHoverInformation(
|
||||
schema: GraphQLSchema,
|
||||
queryText: string,
|
||||
cursor: Position,
|
||||
contextToken?: ContextToken,
|
||||
): Hover.contents {
|
||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
||||
|
||||
if (!schema || !token || !token.state) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const state = token.state;
|
||||
const kind = state.kind;
|
||||
const step = state.step;
|
||||
const typeInfo = getTypeInfo(schema, token.state);
|
||||
const options = {schema};
|
||||
|
||||
// Given a Schema and a Token, produce the contents of an info tooltip.
|
||||
// To do this, create a div element that we will render "into" and then pass
|
||||
// it to various rendering functions.
|
||||
if (
|
||||
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
||||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
||||
) {
|
||||
const into = [];
|
||||
renderField(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.fieldDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
||||
const into = [];
|
||||
renderDirective(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.directiveDef);
|
||||
return into.join('').trim();
|
||||
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
||||
const into = [];
|
||||
renderArg(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.argDef);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'EnumValue' &&
|
||||
typeInfo.enumValue &&
|
||||
typeInfo.enumValue.description
|
||||
) {
|
||||
const into = [];
|
||||
renderEnumValue(into, typeInfo, options);
|
||||
renderDescription(into, options, typeInfo.enumValue);
|
||||
return into.join('').trim();
|
||||
} else if (
|
||||
kind === 'NamedType' &&
|
||||
typeInfo.type &&
|
||||
typeInfo.type.description
|
||||
) {
|
||||
const into = [];
|
||||
renderType(into, typeInfo, options, typeInfo.type);
|
||||
renderDescription(into, options, typeInfo.type);
|
||||
return into.join('').trim();
|
||||
}
|
||||
}
|
||||
|
||||
function renderField(into, typeInfo, options) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
||||
}
|
||||
|
||||
function renderQualifiedField(into, typeInfo, options) {
|
||||
if (!typeInfo.fieldDef) {
|
||||
return;
|
||||
}
|
||||
const fieldName = (typeInfo.fieldDef.name: string);
|
||||
if (fieldName.slice(0, 2) !== '__') {
|
||||
renderType(into, typeInfo, options, typeInfo.parentType);
|
||||
text(into, '.');
|
||||
}
|
||||
text(into, fieldName);
|
||||
}
|
||||
|
||||
function renderDirective(into, typeInfo, options) {
|
||||
if (!typeInfo.directiveDef) {
|
||||
return;
|
||||
}
|
||||
const name = '@' + typeInfo.directiveDef.name;
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderArg(into, typeInfo, options) {
|
||||
if (typeInfo.directiveDef) {
|
||||
renderDirective(into, typeInfo, options);
|
||||
} else if (typeInfo.fieldDef) {
|
||||
renderQualifiedField(into, typeInfo, options);
|
||||
}
|
||||
|
||||
if (!typeInfo.argDef) {
|
||||
return;
|
||||
}
|
||||
|
||||
const name = typeInfo.argDef.name;
|
||||
text(into, '(');
|
||||
text(into, name);
|
||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, ')');
|
||||
}
|
||||
|
||||
function renderTypeAnnotation(into, typeInfo, options, t) {
|
||||
text(into, ': ');
|
||||
renderType(into, typeInfo, options, t);
|
||||
}
|
||||
|
||||
function renderEnumValue(into, typeInfo, options) {
|
||||
if (!typeInfo.enumValue) {
|
||||
return;
|
||||
}
|
||||
const name = typeInfo.enumValue.name;
|
||||
renderType(into, typeInfo, options, typeInfo.inputType);
|
||||
text(into, '.');
|
||||
text(into, name);
|
||||
}
|
||||
|
||||
function renderType(into, typeInfo, options, t) {
|
||||
if (!t) {
|
||||
return;
|
||||
}
|
||||
if (t instanceof GraphQLNonNull) {
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, '!');
|
||||
} else if (t instanceof GraphQLList) {
|
||||
text(into, '[');
|
||||
renderType(into, typeInfo, options, t.ofType);
|
||||
text(into, ']');
|
||||
} else {
|
||||
text(into, t.name);
|
||||
}
|
||||
}
|
||||
|
||||
function renderDescription(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const description =
|
||||
typeof def.description === 'string' ? def.description : null;
|
||||
if (description) {
|
||||
text(into, '\n\n');
|
||||
text(into, description);
|
||||
}
|
||||
renderDeprecation(into, options, def);
|
||||
}
|
||||
|
||||
function renderDeprecation(into, options, def) {
|
||||
if (!def) {
|
||||
return;
|
||||
}
|
||||
const reason =
|
||||
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
||||
if (!reason) {
|
||||
return;
|
||||
}
|
||||
text(into, '\n\n');
|
||||
text(into, 'Deprecated: ');
|
||||
text(into, reason);
|
||||
}
|
||||
|
||||
function text(into: string[], content: string) {
|
||||
into.push(content);
|
||||
}
|
@ -0,0 +1,121 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
import type {
|
||||
Outline,
|
||||
TextToken,
|
||||
TokenKind,
|
||||
} from 'graphql-language-service-types';
|
||||
|
||||
import {Kind, parse, visit} from 'graphql';
|
||||
import {offsetToPosition} from 'graphql-language-service-utils';
|
||||
|
||||
const {INLINE_FRAGMENT} = Kind;
|
||||
|
||||
const OUTLINEABLE_KINDS = {
|
||||
Field: true,
|
||||
OperationDefinition: true,
|
||||
Document: true,
|
||||
SelectionSet: true,
|
||||
Name: true,
|
||||
FragmentDefinition: true,
|
||||
FragmentSpread: true,
|
||||
InlineFragment: true,
|
||||
};
|
||||
|
||||
type OutlineTreeConverterType = {[name: string]: Function};
|
||||
|
||||
export function getOutline(queryText: string): ?Outline {
|
||||
let ast;
|
||||
try {
|
||||
ast = parse(queryText);
|
||||
} catch (error) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const visitorFns = outlineTreeConverter(queryText);
|
||||
const outlineTrees = visit(ast, {
|
||||
leave(node) {
|
||||
if (
|
||||
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
||||
visitorFns[node.kind]
|
||||
) {
|
||||
return visitorFns[node.kind](node);
|
||||
}
|
||||
return null;
|
||||
},
|
||||
});
|
||||
return {outlineTrees};
|
||||
}
|
||||
|
||||
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
||||
const meta = node => ({
|
||||
representativeName: node.name,
|
||||
startPosition: offsetToPosition(docText, node.loc.start),
|
||||
endPosition: offsetToPosition(docText, node.loc.end),
|
||||
children: node.selectionSet || [],
|
||||
});
|
||||
return {
|
||||
Field: node => {
|
||||
const tokenizedText = node.alias
|
||||
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
||||
: [];
|
||||
tokenizedText.push(buildToken('plain', node.name));
|
||||
return {tokenizedText, ...meta(node)};
|
||||
},
|
||||
OperationDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', node.operation),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
Document: node => node.definitions,
|
||||
SelectionSet: node =>
|
||||
concatMap(node.selections, child => {
|
||||
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
||||
}),
|
||||
Name: node => node.value,
|
||||
FragmentDefinition: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('keyword', 'fragment'),
|
||||
buildToken('whitespace', ' '),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
FragmentSpread: node => ({
|
||||
tokenizedText: [
|
||||
buildToken('plain', '...'),
|
||||
buildToken('class-name', node.name),
|
||||
],
|
||||
...meta(node),
|
||||
}),
|
||||
InlineFragment: node => node.selectionSet,
|
||||
};
|
||||
}
|
||||
|
||||
function buildToken(kind: TokenKind, value: string): TextToken {
|
||||
return {kind, value};
|
||||
}
|
||||
|
||||
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
||||
const res = [];
|
||||
for (let i = 0; i < arr.length; i++) {
|
||||
const x = fn(arr[i], i);
|
||||
if (Array.isArray(x)) {
|
||||
res.push(...x);
|
||||
} else {
|
||||
res.push(x);
|
||||
}
|
||||
}
|
||||
return res;
|
||||
}
|
@ -0,0 +1,31 @@
|
||||
/**
|
||||
* Copyright (c) Facebook, Inc.
|
||||
* All rights reserved.
|
||||
*
|
||||
* This source code is licensed under the license found in the
|
||||
* LICENSE file in the root directory of this source tree.
|
||||
*
|
||||
* @flow
|
||||
*/
|
||||
|
||||
export {
|
||||
getDefinitionState,
|
||||
getFieldDef,
|
||||
forEachState,
|
||||
objectValues,
|
||||
hintList,
|
||||
} from './autocompleteUtils';
|
||||
|
||||
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
||||
|
||||
export {
|
||||
LANGUAGE,
|
||||
getDefinitionQueryResultForFragmentSpread,
|
||||
getDefinitionQueryResultForDefinitionNode,
|
||||
} from './getDefinition';
|
||||
|
||||
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
||||
export {getOutline} from './getOutline';
|
||||
export {getHoverInformation} from './getHoverInformation';
|
||||
|
||||
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
BIN
cmd/internal/serv/web/build/static/media/logo.57ee3b60.png
Normal file
BIN
cmd/internal/serv/web/build/static/media/logo.57ee3b60.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 31 KiB |
@ -3,12 +3,12 @@
|
||||
"version": "0.1.0",
|
||||
"private": true,
|
||||
"dependencies": {
|
||||
"@apollographql/graphql-playground-react": "^1.7.30",
|
||||
"@apollographql/graphql-playground-react": "^1.7.31",
|
||||
"apollo-link-ws": "^1.0.8",
|
||||
"graphql": "^14.1.1",
|
||||
"react": "^16.8.3",
|
||||
"react-dom": "^16.8.3",
|
||||
"react-scripts": "2.1.5",
|
||||
"react": "^16.11.0",
|
||||
"react-dom": "^16.11.0",
|
||||
"react-scripts": "3.2.0",
|
||||
"subscriptions-transport-ws": "^0.9.14"
|
||||
},
|
||||
"scripts": {
|
BIN
cmd/internal/serv/web/public/favicon.ico
Executable file
BIN
cmd/internal/serv/web/public/favicon.ico
Executable file
Binary file not shown.
After Width: | Height: | Size: 15 KiB |
15
cmd/internal/serv/web/public/manifest.json
Executable file
15
cmd/internal/serv/web/public/manifest.json
Executable file
@ -0,0 +1,15 @@
|
||||
{
|
||||
"short_name": "Super Graph",
|
||||
"name": "Super Graph - GraphQL API for Rails",
|
||||
"icons": [
|
||||
{
|
||||
"src": "favicon.ico",
|
||||
"sizes": "64x64 32x32 24x24 16x16",
|
||||
"type": "image/x-icon"
|
||||
}
|
||||
],
|
||||
"start_url": ".",
|
||||
"display": "standalone",
|
||||
"theme_color": "#000000",
|
||||
"background_color": "#ffffff"
|
||||
}
|
@ -15,8 +15,8 @@ class App extends Component {
|
||||
return (
|
||||
<div>
|
||||
<header style={{
|
||||
background: '#4d2692',
|
||||
color: '#f1e9ff',
|
||||
background: '#09141b',
|
||||
color: '#03a9f4',
|
||||
letterSpacing: '0.15rem',
|
||||
height: '65px',
|
||||
display: 'flex',
|
||||
@ -32,14 +32,16 @@ class App extends Component {
|
||||
>
|
||||
<span style={{
|
||||
textTransform: 'uppercase',
|
||||
marginLeft: '20px'
|
||||
marginLeft: '20px',
|
||||
paddingRight: '10px',
|
||||
borderRight: '1px solid #fff'
|
||||
}}>
|
||||
Super Graph
|
||||
</span>
|
||||
<span style={{
|
||||
fontSize: '16px',
|
||||
marginLeft: '20px',
|
||||
color: '#b48aff'
|
||||
marginLeft: '10px',
|
||||
color: '#fff'
|
||||
}}>
|
||||
Instant GraphQL</span>
|
||||
</h3>
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user