Compare commits

..

40 Commits

Author SHA1 Message Date
6029c5e05c Add support for websearch_to_tsquery in PG 11 2019-12-02 10:52:22 -05:00
9140e597e1 Add a guide to the internals of the codebase 2019-11-30 10:45:24 -05:00
94593c9cce Add a CONTRIBUTING.md guide for contributors 2019-11-29 22:29:31 -05:00
a96c211fe5 Refactor rename 'Select.Table to Select.Name` 2019-11-29 01:38:23 -05:00
6d47f0df8e Fix for missing filters on nested selectors 2019-11-29 00:14:05 -05:00
e82bdbed65 Add a CHANGLOG.md 2019-11-29 00:14:05 -05:00
ef51b99fc7 Add issue templates 2019-11-28 16:47:56 -05:00
9ebd03fa8c Move license from MIT to Apache 2.0. Add Makefile 2019-11-28 01:25:46 -05:00
aff2a13ba4 Added support for query names to the allow.list 2019-11-26 01:36:19 -05:00
f518d5fc69 Fix bug with compiling anon queries 2019-11-25 02:22:33 -05:00
7583326d21 Move sql query logging from info to debug 2019-11-22 01:32:09 -05:00
8024a8420b Use logger error instead of panic in goja handlers 2019-11-22 00:58:03 -05:00
6bd27d7c79 Add a db:reset command only for dev mode 2019-11-22 00:07:06 -05:00
a4c09dedd5 Optimize db queries limit use of transactions 2019-11-21 02:14:12 -05:00
176514b5f1 Added support for multi-root queries 2019-11-19 00:47:55 -05:00
396f4bcfd8 Fix issues with JWT auth 2019-11-15 01:35:19 -05:00
51c5f037f4 Fix bug with migration filename generation 2019-11-10 01:41:58 -05:00
c576f1ae16 Fix bug with migration file name 2019-11-10 01:39:24 -05:00
0a6995eaca Fix bug with migration template name 2019-11-10 01:35:06 -05:00
06ed823a51 Fix bug with creating new migrations 2019-11-10 01:33:06 -05:00
3438c3efb9 Fix macro syntax bug in app templates 2019-11-09 21:53:26 -05:00
605ec63466 Fix bugs and add new production mode 2019-11-07 02:37:24 -05:00
c7e63a6200 Update docs and website with new features 2019-11-05 00:43:32 -05:00
89bc93e159 Add nested where clause to filter based on related tables 2019-11-04 23:44:42 -05:00
77a51924a7 Block unauthorized requests when 'anon' role is not defined 2019-11-02 17:13:17 -04:00
0deb3596c5 Change config key inherit to inherits 2019-10-31 01:14:51 -04:00
b87ba1fcd0 Add config driven presets for insert, update and upsert 2019-10-30 03:27:11 -04:00
8ae7210e70 Add config driven presets for insert, update and upserta 2019-10-30 02:30:31 -04:00
f8aac8d4d7 Allow config files to inherit from other config files 2019-10-28 01:48:04 -04:00
34867a2733 Add RBAC option to disable functions eg. count 2019-10-27 01:52:48 -04:00
4a8af69dd0 Add fuzz testing to 'serv' for the GQL hash parser 2019-10-26 15:43:40 -04:00
c74226208d Add fuzz testing to 'jsn' and 'qcode' 2019-10-26 03:02:05 -04:00
6d2f334011 Add ability to block queries and mutations by role 2019-10-26 01:34:29 -04:00
ff13f651d6 Merge pull request #11 from dosco/rbac
Role based access control and other fixes
2019-10-25 01:49:37 -04:00
cabd2d81ae Preserve allow.list ordering on save 2019-10-25 01:39:59 -04:00
4edc15eb98 Optimize prepared statement flow for RBAC 2019-10-25 00:01:22 -04:00
6bc66d28bc Get RBAC working for queries and mutations 2019-10-24 02:07:42 -04:00
c797deb4d0 Add built in 'anon' and 'user' roles 2019-10-15 02:30:19 -04:00
deb5b93c81 Add role based access control 2019-10-14 02:51:36 -04:00
85a74ed30c Update filters section in guide 2019-10-10 01:35:35 -04:00
102 changed files with 8263 additions and 4605 deletions

49
.chglog/CHANGELOG.tpl.md Executable file
View File

@ -0,0 +1,49 @@
{{ if .Versions -}}
<a name="unreleased"></a>
## [Unreleased]
{{ if .Unreleased.CommitGroups -}}
{{ range .Unreleased.CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}
{{ range .Versions }}
<a name="{{ .Tag.Name }}"></a>
## {{ if .Tag.Previous }}[{{ .Tag.Name }}]{{ else }}{{ .Tag.Name }}{{ end }} - {{ datetime "2006-01-02" .Tag.Date }}
{{ range .CommitGroups -}}
### {{ .Title }}
{{ range .Commits -}}
- {{ if .Scope }}**{{ .Scope }}:** {{ end }}{{ .Subject }}
{{ end }}
{{ end -}}
{{- if .MergeCommits -}}
### Pull Requests
{{ range .MergeCommits -}}
- {{ .Header }}
{{ end }}
{{ end -}}
{{- if .NoteGroups -}}
{{ range .NoteGroups -}}
### {{ .Title }}
{{ range .Notes }}
{{ .Body }}
{{ end }}
{{ end -}}
{{ end -}}
{{ end -}}
{{- if .Versions }}
[Unreleased]: {{ .Info.RepositoryURL }}/compare/{{ $latest := index .Versions 0 }}{{ $latest.Tag.Name }}...HEAD
{{ range .Versions -}}
{{ if .Tag.Previous -}}
[{{ .Tag.Name }}]: {{ $.Info.RepositoryURL }}/compare/{{ .Tag.Previous.Name }}...{{ .Tag.Name }}
{{ end -}}
{{ end -}}
{{ end -}}

27
.chglog/config.yml Executable file
View File

@ -0,0 +1,27 @@
style: github
template: CHANGELOG.tpl.md
info:
title: CHANGELOG
repository_url: https://github.com/dosco/super-graph
options:
commits:
# filters:
# Type:
# - feat
# - fix
# - perf
# - refactor
commit_groups:
# title_maps:
# feat: Features
# fix: Bug Fixes
# perf: Performance Improvements
# refactor: Code Refactoring
header:
pattern: "^((\\w+)\\s.*)$"
pattern_maps:
- Subject
- Type
notes:
keywords:
- BREAKING CHANGE

24
.github/ISSUE_TEMPLATE/bug_report.md vendored Normal file
View File

@ -0,0 +1,24 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: dosco
---
<!-- If you suspect this could be a bug, follow the template. -->
### What version of Super Graph are you using? `super-graph version`
### Have you tried reproducing the issue with the latest release?
### What is the hardware spec (RAM, OS)?
### Steps to reproduce the issue (config used to run Super Graph).
### Expected behaviour and actual result.

12
.github/ISSUE_TEMPLATE/documentation.md vendored Normal file
View File

@ -0,0 +1,12 @@
---
name: Documentation
about: Suggest how we can improve documentation
title: ''
labels: ''
assignees: dosco
---
<!-- If you think the Super Graph documentation falls short https://supergraph.dev/guide.html please suggest ways we can improve it. -->
<!-- explain it here. -->

View File

@ -0,0 +1,20 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: dosco
---
## Experience Report
Note: Feature requests are judged based on user experience this is similar to the [Go Experience Reports](https://github.com/golang/go/wiki/ExperienceReports). These reports should focus on the problems: they should not focus on and need not propose solutions.
### What you wanted to do
### What you actually did
### Why that wasn't great, with examples
### Any external references to support your case

7
.gitignore vendored
View File

@ -27,4 +27,11 @@
main main
.DS_Store .DS_Store
.swp .swp
.release
main main
super-graph
supergraph
*-fuzz.zip
crashers
suppressions
release

13
.wtc.yaml Normal file
View File

@ -0,0 +1,13 @@
no_trace: false
debounce: 300 # if rule has no debounce, this will be used instead
ignore: \.git/
trig: [start, run] # will run on start
rules:
- name: start
- name: run
match: \.go$
ignore: web|examples|docs|_test\.go$
command: go run main.go serv
- name: test
match: _test\.go$
command: go test -cover {PKG}

384
CHANGELOG.md Normal file
View File

@ -0,0 +1,384 @@
<a name="unreleased"></a>
## [Unreleased]
<a name="v0.12.4"></a>
## [v0.12.4] - 2019-11-28
### Move
- Move license from MIT to Apache 2.0. Add Makefile
<a name="v0.12.3"></a>
## [v0.12.3] - 2019-11-26
### Added
- Added support for query names to the allow.list
<a name="v0.12.2"></a>
## [v0.12.2] - 2019-11-25
### Fix
- Fix bug with compiling anon queries
<a name="v0.12.1"></a>
## [v0.12.1] - 2019-11-22
### Move
- Move sql query logging from info to debug
<a name="v0.12.0"></a>
## [v0.12.0] - 2019-11-22
### Use
- Use logger error instead of panic in goja handlers
<a name="v0.11.9"></a>
## [v0.11.9] - 2019-11-22
### Add
- Add a db:reset command only for dev mode
<a name="v0.11.8"></a>
## [v0.11.8] - 2019-11-21
### Optimize
- Optimize db queries limit use of transactions
<a name="v0.11.7"></a>
## [v0.11.7] - 2019-11-19
### Added
- Added support for multi-root queries
<a name="v0.11.6"></a>
## [v0.11.6] - 2019-11-15
### Fix
- Fix issues with JWT auth
- Fix bug with migration filename generation
- Fix bug with migration file name
<a name="v0.11.5"></a>
## [v0.11.5] - 2019-11-10
### Fix
- Fix bug with migration template name
<a name="v0.11.4"></a>
## [v0.11.4] - 2019-11-10
### Fix
- Fix bug with creating new migrations
<a name="v0.11.3"></a>
## [v0.11.3] - 2019-11-09
### Fix
- Fix macro syntax bug in app templates
<a name="v0.11.2"></a>
## [v0.11.2] - 2019-11-07
### Fix
- Fix bugs and add new production mode
<a name="v0.11.1"></a>
## [v0.11.1] - 2019-11-05
### Add
- Add nested where clause to filter based on related tables
### Block
- Block unauthorized requests when 'anon' role is not defined
### Update
- Update docs and website with new features
<a name="v0.11"></a>
## [v0.11] - 2019-11-01
### Add
- Add config driven presets for insert, update and upsert
- Add config driven presets for insert, update and upserta
- Add RBAC option to disable functions eg. count
- Add fuzz testing to 'serv' for the GQL hash parser
- Add fuzz testing to 'jsn' and 'qcode'
- Add ability to block queries and mutations by role
- Add built in 'anon' and 'user' roles
- Add role based access control
### Allow
- Allow config files to inherit from other config files
### Change
- Change config key inherit to inherits
### Get
- Get RBAC working for queries and mutations
### Optimize
- Optimize prepared statement flow for RBAC
### Preserve
- Preserve allow.list ordering on save
### Update
- Update filters section in guide
### Pull Requests
- Merge pull request [#11](https://github.com/dosco/super-graph/issues/11) from dosco/rbac
<a name="v0.10.1"></a>
## [v0.10.1] - 2019-10-06
### Add
- Add ability to set filters per operation / action
- Add upsert mutation
### Pull Requests
- Merge pull request [#10](https://github.com/dosco/super-graph/issues/10) from FourSigma/sm-examples-folder
<a name="v0.10"></a>
## [v0.10] - 2019-10-04
### Fix
- Fix return values for bulk mutations and delete
- Fix issues with mutation SQL
- Fix broken demo app
- Fix typo in 'across'
### Remove
- Remove extra link from README
### Update
- Update docs, getting started guide and mutations
### Pull Requests
- Merge pull request [#6](https://github.com/dosco/super-graph/issues/6) from muesli/typo-fixes
<a name="v0.9"></a>
## [v0.9] - 2019-10-01
### Fix
- Fix demo rails app broken build
<a name="v0.8"></a>
## [v0.8] - 2019-09-30
### Fix
- Fix invalid import bug
### Update
- Update documentation site
<a name="v0.7"></a>
## [v0.7] - 2019-09-29
### Failure
- Failure to prepare statements should be a warning
### Fix
- Fix duplicte column bug
<a name="v0.6"></a>
## [v0.6] - 2019-09-29
### Add
- Add database setup commands
- Add binary compression back to Dockerfile
- Add initialization command to setup new apps
- Add migrate command
- Add database seeding capability
- Add session variable for user id
- Add delete mutation
- Add update mutation
- Add insert mutation with bulk insert
- Add GoTO Aug, 19 presentation
- Add support for prepared statements
- Add end-to-end benchmaking
- Add object pooling for parser expressions
- Add request / response debugging for remote joins
- Add a presentation about GraphQL
- Add validation for remote JSON
- Add tracing for API stitching
- Add REST API stitching
- Add SQL query cacheing
- Add support for GraphQL variables
- Add fuzz testing to qcode
- Add test for Rails Redis cookie store integration
- Add an install guide
### Change
- Change fuzz test name to qcode
- Change logo from PNG to SVG
### Enabke
- Enabke reload on config change
### Fix
- Fix missing config name bug
- Fix new app templates
- Fix help message for migrate
- Fix session variable bug
- Fix test failures in `psql` and `serv`
- Fix demo docker services startup order
- Fix wrong value for false token bug. Reported by [@ThisIsMissEm](https://github.com/ThisIsMissEm)
- Fix allow.list file discovery bug
- Fix bug with allow list path
- Fix wrong value for use_allow_list in dev config
- Fix startup bug in demo script
- Fix url bug in allow list
- Fix bug [#676](https://github.com/dosco/super-graph/issues/676) found by fuzzer
- Fix race-condition in remote joins
- Fix cookie passing in web ui
- Fix bug with passing cookies in web ui
- Fix null pointer with invalid argument values
- Fix infinite loop bug in lexer
- Fix null pointer issue found by fuzz test
- Fix issue with fuzzbuzz config
- Fix demo to run as memory only
- Fix auth documentation
- Fix issue with web ui sizing
- Fix issue preventing docker-compose deploy
- Fix try demo documentation
### Futher
- Futher reduce allocations across hot paths
- Futher reduce allocations on the compiler hot path
- Futher optimize json parsing and editing performance
### Highlight
- Highlight top features better on the site
### Improve
- Improve readability of json parser code
- Improve the motivation section in the readme
- Improve the demo experience
### Make
- Make remote joins use parallel http requests
### Merge
- Merge branch 'master' into optimize-psql
### New
- New low allocation fast json parsing and editing library
### Optimize
- Optimize lexer and fix bugs
- Optimize the sql generator hot path
### Reduce
- Reduce alllocations done by the stack
- Reduce steps to run the demo
- Reduce allocations and improve perf over 50%
### Remove
- Remove unused packages
- Remove the 'hello' test app folder
- Remove other allocations in psql
### Use
- Use hash's as ids for table relationships
### Watch
- Watch and reload on config changes
<a name="v0.5"></a>
## [v0.5] - 2019-04-10
### Add
- Add supprt for new Rails 5.2 aes-256-gcm cookies
- Add query support for ts_rank and ts_headline
- Add full text search support using TSV indexes
- Add missing assets folder
- Add fetch by ID feature
- Add documentation
### Cleanup
- Cleanup and redesign config files
### Fix
- Fix bug with auth config parsing
### Redesign
- Redesign config file architecture
### Reduce
- Reduce realloc of maps and slices
### Update
- Update docs with full-text search information
<a name="v0.4"></a>
## [v0.4] - 2019-04-01
<a name="v0.3"></a>
## [v0.3] - 2019-04-01
### Add
- Add SQL execution timing and tracing
- Add support for HAVING with aggregate queries
- Add aggregrate functions to GQL queries
- Add Auth0 JWT support
- Add React UI building to the docker build flow
- Add compiler profiling
- Add bechmarks for GQL to SQL compile
- Add tests for gql to sql compile
### Cleanup
- Cleanup Dockerfile
### Fix
- Fix recurring packer issue docker hub builds
- Fix issue with asset packer breaking Docker builds
- Fix missing git package in Dockerfile
- Fix docker ignore values
- Fix image build failure on docker hub
- Fix build issue in Dockerfile
- Fix bugs and document the 'where' clause
- Fix perf issue with inflections
### Optimize
- Optimize docker image
### Pack
- Pack web UI with app into a single binary
### Upgrade
- Upgrade web UI packages
<a name="0.3"></a>
## 0.3 - 2019-03-24
### First
- First commit
### Fix
- Fix license to MIT
[Unreleased]: https://github.com/dosco/super-graph/compare/v0.12.4...HEAD
[v0.12.4]: https://github.com/dosco/super-graph/compare/v0.12.3...v0.12.4
[v0.12.3]: https://github.com/dosco/super-graph/compare/v0.12.2...v0.12.3
[v0.12.2]: https://github.com/dosco/super-graph/compare/v0.12.1...v0.12.2
[v0.12.1]: https://github.com/dosco/super-graph/compare/v0.12.0...v0.12.1
[v0.12.0]: https://github.com/dosco/super-graph/compare/v0.11.9...v0.12.0
[v0.11.9]: https://github.com/dosco/super-graph/compare/v0.11.8...v0.11.9
[v0.11.8]: https://github.com/dosco/super-graph/compare/v0.11.7...v0.11.8
[v0.11.7]: https://github.com/dosco/super-graph/compare/v0.11.6...v0.11.7
[v0.11.6]: https://github.com/dosco/super-graph/compare/v0.11.5...v0.11.6
[v0.11.5]: https://github.com/dosco/super-graph/compare/v0.11.4...v0.11.5
[v0.11.4]: https://github.com/dosco/super-graph/compare/v0.11.3...v0.11.4
[v0.11.3]: https://github.com/dosco/super-graph/compare/v0.11.2...v0.11.3
[v0.11.2]: https://github.com/dosco/super-graph/compare/v0.11.1...v0.11.2
[v0.11.1]: https://github.com/dosco/super-graph/compare/v0.11...v0.11.1
[v0.11]: https://github.com/dosco/super-graph/compare/v0.10.1...v0.11
[v0.10.1]: https://github.com/dosco/super-graph/compare/v0.10...v0.10.1
[v0.10]: https://github.com/dosco/super-graph/compare/v0.9...v0.10
[v0.9]: https://github.com/dosco/super-graph/compare/v0.8...v0.9
[v0.8]: https://github.com/dosco/super-graph/compare/v0.7...v0.8
[v0.7]: https://github.com/dosco/super-graph/compare/v0.6...v0.7
[v0.6]: https://github.com/dosco/super-graph/compare/v0.5...v0.6
[v0.5]: https://github.com/dosco/super-graph/compare/v0.4...v0.5
[v0.4]: https://github.com/dosco/super-graph/compare/v0.3...v0.4
[v0.3]: https://github.com/dosco/super-graph/compare/0.3...v0.3

79
CONTRIBUTING.md Normal file
View File

@ -0,0 +1,79 @@
# Contributing to Super Graph
Super Graph is a very approchable code-base and a project that is easy for almost
anyone with basic GO knowledge to start contributing to. It is also a young project
so a lot of high value work is there for the taking.
Even the GraphQL to SQL compiler that is at the heart of Super Graph is essentially a text book compiler with clean and easy to read code. The data structures used by the lexer, parser and sql generator are easy to understand and modify.
Finally we do have a lot of test for critical parts of the codebase which makes it easy for you to modify with confidence. I'm always available for questions or any sort of guidance so feel fee to reach out over twitter or discord.
* [Getting Started](#get-started)
* [Setting Up the Development Environment](#get-setup)
* [Prerequisites](#prerequisites)
* [Get the Super Graph source](#get-source)
* [Start the development envoirnment ](#start-dev)
* [Testing](#testing)
* [Contributing](#contributing)
* [Guidelines](#guidelines)
* [Code style](#code-style)
## Getting Started {#get-started}
- Read the [Getting Started Guide](https://supergraph.dev/guide.html#get-started)
## Setting Up the Development Environment {#get-setup}
### Prerequisites
- Install [Git](https://git-scm.com/) (may be already installed on your system, or available through your OS package manager)
- Install [Go 1.13 or above](https://golang.org/doc/install)
- Install [Docker](https://docs.docker.com/v17.09/engine/installation/)
### Get the Super Graph source {#get-source}
The entire build flow uses `Makefile` there is a whole list of sub-commands you
can use to build, test, install, lint, etc.
```bash
git clone https://github.com/dosco/super-graph
cd ./super-graph
make help
```
### Start the development envoirnment {#start-dev}
The entire development flow is packaged into a `docker-compose` work flow. The below `up` command will launch A Postgres database, a example e-commerce app in Rails and Super Graph in development mode. The `db:seed` Rails task will insert sample data into Postgres.
```bash
docker-compose -f demo.yml run rails_app rake db:create db:migrate db:seed
docker-compose up
```
### Learn how the code works
[Super Graph codebase explained](https://supergraph.dev/internals.html)
### Testing and Linting {#testing}
```
make lint test
```
## Contributing
- **Pull requests are welcome**, as long as you're willing to put in the effort to meet the guidelines.
- Aim for clear, well written, maintainable code.
- Simple and minimal approach to features, like Go.
- Refactoring existing code now for better performance, better readability or better testability wins over adding a new feature.
- Don't add a function to a module that you don't use right now, or doesn't clearly enable a planned functionality.
- Don't ship a half done feature, which would require significant alterations to work fully.
- Avoid [Technical debt](https://en.wikipedia.org/wiki/Technical_debt) like cancer.
- Leave the code cleaner than when you began.
### Code style
- We're following [Go Code Review](https://github.com/golang/go/wiki/CodeReviewComments).
- Use `go fmt` to format your code before committing.
- If you see *any code* which clearly violates the style guide, please fix it and send a pull request. No need to ask for permission.
- Avoid unnecessary vertical spaces. Use your judgment or follow the code review comments.
- Wrap your code and comments to 100 characters, unless doing so makes the code less legible.

View File

@ -6,14 +6,13 @@ RUN yarn
RUN yarn build RUN yarn build
# stage: 2 # stage: 2
FROM golang:1.13beta1-alpine as go-build FROM golang:1.13.4-alpine as go-build
RUN apk update && \ RUN apk update && \
apk add --no-cache make && \
apk add --no-cache git && \ apk add --no-cache git && \
apk add --no-cache upx=3.95-r2 apk add --no-cache upx=3.95-r2
RUN go get -u github.com/shanzi/wu && \ RUN GO111MODULE=off go get -u github.com/rafaelsq/wtc
go install github.com/shanzi/wu && \
go get github.com/GeertJohan/go.rice/rice
WORKDIR /app WORKDIR /app
COPY . /app COPY . /app
@ -21,11 +20,9 @@ COPY . /app
RUN mkdir -p /app/web/build RUN mkdir -p /app/web/build
COPY --from=react-build /web/build/ ./web/build/ COPY --from=react-build /web/build/ ./web/build/
ENV GO111MODULE=on
RUN go mod vendor RUN go mod vendor
RUN go generate ./... && \ RUN make build
CGO_ENABLED=0 GOOS=linux go build -ldflags="-s -w" -o super-graph && \ RUN echo "Compressing binary, will take a bit of time..." && \
echo "Compressing binary, will take a bit of time..." && \
upx --ultra-brute -qq super-graph && \ upx --ultra-brute -qq super-graph && \
upx -t super-graph upx -t super-graph

189
LICENSE
View File

@ -1,21 +1,176 @@
The MIT License (MIT) Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
Copyright (c) 2019-present Vikram Rangnekar. twitter.com/dosco TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
Permission is hereby granted, free of charge, to any person obtaining a copy 1. Definitions.
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in "License" shall mean the terms and conditions for use, reproduction,
all copies or substantial portions of the Software. and distribution as defined by Sections 1 through 9 of this document.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR "Licensor" shall mean the copyright owner or entity authorized by
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, the copyright owner that is granting the License.
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER "Legal Entity" shall mean the union of the acting entity and all
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, other entities that control, are controlled by, or are under common
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN control with that entity. For the purposes of this definition,
THE SOFTWARE. "control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS

103
Makefile Normal file
View File

@ -0,0 +1,103 @@
BUILD ?= $(shell git rev-parse --short HEAD)
BUILD_DATE ?= $(shell git log -1 --format=%ci)
BUILD_BRANCH ?= $(shell git rev-parse --abbrev-ref HEAD)
BUILD_VERSION ?= $(shell git describe --always --tags)
GOPATH ?= $(shell go env GOPATH)
ifndef GOPATH
override GOPATH = $(HOME)/go
endif
export GO111MODULE := on
# Build-time Go variables
version = github.com/dosco/super-graph/serv.version
gitBranch = github.com/dosco/super-graph/serv.gitBranch
lastCommitSHA = github.com/dosco/super-graph/serv.lastCommitSHA
lastCommitTime = github.com/dosco/super-graph/serv.lastCommitTime
BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime}=${BUILD_DATE}" -X "${version}=${BUILD_VERSION}" -X ${gitBranch}=${BUILD_BRANCH}'
.PHONY: all build gen clean test run lint changlog release version help $(PLATFORMS)
test:
@go test -v ./...
BIN_DIR := $(GOPATH)/bin
GORICE := $(BIN_DIR)/github.com/GeertJohan/go.rice
GOLANGCILINT := $(BIN_DIR)/golangci-lint
GITCHGLOG := $(BIN_DIR)/git-chglog
$(GORICE):
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
$(GITCHGLOG):
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/cmd/git-chglog
changelog: $(GITCHGLOG)
@git-chglog $(ARGS)
$(GOLANGCILINT):
@GO111MODULE=off curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh| sh -s -- -b $(GOPATH)/bin v1.21.0
lint: $(GOMETALINTER)
@golangci-lint run ./... --skip-dirs-use-default
BINARY := super-graph
LDFLAGS := -s -w
PLATFORMS := windows linux darwin
os = $(word 1, $@)
$(PLATFORMS): lint test gen
@mkdir -p release
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64
release: windows linux darwin
all: lint test $(BINARY)
build: $(BINARY)
gen: $(GORICE)
@go generate ./...
$(BINARY): clean gen
@go build $(BUILD_FLAGS) -o $(BINARY)
clean:
@rm -f $(BINARY)
run: clean
@go run $(BUILD_FLAGS) main.go $(ARGS)
install: gen
@echo $(GOPATH)
@echo "Commit Hash: `git rev-parse HEAD`"
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
@go install $(BUILD_FLAGS)
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
uninstall: clean
@go clean -i -x
version:
@echo Super Graph ${BUILD_VERSION}
@echo Build: ${BUILD}
@echo Build date: ${BUILD_DATE}
@echo Branch: ${BUILD_BRANCH}
@echo Go version: $(shell go version)
help:
@echo
@echo Build commands:
@echo " make build - Build supergraph binary"
@echo " make install - Install supergraph binary"
@echo " make uninstall - Uninstall supergraph binary"
@echo " make [platform] - Build for platform [linux|darwin|windows]"
@echo " make release - Build all platforms"
@echo " make run - Run supergraph (eg. make run ARGS=\"help\")"
@echo " make test - Run all tests"
@echo " make changelog - Generate changelog (eg. make changelog ARGS=\"help\")"
@echo " make help - This help"
@echo

13
NOTICE Normal file
View File

@ -0,0 +1,13 @@
Copyright 2019 Vikram Rangnekar
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

View File

@ -1,16 +1,24 @@
<a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a> <!-- <a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a> -->
# Super Graph - Build web products faster. Instant GraphQL APIs for your apps <img src="docs/.vuepress/public/super-graph.png" width="250" />
![MIT license](https://img.shields.io/github/license/dosco/super-graph.svg) ### Build web products faster. Secure high performance GraphQL
![Apache Public License 2.0](https://img.shields.io/github/license/dosco/super-graph.svg)
![Docker build](https://img.shields.io/docker/cloud/build/dosco/super-graph.svg) ![Docker build](https://img.shields.io/docker/cloud/build/dosco/super-graph.svg)
![Cloud native](https://img.shields.io/badge/cloud--native-enabled-blue.svg) ![Cloud native](https://img.shields.io/badge/cloud--native-enabled-blue.svg)
[![Discord Chat](https://img.shields.io/discord/628796009539043348.svg)](https://discord.gg/6pSWCTZ) [![Discord Chat](https://img.shields.io/discord/628796009539043348.svg)](https://discord.gg/6pSWCTZ)
Get an instant high performance GraphQL API for Postgres. No code needed. GraphQL is automatically transformed into efficient database queries.
## What is Super Graph
Super Graph is a micro-service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more writing API code as you develop your web frontend just make the query you need and Super Graph will do the rest.
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Supoport for JWT tokens, Built-in DB mutations and seeding and a lot more.
![GraphQL](docs/.vuepress/public/graphql.png?raw=true "") ![GraphQL](docs/.vuepress/public/graphql.png?raw=true "")
## The story of Super Graph? ## The story of Super Graph?
After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money. After working on several products through my career I find that we spend way too much time on building API backends. Most APIs also require constant updating, this costs real time and money.
@ -25,20 +33,31 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
## Features ## Features
- Works with Rails database schemas - Role and Attribute based access control
- Automatically learns schemas and relationships - Works with existing Ruby-On-Rails apps
- Belongs-To, One-To-Many and Many-To-Many table relationships - Automatically learns database schemas and relationships
- Full text search and Aggregations - Full text search and aggregations
- Rails Auth supported (Redis, Memcache, Cookie) - Rails authentication supported (Redis, Memcache, Cookie)
- JWT tokens supported (Auth0, etc) - JWT tokens supported (Auth0, etc)
- Join with remote REST APIs - Join database with remote REST APIs
- Highly optimized and fast Postgres SQL queries - Highly optimized and fast Postgres SQL queries
- Support GraphQL queries and mutations - GraphQL queries and mutations
- Configure with a simple config file - A simple config file
- High performance GO codebase - High performance GO codebase
- Tiny docker image and low memory requirements - Tiny docker image and low memory requirements
- Fuzz tested for security
- Database migrations tool - Database migrations tool
- Write database seeding scripts in Javascript - Database seeding tool
## Get started
```
git clone https://github.com/dosco/super-graph
cd ./super-graph
make install
super-graph new <app_name>
```
## Documentation ## Documentation
@ -46,13 +65,16 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
## Contact me ## Contact me
I'm happy to help you deploy Super Graph so feel free to reach out over
Twitter or Discord.
[twitter/dosco](https://twitter.com/dosco) [twitter/dosco](https://twitter.com/dosco)
[chat/super-graph](https://discord.gg/6pSWCTZ) [chat/super-graph](https://discord.gg/6pSWCTZ)
## License ## License
[MIT](http://opensource.org/licenses/MIT) [Apache Public License 2.0](https://opensource.org/licenses/Apache-2.0)
Copyright (c) 2019-present Vikram Rangnekar Copyright (c) 2019-present Vikram Rangnekar

View File

@ -1,5 +1,27 @@
# http://localhost:8080/ # http://localhost:8080/
variables {
"data": [
{
"name": "Protect Ya Neck",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Enter the Wu-Tang",
"created_at": "now",
"updated_at": "now"
}
]
}
mutation {
products(insert: $data) {
id
name
}
}
variables { variables {
"update": { "update": {
"name": "Wu-Tang", "name": "Wu-Tang",
@ -16,16 +38,16 @@ mutation {
} }
} }
variables { query {
"data": { users {
"product_id": 5
}
}
mutation {
products(id: $product_id, delete: true) {
id id
name email
picture: avatar
products(limit: 2, where: {price: {gt: 10}}) {
id
name
description
}
} }
} }
@ -51,73 +73,37 @@ mutation {
} }
} }
query {
products {
id
name
user {
email
}
}
}
variables { variables {
"data": [ "data": {
{ "product_id": 5
"name": "Gumbo1", }
"created_at": "now", }
"updated_at": "now"
}, mutation {
{ products(id: $product_id, delete: true) {
"name": "Gumbo2", id
"created_at": "now", name
"updated_at": "now" }
}
]
} }
query { query {
products { products {
id id
name name
} price
} users {
email
}
variables {
"update": {
"name": "Helloo",
"description": "World \u003c\u003e"
},
"user": 123
}
mutation {
products(id: 5, update: $update) {
id
name
description
}
}
variables {
"data": [
{
"name": "Gumbo1",
"created_at": "now",
"updated_at": "now"
},
{
"name": "Gumbo2",
"created_at": "now",
"updated_at": "now"
}
]
}
query {
product {
id
name
}
}
query {
me {
id
email
full_name
} }
} }
@ -136,32 +122,27 @@ mutation {
} }
} }
query { variables {
users { "update": {
"name": "Helloo",
"description": "World \u003c\u003e"
},
"user": 123
}
mutation {
products(id: 5, update: $update) {
id id
email name
picture: avatar description
products(limit: 2, where: {price: {gt: 10}}) {
id
name
description
}
} }
} }
variables { variables {
"data": [ "data": {
{ "name": "WOOO",
"name": "Protect Ya Neck", "price": 50.5
"created_at": "now", }
"updated_at": "now"
},
{
"name": "Enter the Wu-Tang",
"created_at": "now",
"updated_at": "now"
}
]
} }
mutation { mutation {
@ -171,3 +152,34 @@ mutation {
} }
} }
query getProducts {
products {
id
name
price
description
}
}
query {
deals {
id
name
price
}
}
variables {
"beer": "smoke"
}
query beerSearch {
products(search: $beer) {
id
name
search_rank
search_headline_description
}
}

View File

@ -5,15 +5,14 @@ web_ui: true
# debug, info, warn, error, fatal, panic # debug, info, warn, error, fatal, panic
log_level: "debug" log_level: "debug"
# Disable this in development to get a list of # When production mode is 'true' only queries
# queries used. When enabled super graph # from the allow list are permitted.
# will only allow queries from this list # When it's 'false' all queries are saved to the
# List saved to ./config/allow.list # the allow list in ./config/allow.list
use_allow_list: false production: false
# Throw a 401 on auth failure for queries that need auth # Throw a 401 on auth failure for queries that need auth
# valid values: always, per_query, never auth_fail_block: false
auth_fail_block: never
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
@ -22,7 +21,7 @@ enable_tracing: true
# Watch the config folder and reload Super Graph # Watch the config folder and reload Super Graph
# with the new configs when a change is detected # with the new configs when a change is detected
reload_on_config_change: false reload_on_config_change: true
# File that points to the database seeding script # File that points to the database seeding script
# seed_file: seed.js # seed_file: seed.js
@ -52,8 +51,9 @@ auth:
cookie: _app_session cookie: _app_session
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header. Good for testing # the user_id via a header for testing.
header: X-User-ID # Disable in production
creds_in_header: true
rails: rails:
# Rails version this is used for reading the # Rails version this is used for reading the
@ -93,44 +93,25 @@ database:
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"
# Define variables here that you want to use in filters # Set session variable "user.id" to the user id
# sub-queries must be wrapped in () # Enable this if you need the user id in triggers, etc
set_user_id: false
# Define additional variables here to be used with filters
variables: variables:
account_id: "(select account_id from users where id = $user_id)" admin_account_id: "5"
# Define defaults to for the field key and values below # Field and table names that you wish to block
defaults: blocklist:
# filter: ["{ user_id: { eq: $user_id } }"] - ar_internal_metadata
- schema_migrations
# Field and table names that you wish to block - secret
blocklist: - password
- ar_internal_metadata - encrypted
- schema_migrations - token
- secret
- password
- encrypted
- token
tables: tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
# - name: products
# # Multiple filters are AND'd together
# filter: [
# "{ price: { gt: 0 } }",
# "{ price: { lt: 8 } }"
# ]
- name: customers - name: customers
# No filter is used for this field not
# even defaults.filter
filter: none
remotes: remotes:
- name: payments - name: payments
id: stripe_id id: stripe_id
@ -149,7 +130,71 @@ tables:
# real db table backing them # real db table backing them
name: me name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"]
# - name: posts - name: deals
# filter: ["{ account_id: { _eq: $account_id } }"] table: products
roles_query: "SELECT * FROM users WHERE id = $user_id"
roles:
- name: anon
tables:
- name: products
query:
limit: 10
columns: ["id", "name", "description" ]
aggregation: false
insert:
block: false
update:
block: false
delete:
block: false
- name: deals
query:
limit: 3
columns: ["name", "description" ]
aggregation: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description", "search_rank", "search_headline_description" ]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
presets:
- user_id: "$user_id"
- created_at: "now"
- updated_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
presets:
- updated_at: "now"
delete:
block: true
- name: admin
match: id = 1000
tables:
- name: users
filters: []

View File

@ -1,3 +1,7 @@
# Inherit config from this other config file
# so I only need to overwrite some values
inherits: dev
app_name: "Super Graph Production" app_name: "Super Graph Production"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: false web_ui: false
@ -5,15 +9,14 @@ web_ui: false
# debug, info, warn, error, fatal, panic, disable # debug, info, warn, error, fatal, panic, disable
log_level: "info" log_level: "info"
# Disable this in development to get a list of # When production mode is 'true' only queries
# queries used. When enabled super graph # from the allow list are permitted.
# will only allow queries from this list # When it's 'false' all queries are saved to the
# List saved to ./config/allow.list # the allow list in ./config/allow.list
use_allow_list: true production: true
# Throw a 401 on auth failure for queries that need auth # Throw a 401 on auth failure for queries that need auth
# valid values: always, per_query, never auth_fail_block: true
auth_fail_block: always
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
@ -38,110 +41,17 @@ enable_tracing: true
# SG_AUTH_RAILS_REDIS_PASSWORD # SG_AUTH_RAILS_REDIS_PASSWORD
# SG_AUTH_JWT_PUBLIC_KEY_FILE # SG_AUTH_JWT_PUBLIC_KEY_FILE
# inflections:
# person: people
# sheep: sheep
auth:
# Can be 'rails' or 'jwt'
type: rails
cookie: _app_session
# Comment this out if you want to disable setting
# the user_id via a header. Good for testing
header: X-User-ID
rails:
# Rails version this is used for reading the
# various cookies formats.
version: 5.2
# Found in 'Rails.application.config.secret_key_base'
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
# Remote cookie store. (memcache or redis)
# url: redis://127.0.0.1:6379
# password: test
# max_idle: 80,
# max_active: 12000,
# In most cases you don't need these
# salt: "encrypted cookie"
# sign_salt: "signed encrypted cookie"
# auth_salt: "authenticated encrypted cookie"
# jwt:
# provider: auth0
# secret: abc335bfcfdb04e50db5bb0a4d67ab9
# public_key_file: /secrets/public_key.pem
# public_key_type: ecdsa #rsa
database: database:
type: postgres type: postgres
host: db host: db
port: 5432 port: 5432
dbname: {{app_name_slug}}_development dbname: app_production
user: postgres user: postgres
password: '' password: ''
#pool_size: 10 #pool_size: 10
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"
# Define variables here that you want to use in filters # Set session variable "user.id" to the user id
# sub-queries must be wrapped in () # Enable this if you need the user id in triggers, etc
variables: set_user_id: false
account_id: "(select account_id from users where id = $user_id)"
# Define defaults to for the field key and values below
defaults:
filter: ["{ user_id: { eq: $user_id } }"]
# Field and table names that you wish to block
blocklist:
- ar_internal_metadata
- schema_migrations
- secret
- password
- encrypted
- token
tables:
- name: users
# This filter will overwrite defaults.filter
# filter: ["{ id: { eq: $user_id } }"]
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
- name: products
# Multiple filters are AND'd together
filter: [
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }"
]
- name: customers
# No filter is used for this field not
# even defaults.filter
filter: none
# remotes:
# - name: payments
# id: stripe_id
# url: http://rails_app:3000/stripe/$id
# path: data
# # pass_headers:
# # - cookie
# # - host
# set_headers:
# - name: Authorization
# value: Bearer <stripe_api_key>
- # You can create new fields that have a
# real db table backing them
name: me
table: users
filter: ["{ id: { eq: $user_id } }"]
# - name: posts
# filter: ["{ account_id: { _eq: $account_id } }"]

View File

@ -46,10 +46,10 @@ for (i = 0; i < product_count; i++) {
var data = { var data = {
name: fake.beer_name(), name: fake.beer_name(),
description: desc, description: desc,
price: fake.price(), price: fake.price()
user_id: user.id, //user_id: user.id,
created_at: "now", //created_at: "now",
updated_at: "now" //updated_at: "now"
} }
var res = graphql(" \ var res = graphql(" \
@ -57,7 +57,9 @@ for (i = 0; i < product_count; i++) {
product(insert: $data) { \ product(insert: $data) { \
id \ id \
} \ } \
}", { data: data }) }", { data: data }, {
user_id: 5
})
products.push(res.product) products.push(res.product)
} }

View File

@ -34,7 +34,7 @@ services:
volumes: volumes:
- .:/app - .:/app
working_dir: /app working_dir: /app
command: wu -pattern="*.go" go run main.go serv command: wtc
depends_on: depends_on:
- db - db
- rails_app - rails_app

View File

@ -0,0 +1,19 @@
<template>
<div class="shadow bg-white p-4 flex items-start" :class="className">
<slot name="image"></slot>
<div class="pl-4">
<h2 class="p-0">
<slot name="title"></slot>
</h2>
<p>
<slot name="body"></slot>
</p>
</div>
</div>
</template>
<script>
export default {
props: ["className"]
}
</script>

View File

@ -8,21 +8,24 @@
<div class="bg-bottom bg-no-repeat bg-cover"> <div class="bg-bottom bg-no-repeat bg-cover">
<div class="text-center md:text-left pt-24"> <div class="text-center md:text-left pt-24">
<h1 v-if="data.heroText !== null" class="text-5xl font-bold text-black pb-0 uppercase"> <h1 v-if="data.heroText !== null" class="text-5xl font-bold text-black pb-0 uppercase">
{{ data.heroText || $title || 'Hello' }} <img src="/super-graph.png" width="250" />
</h1> </h1>
<p class="text-2xl text-gray-700 leading-tight pb-0"> <p class="text-4xl text-gray-800 leading-tight mt-1">
{{ data.tagline || $description || 'Welcome to your VuePress site' }} Build web products faster. Secure high performance GraphQL
</p>
<p class="text-lg text-gray-600 leading-tight">
{{ data.longTagline }}
</p> </p>
<NavLink <NavLink
class="inline-block px-4 py-3 my-8 bg-blue-600 text-blue-100 font-bold rounded" class="inline-block px-4 py-3 my-8 bg-blue-600 text-blue-100 font-bold rounded"
:item="actionLink" :item="actionLink"
/> />
<a
class="px-4 py-3 my-8 border-2 border-gray-500 text-gray-600 font-bold rounded"
href="https://github.com/dosco/super-graph"
target="_blank"
>Github</a>
</div> </div>
</div> </div>
@ -51,17 +54,26 @@
</div> </div>
</div> </div>
<div class="bg-gray-100 mt-10"> <div class="bg-gray-100 mt-10">
<div class="container mx-auto px-10 md:px-0 py-32"> <div class="container mx-auto px-10 md:px-0 py-32">
<div class="pb-8 hidden md:block ">
<img src="arch-basic.svg">
</div>
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4"> <h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
What is {{ data.heroText }}? What is {{ data.heroText }}?
</h1> </h1>
<div class="text-2xl md:text-3xl"> <div class="text-2xl md:text-3xl">
Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on you're apps core value. Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on you're apps core value.
</div> </div>
</div> </div>
</div> </div>
<div class="flex flex-wrap"> <div class="flex flex-wrap">
<div class="md:w-2/4"> <div class="md:w-2/4">
<img src="/graphql.png"> <img src="/graphql.png">
@ -112,7 +124,7 @@
</div> </div>
</div> </div>
<div class="overflow-hidden bg-indigo-900" style="height: 730px"> <div class="overflow-hidden bg-indigo-900">
<div class="container mx-auto py-20"> <div class="container mx-auto py-20">
<img src="/super-graph-web-ui.png"> <img src="/super-graph-web-ui.png">
</div> </div>
@ -143,10 +155,73 @@
</div> </div>
<div class="border-t py-10"> <div class="border-t py-10">
<div class="container mx-auto"> <div class="block md:hidden w-100">
<style>.embed-container { position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden; max-width: 100%; } .embed-container iframe, .embed-container object, .embed-container embed { position: absolute; top: 0; left: 0; width: 100%; height: 100%; }</style> <iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen style="width: 100%; height: 250px;">
<div class="embed-container shadow"> </iframe>
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen></iframe> </div>
<div class="container mx-auto flex flex-col md:flex-row items-center">
<div class="w-100 md:w-1/2 p-8">
<h1 class="text-2xl font-bold">GraphQL the future of APIs</h1>
<p class="text-xl text-gray-600">Keeping a tight and fast development loop helps you iterate quickly. Leveraging technology like Super Graph focuses your team on building the core product and not reinventing wheels. GraphQL eliminate the dependency on the backend engineering and keeps the things moving fast</p>
</div>
<div class="hidden md:block md:w-1/2">
<style>.embed-container { position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden; max-width: 100%; } .embed-container iframe, .embed-container object, .embed-container embed { position: absolute; top: 0; left: 0; width: 100%; height: 100%; }</style>
<div class="embed-container shadow">
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen >
</iframe>
</div>
</div>
</div>
</div>
<div class="bg-gray-200 mt-10">
<div class="container mx-auto px-10 md:px-0 py-32">
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
Build Secure Apps
</h1>
<div class="flex flex-col text-2xl md:text-3xl">
<card className="mb-1 p-8">
<template #image><font-awesome-icon icon="portrait" class="text-red-500" /></template>
<template #title>Role Based Access Control</template>
<template #body>Dynamically assign roles like admin, manager or anon to specific users. Generate role specific queries at runtime. For example admins can get all users while others can only fetch their own user.</template>
</card>
<card className="mb-1 p-8">
<template #image><font-awesome-icon icon="shield-alt" class="text-blue-500" /></template>
<template #title>Prepared Statements</template>
<template #body>An additional layer of protection from a variety of security issues like SQL injection. In production mode all queries are precompiled into prepared statements so only those can be executed. This also significantly speeds up all queries.</template>
</card>
<card className="p-8">
<template #image><font-awesome-icon icon="lock" class="text-green-500"/></template>
<template #title>Fuzz Tested Code</template>
<template #body>Fuzzing is done by complex software that generates massives amounts of random input to detect if code is free of security bugs. Google uses fuzzing to protects everything from their cloud infrastructure to the Chrome browser.</template>
</card>
</div>
</div>
</div>
<div class="">
<div class="container mx-auto px-10 md:px-0 py-32">
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
More Features
</h1>
<div class="flex flex-col md:flex-row text-2xl md:text-3xl">
<card className="mr-0 md:mr-1 mb-1 flex-col w-100 md:w-1/3 items-center">
<template #image><img src="/arch-remote-join.svg" class="h-64"></template>
<template #title>Remote Joins</template>
<template #body>A powerful feature that allows you to query your database and remote REST APIs at the same time. For example fetch a user from the DB, his tweets from Twitter and his payments from Stripe with a single GraphQL query.</template>
</card>
<card className="mr-0 md:mr-1 mb-1 flex-col w-100 md:w-1/3">
<template #image><img src="/arch-search.svg" class="h-64"></template>
<template #title>Full Text Search</template>
<template #body>Postgres has excellent full-text search built-in. You don't need another expensive service. Super Graph makes it super easy to use with keyword ranking and highlighting also supported.</template>
</card>
<card className="mb-1 flex-col w-100 md:w-1/3">
<template #image><img src="/arch-bulk.svg" class="h-64"></template>
<template #title>Bulk Inserts</template>
<template #body>Efficiently insert, update and delete multiple items with a single query. Upserts are also supported</template>
</card>
</div> </div>
</div> </div>
</div> </div>
@ -164,9 +239,17 @@
<script> <script>
import NavLink from '@theme/components/NavLink.vue' import NavLink from '@theme/components/NavLink.vue'
import Navbar from '@theme/components/Navbar.vue' import Navbar from '@theme/components/Navbar.vue'
import Card from './Card.vue'
import { library } from '@fortawesome/fontawesome-svg-core'
import { faPortrait, faShieldAlt, faLock } from '@fortawesome/free-solid-svg-icons'
import { FontAwesomeIcon } from '@fortawesome/vue-fontawesome'
library.add(faPortrait, faShieldAlt, faLock)
export default { export default {
components: { NavLink, Navbar }, components: { NavLink, Navbar, FontAwesomeIcon, Card },
computed: { computed: {
data () { data () {

View File

@ -1,20 +1,41 @@
let ogprefix = 'og: http://ogp.me/ns#'
let title = 'Super Graph'
let description = 'An instant GraphQL API for your app. No code needed.'
let color = '#f42525'
module.exports = { module.exports = {
title: 'Super Graph', title: title,
description: 'Get an instant GraphQL API for your Rails apps.', description: description,
themeConfig: { themeConfig: {
logo: '/hologram.svg', logo: '/hologram.svg',
nav: [ nav: [
{ text: 'Docs', link: '/guide' }, { text: 'Docs', link: '/guide' },
{ text: 'Deploy', link: '/deploy' }, { text: 'Deploy', link: '/deploy' },
{ text: 'Internals', link: '/internals' },
{ text: 'Github', link: 'https://github.com/dosco/super-graph' }, { text: 'Github', link: 'https://github.com/dosco/super-graph' },
{ text: 'Docker', link: 'https://hub.docker.com/r/dosco/super-graph/builds' }, { text: 'Docker', link: 'https://hub.docker.com/r/dosco/super-graph/builds' },
{ text: 'Join Chat', link: 'https://discord.gg/NKdXBc' }, { text: 'Join Chat', link: 'https://discord.gg/NKdXBc' },
], ],
serviceWorker: { serviceWorker: {
updatePopup: true updatePopup: true
}, },
head: [
//['link', { rel: 'icon', href: `/assets/favicon.ico` }],
['meta', { prefix: ogprefix, property: 'og:title', content: title }],
['meta', { prefix: ogprefix, property: 'twitter:title', content: title }],
['meta', { prefix: ogprefix, property: 'og:type', content: 'website' }],
['meta', { prefix: ogprefix, property: 'og:url', content: 'https://supergraph.dev' }],
['meta', { prefix: ogprefix, property: 'og:description', content: description }],
//['meta', { prefix: ogprefix, property: 'og:image', content: 'https://wireupyourfrontend.com/assets/logo.png' }],
// ['meta', { name: 'apple-mobile-web-app-capable', content: 'yes' }],
// ['meta', { name: 'apple-mobile-web-app-status-bar-style', content: 'black' }],
// ['link', { rel: 'apple-touch-icon', href: `/assets/apple-touch-icon.png` }],
// ['link', { rel: 'mask-icon', href: '/assets/safari-pinned-tab.svg', color: color }],
// ['meta', { name: 'msapplication-TileImage', content: '/assets/mstile-150x150.png' }],
// ['meta', { name: 'msapplication-TileColor', content: color }],
],
}, },
postcss: { postcss: {

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="411px" height="240px" viewBox="-0.5 -0.5 411 240"><defs/><g><g transform="translate(1.5,6.5)"><switch><foreignObject style="overflow:visible;" pointer-events="none" width="379" height="232" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;"><div><div style="font-size: 19px"><font color="#3b3b3b">[{ name: "beer1", description: "99 bottles of"},</font></div></div><div style="font-size: 19px"><div style="font-size: 12px"><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer2", description: "98 bottles of"},</font></div><div style="font-size: 19px"><div style="font-size: 12px"><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer3", description: "97 bottles of"},</font></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer4", description: "96 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer5", description: "95 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer6", description: "94 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer7", description: "93 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer8", description: "92 bottles of"}]</font></div></div></div><div><font color="#3b3b3b"><br /></font></div></div></div></div><div><font color="#3b3b3b"> </font></div></div><div style="font-size: 19px"><font color="#3b3b3b"> </font></div></div></div></foreignObject><text x="190" y="122" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g></g></svg>

After

Width:  |  Height:  |  Size: 2.3 KiB

File diff suppressed because one or more lines are too long

After

Width:  |  Height:  |  Size: 5.4 KiB

View File

@ -0,0 +1,3 @@
<?xml version="1.0" encoding="UTF-8"?>
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="426px" height="204px" viewBox="-0.5 -0.5 426 204"><defs/><g><g transform="translate(1.5,6.5)"><switch><foreignObject style="overflow:visible;" pointer-events="none" width="411" height="196" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;"><div><div style="font-size: 19px"><font color="#3b3b3b">query {</font></div><div><span style="font-size: 19px">  <font color="#cc0000">products</font><font color="#3b3b3b">(</font></span><span style="font-size: 19px"><font color="#3b3b3b">search: "</font><font color="#00994d">ale</font><font color="#3b3b3b">", where: { price: { gt: 3 }) {</font></span></div><div><span style="font-size: 19px">  <font color="#0066cc">  id</font></span></div><div><span style="font-size: 19px"><font color="#0066cc">    name</font></span></div><div><span style="font-size: 19px"><font color="#0066cc">    search_rank</font></span></div><div><span style="font-size: 19px"><font color="#0066cc">    search_headline_description</font><br /> <font color="#3b3b3b"> }<br />}</font></span></div></div><div style="font-size: 19px"><br /></div></div></div></foreignObject><text x="206" y="104" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g></g></svg>

After

Width:  |  Height:  |  Size: 1.8 KiB

Binary file not shown.

After

Width:  |  Height:  |  Size: 14 KiB

View File

@ -17,14 +17,14 @@ features:
details: Easy config file, quick to deploy, No code needed. It just works. details: Easy config file, quick to deploy, No code needed. It just works.
- title: High Performance - title: High Performance
details: Compiles your GraphQL into a fast SQL query in realtime. details: Compiles your GraphQL into a fast SQL query in realtime.
- title: Built in GO
details: Built in Go is a language created at Google to build fast and secure web services.
- title: Ruby-on-Rails - title: Ruby-on-Rails
details: Can read Rails cookies and supports rails database conventions. details: Can read Rails cookies and supports rails database conventions.
- title: Serverless - title: Serverless
details: Instant startup for scale to zero environments like Google Cloud Run, App Engine, AWS Lambda details: Instant startup for scale to zero environments like Google Cloud Run, App Engine, AWS Lambda
- title: Go Lang
details: Go is a language created at Google to build fast and secure web services.
- title: Free and Open Source - title: Free and Open Source
details: Not a VC funded startup. Not even a startup just good old open source code details: Not a VC funded startup. Not even a startup just good old open source code
footer: MIT Licensed | Copyright © 2018-present Vikram Rangnekar footer: Apache Public License 2.0 | Copyright © 2018-present Vikram Rangnekar
--- ---

View File

@ -4,25 +4,29 @@ sidebar: auto
# Guide to Super Graph # Guide to Super Graph
Get an instant high performance GraphQL API for Postgres. No code needed. GraphQL is automatically transformed into efficient database queries. Also Designed to integrate with your Rails apps. Super Graph is a micro-service that instantly and without code gives you a high performance and secure GraphQL API. Your GraphQL queries are auto translated into a single fast SQL query. No more writing API code as you develop your web frontend just make the query you need and Super Graph will do the rest.
Super Graph has a rich feature set like integrating with your existing Ruby on Rails apps, joining your DB with data from remote APIs, Role and Attribute based access control, Supoport for JWT tokens, DB migrations, seeding and a lot more.
## Features ## Features
- Works with Rails database schemas - Role and Attribute based access control
- Automatically learns schemas and relationships - Works with existing Ruby-On-Rails apps
- Belongs-To, One-To-Many and Many-To-Many table relationships - Automatically learns database schemas and relationships
- Full text search and Aggregations - Full text search and aggregations
- Rails Auth supported (Redis, Memcache, Cookie) - Rails authentication supported (Redis, Memcache, Cookie)
- JWT tokens supported (Auth0, etc) - JWT tokens supported (Auth0, etc)
- Join with remote REST APIs - Join database with remote REST APIs
- Highly optimized and fast Postgres SQL queries - Highly optimized and fast Postgres SQL queries
- Support GraphQL queries and mutations - GraphQL queries and mutations
- Configure with a simple config file - A simple config file
- High performance GO codebase - High performance GO codebase
- Tiny docker image and low memory requirements - Tiny docker image and low memory requirements
- Fuzz tested for security
- Database migrations tool - Database migrations tool
- Write database seeding scripts in Javascript - Database seeding tool
## Try the demo app ## Try the demo app
@ -146,9 +150,13 @@ Super Graph can generate your initial app for you. The generated app will have c
You can then add your database schema to the migrations, maybe create some seed data using the seed script and launch Super Graph. You're now good to go and can start working on your UI frontend in React, Vue or whatever. You can then add your database schema to the migrations, maybe create some seed data using the seed script and launch Super Graph. You're now good to go and can start working on your UI frontend in React, Vue or whatever.
```bash ```bash
# use the below command to download and install Super Graph. You will need Go 1.13 or above # Download and install Super Graph. You will need Go 1.13 or above
GO111MODULE=on go get -u github.com/dosco/super-graph git clone https://github.com/dosco/super-graph && cd super-graph && make install
```
And then create and launch you're new app
```bash
# create a new app and change to it's directory # create a new app and change to it's directory
super-graph new blog; cd blog super-graph new blog; cd blog
@ -275,7 +283,7 @@ transmission_gear_type
// Text // Text
word word
sentence sentence
paragrph paragraph
question question
quote quote
@ -475,6 +483,21 @@ query {
} }
``` ```
Multiple tables can also be fetched using a single GraphQL query. This is very fast since the entire query is converted into a single SQL query which the database can efficiently run.
```graphql
query {
user {
full_name
email
}
products {
name
description
}
}
```
### Fetching data ### Fetching data
To fetch a specific `product` by it's ID you can use the `id` argument. The real name id field will be resolved automatically so this query will work even if your id column is named something like `product_id`. To fetch a specific `product` by it's ID you can use the `id` argument. The real name id field will be resolved automatically so this query will work even if your id column is named something like `product_id`.
@ -499,7 +522,36 @@ query {
### Advanced queries ### Advanced queries
Super Graph support complex queries where you can add filters, ordering,offsets and limits on the query. Super Graph support complex queries where you can add filters, ordering,offsets and limits on the query. For example the below query will list all products where the price is greater than 10 and the id is not 5.
```graphql
query {
products(where: {
and: {
price: { gt: 10 },
not: { id: { eq: 5 } }
}
}) {
name
price
}
}
```
#### Nested where clause targeting related tables
Sometimes you need to query a table based on a condition that applies to a related table. For example say you need to list all users who belong to an account. This query below will fetch the id and email or all users who belong to the account with id 3.
```graphql
query {
users(where: {
accounts: { id: { eq: 3 } }
}) {
id
email
}
}`
```
#### Logical Operators #### Logical Operators
@ -613,7 +665,7 @@ mutation {
} }
``` ```
### Bulk insert #### Bulk insert
```json ```json
{ {
@ -659,7 +711,7 @@ mutation {
} }
``` ```
### Bulk update #### Bulk update
```json ```json
{ {
@ -702,7 +754,7 @@ mutation {
} }
``` ```
### Bulk delete #### Bulk delete
```json ```json
{ {
@ -743,7 +795,7 @@ mutation {
} }
``` ```
### Bulk upsert #### Bulk upsert
```json ```json
{ {
@ -878,82 +930,39 @@ class AddSearchColumn < ActiveRecord::Migration[5.1]
end end
``` ```
## Remote Joins ## GraphQL with React
It often happens that after fetching some data from the DB we need to call another API to fetch some more data and all this combined into a single JSON response. For example along with a list of users you need their last 5 payments from Stripe. This requires you to query your DB for the users and Stripe for the payments. Super Graph handles all this for you also only the fields you requested from the Stripe API are returned. This is a quick simple example using `graphql.js` [https://github.com/f/graphql.js/](https://github.com/f/graphql.js/)
::: tip Is this fast? ```js
Super Graph is able fetch remote data and merge it with the DB response in an efficient manner. Several optimizations such as parallel HTTP requests and a zero-allocation JSON merge algorithm makes this very fast. All of this without you having to write a line of code. import React, { useState, useEffect } from 'react'
::: import graphql from 'graphql.js'
For example you need to list the last 3 payments made by a user. You will first need to look up the user in the database and then call the Stripe API to fetch his last 3 payments. For this to work your user table in the db has a `customer_id` column that contains his Stripe customer ID. // Create a GraphQL client pointing to Super Graph
var graph = graphql("http://localhost:3000/api/v1/graphql", { asJSON: true })
Similiarly you could also fetch the users last tweet, lead info from Salesforce or whatever else you need. It's fine to mix up several different `remote joins` into a single GraphQL query. const App = () => {
const [user, setUser] = useState(null)
### Stripe API example
useEffect(() => {
The configuration is self explanatory. A `payments` field has been added under the `customers` table. This field is added to the `remotes` subsection that defines fields associated with `customers` that are remote and not real database columns. async function action() {
// Use the GraphQL client to execute a graphQL query
The `id` parameter maps a column from the `customers` table to the `$id` variable. In this case it maps `$id` to the `customer_id` column. // The second argument to the client are the variables you need to pass
const result = await graph(`{ user { id first_name last_name picture_url } }`)()
```yaml setUser(result)
tables:
- name: customers
remotes:
- name: payments
id: stripe_id
url: http://rails_app:3000/stripe/$id
path: data
# debug: true
# pass_headers:
# - cookie
# - host
set_headers:
- name: Authorization
value: Bearer <stripe_api_key>
```
#### How do I make use of this?
Just include `payments` like you would any other GraphQL selector under the `customers` selector. Super Graph will call the configured API for you and stitch (merge) the JSON the API sends back with the JSON generated from the database query. GraphQL features like aliases and fields all work.
```graphql
query {
customers {
id
email
payments {
customer_id
amount
billing_details
} }
} action()
}, []);
return (
<div className="App">
<h1>{ JSON.stringify(user) }</h1>
</div>
);
} }
``` ```
And voila here is the result. You get all of this advanced and honestly complex querying capability without writing a single line of code. export default App;
```json
"data": {
"customers": [
{
"id": 1,
"email": "linseymertz@reilly.co",
"payments": [
{
"customer_id": "cus_YCj3ndB5Mz",
"amount": 100,
"billing_details": {
"address": "1 Infinity Drive",
"zipcode": "94024"
}
},
...
```
Even tracing data is availble in the Super Graph web UI if tracing is enabled in the config. By default it is enabled in development. Additionally there you can set `debug: true` to enable http request / response dumping to help with debugging.
![Query Tracing](/tracing.png "Super Graph Web UI Query Tracing")
## Authentication ## Authentication
@ -1033,36 +1042,174 @@ We can get the JWT token either from the `authorization` header where we expect
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file. For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
## Easy to setup ## Role based Access Control
It's a common usecase for APIs to control what information they return or insert based on the role of the user. For example when fetching a list of users, a normal user can only fetch his own entry while a manager can fetch all the users within a company and an admin user can fetch everyone. Or when creating a new user an an admin user can set a users role while the user himself cannot set or change it. This is called role based access control or RBAC.
Super Graph allows you to set access control rules based on dynamically defined roles. You can create as many roles as you wish. The only two default (built-in) roles are `user` for authenticated requests and `anon` for unauthenticated. An authenticated request is one where Super Graph can extract an `user_id` based on the configured authenication method (jwt, rails cookies, etc).
### Configure RBAC
```yaml
roles_query: "SELECT * FROM users WHERE users.id = $user_id"
roles:
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
presets:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
presets:
- updated_at: "now"
delete:
block: true
- name: admin
match: users.id = 1
tables:
- name: users
query:
filters: []
```
This configuration is relatively simple to follow the `roles_query` parameter is the query that
must be run to help figure out a users role. This query can be as complex as you like and include joins with other tables.
The individual roles are defined under the `roles` parameter and this includes each table the role has a custom setting for. The role is dynamically matched using the `match` parameter for example in the above case `users.id = 1` means that when the `roles_query` is executed a user with the id `1` willbe assigned the admin role and those that don't match get the `user` role if authenticated successfully or the `anon` role.
## Remote Joins
It often happens that after fetching some data from the DB we need to call another API to fetch some more data and all this combined into a single JSON response. For example along with a list of users you need their last 5 payments from Stripe. This requires you to query your DB for the users and Stripe for the payments. Super Graph handles all this for you also only the fields you requested from the Stripe API are returned.
::: tip Is this fast?
Super Graph is able fetch remote data and merge it with the DB response in an efficient manner. Several optimizations such as parallel HTTP requests and a zero-allocation JSON merge algorithm makes this very fast. All of this without you having to write a line of code.
:::
For example you need to list the last 3 payments made by a user. You will first need to look up the user in the database and then call the Stripe API to fetch his last 3 payments. For this to work your user table in the db has a `customer_id` column that contains his Stripe customer ID.
Similiarly you could also fetch the users last tweet, lead info from Salesforce or whatever else you need. It's fine to mix up several different `remote joins` into a single GraphQL query.
### Stripe API example
The configuration is self explanatory. A `payments` field has been added under the `customers` table. This field is added to the `remotes` subsection that defines fields associated with `customers` that are remote and not real database columns.
The `id` parameter maps a column from the `customers` table to the `$id` variable. In this case it maps `$id` to the `customer_id` column.
```yaml
tables:
- name: customers
remotes:
- name: payments
id: stripe_id
url: http://rails_app:3000/stripe/$id
path: data
# debug: true
# pass_headers:
# - cookie
# - host
set_headers:
- name: Authorization
value: Bearer <stripe_api_key>
```
#### How do I make use of this?
Just include `payments` like you would any other GraphQL selector under the `customers` selector. Super Graph will call the configured API for you and stitch (merge) the JSON the API sends back with the JSON generated from the database query. GraphQL features like aliases and fields all work.
```graphql
query {
customers {
id
email
payments {
customer_id
amount
billing_details
}
}
}
```
And voila here is the result. You get all of this advanced and honestly complex querying capability without writing a single line of code.
```json
"data": {
"customers": [
{
"id": 1,
"email": "linseymertz@reilly.co",
"payments": [
{
"customer_id": "cus_YCj3ndB5Mz",
"amount": 100,
"billing_details": {
"address": "1 Infinity Drive",
"zipcode": "94024"
}
},
...
```
Even tracing data is availble in the Super Graph web UI if tracing is enabled in the config. By default it is enabled in development. Additionally there you can set `debug: true` to enable http request / response dumping to help with debugging.
![Query Tracing](/tracing.png "Super Graph Web UI Query Tracing")
## Configuration files
Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument. Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument.
We're tried to ensure that the config file is self documenting and easy to work with. We're tried to ensure that the config file is self documenting and easy to work with.
```yaml ```yaml
# Inherit config from this other config file
# so I only need to overwrite some values
inherits: base
app_name: "Super Graph Development" app_name: "Super Graph Development"
host_port: 0.0.0.0:8080 host_port: 0.0.0.0:8080
web_ui: true web_ui: true
debug_level: 1
# debug, info, warn, error, fatal, panic, disable # debug, info, warn, error, fatal, panic
log_level: "info" log_level: "debug"
# Disable this in development to get a list of # When production mode is 'true' only queries
# queries used. When enabled super graph # from the allow list are permitted.
# will only allow queries from this list # When it's 'false' all queries are saved to the
# List saved to ./config/allow.list # the allow list in ./config/allow.list
use_allow_list: true production: false
# Throw a 401 on auth failure for queries that need auth # Throw a 401 on auth failure for queries that need auth
# valid values: always, per_query, never auth_fail_block: false
auth_fail_block: always
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
# response # response
enable_tracing: true enable_tracing: true
# Watch the config folder and reload Super Graph
# with the new configs when a change is detected
reload_on_config_change: true
# File that points to the database seeding script
# seed_file: seed.js
# Path pointing to where the migrations can be found
migrations_path: ./config/migrations
# Postgres related environment Variables # Postgres related environment Variables
# SG_DATABASE_HOST # SG_DATABASE_HOST
# SG_DATABASE_PORT # SG_DATABASE_PORT
@ -1085,8 +1232,9 @@ auth:
cookie: _app_session cookie: _app_session
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header. Good for testing # the user_id via a header for testing.
header: X-User-ID # Disable in production
creds_in_header: true
rails: rails:
# Rails version this is used for reading the # Rails version this is used for reading the
@ -1097,10 +1245,10 @@ auth:
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566 secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
# Remote cookie store. (memcache or redis) # Remote cookie store. (memcache or redis)
# url: redis://127.0.0.1:6379 # url: redis://redis:6379
# password: test # password: ""
# max_idle: 80, # max_idle: 80
# max_active: 12000, # max_active: 12000
# In most cases you don't need these # In most cases you don't need these
# salt: "encrypted cookie" # salt: "encrypted cookie"
@ -1120,64 +1268,107 @@ database:
dbname: app_development dbname: app_development
user: postgres user: postgres
password: '' password: ''
# pool_size: 10
# max_retries: 0
# log_level: "debug"
# Define variables here that you want to use in filters #schema: "public"
#pool_size: 10
#max_retries: 0
#log_level: "debug"
# Set session variable "user.id" to the user id
# Enable this if you need the user id in triggers, etc
set_user_id: false
# Define additional variables here to be used with filters
variables: variables:
account_id: "select account_id from users where id = $user_id" admin_account_id: "5"
# Define defaults to for the field key and values below # Field and table names that you wish to block
defaults: blocklist:
filter: ["{ user_id: { eq: $user_id } }"] - ar_internal_metadata
- schema_migrations
- secret
- password
- encrypted
- token
# Field and table names that you wish to block tables:
blacklist: - name: customers
- ar_internal_metadata remotes:
- schema_migrations - name: payments
- secret id: stripe_id
- password url: http://rails_app:3000/stripe/$id
- encrypted path: data
- token # debug: true
pass_headers:
- cookie
set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
tables: - # You can create new fields that have a
- name: users # real db table backing them
# This filter will overwrite defaults.filter name: me
filter: ["{ id: { eq: $user_id } }"] table: users
- name: products roles_query: "SELECT * FROM users WHERE id = $user_id"
# Multiple filters are AND'd together
filter: [
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }"
]
- name: customers roles:
# No filter is used for this field not - name: anon
# even defaults.filter tables:
filter: none - name: products
limit: 10
remotes: query:
- name: payments columns: ["id", "name", "description" ]
id: stripe_id aggregation: false
url: http://rails_app:3000/stripe/$id
path: data
# pass_headers:
# - cookie
# - host
set_headers:
- name: Authorization
value: Bearer <stripe_api_key>
- # You can create new fields that have a insert:
# real db table backing them allow: false
name: me
table: users update:
filter: ["{ id: { eq: $user_id } }"] allow: false
delete:
allow: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
set:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
set:
- updated_at: "now"
delete:
block: true
- name: admin
match: id = 1000
tables:
- name: users
filters: []
# - name: posts
# filter: ["{ account_id: { _eq: $account_id } }"]
``` ```
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets
@ -1212,9 +1403,6 @@ brew install yarn
# yarn install dependencies and build the web ui # yarn install dependencies and build the web ui
(cd web && yarn install && yarn build) (cd web && yarn install && yarn build)
# generate some stuff the go code needs
go generate ./...
# do this the only the time to setup the database # do this the only the time to setup the database
docker-compose run rails_app rake db:create db:migrate db:seed docker-compose run rails_app rake db:create db:migrate db:seed
@ -1223,6 +1411,10 @@ docker-compose up
``` ```
## MIT License ## Learn how the code works
MIT Licensed | Copyright © 2018-present Vikram Rangnekar [Super Graph codebase explained](https://supergraph.dev/internals.html)
## Apache License 2.0
Apache Public License 2.0 | Copyright © 2018-present Vikram Rangnekar

241
docs/internals.md Normal file
View File

@ -0,0 +1,241 @@
---
sidebar: auto
---
# Super Graph Codebase Explained
Super Graph code is made up of a number of packages. We have done our best to keep each package small and focused. Let us begin by looking at some of these packages.
1. qcode - GraphQL lexer and parser.
2. psql - SQL generator
3. serv - HTTP Endpoint, Configs, CLI, etc
4. rails - Rails cookie and session store decoders
## QCODE
This package contains the core of the GraphQL conpiler it handling the lexing and parsing of the GraphQL query transforming it into an internal representation called
`QCode`.
This is the first step of the compiling process the `func NewCompiler(c Config)` function creates a new instance of this compiler which has it's own config.
Keep in mind QCode has no knowledge of the Database structure it is designed to be a fast GraphQL parser. Care is taken to keep memory allocations to a minimum.
```go
const (
opQuery
opMutate
...
)
type QCode struct {
Type QType
Selects []Select
...
}
type Select struct {
ID int32
ParentID int32
Args map[string]*Node
Name string
FieldName string
Cols []Column
Where *Exp
OrderBy []*OrderBy
DistinctOn []string
Paging Paging
Children []int32
Functions bool
Allowed map[string]struct{}
PresetMap map[string]string
PresetList []string
}
```
But before the incoming GraphQL query can be turned into QCode it must first be tokenzied by the lexer `lex.go`. As the tokenzier walks the bytes of the query it generates tokens `item` structs which are then consumed by the next step the parser `parse.go`.
```go
type item struct {
typ itemType
pos Pos
end Pos
}
```
For exmple a simple query like `query getUser { user { id } }` will be converted into several tokens like below.
```go
item{itemQuery, 0, 4} // query
item{itemName, 6, 12} // getUser
item{itemObjOpen, 16, 20} // {
...
```
These tokens are then fed into the parser `parse.go` the parser does the work of generating an abstract syntax tree (AST) from the tokens. This AST is an internal representation (data structure) and is not exposed outside the package. Sinc the AST is a tree a stack `stack.go` is used to walk the tree and generate the QCode AST. The QCode data structure is also a tree (represented as an array). This is then returned to the caller of the compile function.
```go
type Operation struct {
Type parserType
Name string
Args []Arg
Fields []Field
}
type Field struct {
ID int32
ParentID int32
Name string
Alias string
Args []Arg
Children []int32
}
```
## PSQL
This package is responsible for generating Postgres SQL from the QCode AST. There are various GraphQL query types (Query, Mutation, etc). And several more sub types like single root or multi-root queries, various types of mutations (insert, update delete, bulk insert, etc). This package is designed to be able to generate SQL for all of those types.
In addition to QCode variable data is also passed to the compile function within this package. Variables are decoded to derive what is being inserted and what kind of insert is it single or bulk. This information is not available in the GraphQL query its passed in seperatly via variables. This package is able to put all this together and generate the right SQL code.
The entry point of this package is in `query.go`. The database schema must be passed in the config object when creating a new compiler instance `NewCompiler`. The functions to extract this schema from the database are also part of this package `tables.go`. The `GetTables` functions fetches all the tables from the database and `GetColumns` fetches columns and relationship information.
```go
func NewCompiler(conf Config) *Compiler {
return &Compiler{conf.Schema, conf.Vars}
}
func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
switch qc.Type {
case qcode.QTQuery:
return co.compileQuery(qc, w)
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
return co.compileMutation(qc, w, vars)
}
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
}
```
GraphQL, input is first converted to QCode.
```graphql
query {
user {
id
}
posts {
title
}
}
```
SQL, in reality the generated SQL is far more complex single it has to be very efficient, leverage the power of Postgres, support RBAC (Role based access control) and all of this must be done in a single SQL query.
```sql
SELECT users.id, posts.title FROM users, posts;
```
## SERV
The `serv` package constains most of code that turns the above compiler into an HTTP service. It also includes authentication middleware, remote join resolvers, config parsering, database migrations and seeding commands.
Another big feature that this package handles is the `allow.list` management code. In production mode parsing the allow list file and registering prepared statements to adding GraphQL queries to this file in development mode.
Currently the following global variables are referrenced across the package. In future I'd prefer to move these into a context struct and pass that around instead.
```go
var (
logger zerolog.Logger // logger for everything but errors
errlog zerolog.Logger // logger for errors includes line numbers
conf *config // parsed config
confPath string // path to the config file
db *pgxpool.Pool // database connection pool
schema *psql.DBSchema // database tables, columns and relationships
qcompile *qcode.Compiler // qcode compiler
pcompile *psql.Compiler // postgres sql compiler
)
```
## Testing
There are several unit tests and benchmark tests `parse_test.go`) included. There are also scripts included for memory `pprof_cpu.sh` and cpu `pprof_cpu.sh` profiling.
```go
// Test to ensure synthetic tables gnerate the correct SQL
func syntheticTables(t *testing.T) {
gql := `query {
me {
email
}
}`
sql := `SELECT json_object_agg('me', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT ) AS "json_row_0")) AS "json_0" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
```
You can run tests within each package or across the entire app. It is usually the fastest to first write a test and then build the feature to satisfy it.
```
go test -v ./...
```
Memory profiling can help find where allocations are happining within the package code.
```bash
$ cd ./psql
$ ./pprof_mem.sh
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/psql
BenchmarkCompile-8 52567 19401 ns/op 3918 B/op 61 allocs/op
BenchmarkCompileParallel-8 219548 5684 ns/op 3938 B/op 61 allocs/op
PASS
ok github.com/dosco/super-graph/psql 2.582s
Type: alloc_space
Time: Nov 29, 2019 at 11:59pm (EST)
Entering interactive mode (type "help" for commands, "o" for options)
(pprof) top
Showing nodes accounting for 880.59MB, 80.63% of 1092.14MB total
Dropped 33 nodes (cum <= 5.46MB)
Showing top 10 nodes out of 35
flat flat% sum% cum cum%
22MB 2.01% 2.01% 903.57MB 82.73% github.com/dosco/super-graph/qcode.(*Compiler).Compile
0 0% 2.01% 862.98MB 79.02% github.com/dosco/super-graph/psql.BenchmarkCompileParallel.func1
0 0% 2.01% 862.98MB 79.02% testing.(*B).RunParallel.func1
461.95MB 42.30% 44.31% 760.53MB 69.64% github.com/dosco/super-graph/qcode.(*Compiler).compileQuery
396.63MB 36.32% 80.63% 396.63MB 36.32% github.com/dosco/super-graph/util.NewStack
0 0% 80.63% 252.07MB 23.08% github.com/dosco/super-graph/qcode.(*Compiler).compileArgs
0 0% 80.63% 228.15MB 20.89% testing.(*B).runN
0 0% 80.63% 227.63MB 20.84% github.com/dosco/super-graph/psql.BenchmarkCompile
0 0% 80.63% 227.63MB 20.84% testing.(*B).launch
0 0% 80.63% 187.04MB 17.13% github.com/dosco/super-graph/psql.(*Compiler).Compile
```
## Benchmarking
Most packages contain benchmark tests to ensure new features don't introduce a significant regression to performance.
```bash
$ cd ./psql
$ go test -v -run=xx -bench=.
goos: darwin
goarch: amd64
pkg: github.com/dosco/super-graph/psql
BenchmarkCompile-8 60775 19076 ns/op 3919 B/op 61 allocs/op
BenchmarkCompileParallel-8 207847 5172 ns/op 3937 B/op 61 allocs/op
PASS
ok github.com/dosco/super-graph/psql 2.530s
```
## Reach out
If you'd like me to explain other parts of the code please reach out over Twitter or Discord. I'll keep adding to this doc as I get time.

View File

@ -10,5 +10,10 @@
"tailwindcss": "^1.0.6", "tailwindcss": "^1.0.6",
"vuepress": "^1.0.0", "vuepress": "^1.0.0",
"webpack-dev-middleware": "3.6.0" "webpack-dev-middleware": "3.6.0"
},
"dependencies": {
"@fortawesome/fontawesome-svg-core": "^1.2.25",
"@fortawesome/free-solid-svg-icons": "^5.11.2",
"@fortawesome/vue-fontawesome": "^0.1.7"
} }
} }

View File

@ -10,17 +10,17 @@
"@babel/highlight" "^7.0.0" "@babel/highlight" "^7.0.0"
"@babel/core@^7.0.0": "@babel/core@^7.0.0":
version "7.6.2" version "7.6.4"
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.2.tgz#069a776e8d5e9eefff76236bc8845566bd31dd91" resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.4.tgz#6ebd9fe00925f6c3e177bb726a188b5f578088ff"
integrity sha512-l8zto/fuoZIbncm+01p8zPSDZu/VuuJhAfA7d/AbzM09WR7iVhavvfNDYCNpo1VvLk6E6xgAoP9P+/EMJHuRkQ== integrity sha512-Rm0HGw101GY8FTzpWSyRbki/jzq+/PkNQJ+nSulrdY6gFGOsNseCqD6KHRYe2E+EdzuBdr2pxCp6s4Uk6eJ+XQ==
dependencies: dependencies:
"@babel/code-frame" "^7.5.5" "@babel/code-frame" "^7.5.5"
"@babel/generator" "^7.6.2" "@babel/generator" "^7.6.4"
"@babel/helpers" "^7.6.2" "@babel/helpers" "^7.6.2"
"@babel/parser" "^7.6.2" "@babel/parser" "^7.6.4"
"@babel/template" "^7.6.0" "@babel/template" "^7.6.0"
"@babel/traverse" "^7.6.2" "@babel/traverse" "^7.6.3"
"@babel/types" "^7.6.0" "@babel/types" "^7.6.3"
convert-source-map "^1.1.0" convert-source-map "^1.1.0"
debug "^4.1.0" debug "^4.1.0"
json5 "^2.1.0" json5 "^2.1.0"
@ -29,12 +29,12 @@
semver "^5.4.1" semver "^5.4.1"
source-map "^0.5.0" source-map "^0.5.0"
"@babel/generator@^7.6.2": "@babel/generator@^7.6.3", "@babel/generator@^7.6.4":
version "7.6.2" version "7.6.4"
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.2.tgz#dac8a3c2df118334c2a29ff3446da1636a8f8c03" resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.4.tgz#a4f8437287bf9671b07f483b76e3bb731bc97671"
integrity sha512-j8iHaIW4gGPnViaIHI7e9t/Hl8qLjERI6DcV9kEpAIDJsAOrcnXqRS7t+QbhL76pwbtqP+QCQLL0z1CyVmtjjQ== integrity sha512-jsBuXkFoZxk0yWLyGI9llT9oiQ2FeTASmRFE32U+aaDTfoE92t78eroO7PTpU/OrYq38hlcDM6vbfLDaOLy+7w==
dependencies: dependencies:
"@babel/types" "^7.6.0" "@babel/types" "^7.6.3"
jsesc "^2.5.1" jsesc "^2.5.1"
lodash "^4.17.13" lodash "^4.17.13"
source-map "^0.5.0" source-map "^0.5.0"
@ -224,10 +224,10 @@
esutils "^2.0.2" esutils "^2.0.2"
js-tokens "^4.0.0" js-tokens "^4.0.0"
"@babel/parser@^7.6.0", "@babel/parser@^7.6.2": "@babel/parser@^7.6.0", "@babel/parser@^7.6.3", "@babel/parser@^7.6.4":
version "7.6.2" version "7.6.4"
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.2.tgz#205e9c95e16ba3b8b96090677a67c9d6075b70a1" resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.4.tgz#cb9b36a7482110282d5cb6dd424ec9262b473d81"
integrity sha512-mdFqWrSPCmikBoaBYMuBulzTIKuXVPtEISFbRRVNwMWpCms/hmE2kRq0bblUHaNRKrjRlmVbx1sDHmjmRgD2Xg== integrity sha512-D8RHPW5qd0Vbyo3qb+YjO5nvUVRTXFLQ/FsDxJU2Nqz4uB5EnUN0ZQSEYpvTIbRuttig1XbHWU5oMeQwQSAA+A==
"@babel/plugin-proposal-async-generator-functions@^7.2.0": "@babel/plugin-proposal-async-generator-functions@^7.2.0":
version "7.2.0" version "7.2.0"
@ -361,9 +361,9 @@
"@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0"
"@babel/plugin-transform-block-scoping@^7.3.4": "@babel/plugin-transform-block-scoping@^7.3.4":
version "7.6.2" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.2.tgz#96c33ab97a9ae500cc6f5b19e04a7e6553360a79" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.3.tgz#6e854e51fbbaa84351b15d4ddafe342f3a5d542a"
integrity sha512-zZT8ivau9LOQQaOGC7bQLQOT4XPkPXgN2ERfUgk1X8ql+mVkLc4E8eKk+FO3o0154kxzqenWCorfmEXpEZcrSQ== integrity sha512-7hvrg75dubcO3ZI2rjYTzUrEuh1E9IyDEhhB6qfcooxhDA33xx2MasuLVgdxzcP6R/lipAC6n9ub9maNW6RKdw==
dependencies: dependencies:
"@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0"
lodash "^4.17.13" lodash "^4.17.13"
@ -479,9 +479,9 @@
"@babel/helper-plugin-utils" "^7.0.0" "@babel/helper-plugin-utils" "^7.0.0"
"@babel/plugin-transform-named-capturing-groups-regex@^7.3.0": "@babel/plugin-transform-named-capturing-groups-regex@^7.3.0":
version "7.6.2" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.2.tgz#c1ca0bb84b94f385ca302c3932e870b0fb0e522b" resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.3.tgz#aaa6e409dd4fb2e50b6e2a91f7e3a3149dbce0cf"
integrity sha512-xBdB+XOs+lgbZc2/4F5BVDVcDNS4tcSKQc96KmlqLEAwz6tpYPEvPdmDfvVG0Ssn8lAhronaRs6Z6KSexIpK5g== integrity sha512-jTkk7/uE6H2s5w6VlMHeWuH+Pcy2lmdwFoeWCVnvIrDUnB5gQqTVI8WfmEAhF2CDEarGrknZcmSFg1+bkfCoSw==
dependencies: dependencies:
regexpu-core "^4.6.0" regexpu-core "^4.6.0"
@ -622,17 +622,17 @@
semver "^5.3.0" semver "^5.3.0"
"@babel/runtime-corejs2@^7.2.0": "@babel/runtime-corejs2@^7.2.0":
version "7.6.2" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/runtime-corejs2/-/runtime-corejs2-7.6.2.tgz#062f8e31f3df30fc1a3dea68aa1bd854e06e9ba6" resolved "https://registry.yarnpkg.com/@babel/runtime-corejs2/-/runtime-corejs2-7.6.3.tgz#de3f446b3fb688b98cbd220474d1a7cad909bcb8"
integrity sha512-wdyVKnTv9Be4YlwF/7pByYNfcl23qC21aAQ0aIaZOo2ZOvhFEyJdBLJClYZ9i+Pmrz7sUQgg/MwbJa2RZTkygg== integrity sha512-nuA2o+rgX2+PrNTZ063ehncVcg7sn+tU71BB81SaWRVUbGwCOlb0+yQA1e0QqmzOfRSYOxfvf8cosYqFbJEiwQ==
dependencies: dependencies:
core-js "^2.6.5" core-js "^2.6.5"
regenerator-runtime "^0.13.2" regenerator-runtime "^0.13.2"
"@babel/runtime@^7.0.0": "@babel/runtime@^7.0.0":
version "7.6.2" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.6.2.tgz#c3d6e41b304ef10dcf13777a33e7694ec4a9a6dd" resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.6.3.tgz#935122c74c73d2240cafd32ddb5fc2a6cd35cf1f"
integrity sha512-EXxN64agfUqqIGeEjI5dL5z0Sw0ZwWo1mLTi4mQowCZ42O59b7DRpZAnTC6OqdF28wMBMFKNb/4uFGrVaigSpg== integrity sha512-kq6anf9JGjW8Nt5rYfEuGRaEAaH1mkv3Bbu6rYvLOpPh/RusSJXuKPEAoZ7L7gybZkchE8+NV5g9vKF4AGAtsA==
dependencies: dependencies:
regenerator-runtime "^0.13.2" regenerator-runtime "^0.13.2"
@ -645,31 +645,55 @@
"@babel/parser" "^7.6.0" "@babel/parser" "^7.6.0"
"@babel/types" "^7.6.0" "@babel/types" "^7.6.0"
"@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2": "@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2", "@babel/traverse@^7.6.3":
version "7.6.2" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.2.tgz#b0e2bfd401d339ce0e6c05690206d1e11502ce2c" resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.3.tgz#66d7dba146b086703c0fb10dd588b7364cec47f9"
integrity sha512-8fRE76xNwNttVEF2TwxJDGBLWthUkHWSldmfuBzVRmEDWOtu4XdINTgN7TDWzuLg4bbeIMLvfMFD9we5YcWkRQ== integrity sha512-unn7P4LGsijIxaAJo/wpoU11zN+2IaClkQAxcJWBNCMS6cmVh802IyLHNkAjQ0iYnRS3nnxk5O3fuXW28IMxTw==
dependencies: dependencies:
"@babel/code-frame" "^7.5.5" "@babel/code-frame" "^7.5.5"
"@babel/generator" "^7.6.2" "@babel/generator" "^7.6.3"
"@babel/helper-function-name" "^7.1.0" "@babel/helper-function-name" "^7.1.0"
"@babel/helper-split-export-declaration" "^7.4.4" "@babel/helper-split-export-declaration" "^7.4.4"
"@babel/parser" "^7.6.2" "@babel/parser" "^7.6.3"
"@babel/types" "^7.6.0" "@babel/types" "^7.6.3"
debug "^4.1.0" debug "^4.1.0"
globals "^11.1.0" globals "^11.1.0"
lodash "^4.17.13" lodash "^4.17.13"
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0": "@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0", "@babel/types@^7.6.3":
version "7.6.1" version "7.6.3"
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.6.1.tgz#53abf3308add3ac2a2884d539151c57c4b3ac648" resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.6.3.tgz#3f07d96f854f98e2fbd45c64b0cb942d11e8ba09"
integrity sha512-X7gdiuaCmA0uRjCmRtYJNAVCc/q+5xSgsfKJHqMN4iNLILX39677fJE1O40arPMh0TTtS9ItH67yre6c7k6t0g== integrity sha512-CqbcpTxMcpuQTMhjI37ZHVgjBkysg5icREQIEZ0eG1yCNwg3oy+5AaLiOKmjsCj6nqOsa6Hf0ObjRVwokb7srA==
dependencies: dependencies:
esutils "^2.0.2" esutils "^2.0.2"
lodash "^4.17.13" lodash "^4.17.13"
to-fast-properties "^2.0.0" to-fast-properties "^2.0.0"
"@fullhuman/postcss-purgecss@^1.1.0": "@fortawesome/fontawesome-common-types@^0.2.25":
version "0.2.25"
resolved "https://registry.yarnpkg.com/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-0.2.25.tgz#6df015905081f2762e5cfddeb7a20d2e9b16c786"
integrity sha512-3RuZPDuuPELd7RXtUqTCfed14fcny9UiPOkdr2i+cYxBoTOfQgxcDoq77fHiiHcgWuo1LoBUpvGxFF1H/y7s3Q==
"@fortawesome/fontawesome-svg-core@^1.2.25":
version "1.2.25"
resolved "https://registry.yarnpkg.com/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-1.2.25.tgz#24b03391d14f0c6171e8cad7057c687b74049790"
integrity sha512-MotKnn53JKqbkLQiwcZSBJVYtTgIKFbh7B8+kd05TSnfKYPFmjKKI59o2fpz5t0Hzl35vVGU6+N4twoOpZUrqA==
dependencies:
"@fortawesome/fontawesome-common-types" "^0.2.25"
"@fortawesome/free-solid-svg-icons@^5.11.2":
version "5.11.2"
resolved "https://registry.yarnpkg.com/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-5.11.2.tgz#2f2f1459743a27902b76655a0d0bc5ec4d945631"
integrity sha512-zBue4i0PAZJUXOmLBBvM7L0O7wmsDC8dFv9IhpW5QL4kT9xhhVUsYg/LX1+5KaukWq4/cbDcKT+RT1aRe543sg==
dependencies:
"@fortawesome/fontawesome-common-types" "^0.2.25"
"@fortawesome/vue-fontawesome@^0.1.7":
version "0.1.7"
resolved "https://registry.yarnpkg.com/@fortawesome/vue-fontawesome/-/vue-fontawesome-0.1.7.tgz#121867297cafd141af78c67d92ab9f1ad4b7328b"
integrity sha512-YCw2Q2m4fxzyFsPOH3uDYMoJztTD+pT+AAyse4LFpbdrBg+r8ueaVT8BFnXEjrGwMDJJeXrwJ5AOC6q/JWBI4w==
"@fullhuman/postcss-purgecss@^1.3.0":
version "1.3.0" version "1.3.0"
resolved "https://registry.yarnpkg.com/@fullhuman/postcss-purgecss/-/postcss-purgecss-1.3.0.tgz#d632900d818f4fcf4678e7326923fb838c3e03a7" resolved "https://registry.yarnpkg.com/@fullhuman/postcss-purgecss/-/postcss-purgecss-1.3.0.tgz#d632900d818f4fcf4678e7326923fb838c3e03a7"
integrity sha512-zvfS3dPKD2FAtMcXapMJXGbDgEp9E++mLR6lTgSruv6y37uvV5xJ1crVktuC1gvnmMwsa7Zh1m05FeEiz4VnIQ== integrity sha512-zvfS3dPKD2FAtMcXapMJXGbDgEp9E++mLR6lTgSruv6y37uvV5xJ1crVktuC1gvnmMwsa7Zh1m05FeEiz4VnIQ==
@ -691,13 +715,13 @@
integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw== integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==
"@silvanite/vuepress-plugin-tailwind@^1.1.0": "@silvanite/vuepress-plugin-tailwind@^1.1.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@silvanite/vuepress-plugin-tailwind/-/vuepress-plugin-tailwind-1.1.0.tgz#e5d0c8b0b1127201509196eca359ac9070517202" resolved "https://registry.yarnpkg.com/@silvanite/vuepress-plugin-tailwind/-/vuepress-plugin-tailwind-1.2.0.tgz#2e1e6d10e441b49c7446bbb99e7fe8ec80aee273"
integrity sha512-pQVMz0knDMMfIuXhEwYSRK2gPW9ds+C9YEX8IF0sdCqiPRlJSNG5oUTIAoKoe2JYHlr7zdQZX8wBQN7FQEVO4Q== integrity sha512-1LWlIa+g1vV6HkwwEmADAhnuZ33oAGidnbrCs7qnK53ApbtWa9K/+0cHCLhDeQ3zqoyotqK7YQR2sm5nCOTYqA==
dependencies: dependencies:
"@fullhuman/postcss-purgecss" "^1.1.0" "@fullhuman/postcss-purgecss" "^1.3.0"
lodash "^4.17.11" lodash "^4.17.15"
tailwindcss "^0.7.4" tailwindcss "^1.1.2"
"@types/events@*": "@types/events@*":
version "3.0.0" version "3.0.0"
@ -719,9 +743,9 @@
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA== integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
"@types/node@*": "@types/node@*":
version "12.7.8" version "12.12.3"
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.8.tgz#cb1bf6800238898bc2ff6ffa5702c3cadd350708" resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.3.tgz#ebfe83507ac506bc3486314a8aa395be66af8d23"
integrity sha512-FMdVn84tJJdV+xe+53sYiZS4R5yn1mAIxfj+DVoNiQjTYz1+OYmjwEZr1ev9nU0axXwda0QDbYl06QHanRVH3A== integrity sha512-opgSsy+cEF9N8MgaVPnWVtdJ3o4mV2aMHvDq7thkQUFt0EuOHJon4rQpJfhjmNHB+ikl0Cd6WhWIErOyQ+f7tw==
"@types/q@^1.5.1": "@types/q@^1.5.1":
version "1.5.2" version "1.5.2"
@ -746,9 +770,9 @@
svg-tags "^1.0.0" svg-tags "^1.0.0"
"@vue/babel-preset-app@^3.1.1": "@vue/babel-preset-app@^3.1.1":
version "3.11.0" version "3.12.1"
resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-3.11.0.tgz#52bf79c15560a304a13f4770e3e5530e01dd6173" resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-3.12.1.tgz#24c477052f078f30fdb7735103b14dd1fa2cbfe1"
integrity sha512-fcCq9nuGGx1WGnyaKHvIC8RnWjISXGf1rJH4mN9+bymDfosgDbwnfV4TYvTZlyK1/aTHEEpIoO3XimTXBo7QBw== integrity sha512-Zjy5jQaikV1Pz+ri0YgXFS7q4/5wCxB5tRkDOEIt5+4105u0Feb/pvH20nVL6nx9GyXrECFfcm7Yxr/z++OaPQ==
dependencies: dependencies:
"@babel/helper-module-imports" "^7.0.0" "@babel/helper-module-imports" "^7.0.0"
"@babel/plugin-proposal-class-properties" "^7.0.0" "@babel/plugin-proposal-class-properties" "^7.0.0"
@ -765,15 +789,15 @@
core-js "^2.6.5" core-js "^2.6.5"
"@vue/babel-preset-jsx@^1.0.0": "@vue/babel-preset-jsx@^1.0.0":
version "1.1.0" version "1.1.1"
resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.1.0.tgz#c8001329f5b372297a3111a251eb4f9e956c1266" resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.1.1.tgz#3a74642ca0ecea10aae13649df5ff70f9d24a6f5"
integrity sha512-EeZ9gwEmu79B4A6LMLAw5cPCVYIcbKWgJgJafWtLzh1S+SgERUmTkVQ9Vx4k8zYBiCuxHK3XziZ3VJIMau7THA== integrity sha512-SeyndwQZc8MAOkhbJaC34ocTwcKekKkwrwnTMC3YF8VmGp5IQWW5gPIU66bqO9WFBXFA3J3ANsUbP2pj8q8KdQ==
dependencies: dependencies:
"@vue/babel-helper-vue-jsx-merge-props" "^1.0.0" "@vue/babel-helper-vue-jsx-merge-props" "^1.0.0"
"@vue/babel-plugin-transform-vue-jsx" "^1.0.0" "@vue/babel-plugin-transform-vue-jsx" "^1.0.0"
"@vue/babel-sugar-functional-vue" "^1.0.0" "@vue/babel-sugar-functional-vue" "^1.0.0"
"@vue/babel-sugar-inject-h" "^1.0.0" "@vue/babel-sugar-inject-h" "^1.0.0"
"@vue/babel-sugar-v-model" "^1.0.0" "@vue/babel-sugar-v-model" "^1.1.1"
"@vue/babel-sugar-v-on" "^1.1.0" "@vue/babel-sugar-v-on" "^1.1.0"
"@vue/babel-sugar-functional-vue@^1.0.0": "@vue/babel-sugar-functional-vue@^1.0.0":
@ -790,10 +814,10 @@
dependencies: dependencies:
"@babel/plugin-syntax-jsx" "^7.2.0" "@babel/plugin-syntax-jsx" "^7.2.0"
"@vue/babel-sugar-v-model@^1.0.0": "@vue/babel-sugar-v-model@^1.1.1":
version "1.0.0" version "1.1.1"
resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.0.0.tgz#f4da56aa67f65a349bd2c269a95e72e601af4613" resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.1.1.tgz#a0f0750fcee20769805a20178299eebd4babf25a"
integrity sha512-Pfg2Al0io66P1eO6zUbRIgpyKCU2qTnumiE0lao/wA/uNdb7Dx5Tfd1W6tO5SsByETPnEs8i8+gawRIXX40rFw== integrity sha512-qiPbdUTiqNQdhXzvWQMVfrYGHCiMmscY7j/cudLxdxWZ8AFhgPRVlniVgaWIT7A1iOjs92e8U6qVyqkf0d4ZrA==
dependencies: dependencies:
"@babel/plugin-syntax-jsx" "^7.2.0" "@babel/plugin-syntax-jsx" "^7.2.0"
"@vue/babel-helper-vue-jsx-merge-props" "^1.0.0" "@vue/babel-helper-vue-jsx-merge-props" "^1.0.0"
@ -826,18 +850,18 @@
source-map "~0.6.1" source-map "~0.6.1"
vue-template-es2015-compiler "^1.9.0" vue-template-es2015-compiler "^1.9.0"
"@vuepress/core@^1.1.0": "@vuepress/core@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/core/-/core-1.1.0.tgz#32fd2b65a4613085cbd2b812bf67afe3a037dc65" resolved "https://registry.yarnpkg.com/@vuepress/core/-/core-1.2.0.tgz#8e0c636b7f8676202fdd1ecfbe31bfe245dab2a8"
integrity sha512-qC+R9kdTpui9QjQGUXUsmfAbToWOnoYjP2AJqMT/RsKUhQsXAIMe2Z0L/Vw2Z3bmlTUq26v+B1zlFgYzGuyIEQ== integrity sha512-ZIsUkQIF+h4Yk6q4okoRnRwRhcYePu/kNiL0WWPDGycjai8cFqFjLDP/tJjfTKXmn9A62j2ETjSwaiMxCtDkyw==
dependencies: dependencies:
"@babel/core" "^7.0.0" "@babel/core" "^7.0.0"
"@vue/babel-preset-app" "^3.1.1" "@vue/babel-preset-app" "^3.1.1"
"@vuepress/markdown" "^1.1.0" "@vuepress/markdown" "^1.2.0"
"@vuepress/markdown-loader" "^1.1.0" "@vuepress/markdown-loader" "^1.2.0"
"@vuepress/plugin-last-updated" "^1.1.0" "@vuepress/plugin-last-updated" "^1.2.0"
"@vuepress/plugin-register-components" "^1.1.0" "@vuepress/plugin-register-components" "^1.2.0"
"@vuepress/shared-utils" "^1.1.0" "@vuepress/shared-utils" "^1.2.0"
autoprefixer "^9.5.1" autoprefixer "^9.5.1"
babel-loader "^8.0.4" babel-loader "^8.0.4"
cache-loader "^3.0.0" cache-loader "^3.0.0"
@ -856,34 +880,34 @@
postcss-safe-parser "^4.0.1" postcss-safe-parser "^4.0.1"
toml "^3.0.0" toml "^3.0.0"
url-loader "^1.0.1" url-loader "^1.0.1"
vue "^2.5.16" vue "^2.6.10"
vue-loader "^15.2.4" vue-loader "^15.7.1"
vue-router "^3.0.2" vue-router "^3.1.3"
vue-server-renderer "^2.5.16" vue-server-renderer "^2.6.10"
vue-template-compiler "^2.5.16" vue-template-compiler "^2.6.10"
vuepress-html-webpack-plugin "^3.2.0" vuepress-html-webpack-plugin "^3.2.0"
vuepress-plugin-container "^2.0.0" vuepress-plugin-container "^2.0.2"
webpack "^4.8.1" webpack "^4.8.1"
webpack-chain "^4.6.0" webpack-chain "^4.6.0"
webpack-dev-server "^3.5.1" webpack-dev-server "^3.5.1"
webpack-merge "^4.1.2" webpack-merge "^4.1.2"
webpackbar "3.2.0" webpackbar "3.2.0"
"@vuepress/markdown-loader@^1.1.0": "@vuepress/markdown-loader@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/markdown-loader/-/markdown-loader-1.1.0.tgz#ab8ac2d286c255f9fa39ecb2f4542053314825ac" resolved "https://registry.yarnpkg.com/@vuepress/markdown-loader/-/markdown-loader-1.2.0.tgz#f8972014616b4ab46a99c9aaac2dd414d437411c"
integrity sha512-X4+E9kbFt3OSXKxtQbNxeuzxbXdSMhXz8tliUW+/+1zx7RGn1ApcR0x7Y6/irESUgZ+GxOT3jyiCDZA4usHhLA== integrity sha512-gOZzoHjfp/W6t+qKBRdbHS/9TwRnNuhY7V+yFzxofNONFHQULofIN/arG+ptYc2SuqJ541jqudNQW+ldHNMC2w==
dependencies: dependencies:
"@vuepress/markdown" "^1.1.0" "@vuepress/markdown" "^1.2.0"
loader-utils "^1.1.0" loader-utils "^1.1.0"
lru-cache "^5.1.1" lru-cache "^5.1.1"
"@vuepress/markdown@^1.1.0": "@vuepress/markdown@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/markdown/-/markdown-1.1.0.tgz#f9095c91019d21dbc3daedfd3773c6d5c29117ec" resolved "https://registry.yarnpkg.com/@vuepress/markdown/-/markdown-1.2.0.tgz#7c457e0fab52ef8ac4dd1898ae450bc3aec30746"
integrity sha512-O2ivsIkUrSUPDx+9N43XKSOGtprV4G1k6/4o3wZjjCn6GXYRsRE906cFDlbryHxQ49Z7Yfz3gyZIGMnThxLo/w== integrity sha512-RLRQmTu5wJbCO4Qv+J0K53o5Ew7nAGItLwWyzCbIUB6pRsya3kqSCViWQVlKlS53zFTmRHuAC9tJMRdzly3mCA==
dependencies: dependencies:
"@vuepress/shared-utils" "^1.1.0" "@vuepress/shared-utils" "^1.2.0"
markdown-it "^8.4.1" markdown-it "^8.4.1"
markdown-it-anchor "^5.0.2" markdown-it-anchor "^5.0.2"
markdown-it-chain "^1.3.0" markdown-it-chain "^1.3.0"
@ -891,43 +915,43 @@
markdown-it-table-of-contents "^0.4.0" markdown-it-table-of-contents "^0.4.0"
prismjs "^1.13.0" prismjs "^1.13.0"
"@vuepress/plugin-active-header-links@^1.1.0": "@vuepress/plugin-active-header-links@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/plugin-active-header-links/-/plugin-active-header-links-1.1.0.tgz#cd62c1712040676035f34fed16a088e1c08811d8" resolved "https://registry.yarnpkg.com/@vuepress/plugin-active-header-links/-/plugin-active-header-links-1.2.0.tgz#46495c89e51a95e57139be007dffbcae4b229260"
integrity sha512-sa5ySYl/kTyr1AMakeW375wWs1aQ6psiJiSFclxkGvxcuGZ89F27ELvd43DKaETAlH90LcoE/j7TXMA895qXmw== integrity sha512-vdi7l96pElJvEmcx6t9DWJNH25TIurS8acjN3+b7o4NzdaszFn5j6klN6WtI4Z+5BVDrxHP5W1F3Ebw8SZyupA==
dependencies: dependencies:
lodash.throttle "^4.1.1" lodash.debounce "^4.0.8"
"@vuepress/plugin-last-updated@^1.1.0": "@vuepress/plugin-last-updated@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/plugin-last-updated/-/plugin-last-updated-1.1.0.tgz#65f2de734f3744026297b4667f3b5276ef99fd06" resolved "https://registry.yarnpkg.com/@vuepress/plugin-last-updated/-/plugin-last-updated-1.2.0.tgz#7b34065b793848b0482a222b7a6f1b7df3668cdc"
integrity sha512-x2SaAKWk26RK9O0slnZ55eSlBFYdYjFgqkRIfaOf4f2biWqTa9nzaIbvjzvcx3AZKlOWMl81KRwybhDL8E9OsA== integrity sha512-j4uZb/MXDyG+v9QCG3T/rkiaOhC/ib7NKCt1cjn3GOwvWTDmB5UZm9EBhUpbDNrBgxW+SaHOe3kMVNO8bGOTGw==
dependencies: dependencies:
cross-spawn "^6.0.5" cross-spawn "^6.0.5"
"@vuepress/plugin-nprogress@^1.1.0": "@vuepress/plugin-nprogress@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/plugin-nprogress/-/plugin-nprogress-1.1.0.tgz#ca7106adc7016ed0d90a22555066c11da597ef59" resolved "https://registry.yarnpkg.com/@vuepress/plugin-nprogress/-/plugin-nprogress-1.2.0.tgz#ff6166946a0b118a39a562acb57983529afce4d2"
integrity sha512-XhUyAO+mzYFOFupX/pNlPbv0bT596Lk000Q2PhWfRliwUzpUd0/u5Z6B6fasIVj01Yqih/gAGOZpr2ZwSCNJYw== integrity sha512-0apt3Dp6XVCOkLViX6seKSEJgARihi+pX3/r8j8ndFp9Y+vmgLFZnQnGE5iKNi1ty+A6PZOK0RQcBjwTAU4pAw==
dependencies: dependencies:
nprogress "^0.2.0" nprogress "^0.2.0"
"@vuepress/plugin-register-components@^1.1.0": "@vuepress/plugin-register-components@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/plugin-register-components/-/plugin-register-components-1.1.0.tgz#42ea75bcad3fb562fbb86c424136f86e13641162" resolved "https://registry.yarnpkg.com/@vuepress/plugin-register-components/-/plugin-register-components-1.2.0.tgz#95aa0e0af94b2758b26ab98814c43b0f7bcd502b"
integrity sha512-HXGdcmBdGHLhI8KHr09GnnZEzgCuaIQx1WBqDNfbigSVKEx910L56ej+Whl6VFd7D0uOLUlW4kb9ELM0sjJpKg== integrity sha512-C32b8sbGtDEX8I3SUUKS/w2rThiRFiKxmzNcJD996me7VY/4rgmZ8CxGtb6G9wByVoK0UdG1SOkrgOPdSCm80A==
dependencies: dependencies:
"@vuepress/shared-utils" "^1.1.0" "@vuepress/shared-utils" "^1.2.0"
"@vuepress/plugin-search@^1.1.0": "@vuepress/plugin-search@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/plugin-search/-/plugin-search-1.1.0.tgz#3b7a344a7df1bab27f10a46e6b57680c8f5d4c7e" resolved "https://registry.yarnpkg.com/@vuepress/plugin-search/-/plugin-search-1.2.0.tgz#0b27c467b7fd42bd4d9e32de0fe2fb81a24bd311"
integrity sha512-GoxvcM65ZAZycnsoZJ/wx9F3hXKzzJQdS7lNnAuHrvCheT5tVO1wwMumVP/unZU/59zCQ1PiyReYntLSp5bXVg== integrity sha512-QU3JfnMfImDAArbJOVH1q1iCDE5QrT99GLpNGo6KQYZWqY1TWAbgyf8C2hQdaI03co1lkU2Wn/iqzHJ5WHlueg==
"@vuepress/shared-utils@^1.1.0": "@vuepress/shared-utils@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/shared-utils/-/shared-utils-1.1.0.tgz#9d220ffe54f2d698c56ca5348ba2cb9dd72800da" resolved "https://registry.yarnpkg.com/@vuepress/shared-utils/-/shared-utils-1.2.0.tgz#8d9ab40c24f75f027ef32c2ad0169f0f08e949fa"
integrity sha512-zvYfejRRl7y3oavLvAe7dHfCu4XewKnhsyUQ7to6tfxVNoEqzhrl5HcCBwcLlphj792tvTAth5QkVegTgGfsaw== integrity sha512-wo5Ng2/xzsmIYCzvWxgLFlDBp7FkmJp2shAkbSurLNAh1vixhs0+LyDxsk01+m34ktJSp9rTUUsm6khw/Fvo0w==
dependencies: dependencies:
chalk "^2.3.2" chalk "^2.3.2"
diacritics "^1.3.0" diacritics "^1.3.0"
@ -939,19 +963,20 @@
semver "^6.0.0" semver "^6.0.0"
upath "^1.1.0" upath "^1.1.0"
"@vuepress/theme-default@^1.1.0": "@vuepress/theme-default@^1.2.0":
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/@vuepress/theme-default/-/theme-default-1.1.0.tgz#915c97bb69985d6fccd815f829532d67d828e10a" resolved "https://registry.yarnpkg.com/@vuepress/theme-default/-/theme-default-1.2.0.tgz#3303af21a00031a3482ed1c494508234f545cbf1"
integrity sha512-U+kFHakSBEXFAdfItyeCbP//q2hm9R8+vnTFjbMMVgRZ2SHPnDUC/7WWGoEUzfEpFHHPrG1OzC9iI/o5v8p5AQ== integrity sha512-mJxAMYQQv4OrGFsArMlONu8RpCzPUVx81dumkyTT4ay5PXAWTj+WDeFQLOT3j0g9QrDJGnHhbiw2aS+R/0WUyQ==
dependencies: dependencies:
"@vuepress/plugin-active-header-links" "^1.1.0" "@vuepress/plugin-active-header-links" "^1.2.0"
"@vuepress/plugin-nprogress" "^1.1.0" "@vuepress/plugin-nprogress" "^1.2.0"
"@vuepress/plugin-search" "^1.1.0" "@vuepress/plugin-search" "^1.2.0"
docsearch.js "^2.5.2" docsearch.js "^2.5.2"
lodash "^4.17.15" lodash "^4.17.15"
stylus "^0.54.5" stylus "^0.54.5"
stylus-loader "^3.0.2" stylus-loader "^3.0.2"
vuepress-plugin-container "^2.0.0" vuepress-plugin-container "^2.0.2"
vuepress-plugin-smooth-scroll "^0.0.3"
"@webassemblyjs/ast@1.8.5": "@webassemblyjs/ast@1.8.5":
version "1.8.5" version "1.8.5"
@ -1153,9 +1178,9 @@ ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5:
uri-js "^4.2.2" uri-js "^4.2.2"
algoliasearch@^3.24.5: algoliasearch@^3.24.5:
version "3.35.0" version "3.35.1"
resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-3.35.0.tgz#03f2900698c7c547fce9fb8fb8d0b9a56c8da405" resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-3.35.1.tgz#297d15f534a3507cab2f5dfb996019cac7568f0c"
integrity sha512-Om4aLzkGbUi+Rc3sa8s48CRj2Qe7u5TXS7lK7Z681x2EiAa5Qx5uB/kbp8A6qY6dFDX7vstYRIYZ7t9XgdJ1dw== integrity sha512-K4yKVhaHkXfJ/xcUnil04xiSrB8B8yHZoFEhWNpXg23eiCnqvTZw1tn/SqvdsANlYHLJlKl0qi3I/Q2Sqo7LwQ==
dependencies: dependencies:
agentkeepalive "^2.2.0" agentkeepalive "^2.2.0"
debug "^2.6.9" debug "^2.6.9"
@ -1336,10 +1361,12 @@ async-limiter@~1.0.0:
resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd" resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd"
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ== integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
async@^1.5.2: async@^2.6.2:
version "1.5.2" version "2.6.3"
resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a" resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo= integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
dependencies:
lodash "^4.17.14"
asynckit@^0.4.0: asynckit@^0.4.0:
version "0.4.0" version "0.4.0"
@ -1359,17 +1386,17 @@ autocomplete.js@0.36.0:
immediate "^3.2.3" immediate "^3.2.3"
autoprefixer@^9.4.5, autoprefixer@^9.5.1, autoprefixer@^9.6.1: autoprefixer@^9.4.5, autoprefixer@^9.5.1, autoprefixer@^9.6.1:
version "9.6.1" version "9.7.0"
resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.6.1.tgz#51967a02d2d2300bb01866c1611ec8348d355a47" resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.0.tgz#905ec19e50f04545fe9ff131182cc9ab25246901"
integrity sha512-aVo5WxR3VyvyJxcJC3h4FKfwCQvQWb1tSI5VHNibddCVWrcD1NvlxEweg3TSgiPztMnWfjpy2FURKA2kvDE+Tw== integrity sha512-j2IRvaCfrUxIiZun9ba4mhJ2omhw4OY88/yVzLO+lHhGBumAAK72PgM6gkbSN8iregPOn1ZlxGkmZh2CQ7X4AQ==
dependencies: dependencies:
browserslist "^4.6.3" browserslist "^4.7.2"
caniuse-lite "^1.0.30000980" caniuse-lite "^1.0.30001004"
chalk "^2.4.2" chalk "^2.4.2"
normalize-range "^0.1.2" normalize-range "^0.1.2"
num2fraction "^1.2.2" num2fraction "^1.2.2"
postcss "^7.0.17" postcss "^7.0.19"
postcss-value-parser "^4.0.0" postcss-value-parser "^4.0.2"
aws-sign2@~0.7.0: aws-sign2@~0.7.0:
version "0.7.0" version "0.7.0"
@ -1381,13 +1408,6 @@ aws4@^1.8.0:
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f" resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f"
integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ== integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==
babel-extract-comments@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz#0a2aedf81417ed391b85e18b4614e693a0351a21"
integrity sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==
dependencies:
babylon "^6.18.0"
babel-loader@^8.0.4: babel-loader@^8.0.4:
version "8.0.6" version "8.0.6"
resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb" resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb"
@ -1416,32 +1436,6 @@ babel-plugin-module-resolver@3.2.0:
reselect "^3.0.1" reselect "^3.0.1"
resolve "^1.4.0" resolve "^1.4.0"
babel-plugin-syntax-object-rest-spread@^6.8.0:
version "6.13.0"
resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5"
integrity sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=
babel-plugin-transform-object-rest-spread@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06"
integrity sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=
dependencies:
babel-plugin-syntax-object-rest-spread "^6.8.0"
babel-runtime "^6.26.0"
babel-runtime@^6.26.0:
version "6.26.0"
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4=
dependencies:
core-js "^2.4.0"
regenerator-runtime "^0.11.0"
babylon@^6.18.0:
version "6.18.0"
resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==
balanced-match@^1.0.0: balanced-match@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767" resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
@ -1493,9 +1487,9 @@ binary-extensions@^1.0.0:
integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw== integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==
bluebird@^3.1.1, bluebird@^3.5.5: bluebird@^3.1.1, bluebird@^3.5.5:
version "3.5.5" version "3.7.1"
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f" resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de"
integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w== integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg==
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0: bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
version "4.11.8" version "4.11.8"
@ -1623,14 +1617,14 @@ browserify-zlib@^0.2.0:
dependencies: dependencies:
pako "~1.0.5" pako "~1.0.5"
browserslist@^4.0.0, browserslist@^4.3.4, browserslist@^4.6.3: browserslist@^4.0.0, browserslist@^4.3.4, browserslist@^4.7.2:
version "4.7.0" version "4.7.2"
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.0.tgz#9ee89225ffc07db03409f2fee524dc8227458a17" resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.2.tgz#1bb984531a476b5d389cedecb195b2cd69fb1348"
integrity sha512-9rGNDtnj+HaahxiVV38Gn8n8Lr8REKsel68v1sPFfIGEK6uSXTY3h9acgiT1dZVtOOUtifo/Dn8daDQ5dUgVsA== integrity sha512-uZavT/gZXJd2UTi9Ov7/Z340WOSQ3+m1iBVRUknf+okKxonL9P83S3ctiBDtuRmRu8PiCHjqyueqQ9HYlJhxiw==
dependencies: dependencies:
caniuse-lite "^1.0.30000989" caniuse-lite "^1.0.30001004"
electron-to-chromium "^1.3.247" electron-to-chromium "^1.3.295"
node-releases "^1.1.29" node-releases "^1.1.38"
buffer-from@^1.0.0: buffer-from@^1.0.0:
version "1.1.1" version "1.1.1"
@ -1801,10 +1795,10 @@ caniuse-api@^3.0.0:
lodash.memoize "^4.1.2" lodash.memoize "^4.1.2"
lodash.uniq "^4.5.0" lodash.uniq "^4.5.0"
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000980, caniuse-lite@^1.0.30000989: caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001004:
version "1.0.30000997" version "1.0.30001006"
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000997.tgz#ba44a606804f8680894b7042612c2c7f65685b7e" resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001006.tgz#5b6e8288792cfa275f007b2819a00ccad7112655"
integrity sha512-BQLFPIdj2ntgBNWp9Q64LGUIEmvhKkzzHhUHR3CD5A9Lb7ZKF20/+sgadhFap69lk5XmK1fTUleDclaRFvgVUA== integrity sha512-MXnUVX27aGs/QINz+QG1sWSLDr3P1A3Hq5EUWoIt0T7K24DuvMxZEnh3Y5aHlJW6Bz2aApJdSewdYLd8zQnUuw==
caseless@~0.12.0: caseless@~0.12.0:
version "0.12.0" version "0.12.0"
@ -1987,20 +1981,15 @@ commander@2.17.x:
integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg== integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==
commander@^2.20.0: commander@^2.20.0:
version "2.20.1" version "2.20.3"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.1.tgz#3863ce3ca92d0831dcf2a102f5fb4b5926afd0f9" resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
integrity sha512-cCuLsMhJeWQ/ZpsFTbE765kvVfoeSddc4nU3up4fV+fDBcfUXnbITJ+JzhkdjzOqhURjZgujxaioam4RM9yGUg== integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
commander@~2.19.0: commander@~2.19.0:
version "2.19.0" version "2.19.0"
resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a" resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a"
integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg== integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==
comment-regex@^1.0.0:
version "1.0.1"
resolved "https://registry.yarnpkg.com/comment-regex/-/comment-regex-1.0.1.tgz#e070d2c4db33231955d0979d27c918fcb6f93565"
integrity sha512-IWlN//Yfby92tOIje7J18HkNmWRR7JESA/BK8W7wqY/akITpU5B0JQWnbTjCfdChSrDNb0DrdA9jfAxiiBXyiQ==
commondir@^1.0.1: commondir@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b" resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
@ -2057,11 +2046,9 @@ consola@^2.6.0:
integrity sha512-4sxpH6SGFYLADfUip4vuY65f/gEogrzJoniVhNUYkJHtng0l8ZjnDCqxxrSVRHOHwKxsy8Vm5ONZh1wOR3/l/w== integrity sha512-4sxpH6SGFYLADfUip4vuY65f/gEogrzJoniVhNUYkJHtng0l8ZjnDCqxxrSVRHOHwKxsy8Vm5ONZh1wOR3/l/w==
console-browserify@^1.1.0: console-browserify@^1.1.0:
version "1.1.0" version "1.2.0"
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10" resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
integrity sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA= integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
dependencies:
date-now "^0.1.4"
console-control-strings@^1.0.0, console-control-strings@~1.1.0: console-control-strings@^1.0.0, console-control-strings@~1.1.0:
version "1.1.0" version "1.1.0"
@ -2144,10 +2131,10 @@ copy-webpack-plugin@^5.0.2:
serialize-javascript "^1.7.0" serialize-javascript "^1.7.0"
webpack-log "^2.0.0" webpack-log "^2.0.0"
core-js@^2.4.0, core-js@^2.6.5: core-js@^2.6.5:
version "2.6.9" version "2.6.10"
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2" resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f"
integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A== integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA==
core-util-is@1.0.2, core-util-is@~1.0.0: core-util-is@1.0.2, core-util-is@~1.0.0:
version "1.0.2" version "1.0.2"
@ -2285,21 +2272,13 @@ css-select@^2.0.0:
domutils "^1.7.0" domutils "^1.7.0"
nth-check "^1.0.2" nth-check "^1.0.2"
css-tree@1.0.0-alpha.29: css-tree@1.0.0-alpha.37:
version "1.0.0-alpha.29" version "1.0.0-alpha.37"
resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39" resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22"
integrity sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg== integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==
dependencies:
mdn-data "~1.1.0"
source-map "^0.5.3"
css-tree@1.0.0-alpha.33:
version "1.0.0-alpha.33"
resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.33.tgz#970e20e5a91f7a378ddd0fc58d0b6c8d4f3be93e"
integrity sha512-SPt57bh5nQnpsTBsx/IXbO14sRc9xXu5MtMAVuo0BaQQmyf0NupNPPSoMaqiAF5tDFafYsTkfeH4Q/HCKXkg4w==
dependencies: dependencies:
mdn-data "2.0.4" mdn-data "2.0.4"
source-map "^0.5.3" source-map "^0.6.1"
css-unit-converter@^1.1.1: css-unit-converter@^1.1.1:
version "1.1.1" version "1.1.1"
@ -2311,11 +2290,6 @@ css-what@2.1, css-what@^2.1.2:
resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2" resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2"
integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg== integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg==
css.escape@^1.5.1:
version "1.5.1"
resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb"
integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s=
css@^2.0.0: css@^2.0.0:
version "2.2.4" version "2.2.4"
resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929" resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929"
@ -2404,12 +2378,12 @@ cssnano@^4.1.10:
is-resolvable "^1.0.0" is-resolvable "^1.0.0"
postcss "^7.0.0" postcss "^7.0.0"
csso@^3.5.1: csso@^4.0.2:
version "3.5.1" version "4.0.2"
resolved "https://registry.yarnpkg.com/csso/-/csso-3.5.1.tgz#7b9eb8be61628973c1b261e169d2f024008e758b" resolved "https://registry.yarnpkg.com/csso/-/csso-4.0.2.tgz#e5f81ab3a56b8eefb7f0092ce7279329f454de3d"
integrity sha512-vrqULLffYU1Q2tLdJvaCYbONStnfkfimRxXNaGjxMldI0C7JPBC4rB1RyjhfdZ4m1frm8pM9uRPKH3d2knZ8gg== integrity sha512-kS7/oeNVXkHWxby5tHVxlhjizRCSv8QdU7hB2FpdAibDU8FjTAolhNjKNTiLzXtUrKT6HwClE81yXwEk1309wg==
dependencies: dependencies:
css-tree "1.0.0-alpha.29" css-tree "1.0.0-alpha.37"
cyclist@^1.0.1: cyclist@^1.0.1:
version "1.0.1" version "1.0.1"
@ -2423,11 +2397,6 @@ dashdash@^1.12.0:
dependencies: dependencies:
assert-plus "^1.0.0" assert-plus "^1.0.0"
date-now@^0.1.4:
version "0.1.4"
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
integrity sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=
de-indent@^1.0.2: de-indent@^1.0.2:
version "1.0.2" version "1.0.2"
resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d" resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d"
@ -2440,7 +2409,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.9:
dependencies: dependencies:
ms "2.0.0" ms "2.0.0"
debug@^3.0.0, debug@^3.2.5, debug@^3.2.6: debug@^3.0.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6:
version "3.2.6" version "3.2.6"
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b" resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ== integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
@ -2530,11 +2499,6 @@ define-property@^2.0.2:
is-descriptor "^1.0.2" is-descriptor "^1.0.2"
isobject "^3.0.1" isobject "^3.0.1"
defined@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=
del@^4.1.1: del@^4.1.1:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4" resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4"
@ -2733,10 +2697,10 @@ ee-first@1.1.1:
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d" resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0= integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
electron-to-chromium@^1.3.247: electron-to-chromium@^1.3.295:
version "1.3.268" version "1.3.296"
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.268.tgz#d18f10b064dd0fce39098704896309a8249cb62f" resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.296.tgz#a1d4322d742317945285d3ba88966561b67f3ac8"
integrity sha512-QkPEya233zGh+1erw/N/GNgLjs+t65wkGX4Yw0X/ZuO75r+4Ropk7toXSUqP3TQ7EIwBDotTks3rbNZ1Kwz8hA== integrity sha512-s5hv+TSJSVRsxH190De66YHb50pBGTweT9XGWYu/LMR20KX6TsjFzObo36CjVAzM+PUeeKSBRtm/mISlCzeojQ==
elliptic@^6.0.0: elliptic@^6.0.0:
version "6.5.1" version "6.5.1"
@ -2774,12 +2738,12 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0:
once "^1.4.0" once "^1.4.0"
enhanced-resolve@^4.1.0: enhanced-resolve@^4.1.0:
version "4.1.0" version "4.1.1"
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f" resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66"
integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng== integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA==
dependencies: dependencies:
graceful-fs "^4.1.2" graceful-fs "^4.1.2"
memory-fs "^0.4.0" memory-fs "^0.5.0"
tapable "^1.0.0" tapable "^1.0.0"
entities@^1.1.1, entities@~1.1.1: entities@^1.1.1, entities@~1.1.1:
@ -2820,9 +2784,9 @@ error-ex@^1.3.1:
is-arrayish "^0.2.1" is-arrayish "^0.2.1"
es-abstract@^1.12.0, es-abstract@^1.5.1: es-abstract@^1.12.0, es-abstract@^1.5.1:
version "1.14.2" version "1.16.0"
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.14.2.tgz#7ce108fad83068c8783c3cdf62e504e084d8c497" resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d"
integrity sha512-DgoQmbpFNOofkjJtKwr87Ma5EW4Dc8fWhD0R+ndq7Oc456ivUfGOOP6oAZTTKl5/CcNMP+EN+e3/iUzgE0veZg== integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg==
dependencies: dependencies:
es-to-primitive "^1.2.0" es-to-primitive "^1.2.0"
function-bind "^1.1.1" function-bind "^1.1.1"
@ -2832,8 +2796,8 @@ es-abstract@^1.12.0, es-abstract@^1.5.1:
is-regex "^1.0.4" is-regex "^1.0.4"
object-inspect "^1.6.0" object-inspect "^1.6.0"
object-keys "^1.1.1" object-keys "^1.1.1"
string.prototype.trimleft "^2.0.0" string.prototype.trimleft "^2.1.0"
string.prototype.trimright "^2.0.0" string.prototype.trimright "^2.1.0"
es-to-primitive@^1.2.0: es-to-primitive@^1.2.0:
version "1.2.0" version "1.2.0"
@ -3072,9 +3036,9 @@ figgy-pudding@^3.5.1:
integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w== integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==
figures@^3.0.0: figures@^3.0.0:
version "3.0.0" version "3.1.0"
resolved "https://registry.yarnpkg.com/figures/-/figures-3.0.0.tgz#756275c964646163cc6f9197c7a0295dbfd04de9" resolved "https://registry.yarnpkg.com/figures/-/figures-3.1.0.tgz#4b198dd07d8d71530642864af2d45dd9e459c4ec"
integrity sha512-HKri+WoWoUgr83pehn/SIgLOMZ9nAWC6dcGj26RY2R4F50u4+RTUz0RCrUlOV3nKRAICW1UGzyb+kcX2qK1S/g== integrity sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==
dependencies: dependencies:
escape-string-regexp "^1.0.5" escape-string-regexp "^1.0.5"
@ -3204,15 +3168,6 @@ from2@^2.1.0:
inherits "^2.0.1" inherits "^2.0.1"
readable-stream "^2.0.0" readable-stream "^2.0.0"
fs-extra@^4.0.2:
version "4.0.3"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94"
integrity sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==
dependencies:
graceful-fs "^4.1.2"
jsonfile "^4.0.0"
universalify "^0.1.0"
fs-extra@^7.0.1: fs-extra@^7.0.1:
version "7.0.1" version "7.0.1"
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9" resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9"
@ -3266,11 +3221,6 @@ function-bind@^1.1.1:
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d" resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A== integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
gather-stream@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/gather-stream/-/gather-stream-1.0.0.tgz#b33994af457a8115700d410f317733cbe7a0904b"
integrity sha1-szmUr0V6gRVwDUEPMXczy+egkEs=
gauge@~2.7.3: gauge@~2.7.3:
version "2.7.4" version "2.7.4"
resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7" resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
@ -3328,9 +3278,9 @@ glob-to-regexp@^0.3.0:
integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs= integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs=
glob@^7.0.3, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4: glob@^7.0.3, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4:
version "7.1.4" version "7.1.5"
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255" resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.5.tgz#6714c69bee20f3c3e64c4dd905553e532b40cdc0"
integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A== integrity sha512-J9dlskqUXK1OeTOYBEn5s8aMukWMwWfs+rPTn/jn50Ux4MNXVhubL1wu/j2t+H4NVI+cXEcCaYellqaPVGXNqQ==
dependencies: dependencies:
fs.realpath "^1.0.0" fs.realpath "^1.0.0"
inflight "^1.0.4" inflight "^1.0.4"
@ -3397,9 +3347,9 @@ good-listener@^1.2.2:
delegate "^3.1.2" delegate "^3.1.2"
graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0: graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0:
version "4.2.2" version "4.2.3"
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02" resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q== integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
gray-matter@^4.0.1: gray-matter@^4.0.1:
version "4.0.2" version "4.0.2"
@ -3436,11 +3386,6 @@ has-ansi@^2.0.0:
dependencies: dependencies:
ansi-regex "^2.0.0" ansi-regex "^2.0.0"
has-flag@^1.0.0:
version "1.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=
has-flag@^3.0.0: has-flag@^3.0.0:
version "3.0.0" version "3.0.0"
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd" resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
@ -3644,7 +3589,7 @@ http-errors@~1.7.2:
resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4" resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4"
integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q= integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q=
http-proxy-middleware@^0.19.1: http-proxy-middleware@0.19.1:
version "0.19.1" version "0.19.1"
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a" resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a"
integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q== integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==
@ -3707,9 +3652,9 @@ iferr@^0.1.5:
integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE= integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
ignore-walk@^3.0.1: ignore-walk@^3.0.1:
version "3.0.2" version "3.0.3"
resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.2.tgz#99d83a246c196ea5c93ef9315ad7b0819c35069b" resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
integrity sha512-EXyErtpHbn75ZTsOADsfx6J/FPo6/5cjev46PXrcTpd8z3BoRkXgYu9/JVqrI7tusjmwCZutGeRJeU0Wo1e4Cw== integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
dependencies: dependencies:
minimatch "^3.0.4" minimatch "^3.0.4"
@ -3846,7 +3791,7 @@ is-absolute-url@^2.0.0:
resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6"
integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY= integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=
is-absolute-url@^3.0.2: is-absolute-url@^3.0.3:
version "3.0.3" version "3.0.3"
resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698" resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698"
integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q== integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==
@ -4120,11 +4065,6 @@ javascript-stringify@^1.6.0:
resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-1.6.0.tgz#142d111f3a6e3dae8f4a9afd77d45855b5a9cce3" resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-1.6.0.tgz#142d111f3a6e3dae8f4a9afd77d45855b5a9cce3"
integrity sha1-FC0RHzpuPa6PSpr9d9RYVbWpzOM= integrity sha1-FC0RHzpuPa6PSpr9d9RYVbWpzOM=
js-base64@^2.1.9:
version "2.5.1"
resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.5.1.tgz#1efa39ef2c5f7980bb1784ade4a8af2de3291121"
integrity sha512-M7kLczedRMYX4L8Mdh4MzyAMM9O5osx+4FcOQuTvr3A9F2D9S5JXheN0ewNbrvK2UatkTRhL5ejGmGSjNMiZuw==
js-levenshtein@^1.1.3: js-levenshtein@^1.1.3:
version "1.1.6" version "1.1.6"
resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d" resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d"
@ -4196,9 +4136,9 @@ json5@^1.0.1:
minimist "^1.2.0" minimist "^1.2.0"
json5@^2.1.0: json5@^2.1.0:
version "2.1.0" version "2.1.1"
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.0.tgz#e7a0c62c48285c628d20a10b85c89bb807c32850" resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6"
integrity sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ== integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ==
dependencies: dependencies:
minimist "^1.2.0" minimist "^1.2.0"
@ -4325,6 +4265,11 @@ lodash.clonedeep@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef" resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8= integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=
lodash.debounce@^4.0.8:
version "4.0.8"
resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168=
lodash.kebabcase@^4.1.1: lodash.kebabcase@^4.1.1:
version "4.1.1" version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36" resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
@ -4350,11 +4295,6 @@ lodash.templatesettings@^4.0.0:
dependencies: dependencies:
lodash._reinterpolate "^3.0.0" lodash._reinterpolate "^3.0.0"
lodash.throttle@^4.1.1:
version "4.1.1"
resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4"
integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=
lodash.toarray@^4.4.0: lodash.toarray@^4.4.0:
version "4.4.0" version "4.4.0"
resolved "https://registry.yarnpkg.com/lodash.toarray/-/lodash.toarray-4.4.0.tgz#24c4bfcd6b2fba38bfd0594db1179d8e9b656561" resolved "https://registry.yarnpkg.com/lodash.toarray/-/lodash.toarray-4.4.0.tgz#24c4bfcd6b2fba38bfd0594db1179d8e9b656561"
@ -4365,7 +4305,7 @@ lodash.uniq@^4.5.0:
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773" resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M= integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.15, lodash@^4.17.3, lodash@^4.17.5: lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.3, lodash@^4.17.5:
version "4.17.15" version "4.17.15"
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548" resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A== integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
@ -4435,9 +4375,9 @@ map-visit@^1.0.0:
object-visit "^1.0.0" object-visit "^1.0.0"
markdown-it-anchor@^5.0.2: markdown-it-anchor@^5.0.2:
version "5.2.4" version "5.2.5"
resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-5.2.4.tgz#d39306fe4c199705b4479d3036842cf34dcba24f" resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-5.2.5.tgz#dbf13cfcdbffd16a510984f1263e1d479a47d27a"
integrity sha512-n8zCGjxA3T+Mx1pG8HEgbJbkB8JFUuRkeTZQuIM8iPY6oQ8sWOPRZJDFC9a/pNg2QkHEjjGkhBEl/RSyzaDZ3A== integrity sha512-xLIjLQmtym3QpoY9llBgApknl7pxAcN3WDRc2d3rwpl+/YvDZHPmKscGs+L6E05xf2KrCXPBvosWt7MZukwSpQ==
markdown-it-chain@^1.3.0: markdown-it-chain@^1.3.0:
version "1.3.0" version "1.3.0"
@ -4486,11 +4426,6 @@ mdn-data@2.0.4:
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b" resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b"
integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA== integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==
mdn-data@~1.1.0:
version "1.1.4"
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-1.1.4.tgz#50b5d4ffc4575276573c4eedb8780812a8419f01"
integrity sha512-FSYbp3lyKjyj3E7fMl6rYvUdX0FBXaluGqlFoYESWQlyUTq8R+wp0rkFxoYFqZlHCvsUXGjyJmLQSnXToYhOSA==
mdurl@^1.0.1: mdurl@^1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e" resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
@ -4510,7 +4445,7 @@ mem@^4.0.0:
mimic-fn "^2.0.0" mimic-fn "^2.0.0"
p-is-promise "^2.0.0" p-is-promise "^2.0.0"
memory-fs@^0.4.0, memory-fs@^0.4.1: memory-fs@^0.4.1:
version "0.4.1" version "0.4.1"
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552" resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI= integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=
@ -4518,6 +4453,14 @@ memory-fs@^0.4.0, memory-fs@^0.4.1:
errno "^0.1.3" errno "^0.1.3"
readable-stream "^2.0.1" readable-stream "^2.0.1"
memory-fs@^0.5.0:
version "0.5.0"
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c"
integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==
dependencies:
errno "^0.1.3"
readable-stream "^2.0.1"
merge-descriptors@1.0.1: merge-descriptors@1.0.1:
version "1.0.1" version "1.0.1"
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61" resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
@ -4643,15 +4586,7 @@ minimist@^1.2.0:
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284" resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ= integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=
minipass@^2.6.0, minipass@^2.8.6: minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0:
version "2.8.6"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.8.6.tgz#620d889ace26356391d010ecb9458749df9b6db5"
integrity sha512-lFG7d6g3+/UaFDCOtqPiKAC9zngWWsQZl1g5q6gaONqrjq61SX2xFqXMleQiFVyDpYwa018E9hmlAFY22PCb+A==
dependencies:
safe-buffer "^5.1.2"
yallist "^3.0.0"
minipass@^2.9.0:
version "2.9.0" version "2.9.0"
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6" resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg== integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
@ -4660,9 +4595,9 @@ minipass@^2.9.0:
yallist "^3.0.0" yallist "^3.0.0"
minizlib@^1.2.1: minizlib@^1.2.1:
version "1.3.2" version "1.3.3"
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.2.tgz#5d24764998f98112586f7e566bd4c0999769dad4" resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
integrity sha512-lsNFqSHdJ21EwKzCp12HHJGxSMtHkCW1EMA9cceG3MkMNARjuWotZnMe3NKNshAvFXpm4loZqmYsCmRwhS2JMw== integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
dependencies: dependencies:
minipass "^2.9.0" minipass "^2.9.0"
@ -4695,7 +4630,7 @@ mkdirp@0.3.0:
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e"
integrity sha1-G79asbqCevI1dRQ0kEJkVfSB/h4= integrity sha1-G79asbqCevI1dRQ0kEJkVfSB/h4=
mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1, mkdirp@~0.5.x: mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1, mkdirp@~0.5.x:
version "0.5.1" version "0.5.1"
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903" resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM= integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
@ -4802,10 +4737,10 @@ node-emoji@^1.8.1:
dependencies: dependencies:
lodash.toarray "^4.4.0" lodash.toarray "^4.4.0"
node-forge@0.8.2: node-forge@0.9.0:
version "0.8.2" version "0.9.0"
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.8.2.tgz#b4bcc59fb12ce77a8825fc6a783dfe3182499c5a" resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579"
integrity sha512-mXQ9GBq1N3uDCyV1pdSzgIguwgtVpM7f5/5J4ipz12PKWElmPpVWLDuWl8iXmhysr21+WmX/OJ5UKx82wjomgg== integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==
node-libs-browser@^2.2.1: node-libs-browser@^2.2.1:
version "2.2.1" version "2.2.1"
@ -4852,12 +4787,12 @@ node-pre-gyp@^0.12.0:
semver "^5.3.0" semver "^5.3.0"
tar "^4" tar "^4"
node-releases@^1.1.29: node-releases@^1.1.38:
version "1.1.32" version "1.1.39"
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.32.tgz#485b35c1bf9b4d8baa105d782f8ca731e518276e" resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.39.tgz#c1011f30343aff5b633153b10ff691d278d08e8d"
integrity sha512-VhVknkitq8dqtWoluagsGPn3dxTvN9fwgR59fV3D7sLBHe0JfDramsMI8n8mY//ccq/Kkrf8ZRHRpsyVZ3qw1A== integrity sha512-8MRC/ErwNCHOlAFycy9OPca46fQYUjbJRDcZTHVWIGXIjYLM73k70vv3WkYutVnM4cCo4hE0MqBVVZjP6vjISA==
dependencies: dependencies:
semver "^5.3.0" semver "^6.3.0"
nopt@1.0.10: nopt@1.0.10:
version "1.0.10" version "1.0.10"
@ -4916,9 +4851,9 @@ npm-bundled@^1.0.1:
integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g== integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g==
npm-packlist@^1.1.6: npm-packlist@^1.1.6:
version "1.4.4" version "1.4.6"
resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44" resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4"
integrity sha512-zTLo8UcVYtDU3gdeaFu2Xu0n0EvelfHDGuqtNIn5RO7yQj4H1TqNdBc/yZjxnWA0PVB8D3Woyp0i5B43JwQ6Vw== integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg==
dependencies: dependencies:
ignore-walk "^3.0.1" ignore-walk "^3.0.1"
npm-bundled "^1.0.1" npm-bundled "^1.0.1"
@ -5295,22 +5230,6 @@ pbkdf2@^3.0.3:
safe-buffer "^5.0.1" safe-buffer "^5.0.1"
sha.js "^2.4.8" sha.js "^2.4.8"
perfectionist@^2.4.0:
version "2.4.0"
resolved "https://registry.yarnpkg.com/perfectionist/-/perfectionist-2.4.0.tgz#c147ad3714e126467f1764129ee72df861d47ea0"
integrity sha1-wUetNxThJkZ/F2QSnuct+GHUfqA=
dependencies:
comment-regex "^1.0.0"
defined "^1.0.0"
minimist "^1.2.0"
postcss "^5.0.8"
postcss-scss "^0.3.0"
postcss-value-parser "^3.3.0"
read-file-stdin "^0.2.0"
string.prototype.repeat "^0.2.0"
vendors "^1.0.0"
write-file-stdout "0.0.2"
performance-now@^2.1.0: performance-now@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b" resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
@ -5357,14 +5276,14 @@ pkg-up@^2.0.0:
dependencies: dependencies:
find-up "^2.1.0" find-up "^2.1.0"
portfinder@^1.0.13, portfinder@^1.0.24: portfinder@^1.0.13, portfinder@^1.0.25:
version "1.0.24" version "1.0.25"
resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.24.tgz#11efbc6865f12f37624b6531ead1d809ed965cfa" resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca"
integrity sha512-ekRl7zD2qxYndYflwiryJwMioBI7LI7rVXg3EnLK3sjkouT5eOuhS3gS255XxBksa30VG8UPZYZCdgfGOfkSUg== integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg==
dependencies: dependencies:
async "^1.5.2" async "^2.6.2"
debug "^2.2.0" debug "^3.1.1"
mkdirp "0.5.x" mkdirp "^0.5.1"
posix-character-classes@^0.1.0: posix-character-classes@^0.1.0:
version "0.1.1" version "0.1.1"
@ -5693,13 +5612,6 @@ postcss-safe-parser@^4.0.1:
dependencies: dependencies:
postcss "^7.0.0" postcss "^7.0.0"
postcss-scss@^0.3.0:
version "0.3.1"
resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-0.3.1.tgz#65c610d8e2a7ee0e62b1835b71b8870734816e4b"
integrity sha1-ZcYQ2OKn7g5isYNbcbiHBzSBbks=
dependencies:
postcss "^5.2.4"
postcss-selector-parser@^3.0.0: postcss-selector-parser@^3.0.0:
version "3.1.1" version "3.1.1"
resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz#4f875f4afb0c96573d5cf4d74011aee250a7e865" resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz#4f875f4afb0c96573d5cf4d74011aee250a7e865"
@ -5751,21 +5663,11 @@ postcss-value-parser@^3.0.0, postcss-value-parser@^3.2.3, postcss-value-parser@^
resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281"
integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ== integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==
postcss-value-parser@^4.0.0: postcss-value-parser@^4.0.2:
version "4.0.2" version "4.0.2"
resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9" resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9"
integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ== integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ==
postcss@^5.0.8, postcss@^5.2.4:
version "5.2.18"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.18.tgz#badfa1497d46244f6390f58b319830d9107853c5"
integrity sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==
dependencies:
chalk "^1.1.3"
js-base64 "^2.1.9"
source-map "^0.5.6"
supports-color "^3.2.3"
postcss@^6.0.9: postcss@^6.0.9:
version "6.0.23" version "6.0.23"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324" resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324"
@ -5775,10 +5677,10 @@ postcss@^6.0.9:
source-map "^0.6.1" source-map "^0.6.1"
supports-color "^5.4.0" supports-color "^5.4.0"
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.5, postcss@^7.0.6: postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.18, postcss@^7.0.19, postcss@^7.0.5, postcss@^7.0.6:
version "7.0.18" version "7.0.21"
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.18.tgz#4b9cda95ae6c069c67a4d933029eddd4838ac233" resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.21.tgz#06bb07824c19c2021c5d056d5b10c35b989f7e17"
integrity sha512-/7g1QXXgegpF+9GJj4iN7ChGF40sYuGYJ8WZu8DZWnmhQ/G36hfdk3q9LBJmoK+lZ+yzZ5KYpOoxq7LF1BxE8g== integrity sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==
dependencies: dependencies:
chalk "^2.4.2" chalk "^2.4.2"
source-map "^0.6.1" source-map "^0.6.1"
@ -5915,9 +5817,9 @@ punycode@^2.1.0:
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A== integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
purgecss@^1.4.0: purgecss@^1.4.0:
version "1.4.0" version "1.4.1"
resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-1.4.0.tgz#79905624ec1c6c8e1f03044bca92dd8a598ba429" resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-1.4.1.tgz#d362e63eb1ed9dd1fa1554b9fd7accb8d54e56dc"
integrity sha512-or7/16i7O6DH+NpXqY8NCcWCc940O6PxOgjWAcMTElzgccKOJua1/n6JVtM8UYqoMMWoCyKk+CbLpo4+4mY3BQ== integrity sha512-5jONV/D/3nfa+lC425+LA+OWe5/LDn4a79cac+TnzJq3VczwnWlpIDdW275hHsGhkzIlqATQsYFLW7or0cSwNQ==
dependencies: dependencies:
glob "^7.1.3" glob "^7.1.3"
postcss "^7.0.14" postcss "^7.0.14"
@ -6010,13 +5912,6 @@ read-cache@^1.0.0:
dependencies: dependencies:
pify "^2.3.0" pify "^2.3.0"
read-file-stdin@^0.2.0:
version "0.2.1"
resolved "https://registry.yarnpkg.com/read-file-stdin/-/read-file-stdin-0.2.1.tgz#25eccff3a153b6809afacb23ee15387db9e0ee61"
integrity sha1-JezP86FTtoCa+ssj7hU4fbng7mE=
dependencies:
gather-stream "^1.0.0"
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6: "readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
version "2.3.6" version "2.3.6"
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf" resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
@ -6049,9 +5944,9 @@ readdirp@^2.2.1:
readable-stream "^2.0.2" readable-stream "^2.0.2"
reduce-css-calc@^2.1.6: reduce-css-calc@^2.1.6:
version "2.1.6" version "2.1.7"
resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.6.tgz#050fe6ee7d98a1d70775d2e93ce0b713cee394d2" resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.7.tgz#1ace2e02c286d78abcd01fd92bfe8097ab0602c2"
integrity sha512-+l5/qlQmdsbM9h6JerJ/y5vR5Ci0k93aszLNpCmbadC3nBcbRGmIBm0s9Nj59i22LvCjTGftWzdQRwdknayxhw== integrity sha512-fDnlZ+AybAS3C7Q9xDq5y8A2z+lT63zLbynew/lur/IR24OQF5x98tfNwf79mzEdfywZ0a2wpM860FhFfMxZlA==
dependencies: dependencies:
css-unit-converter "^1.1.1" css-unit-converter "^1.1.1"
postcss-value-parser "^3.3.0" postcss-value-parser "^3.3.0"
@ -6075,11 +5970,6 @@ regenerate@^1.4.0:
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11" resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11"
integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg== integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==
regenerator-runtime@^0.11.0:
version "0.11.1"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
regenerator-runtime@^0.13.2: regenerator-runtime@^0.13.2:
version "0.13.3" version "0.13.3"
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5" resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5"
@ -6120,9 +6010,9 @@ regexpu-core@^4.6.0:
unicode-match-property-value-ecmascript "^1.1.0" unicode-match-property-value-ecmascript "^1.1.0"
regjsgen@^0.5.0: regjsgen@^0.5.0:
version "0.5.0" version "0.5.1"
resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd" resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c"
integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA== integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg==
regjsparser@^0.6.0: regjsparser@^0.6.0:
version "0.6.0" version "0.6.0"
@ -6333,12 +6223,12 @@ select@^1.1.2:
resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d" resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d"
integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0= integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=
selfsigned@^1.10.6: selfsigned@^1.10.7:
version "1.10.6" version "1.10.7"
resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.6.tgz#7b3cd37ed9c2034261a173af1a1aae27d8169b67" resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b"
integrity sha512-i3+CeqxL7DpAazgVpAGdKMwHuL63B5nhJMh9NQ7xmChGkA3jNFflq6Jyo1LLJYcr3idWiNOPWHCrm4zMayLG4w== integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA==
dependencies: dependencies:
node-forge "0.8.2" node-forge "0.9.0"
semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0: semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0:
version "5.7.1" version "5.7.1"
@ -6469,6 +6359,11 @@ slash@^2.0.0:
resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44" resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44"
integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A== integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==
smoothscroll-polyfill@^0.4.3:
version "0.4.4"
resolved "https://registry.yarnpkg.com/smoothscroll-polyfill/-/smoothscroll-polyfill-0.4.4.tgz#3a259131dc6930e6ca80003e1cb03b603b69abf8"
integrity sha512-TK5ZA9U5RqCwMpfoMq/l1mrH0JAR7y7KRvOBx0n2869aLxch+gT9GhN3yUfjiw+d/DiF1mKo14+hd62JyMmoBg==
snapdragon-node@^2.0.1: snapdragon-node@^2.0.1:
version "2.1.1" version "2.1.1"
resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b" resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
@ -6543,9 +6438,9 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
urix "^0.1.0" urix "^0.1.0"
source-map-support@~0.5.12: source-map-support@~0.5.12:
version "0.5.13" version "0.5.16"
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932" resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042"
integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w== integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==
dependencies: dependencies:
buffer-from "^1.0.0" buffer-from "^1.0.0"
source-map "^0.6.0" source-map "^0.6.0"
@ -6560,7 +6455,7 @@ source-map@0.5.6:
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412"
integrity sha1-dc449SvwczxafwwRjYEzSiu19BI= integrity sha1-dc449SvwczxafwwRjYEzSiu19BI=
source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6: source-map@^0.5.0, source-map@^0.5.6:
version "0.5.7" version "0.5.7"
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc" resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w= integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
@ -6725,12 +6620,7 @@ string-width@^3.0.0, string-width@^3.1.0:
is-fullwidth-code-point "^2.0.0" is-fullwidth-code-point "^2.0.0"
strip-ansi "^5.1.0" strip-ansi "^5.1.0"
string.prototype.repeat@^0.2.0: string.prototype.trimleft@^2.1.0:
version "0.2.0"
resolved "https://registry.yarnpkg.com/string.prototype.repeat/-/string.prototype.repeat-0.2.0.tgz#aba36de08dcee6a5a337d49b2ea1da1b28fc0ecf"
integrity sha1-q6Nt4I3O5qWjN9SbLqHaGyj8Ds8=
string.prototype.trimleft@^2.0.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634" resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634"
integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw== integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==
@ -6738,7 +6628,7 @@ string.prototype.trimleft@^2.0.0:
define-properties "^1.1.3" define-properties "^1.1.3"
function-bind "^1.1.1" function-bind "^1.1.1"
string.prototype.trimright@^2.0.0: string.prototype.trimright@^2.1.0:
version "2.1.0" version "2.1.0"
resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58" resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58"
integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg== integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==
@ -6786,14 +6676,6 @@ strip-bom-string@^1.0.0:
resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92" resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92"
integrity sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI= integrity sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI=
strip-comments@^1.0.2:
version "1.0.2"
resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-1.0.2.tgz#82b9c45e7f05873bee53f37168af930aa368679d"
integrity sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==
dependencies:
babel-extract-comments "^1.0.0"
babel-plugin-transform-object-rest-spread "^6.26.0"
strip-eof@^1.0.0: strip-eof@^1.0.0:
version "1.0.0" version "1.0.0"
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf" resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
@ -6841,13 +6723,6 @@ supports-color@^2.0.0:
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc= integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=
supports-color@^3.2.3:
version "3.2.3"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6"
integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=
dependencies:
has-flag "^1.0.0"
supports-color@^5.3.0, supports-color@^5.4.0: supports-color@^5.3.0, supports-color@^5.4.0:
version "5.5.0" version "5.5.0"
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f" resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
@ -6868,16 +6743,16 @@ svg-tags@^1.0.0:
integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q= integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=
svgo@^1.0.0: svgo@^1.0.0:
version "1.3.0" version "1.3.2"
resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.0.tgz#bae51ba95ded9a33a36b7c46ce9c359ae9154313" resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167"
integrity sha512-MLfUA6O+qauLDbym+mMZgtXCGRfIxyQoeH6IKVcFslyODEe/ElJNwr0FohQ3xG4C6HK6bk3KYPPXwHVJk3V5NQ== integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==
dependencies: dependencies:
chalk "^2.4.1" chalk "^2.4.1"
coa "^2.0.2" coa "^2.0.2"
css-select "^2.0.0" css-select "^2.0.0"
css-select-base-adapter "^0.1.1" css-select-base-adapter "^0.1.1"
css-tree "1.0.0-alpha.33" css-tree "1.0.0-alpha.37"
csso "^3.5.1" csso "^4.0.2"
js-yaml "^3.13.1" js-yaml "^3.13.1"
mkdirp "~0.5.1" mkdirp "~0.5.1"
object.values "^1.1.0" object.values "^1.1.0"
@ -6886,31 +6761,10 @@ svgo@^1.0.0:
unquote "~1.1.1" unquote "~1.1.1"
util.promisify "~1.0.0" util.promisify "~1.0.0"
tailwindcss@^0.7.4: tailwindcss@^1.0.6, tailwindcss@^1.1.2:
version "0.7.4" version "1.1.3"
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-0.7.4.tgz#fb7926821d42eacdc12e6621a49d21f37a3ff9e9" resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-1.1.3.tgz#ad154f78e1e44060e32e3ed44b27287c2be126a6"
integrity sha512-+GeQjHRJ2VmeLkrNwMCbPDfm2cc5P8eoc7n+DtZfI8oQdlo5eSHqsIlPEuZOtoqQlIALsd2jAggWrUUBFGP2ow== integrity sha512-8sa/QO+blnu3WXUylsgvYZlUbBpVH36QeGuZxgSGqp1dF3g4AGe1azt8FsO8i8Hfe9Oyvwhx3iSjRDak3nngeQ==
dependencies:
autoprefixer "^9.4.5"
bytes "^3.0.0"
chalk "^2.4.1"
css.escape "^1.5.1"
fs-extra "^4.0.2"
lodash "^4.17.5"
node-emoji "^1.8.1"
perfectionist "^2.4.0"
postcss "^7.0.11"
postcss-functions "^3.0.0"
postcss-js "^2.0.0"
postcss-nested "^4.1.1"
postcss-selector-parser "^5.0.0"
pretty-hrtime "^1.0.3"
strip-comments "^1.0.2"
tailwindcss@^1.0.6:
version "1.1.2"
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-1.1.2.tgz#0107dc092c3edee6132b105d896b109c0f66afd6"
integrity sha512-mcTzZHXMipnQY9haB17baNJmBTkYYcC8ljfMdB9/97FfhKJIzlglJcyGythuQTOu7r/QIbLfZYYWZhAvaGj95A==
dependencies: dependencies:
autoprefixer "^9.4.5" autoprefixer "^9.4.5"
bytes "^3.0.0" bytes "^3.0.0"
@ -6961,9 +6815,9 @@ terser-webpack-plugin@^1.4.1:
worker-farm "^1.7.0" worker-farm "^1.7.0"
terser@^4.1.2: terser@^4.1.2:
version "4.3.4" version "4.3.9"
resolved "https://registry.yarnpkg.com/terser/-/terser-4.3.4.tgz#ad91bade95619e3434685d69efa621a5af5f877d" resolved "https://registry.yarnpkg.com/terser/-/terser-4.3.9.tgz#e4be37f80553d02645668727777687dad26bbca8"
integrity sha512-Kcrn3RiW8NtHBP0ssOAzwa2MsIRQ8lJWiBG/K7JgqPlomA3mtb2DEmp4/hrUA+Jujx+WZ02zqd7GYD+QRBB/2Q== integrity sha512-NFGMpHjlzmyOtPL+fDw3G7+6Ueh/sz4mkaUYa4lJCxOPTNzd0Uj0aZJOmsDYoSQyfuVoWDMSWTPU3huyOm2zdA==
dependencies: dependencies:
commander "^2.20.0" commander "^2.20.0"
source-map "~0.6.1" source-map "~0.6.1"
@ -6988,9 +6842,9 @@ through@~2.3.4:
integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU= integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=
thunky@^1.0.2: thunky@^1.0.2:
version "1.0.3" version "1.1.0"
resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.3.tgz#f5df732453407b09191dae73e2a8cc73f381a826" resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d"
integrity sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow== integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==
timers-browserify@^2.0.4: timers-browserify@^2.0.4:
version "2.0.11" version "2.0.11"
@ -7328,7 +7182,7 @@ vue-hot-reload-api@^2.3.0:
resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.4.tgz#532955cc1eb208a3d990b3a9f9a70574657e08f2" resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.4.tgz#532955cc1eb208a3d990b3a9f9a70574657e08f2"
integrity sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog== integrity sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==
vue-loader@^15.2.4: vue-loader@^15.7.1:
version "15.7.1" version "15.7.1"
resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.7.1.tgz#6ccacd4122aa80f69baaac08ff295a62e3aefcfd" resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.7.1.tgz#6ccacd4122aa80f69baaac08ff295a62e3aefcfd"
integrity sha512-fwIKtA23Pl/rqfYP5TSGK7gkEuLhoTvRYW+TU7ER3q9GpNLt/PjG5NLv3XHRDiTg7OPM1JcckBgds+VnAc+HbA== integrity sha512-fwIKtA23Pl/rqfYP5TSGK7gkEuLhoTvRYW+TU7ER3q9GpNLt/PjG5NLv3XHRDiTg7OPM1JcckBgds+VnAc+HbA==
@ -7339,12 +7193,12 @@ vue-loader@^15.2.4:
vue-hot-reload-api "^2.3.0" vue-hot-reload-api "^2.3.0"
vue-style-loader "^4.1.0" vue-style-loader "^4.1.0"
vue-router@^3.0.2: vue-router@^3.1.3:
version "3.1.3" version "3.1.3"
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.1.3.tgz#e6b14fabc0c0ee9fda0e2cbbda74b350e28e412b" resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.1.3.tgz#e6b14fabc0c0ee9fda0e2cbbda74b350e28e412b"
integrity sha512-8iSa4mGNXBjyuSZFCCO4fiKfvzqk+mhL0lnKuGcQtO1eoj8nq3CmbEG8FwK5QqoqwDgsjsf1GDuisDX4cdb/aQ== integrity sha512-8iSa4mGNXBjyuSZFCCO4fiKfvzqk+mhL0lnKuGcQtO1eoj8nq3CmbEG8FwK5QqoqwDgsjsf1GDuisDX4cdb/aQ==
vue-server-renderer@^2.5.16: vue-server-renderer@^2.6.10:
version "2.6.10" version "2.6.10"
resolved "https://registry.yarnpkg.com/vue-server-renderer/-/vue-server-renderer-2.6.10.tgz#cb2558842ead360ae2ec1f3719b75564a805b375" resolved "https://registry.yarnpkg.com/vue-server-renderer/-/vue-server-renderer-2.6.10.tgz#cb2558842ead360ae2ec1f3719b75564a805b375"
integrity sha512-UYoCEutBpKzL2fKCwx8zlRtRtwxbPZXKTqbl2iIF4yRZUNO/ovrHyDAJDljft0kd+K0tZhN53XRHkgvCZoIhug== integrity sha512-UYoCEutBpKzL2fKCwx8zlRtRtwxbPZXKTqbl2iIF4yRZUNO/ovrHyDAJDljft0kd+K0tZhN53XRHkgvCZoIhug==
@ -7366,7 +7220,7 @@ vue-style-loader@^4.1.0:
hash-sum "^1.0.2" hash-sum "^1.0.2"
loader-utils "^1.0.2" loader-utils "^1.0.2"
vue-template-compiler@^2.5.16: vue-template-compiler@^2.6.10:
version "2.6.10" version "2.6.10"
resolved "https://registry.yarnpkg.com/vue-template-compiler/-/vue-template-compiler-2.6.10.tgz#323b4f3495f04faa3503337a82f5d6507799c9cc" resolved "https://registry.yarnpkg.com/vue-template-compiler/-/vue-template-compiler-2.6.10.tgz#323b4f3495f04faa3503337a82f5d6507799c9cc"
integrity sha512-jVZkw4/I/HT5ZMvRnhv78okGusqe0+qH2A0Em0Cp8aq78+NK9TII263CDVz2QXZsIT+yyV/gZc/j/vlwa+Epyg== integrity sha512-jVZkw4/I/HT5ZMvRnhv78okGusqe0+qH2A0Em0Cp8aq78+NK9TII263CDVz2QXZsIT+yyV/gZc/j/vlwa+Epyg==
@ -7379,7 +7233,7 @@ vue-template-es2015-compiler@^1.9.0:
resolved "https://registry.yarnpkg.com/vue-template-es2015-compiler/-/vue-template-es2015-compiler-1.9.1.tgz#1ee3bc9a16ecbf5118be334bb15f9c46f82f5825" resolved "https://registry.yarnpkg.com/vue-template-es2015-compiler/-/vue-template-es2015-compiler-1.9.1.tgz#1ee3bc9a16ecbf5118be334bb15f9c46f82f5825"
integrity sha512-4gDntzrifFnCEvyoO8PqyJDmguXgVPxKiIxrBKjIowvL9l+N66196+72XVYR8BBf1Uv1Fgt3bGevJ+sEmxfZzw== integrity sha512-4gDntzrifFnCEvyoO8PqyJDmguXgVPxKiIxrBKjIowvL9l+N66196+72XVYR8BBf1Uv1Fgt3bGevJ+sEmxfZzw==
vue@^2.5.16: vue@^2.6.10:
version "2.6.10" version "2.6.10"
resolved "https://registry.yarnpkg.com/vue/-/vue-2.6.10.tgz#a72b1a42a4d82a721ea438d1b6bf55e66195c637" resolved "https://registry.yarnpkg.com/vue/-/vue-2.6.10.tgz#a72b1a42a4d82a721ea438d1b6bf55e66195c637"
integrity sha512-ImThpeNU9HbdZL3utgMCq0oiMzAkt1mcgy3/E6zWC/G6AaQoeuFdsl9nDhTDU3X1R6FK7nsIUuRACVcjI+A2GQ== integrity sha512-ImThpeNU9HbdZL3utgMCq0oiMzAkt1mcgy3/E6zWC/G6AaQoeuFdsl9nDhTDU3X1R6FK7nsIUuRACVcjI+A2GQ==
@ -7397,20 +7251,27 @@ vuepress-html-webpack-plugin@^3.2.0:
toposort "^1.0.0" toposort "^1.0.0"
util.promisify "1.0.0" util.promisify "1.0.0"
vuepress-plugin-container@^2.0.0: vuepress-plugin-container@^2.0.2:
version "2.0.2" version "2.1.0"
resolved "https://registry.yarnpkg.com/vuepress-plugin-container/-/vuepress-plugin-container-2.0.2.tgz#3489cc732c7a210b31f202556e1346125dffeb73" resolved "https://registry.yarnpkg.com/vuepress-plugin-container/-/vuepress-plugin-container-2.1.0.tgz#eb2ba3e01cdac419bd678d40e05c934caffe6db0"
integrity sha512-SrGYYT7lkie7xlIlAVhn+9sDW42MytNCoxWL/2uDr+q9wZA4h1uYlQvfc2DVjy+FsM9PPPSslkeo/zCpYVY82g== integrity sha512-i4p7S1cqYUrg/3pt+xSghZtKSHVI3VXMQNept8ILxA+lMK1XJkdRkjNovZzwpXlrErQssvrUOTWBV0hdBv7eXQ==
dependencies: dependencies:
markdown-it-container "^2.0.0" markdown-it-container "^2.0.0"
vuepress@^1.0.0: vuepress-plugin-smooth-scroll@^0.0.3:
version "1.1.0" version "0.0.3"
resolved "https://registry.yarnpkg.com/vuepress/-/vuepress-1.1.0.tgz#ca0d787d93188b2fd05820a650d7e3643c9e7675" resolved "https://registry.yarnpkg.com/vuepress-plugin-smooth-scroll/-/vuepress-plugin-smooth-scroll-0.0.3.tgz#6eff2d4c186cca917cc9f7df2b0af7de7c8c6438"
integrity sha512-LAgS9nXsmvjTuCc/LHPWnIsPOuVuZtxh1MjVZf/xJ3Yy5kXoPhqbGUptlQdQt3izjIlns9zin5K6MNBY3u5l5g== integrity sha512-qsQkDftLVFLe8BiviIHaLV0Ea38YLZKKonDGsNQy1IE0wllFpFIEldWD8frWZtDFdx6b/O3KDMgVQ0qp5NjJCg==
dependencies: dependencies:
"@vuepress/core" "^1.1.0" smoothscroll-polyfill "^0.4.3"
"@vuepress/theme-default" "^1.1.0"
vuepress@^1.0.0:
version "1.2.0"
resolved "https://registry.yarnpkg.com/vuepress/-/vuepress-1.2.0.tgz#2f2cdf337ad40a3e4866dfd33e97b840db386af7"
integrity sha512-EfHo8Cc73qo+1Pm18hM0qOGynmDr8q5fu2664obynsdCJ1zpvoShVnA0Msraw4SI2xDc0iAoIb3dTwxUIM8DAw==
dependencies:
"@vuepress/core" "^1.2.0"
"@vuepress/theme-default" "^1.2.0"
cac "^6.3.9" cac "^6.3.9"
envinfo "^7.2.0" envinfo "^7.2.0"
opencollective-postinstall "^2.0.2" opencollective-postinstall "^2.0.2"
@ -7449,7 +7310,7 @@ webpack-dev-middleware@3.6.0:
range-parser "^1.0.3" range-parser "^1.0.3"
webpack-log "^2.0.0" webpack-log "^2.0.0"
webpack-dev-middleware@^3.7.1: webpack-dev-middleware@^3.7.2:
version "3.7.2" version "3.7.2"
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3" resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3"
integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw== integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw==
@ -7461,9 +7322,9 @@ webpack-dev-middleware@^3.7.1:
webpack-log "^2.0.0" webpack-log "^2.0.0"
webpack-dev-server@^3.5.1: webpack-dev-server@^3.5.1:
version "3.8.1" version "3.9.0"
resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.8.1.tgz#485b64c4aadc23f601e72114b40c1b1fea31d9f1" resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c"
integrity sha512-9F5DnfFA9bsrhpUCAfQic/AXBVHvq+3gQS+x6Zj0yc1fVVE0erKh2MV4IV12TBewuTrYeeTIRwCH9qLMvdNvTw== integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw==
dependencies: dependencies:
ansi-html "0.0.7" ansi-html "0.0.7"
bonjour "^3.5.0" bonjour "^3.5.0"
@ -7474,18 +7335,18 @@ webpack-dev-server@^3.5.1:
del "^4.1.1" del "^4.1.1"
express "^4.17.1" express "^4.17.1"
html-entities "^1.2.1" html-entities "^1.2.1"
http-proxy-middleware "^0.19.1" http-proxy-middleware "0.19.1"
import-local "^2.0.0" import-local "^2.0.0"
internal-ip "^4.3.0" internal-ip "^4.3.0"
ip "^1.1.5" ip "^1.1.5"
is-absolute-url "^3.0.2" is-absolute-url "^3.0.3"
killable "^1.0.1" killable "^1.0.1"
loglevel "^1.6.4" loglevel "^1.6.4"
opn "^5.5.0" opn "^5.5.0"
p-retry "^3.0.1" p-retry "^3.0.1"
portfinder "^1.0.24" portfinder "^1.0.25"
schema-utils "^1.0.0" schema-utils "^1.0.0"
selfsigned "^1.10.6" selfsigned "^1.10.7"
semver "^6.3.0" semver "^6.3.0"
serve-index "^1.9.1" serve-index "^1.9.1"
sockjs "0.3.19" sockjs "0.3.19"
@ -7494,7 +7355,7 @@ webpack-dev-server@^3.5.1:
strip-ansi "^3.0.1" strip-ansi "^3.0.1"
supports-color "^6.1.0" supports-color "^6.1.0"
url "^0.11.0" url "^0.11.0"
webpack-dev-middleware "^3.7.1" webpack-dev-middleware "^3.7.2"
webpack-log "^2.0.0" webpack-log "^2.0.0"
ws "^6.2.1" ws "^6.2.1"
yargs "12.0.5" yargs "12.0.5"
@ -7523,9 +7384,9 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1:
source-map "~0.6.1" source-map "~0.6.1"
webpack@^4.8.1: webpack@^4.8.1:
version "4.41.0" version "4.41.2"
resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.0.tgz#db6a254bde671769f7c14e90a1a55e73602fc70b" resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e"
integrity sha512-yNV98U4r7wX1VJAj5kyMsu36T8RPPQntcb5fJLOsMz/pt/WrKC0Vp1bAlqPLkA1LegSwQwf6P+kAbyhRKVQ72g== integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A==
dependencies: dependencies:
"@webassemblyjs/ast" "1.8.5" "@webassemblyjs/ast" "1.8.5"
"@webassemblyjs/helper-module-context" "1.8.5" "@webassemblyjs/helper-module-context" "1.8.5"
@ -7632,11 +7493,6 @@ wrappy@1:
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f" resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8= integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
write-file-stdout@0.0.2:
version "0.0.2"
resolved "https://registry.yarnpkg.com/write-file-stdout/-/write-file-stdout-0.0.2.tgz#c252d7c7c5b1b402897630e3453c7bfe690d9ca1"
integrity sha1-wlLXx8WxtAKJdjDjRTx7/mkNnKE=
ws@^6.2.1: ws@^6.2.1:
version "6.2.1" version "6.2.1"
resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb" resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb"
@ -7660,9 +7516,9 @@ yallist@^2.1.2:
integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI= integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=
yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3: yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3:
version "3.1.0" version "3.1.1"
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.0.tgz#906cc2100972dc2625ae78f566a2577230a1d6f7" resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
integrity sha512-6gpP93MR+VOOehKbCPchro3wFZNSNmek8A2kbkOAZLIZAYx1KP/zAqwO0sOHi3xJEb+UBz8NaYt/17UNit1Q9w== integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
yargs-parser@^11.1.1: yargs-parser@^11.1.1:
version "11.1.1" version "11.1.1"
@ -7672,10 +7528,10 @@ yargs-parser@^11.1.1:
camelcase "^5.0.0" camelcase "^5.0.0"
decamelize "^1.2.0" decamelize "^1.2.0"
yargs-parser@^13.1.1: yargs-parser@^15.0.0:
version "13.1.1" version "15.0.0"
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0" resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-15.0.0.tgz#cdd7a97490ec836195f59f3f4dbe5ea9e8f75f08"
integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ== integrity sha512-xLTUnCMc4JhxrPEPUYD5IBR1mWCK/aT6+RJ/K29JY2y1vD+FhtgKK0AXRWvI262q3QSffAQuTouFIKUuHX89wQ==
dependencies: dependencies:
camelcase "^5.0.0" camelcase "^5.0.0"
decamelize "^1.2.0" decamelize "^1.2.0"
@ -7699,9 +7555,9 @@ yargs@12.0.5:
yargs-parser "^11.1.1" yargs-parser "^11.1.1"
yargs@^14.0.0: yargs@^14.0.0:
version "14.0.0" version "14.2.0"
resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.0.0.tgz#ba4cacc802b3c0b3e36a9e791723763d57a85066" resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.2.0.tgz#f116a9242c4ed8668790b40759b4906c276e76c3"
integrity sha512-ssa5JuRjMeZEUjg7bEL99AwpitxU/zWGAGpdj0di41pOEmJti8NR6kyUIJBkR78DTYNPZOU08luUo0GTHuB+ow== integrity sha512-/is78VKbKs70bVZH7w4YaZea6xcJWOAwkhbR0CFuZBmYtfTYF0xjGJF43AYd8g2Uii1yJwmS5GR2vBmrc32sbg==
dependencies: dependencies:
cliui "^5.0.0" cliui "^5.0.0"
decamelize "^1.2.0" decamelize "^1.2.0"
@ -7713,7 +7569,7 @@ yargs@^14.0.0:
string-width "^3.0.0" string-width "^3.0.0"
which-module "^2.0.0" which-module "^2.0.0"
y18n "^4.0.0" y18n "^4.0.0"
yargs-parser "^13.1.1" yargs-parser "^15.0.0"
zepto@^1.2.0: zepto@^1.2.0:
version "1.2.0" version "1.2.0"

View File

@ -0,0 +1,273 @@
GIT
remote: https://github.com/stympy/faker.git
revision: 4e9144825fcc9ba5c83cc0fd037779ab82f3120b
branch: master
specs:
faker (2.6.0)
i18n (>= 1.6, < 1.8)
GEM
remote: https://rubygems.org/
specs:
actioncable (6.0.0)
actionpack (= 6.0.0)
nio4r (~> 2.0)
websocket-driver (>= 0.6.1)
actionmailbox (6.0.0)
actionpack (= 6.0.0)
activejob (= 6.0.0)
activerecord (= 6.0.0)
activestorage (= 6.0.0)
activesupport (= 6.0.0)
mail (>= 2.7.1)
actionmailer (6.0.0)
actionpack (= 6.0.0)
actionview (= 6.0.0)
activejob (= 6.0.0)
mail (~> 2.5, >= 2.5.4)
rails-dom-testing (~> 2.0)
actionpack (6.0.0)
actionview (= 6.0.0)
activesupport (= 6.0.0)
rack (~> 2.0)
rack-test (>= 0.6.3)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.0, >= 1.2.0)
actiontext (6.0.0)
actionpack (= 6.0.0)
activerecord (= 6.0.0)
activestorage (= 6.0.0)
activesupport (= 6.0.0)
nokogiri (>= 1.8.5)
actionview (6.0.0)
activesupport (= 6.0.0)
builder (~> 3.1)
erubi (~> 1.4)
rails-dom-testing (~> 2.0)
rails-html-sanitizer (~> 1.1, >= 1.2.0)
activejob (6.0.0)
activesupport (= 6.0.0)
globalid (>= 0.3.6)
activemodel (6.0.0)
activesupport (= 6.0.0)
activerecord (6.0.0)
activemodel (= 6.0.0)
activesupport (= 6.0.0)
activestorage (6.0.0)
actionpack (= 6.0.0)
activejob (= 6.0.0)
activerecord (= 6.0.0)
marcel (~> 0.3.1)
activesupport (6.0.0)
concurrent-ruby (~> 1.0, >= 1.0.2)
i18n (>= 0.7, < 2)
minitest (~> 5.1)
tzinfo (~> 1.1)
zeitwerk (~> 2.1, >= 2.1.8)
addressable (2.7.0)
public_suffix (>= 2.0.2, < 5.0)
archive-zip (0.12.0)
io-like (~> 0.3.0)
bcrypt (3.1.13)
bindex (0.8.1)
bootsnap (1.4.5)
msgpack (~> 1.0)
builder (3.2.3)
byebug (11.0.1)
capybara (3.29.0)
addressable
mini_mime (>= 0.1.3)
nokogiri (~> 1.8)
rack (>= 1.6.0)
rack-test (>= 0.6.3)
regexp_parser (~> 1.5)
xpath (~> 3.2)
childprocess (3.0.0)
chromedriver-helper (2.1.1)
archive-zip (~> 0.10)
nokogiri (~> 1.8)
coffee-rails (4.2.2)
coffee-script (>= 2.2.0)
railties (>= 4.0.0)
coffee-script (2.4.1)
coffee-script-source
execjs
coffee-script-source (1.12.2)
concurrent-ruby (1.1.5)
crass (1.0.4)
devise (4.7.1)
bcrypt (~> 3.0)
orm_adapter (~> 0.1)
railties (>= 4.1.0)
responders
warden (~> 1.2.3)
erubi (1.9.0)
execjs (2.7.0)
ffi (1.11.1)
globalid (0.4.2)
activesupport (>= 4.2.0)
i18n (1.7.0)
concurrent-ruby (~> 1.0)
io-like (0.3.0)
jbuilder (2.9.1)
activesupport (>= 4.2.0)
listen (3.1.5)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
ruby_dep (~> 1.2)
loofah (2.3.0)
crass (~> 1.0.2)
nokogiri (>= 1.5.9)
mail (2.7.1)
mini_mime (>= 0.1.1)
marcel (0.3.3)
mimemagic (~> 0.3.2)
method_source (0.9.2)
mimemagic (0.3.3)
mini_mime (1.0.2)
mini_portile2 (2.4.0)
minitest (5.12.2)
msgpack (1.3.1)
nio4r (2.5.2)
nokogiri (1.10.4)
mini_portile2 (~> 2.4.0)
orm_adapter (0.5.0)
pg (1.1.4)
public_suffix (4.0.1)
puma (3.12.1)
rack (2.0.7)
rack-test (1.1.0)
rack (>= 1.0, < 3)
rails (6.0.0)
actioncable (= 6.0.0)
actionmailbox (= 6.0.0)
actionmailer (= 6.0.0)
actionpack (= 6.0.0)
actiontext (= 6.0.0)
actionview (= 6.0.0)
activejob (= 6.0.0)
activemodel (= 6.0.0)
activerecord (= 6.0.0)
activestorage (= 6.0.0)
activesupport (= 6.0.0)
bundler (>= 1.3.0)
railties (= 6.0.0)
sprockets-rails (>= 2.0.0)
rails-dom-testing (2.0.3)
activesupport (>= 4.2.0)
nokogiri (>= 1.6)
rails-html-sanitizer (1.3.0)
loofah (~> 2.3)
railties (6.0.0)
actionpack (= 6.0.0)
activesupport (= 6.0.0)
method_source
rake (>= 0.8.7)
thor (>= 0.20.3, < 2.0)
rake (13.0.0)
rb-fsevent (0.10.3)
rb-inotify (0.10.0)
ffi (~> 1.0)
redis (4.1.3)
redis-actionpack (5.1.0)
actionpack (>= 4.0, < 7)
redis-rack (>= 1, < 3)
redis-store (>= 1.1.0, < 2)
redis-activesupport (5.2.0)
activesupport (>= 3, < 7)
redis-store (>= 1.3, < 2)
redis-rack (2.0.6)
rack (>= 1.5, < 3)
redis-store (>= 1.2, < 2)
redis-rails (5.0.2)
redis-actionpack (>= 5.0, < 6)
redis-activesupport (>= 5.0, < 6)
redis-store (>= 1.2, < 2)
redis-store (1.8.0)
redis (>= 4, < 5)
regexp_parser (1.6.0)
responders (3.0.0)
actionpack (>= 5.0)
railties (>= 5.0)
ruby_dep (1.5.0)
rubyzip (2.0.0)
sass (3.7.4)
sass-listen (~> 4.0.0)
sass-listen (4.0.0)
rb-fsevent (~> 0.9, >= 0.9.4)
rb-inotify (~> 0.9, >= 0.9.7)
sass-rails (5.1.0)
railties (>= 5.2.0)
sass (~> 3.1)
sprockets (>= 2.8, < 4.0)
sprockets-rails (>= 2.0, < 4.0)
tilt (>= 1.1, < 3)
selenium-webdriver (3.142.6)
childprocess (>= 0.5, < 4.0)
rubyzip (>= 1.2.2)
spring (2.1.0)
spring-watcher-listen (2.0.1)
listen (>= 2.7, < 4.0)
spring (>= 1.2, < 3.0)
sprockets (3.7.2)
concurrent-ruby (~> 1.0)
rack (> 1, < 3)
sprockets-rails (3.2.1)
actionpack (>= 4.0)
activesupport (>= 4.0)
sprockets (>= 3.0.0)
thor (0.20.3)
thread_safe (0.3.6)
tilt (2.0.10)
turbolinks (5.2.1)
turbolinks-source (~> 5.2)
turbolinks-source (5.2.0)
tzinfo (1.2.5)
thread_safe (~> 0.1)
uglifier (4.2.0)
execjs (>= 0.3.0, < 3)
warden (1.2.8)
rack (>= 2.0.6)
web-console (4.0.1)
actionview (>= 6.0.0)
activemodel (>= 6.0.0)
bindex (>= 0.4.0)
railties (>= 6.0.0)
websocket-driver (0.7.1)
websocket-extensions (>= 0.1.0)
websocket-extensions (0.1.4)
xpath (3.2.0)
nokogiri (~> 1.8)
zeitwerk (2.2.0)
PLATFORMS
ruby
DEPENDENCIES
bootsnap (>= 1.1.0)
byebug
capybara (>= 2.15)
chromedriver-helper
coffee-rails (~> 4.2)
devise
faker!
jbuilder (~> 2.5)
listen (>= 3.0.5, < 3.2)
pg (>= 0.18, < 2.0)
puma (~> 3.11)
rails (~> 6.0.0.rc1)
redis-rails
sass-rails (~> 5.0)
selenium-webdriver
spring
spring-watcher-listen (~> 2.0.0)
turbolinks (~> 5)
tzinfo-data
uglifier (>= 1.3.0)
web-console (>= 3.3.0)
RUBY VERSION
ruby 2.5.7p206
BUNDLED WITH
1.17.3

11
go.mod
View File

@ -3,7 +3,6 @@ module github.com/dosco/super-graph
require ( require (
github.com/GeertJohan/go.rice v1.0.0 github.com/GeertJohan/go.rice v1.0.0
github.com/Masterminds/semver v1.5.0 github.com/Masterminds/semver v1.5.0
github.com/OneOfOne/xxhash v1.2.5 // indirect
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
github.com/brianvoe/gofakeit v3.18.0+incompatible github.com/brianvoe/gofakeit v3.18.0+incompatible
@ -16,27 +15,21 @@ require (
github.com/garyburd/redigo v1.6.0 github.com/garyburd/redigo v1.6.0
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
github.com/gobuffalo/flect v0.1.6 github.com/gobuffalo/flect v0.1.6
github.com/gorilla/websocket v1.4.1
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733 // indirect
github.com/jackc/pgconn v1.0.1 github.com/jackc/pgconn v1.0.1
github.com/jackc/pgtype v1.0.1 github.com/jackc/pgtype v1.0.1
github.com/jackc/pgx v3.6.0+incompatible
github.com/jackc/pgx/v4 v4.0.1 github.com/jackc/pgx/v4 v4.0.1
github.com/jackc/tern v1.8.2
github.com/magiconair/properties v1.8.1 // indirect github.com/magiconair/properties v1.8.1 // indirect
github.com/pelletier/go-toml v1.4.0 // indirect github.com/pelletier/go-toml v1.4.0 // indirect
github.com/pkg/errors v0.8.1 github.com/pkg/errors v0.8.1
github.com/rs/zerolog v1.15.0 github.com/rs/zerolog v1.15.0
github.com/spaolacci/murmur3 v1.1.0 // indirect
github.com/spf13/afero v1.2.2 // indirect github.com/spf13/afero v1.2.2 // indirect
github.com/spf13/cobra v0.0.5 github.com/spf13/cobra v0.0.5
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.4.0 github.com/spf13/viper v1.4.0
github.com/valyala/fasttemplate v1.0.1 github.com/valyala/fasttemplate v1.0.1
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad
golang.org/x/sys v0.0.0-20190927073244-c990c680b611 // indirect golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 // indirect
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 gopkg.in/yaml.v2 v2.2.7 // indirect
) )
go 1.13 go 1.13

43
go.sum
View File

@ -1,34 +1,28 @@
cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw= cloud.google.com/go v0.26.0/go.mod h1:aQUYkXzVsufM+DwF1aE+0xfcU+56JwCaLick0ClmMTw=
github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ= github.com/BurntSushi/toml v0.3.1 h1:WXkYYl6Yr3qBf1K79EBnL4mak0OimBfB0XUf9Vl28OQ=
github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU=
github.com/GeertJohan/go.incremental v1.0.0 h1:7AH+pY1XUgQE4Y1HcXYaMqAI0m9yrFqo/jt0CW30vsg=
github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0= github.com/GeertJohan/go.incremental v1.0.0/go.mod h1:6fAjUhbVuX1KcMD3c8TEgVUqmo4seqhv0i0kdATSkM0=
github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ= github.com/GeertJohan/go.rice v1.0.0 h1:KkI6O9uMaQU3VEKaj01ulavtF7o1fWT7+pk/4voiMLQ=
github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0= github.com/GeertJohan/go.rice v1.0.0/go.mod h1:eH6gbSOAUv07dQuZVnBmoDP8mgsM1rtixis4Tib9if0=
github.com/Masterminds/semver v1.4.2 h1:WBLTQ37jOCzSLtXNdoo8bNM8876KhNqOKvrlGITgsTc=
github.com/Masterminds/semver v1.4.2/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww= github.com/Masterminds/semver v1.5.0 h1:H65muMkzWKEuNDnfl9d70GUjFniHKHRbFPGBuZ3QEww=
github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y= github.com/Masterminds/semver v1.5.0/go.mod h1:MB6lktGJrhw8PrUyiEoblNEGEQ+RzHPF078ddwwvV3Y=
github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU= github.com/OneOfOne/xxhash v1.2.2/go.mod h1:HSdplMjZKSmBqAxg5vPj2TmRDmfkzw+cTzAElWljhcU=
github.com/OneOfOne/xxhash v1.2.5 h1:zl/OfRA6nftbBK9qTohYBJ5xvw6C/oNKizR7cZGl3cI=
github.com/OneOfOne/xxhash v1.2.5/go.mod h1:eZbhyaAYD41SGSSsnmcpxVoRiQ/MPUTjUdIIOT9Um7Q=
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgvk5RMV5HQznSLvI8Zxajwdj4FhFg= github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3 h1:+qz9Ga6l6lKw6fgvk5RMV5HQznSLvI8Zxajwdj4FhFg=
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M= github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3/go.mod h1:FlkD11RtgMTYjVuBnb7cxoHmQGqvPpCsr2atC88nl/M=
github.com/akavel/rsrc v0.8.0 h1:zjWn7ukO9Kc5Q62DOJCcxGpXC18RawVtYAGdz2aLlfw=
github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c= github.com/akavel/rsrc v0.8.0/go.mod h1:uLoCtb9J+EyAqh+26kdrTgmzRBFPGOolLWKpdxkKq+c=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0= github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8= github.com/armon/consul-api v0.0.0-20180202201655-eb2c6b5be1b6/go.mod h1:grANhF5doyWs3UAsr3K4I6qtAmlQcZDesFNEHPZAzj8=
github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q= github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24TaqPxmxbtue+5NUziq4I4S80YR8gNf3Q=
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8= github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737 h1:rRISKWyXfVxvoa702s91Zl5oREZTrR3yv+tXrrX7G/g=
github.com/bradfitz/gomemcache v0.0.0-20180710155616-bc664df96737/go.mod h1:PmM6Mmwb0LSuEubjR8N7PtNe1KxZLtOUHtbeikc5h60=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0= github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA= github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
github.com/brianvoe/gofakeit v3.18.0+incompatible h1:wDOmHc9DLG4nRjUVVaxA+CEglKOW72Y5+4WNxUIkjM8= github.com/brianvoe/gofakeit v3.18.0+incompatible h1:wDOmHc9DLG4nRjUVVaxA+CEglKOW72Y5+4WNxUIkjM8=
github.com/brianvoe/gofakeit v3.18.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc= github.com/brianvoe/gofakeit v3.18.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc=
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko= github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc= github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
github.com/cespare/xxhash/v2 v2.0.0 h1:Eb1IiuHmi3FhT12NKfqCQXSXRqc4NTMvgJoREemrSt4=
github.com/cespare/xxhash/v2 v2.0.0/go.mod h1:MaMeaVDXZNmTpkOyhVs3/WfjgobkbQgfrVnrr3DyZL0=
github.com/cespare/xxhash/v2 v2.1.0 h1:yTUvW7Vhb89inJ+8irsUqiWjh8iT6sQPZiQzI6ReGkA= github.com/cespare/xxhash/v2 v2.1.0 h1:yTUvW7Vhb89inJ+8irsUqiWjh8iT6sQPZiQzI6ReGkA=
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM= github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
@ -41,6 +35,7 @@ github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3Ee
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4= github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA= github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE= github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY= github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY= github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
@ -68,8 +63,6 @@ github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V
github.com/go-sourcemap/sourcemap v2.1.2+incompatible h1:0b/xya7BKGhXuqFESKM4oIiRo9WOt2ebz7KxfreD6ug= github.com/go-sourcemap/sourcemap v2.1.2+incompatible h1:0b/xya7BKGhXuqFESKM4oIiRo9WOt2ebz7KxfreD6ug=
github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg= github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY= github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
github.com/gobuffalo/flect v0.1.1 h1:GTZJjJufv9FxgRs1+0Soo3wj+Md3kTUmTER/YE4uINA=
github.com/gobuffalo/flect v0.1.1/go.mod h1:8JCgGVbRjJhVgD6399mQr4fx5rRfGKVzFjbj6RE/9UI=
github.com/gobuffalo/flect v0.1.6 h1:D7KWNRFiCknJKA495/e1BO7oxqf8tbieaLv/ehoZ/+g= github.com/gobuffalo/flect v0.1.6 h1:D7KWNRFiCknJKA495/e1BO7oxqf8tbieaLv/ehoZ/+g=
github.com/gobuffalo/flect v0.1.6/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80= github.com/gobuffalo/flect v0.1.6/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80=
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE= github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
@ -85,8 +78,6 @@ github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M= github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q= github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ= github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
github.com/gorilla/websocket v1.4.1 h1:q7AeDBpnBk8AogcD4DSag/Ukw/KV+YhzLj2bP5HvKCM=
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs= github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk= github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY= github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
@ -98,8 +89,6 @@ github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZb
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo= github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
github.com/jackc/chunkreader/v2 v2.0.0 h1:DUwgMQuuPnS0rhMXenUtZpqZqrR/30NWY+qQvTpSvEs= github.com/jackc/chunkreader/v2 v2.0.0 h1:DUwgMQuuPnS0rhMXenUtZpqZqrR/30NWY+qQvTpSvEs=
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk= github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733 h1:vr3AYkKovP8uR8AvSGGUK1IDqRa5lAAvEkZG1LKaCRc=
github.com/jackc/fake v0.0.0-20150926172116-812a484cc733/go.mod h1:WrMFNQdiFJ80sQsxDoMokWK1W5TQtxBFNpzWTD84ibQ=
github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA= github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE= github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s= github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
@ -124,9 +113,6 @@ github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCM
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw= github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
github.com/jackc/pgtype v1.0.1 h1:7GWB9n3DdnO3TIbj59wMAE9QcHPL4cy/Bbtk5P1Noow= github.com/jackc/pgtype v1.0.1 h1:7GWB9n3DdnO3TIbj59wMAE9QcHPL4cy/Bbtk5P1Noow=
github.com/jackc/pgtype v1.0.1/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0= github.com/jackc/pgtype v1.0.1/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0=
github.com/jackc/pgx v0.0.0-20180217033919-55ca9db5d578/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
github.com/jackc/pgx v3.6.0+incompatible h1:bJeo4JdVbDAW8KB2m8XkFeo8CPipREoG37BwEoKGz+Q=
github.com/jackc/pgx v3.6.0+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y= github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM= github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc= github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
@ -136,8 +122,7 @@ github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0f
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/puddle v1.0.0 h1:rbjAshlgKscNa7j0jAM0uNQflis5o2XUogPMVAwtcsM= github.com/jackc/puddle v1.0.0 h1:rbjAshlgKscNa7j0jAM0uNQflis5o2XUogPMVAwtcsM=
github.com/jackc/puddle v1.0.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk= github.com/jackc/puddle v1.0.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
github.com/jackc/tern v1.8.2 h1:+d9eK83fRS0dbf6nt+2tjILYF4FKG1O5xTFB8Lzc66U= github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
github.com/jackc/tern v1.8.2/go.mod h1:AMppp2oyCT6rYnJHLLMmPWwahfFvdIVi6mr9gH81Nxs=
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo= github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
@ -161,23 +146,25 @@ github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czP
github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4= github.com/magiconair/properties v1.8.1 h1:ZC2Vc7/ZFkGmsVC9KvOjumD+G5lXy2RtTKyzRKO2BQ4=
github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ= github.com/magiconair/properties v1.8.1/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ= github.com/mattn/go-colorable v0.1.1/go.mod h1:FuOcm+DKB9mbwrcAfNl7/TZVBZ6rcnceauSikq3lYCQ=
github.com/mattn/go-colorable v0.1.2 h1:/bC9yWikZXAL9uJdulbSfyVNIR3n3trXl+v8+1sx8mU=
github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE= github.com/mattn/go-colorable v0.1.2/go.mod h1:U0ppj6V5qS13XJ6of8GYAs25YV2eR4EVcfRqFIhoBtE=
github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s= github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
github.com/mattn/go-isatty v0.0.9 h1:d5US/mDsogSGW37IV293h//ZFaeajb69h+EHFsv2xGg=
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ= github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0= github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE= github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y= github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U= github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E= github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U= github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc= github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic= github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg= github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo= github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
github.com/pkg/errors v0.0.0-20171018195549-f15c970de5b7/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I= github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
@ -197,6 +184,7 @@ github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU= github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY= github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc= github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g= github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0= github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE= github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
@ -206,22 +194,16 @@ github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMB
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM= github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA= github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spaolacci/murmur3 v1.1.0 h1:7c1g84S4BPRrfL5Xrdp6fOJ206sU9y293DDHaoy0bLI=
github.com/spaolacci/murmur3 v1.1.0/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI= github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ= github.com/spf13/afero v1.1.2/go.mod h1:j4pytiNVoe2o6bmDsKpLACNPDBIoEAkihy7loJ1B0CQ=
github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc= github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk= github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8= github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE= github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
github.com/spf13/cobra v0.0.0-20160114030619-9c9300901990/go.mod h1:1l0Ry5zgKvJasoi3XT1TypsSe7PqH0Sj9dhYf7v3XqQ=
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s= github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU= github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk= github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo= github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
github.com/spf13/pflag v0.0.0-20151218134703-7f60f83a2c81/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg= github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4= github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA= github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
@ -245,8 +227,6 @@ github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6Kllzaw
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc= github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8= github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8= github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
github.com/vaughan0/go-ini v0.0.0-20130923145212-a98ad7ee00ec h1:DGmKwyZwEB8dI7tbLt/I/gQuP559o/0FrAkHKlQM/Ks=
github.com/vaughan0/go-ini v0.0.0-20130923145212-a98ad7ee00ec/go.mod h1:owBmyHYMLkxyrugmfwE/DLJyW8Ro9mkphwuVErQ0iUw=
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU= github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q= github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q= github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
@ -256,7 +236,6 @@ go.uber.org/atomic v1.4.0/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0= go.uber.org/multierr v1.1.0/go.mod h1:wR5kodmAFQ0UK8QlbwjlSNy0Z68gJhDJUG5sjR94q/0=
go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.9.1/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q= go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
golang.org/x/crypto v0.0.0-20151201002508-7b85b097bf75/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4= golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
@ -301,8 +280,8 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/sys v0.0.0-20190927073244-c990c680b611 h1:q9u40nxWT5zRClI/uU9dHCiYGottAg6Nzz4YUQyHxdA= golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 h1:ZBzSG/7F4eNKz2L3GE9o300RX0Az1Bw5HF7PDraD+qU=
golang.org/x/sys v0.0.0-20190927073244-c990c680b611/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs= golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg= golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs= golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
@ -333,4 +312,6 @@ gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bl
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw= gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4= honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=

View File

@ -97,8 +97,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
case state == expectNumClose && case state == expectNumClose &&
((b[i] < '0' || b[i] > '9') && ((b[i] < '0' || b[i] > '9') &&
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')): (b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
i-- e = i - 1
e = i
case state == expectValue && case state == expectValue &&
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'): (b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
@ -140,7 +139,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
} }
if sk > 0 && sk < len(cb) { if sk > 0 && sk < len(cb) {
_, err = w.Write(cb[sk:len(cb)]) _, err = w.Write(cb[sk:])
} else { } else {
_, err = w.Write(cb) _, err = w.Write(cb)
} }

View File

@ -1,35 +1,11 @@
// +build gofuzz
package jsn package jsn
import "bytes" func Fuzz(data []byte) int {
if err := unifiedTest(data); err != nil {
// FuzzerEntrypoint for Fuzzbuzz return 0
func FuzzerEntryPoint(data []byte) int {
err1 := Validate(string(data))
var b1 bytes.Buffer
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
path1 := [][]byte{[]byte("data"), []byte("users")}
Strip(data, path1)
from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
} }
to := []Field{ return 1
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
}
var b2 bytes.Buffer
err3 := Replace(&b2, data, from, to)
Keys(data)
if err1 != nil || err2 != nil || err3 != nil {
return -1
}
return 0
} }

60
jsn/fuzz_test.go Normal file
View File

@ -0,0 +1,60 @@
package jsn
import (
"testing"
)
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"00\"0000\"0{",
"6\",\n\t\t\t\"something\": " +
"null\n\t\t},\n\t\t{\n\t\t\t\"id" +
"\": 12,\n\t\t\t\"full_name" +
"\": \"Brenton Bauch Ph" +
"D\",\n\t\t\t\"email\": \"ren" +
"ee@miller.co\",\n\t\t\t\"_" +
"_twitter_id\": 1\n\t\t}," +
"\n\t\t{\n\t\t\t\"id\": 13,\n\t\t" +
"\t\"full_name\": \"Daine" +
" Gleichner\",\n\t\t\t\"ema" +
"il\": \"andrea@gmail.c" +
"om\",\n\t\t\t\"__twitter_i" +
"d\": \"\",\n\t\t\t\"id__twit" +
"ter_id\": \"NOOO\",\n\t\t\t" +
"\"work_email\": \"andre" +
"a@nienow.co\"\n\t\t}\n\t]}" +
"\n\t}",
"0000\"0000\"0{",
"0000\"\"{",
"0000\"000\"{",
"0\"\"{",
"\"0\"{",
"000\"0\"{",
"0\"0000\"0{",
"000\"\"{",
"0\"00\"{",
"000\"0000\"0{",
"000\"00\"{",
"\"\"{",
"0\"0000\"{",
"\"000\"00{",
"0000\"00\"{",
"00\"0\"{",
"0\"0\"{",
"000\"0000\"{",
"00\"0000\"{",
"0000\"0000\"{",
"\"000\"{",
"00\"00\"{",
"00\"0000\"00{",
"0\"0000\"00{",
"00\"\"{",
"0000\"0\"{",
"000\"000\"{",
"\"00000000\"{",
}
for _, f := range crashers {
_ = unifiedTest([]byte(f))
}
}

View File

@ -105,8 +105,7 @@ func Get(b []byte, keys [][]byte) []Field {
case state == expectNumClose && case state == expectNumClose &&
((b[i] < '0' || b[i] > '9') && ((b[i] < '0' || b[i] > '9') &&
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')): (b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
i-- e = i - 1
e = i
case state == expectValue && case state == expectValue &&
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'): (b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):

View File

@ -191,11 +191,11 @@ func TestGet(t *testing.T) {
} }
for i := range expected { for i := range expected {
if bytes.Equal(values[i].Key, expected[i].Key) == false { if !bytes.Equal(values[i].Key, expected[i].Key) {
t.Error(string(values[i].Key), " != ", string(expected[i].Key)) t.Error(string(values[i].Key), " != ", string(expected[i].Key))
} }
if bytes.Equal(values[i].Value, expected[i].Value) == false { if !bytes.Equal(values[i].Value, expected[i].Value) {
t.Error(string(values[i].Value), " != ", string(expected[i].Value)) t.Error(string(values[i].Value), " != ", string(expected[i].Value))
} }
} }
@ -225,7 +225,10 @@ func TestValue(t *testing.T) {
func TestFilter1(t *testing.T) { func TestFilter1(t *testing.T) {
var b bytes.Buffer var b bytes.Buffer
Filter(&b, []byte(input2), []string{"id", "full_name", "embed"}) err := Filter(&b, []byte(input2), []string{"id", "full_name", "embed"})
if err != nil {
t.Error(err)
}
expected := `[{"id": 1,"full_name": "Sidney Stroman","embed": {"id": 8,"full_name": "Caroll Orn Sr.","email": "joannarau@hegmann.io","__twitter_id": "ABC123"}},{"id": 2,"full_name": "Jerry Dickinson"}]` expected := `[{"id": 1,"full_name": "Sidney Stroman","embed": {"id": 8,"full_name": "Caroll Orn Sr.","email": "joannarau@hegmann.io","__twitter_id": "ABC123"}},{"id": 2,"full_name": "Jerry Dickinson"}]`
@ -238,7 +241,10 @@ func TestFilter2(t *testing.T) {
value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]` value := `[{"id":1,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":100,"amount_refunded":0,"date":"01/01/2019","application":null,"billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}, {"id":2,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":0,"date":"02/18/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}},{"id":3,"customer_id":"cus_2TbMGf3cl0","object":"charge","amount":150,"amount_refunded":50,"date":"03/21/2019","billing_details":{"address":"1 Infinity Drive","zipcode":"94024"}}]`
var b bytes.Buffer var b bytes.Buffer
Filter(&b, []byte(value), []string{"id"}) err := Filter(&b, []byte(value), []string{"id"})
if err != nil {
t.Error(err)
}
expected := `[{"id":1},{"id":2},{"id":3}]` expected := `[{"id":1},{"id":2},{"id":3}]`
@ -253,7 +259,7 @@ func TestStrip(t *testing.T) {
expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`) expected := []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)
if bytes.Equal(value1, expected) == false { if !bytes.Equal(value1, expected) {
t.Log(value1) t.Log(value1)
t.Error("[Valid path] Does not match expected json") t.Error("[Valid path] Does not match expected json")
} }
@ -261,7 +267,7 @@ func TestStrip(t *testing.T) {
path2 := [][]byte{[]byte("boo"), []byte("hoo")} path2 := [][]byte{[]byte("boo"), []byte("hoo")}
value2 := Strip([]byte(input3), path2) value2 := Strip([]byte(input3), path2)
if bytes.Equal(value2, []byte(input3)) == false { if !bytes.Equal(value2, []byte(input3)) {
t.Log(value2) t.Log(value2)
t.Error("[Invalid path] Does not match expected json") t.Error("[Invalid path] Does not match expected json")
} }
@ -374,10 +380,6 @@ func TestKeys2(t *testing.T) {
"id", "posts", "title", "description", "full_name", "email", "books", "name", "description", "id", "posts", "title", "description", "full_name", "email", "books", "name", "description",
} }
// for i := range fields {
// fmt.Println("-->", string(fields[i]))
// }
if len(exp) != len(fields) { if len(exp) != len(fields) {
t.Errorf("Expected %d fields %d", len(exp), len(fields)) t.Errorf("Expected %d fields %d", len(exp), len(fields))
} }

View File

@ -12,6 +12,7 @@ func Keys(b []byte) [][]byte {
ae := 0 ae := 0
for i := 0; i < len(b); i++ { for i := 0; i < len(b); i++ {
if state == expectObjClose || state == expectListClose { if state == expectObjClose || state == expectListClose {
switch b[i] { switch b[i] {
case '{', '[': case '{', '[':
@ -88,8 +89,7 @@ func Keys(b []byte) [][]byte {
case state == expectNumClose && case state == expectNumClose &&
((b[i] < '0' || b[i] > '9') && ((b[i] < '0' || b[i] > '9') &&
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')): (b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
i-- e = i - 1
e = i
case state == expectValue && case state == expectValue &&
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'): (b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):

View File

@ -16,8 +16,12 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
tmap := make(map[uint64]int, len(from)) tmap := make(map[uint64]int, len(from))
for i, f := range from { for i, f := range from {
h.Write(f.Key) if _, err := h.Write(f.Key); err != nil {
h.Write(f.Value) return err
}
if _, err := h.Write(f.Value); err != nil {
return err
}
tmap[h.Sum64()] = i tmap[h.Sum64()] = i
h.Reset() h.Reset()
@ -50,7 +54,9 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
case state == expectKeyClose && b[i] == '"': case state == expectKeyClose && b[i] == '"':
state = expectColon state = expectColon
h.Write(b[(s + 1):i]) if _, err := h.Write(b[(s + 1):i]); err != nil {
return err
}
we = s we = s
case state == expectColon && b[i] == ':': case state == expectColon && b[i] == ':':
@ -86,8 +92,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
case state == expectNumClose && case state == expectNumClose &&
((b[i] < '0' || b[i] > '9') && ((b[i] < '0' || b[i] > '9') &&
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')): (b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
i-- e = i - 1
e = i
case state == expectValue && case state == expectValue &&
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'): (b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
@ -107,7 +112,9 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
if e != 0 { if e != 0 {
e++ e++
h.Write(b[s:e]) if _, err := h.Write(b[s:e]); err != nil {
return err
}
n, ok := tmap[h.Sum64()] n, ok := tmap[h.Sum64()]
h.Reset() h.Reset()
@ -156,7 +163,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
if ws == -1 || (ws == 0 && we == len(b)) { if ws == -1 || (ws == 0 && we == len(b)) {
w.Write(b) w.Write(b)
} else { } else if ws < we {
w.Write(b[ws:we]) w.Write(b[ws:we])
} }

View File

@ -70,8 +70,7 @@ func Strip(b []byte, path [][]byte) []byte {
case state == expectNumClose && case state == expectNumClose &&
((b[i] < '0' || b[i] > '9') && ((b[i] < '0' || b[i] > '9') &&
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')): (b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
i-- e = i - 1
e = i
case state == expectValue && case state == expectValue &&
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'): (b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):

37
jsn/test.go Normal file
View File

@ -0,0 +1,37 @@
package jsn
import (
"bytes"
"errors"
)
func unifiedTest(data []byte) error {
err1 := Validate(string(data))
var b1 bytes.Buffer
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
path1 := [][]byte{[]byte("data"), []byte("users")}
Strip(data, path1)
from := []Field{
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
}
to := []Field{
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
}
var b2 bytes.Buffer
err3 := Replace(&b2, data, from, to)
Keys(data)
if err1 != nil || err2 != nil || err3 != nil {
return errors.New("there was an error")
}
return nil
}

View File

@ -2,7 +2,6 @@ package jsn
import ( import (
"fmt" "fmt"
"reflect"
"strconv" "strconv"
"strings" "strings"
"unsafe" "unsafe"
@ -333,15 +332,6 @@ func b2s(b []byte) string {
return *(*string)(unsafe.Pointer(&b)) return *(*string)(unsafe.Pointer(&b))
} }
func s2b(s string) []byte {
strh := (*reflect.StringHeader)(unsafe.Pointer(&s))
var sh reflect.SliceHeader
sh.Data = strh.Data
sh.Len = strh.Len
sh.Cap = strh.Len
return *(*[]byte)(unsafe.Pointer(&sh))
}
const maxStartEndStringLen = 80 const maxStartEndStringLen = 80
func startEndString(s string) string { func startEndString(s string) string {

View File

@ -16,7 +16,7 @@ import (
"github.com/pkg/errors" "github.com/pkg/errors"
) )
var migrationPattern = regexp.MustCompile(`\A(\d+)_.+\.sql\z`) var migrationPattern = regexp.MustCompile(`\A(\d+)_[^\.]+\.sql\z`)
var ErrNoFwMigration = errors.Errorf("no sql in forward migration step") var ErrNoFwMigration = errors.Errorf("no sql in forward migration step")
@ -127,7 +127,7 @@ func FindMigrationsEx(path string, fs MigratorFS) ([]string, error) {
return nil, err return nil, err
} }
mcount := len(paths) + 100 mcount := len(paths)
if n < int64(mcount) { if n < int64(mcount) {
return nil, fmt.Errorf("Duplicate migration %d", n) return nil, fmt.Errorf("Duplicate migration %d", n)
@ -244,7 +244,6 @@ func (m *Migrator) AppendMigration(name, upSQL, downSQL string) {
UpSQL: upSQL, UpSQL: upSQL,
DownSQL: downSQL, DownSQL: downSQL,
}) })
return
} }
// Migrate runs pending migrations // Migrate runs pending migrations
@ -315,7 +314,7 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
if err != nil { if err != nil {
return err return err
} }
defer tx.Rollback(ctx) defer tx.Rollback(ctx) //nolint: errcheck
// Fire on start callback // Fire on start callback
if m.OnStart != nil { if m.OnStart != nil {
@ -332,7 +331,9 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
} }
// Reset all database connection settings. Important to do before updating version as search_path may have been changed. // Reset all database connection settings. Important to do before updating version as search_path may have been changed.
tx.Exec(ctx, "reset all") if _, err := tx.Exec(ctx, "reset all"); err != nil {
return err
}
// Add one to the version // Add one to the version
_, err = tx.Exec(ctx, "update "+m.versionTable+" set version=$1", sequence) _, err = tx.Exec(ctx, "update "+m.versionTable+" set version=$1", sequence)
@ -352,16 +353,14 @@ func (m *Migrator) MigrateTo(targetVersion int32) (err error) {
} }
func (m *Migrator) GetCurrentVersion() (v int32, err error) { func (m *Migrator) GetCurrentVersion() (v int32, err error) {
ctx := context.Background() err = m.conn.QueryRow(context.Background(),
"select version from "+m.versionTable).Scan(&v)
err = m.conn.QueryRow(ctx, "select version from "+m.versionTable).Scan(&v)
return v, err return v, err
} }
func (m *Migrator) ensureSchemaVersionTableExists() (err error) { func (m *Migrator) ensureSchemaVersionTableExists() (err error) {
ctx := context.Background() _, err = m.conn.Exec(context.Background(), fmt.Sprintf(`
_, err = m.conn.Exec(ctx, fmt.Sprintf(`
create table if not exists %s(version int4 not null); create table if not exists %s(version int4 not null);
insert into %s(version) insert into %s(version)

View File

@ -1,249 +0,0 @@
package psql
import (
"bytes"
"errors"
"fmt"
"io"
"github.com/dosco/super-graph/jsn"
"github.com/dosco/super-graph/qcode"
)
var zeroPaging = qcode.Paging{}
func (co *Compiler) compileMutation(qc *qcode.QCode, w *bytes.Buffer, vars Variables) (uint32, error) {
if len(qc.Selects) == 0 {
return 0, errors.New("empty query")
}
c := &compilerContext{w, qc.Selects, co}
root := &qc.Selects[0]
ti, err := c.schema.GetTable(root.Table)
if err != nil {
return 0, err
}
c.w.WriteString(`WITH `)
quoted(c.w, ti.Name)
c.w.WriteString(` AS `)
switch root.Action {
case qcode.ActionInsert:
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.ActionUpdate:
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.ActionUpsert:
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.ActionDelete:
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
return 0, err
}
default:
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
}
io.WriteString(c.w, ` RETURNING *) `)
root.Paging = zeroPaging
root.DistinctOn = root.DistinctOn[:]
root.OrderBy = root.OrderBy[:]
root.Where = nil
root.Args = nil
return c.compileQuery(qc, w)
}
func (c *compilerContext) renderInsert(qc *qcode.QCode, w *bytes.Buffer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
insert, ok := vars[root.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
}
jt, array, err := jsn.Tree(insert)
if err != nil {
return 0, err
}
c.w.WriteString(`(WITH "input" AS (SELECT {{`)
c.w.WriteString(root.ActionVar)
c.w.WriteString(`}}::json AS j) INSERT INTO `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` (`)
c.renderInsertUpdateColumns(qc, w, jt, ti)
io.WriteString(c.w, `)`)
c.w.WriteString(` SELECT `)
c.renderInsertUpdateColumns(qc, w, jt, ti)
c.w.WriteString(` FROM input i, `)
if array {
c.w.WriteString(`json_populate_recordset`)
} else {
c.w.WriteString(`json_populate_record`)
}
c.w.WriteString(`(NULL::`)
c.w.WriteString(ti.Name)
c.w.WriteString(`, i.j) t`)
return 0, nil
}
func (c *compilerContext) renderInsertUpdateColumns(qc *qcode.QCode, w *bytes.Buffer,
jt map[string]interface{}, ti *DBTableInfo) (uint32, error) {
i := 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
c.w.WriteString(cn)
i++
}
return 0, nil
}
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w *bytes.Buffer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
update, ok := vars[root.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
}
jt, array, err := jsn.Tree(update)
if err != nil {
return 0, err
}
c.w.WriteString(`(WITH "input" AS (SELECT {{`)
c.w.WriteString(root.ActionVar)
c.w.WriteString(`}}::json AS j) UPDATE `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` SET (`)
c.renderInsertUpdateColumns(qc, w, jt, ti)
c.w.WriteString(`) = (SELECT `)
c.renderInsertUpdateColumns(qc, w, jt, ti)
c.w.WriteString(` FROM input i, `)
if array {
c.w.WriteString(`json_populate_recordset`)
} else {
c.w.WriteString(`json_populate_record`)
}
c.w.WriteString(`(NULL::`)
c.w.WriteString(ti.Name)
c.w.WriteString(`, i.j) t)`)
io.WriteString(c.w, ` WHERE `)
if err := c.renderWhere(root, ti); err != nil {
return 0, err
}
return 0, nil
}
func (c *compilerContext) renderDelete(qc *qcode.QCode, w *bytes.Buffer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
c.w.WriteString(`(DELETE FROM `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` WHERE `)
if err := c.renderWhere(root, ti); err != nil {
return 0, err
}
return 0, nil
}
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w *bytes.Buffer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
upsert, ok := vars[root.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
}
jt, _, err := jsn.Tree(upsert)
if err != nil {
return 0, err
}
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return 0, err
}
c.w.WriteString(` ON CONFLICT DO (`)
i := 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if col, ok := ti.Columns[cn]; !ok || !(col.UniqueKey || col.PrimaryKey) {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
c.w.WriteString(cn)
i++
}
if i == 0 {
c.w.WriteString(ti.PrimaryCol)
}
c.w.WriteString(`) DO `)
c.w.WriteString(`UPDATE `)
io.WriteString(c.w, ` SET `)
i = 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
c.w.WriteString(cn)
io.WriteString(c.w, ` = EXCLUDED.`)
c.w.WriteString(cn)
i++
}
return 0, nil
}
func quoted(w *bytes.Buffer, identifier string) {
w.WriteString(`"`)
w.WriteString(identifier)
w.WriteString(`"`)
}

View File

@ -1,184 +0,0 @@
package psql
import (
"encoding/json"
"testing"
)
func simpleInsert(t *testing.T) {
gql := `mutation {
user(insert: $data) {
id
}
}`
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleInsert(t *testing.T) {
gql := `mutation {
product(id: 15, insert: $insert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func bulkInsert(t *testing.T) {
gql := `mutation {
product(id: 15, insert: $insert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleUpsert(t *testing.T) {
gql := `mutation {
product(id: 15, upsert: $upsert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func bulkUpsert(t *testing.T) {
gql := `mutation {
product(id: 15, upsert: $upsert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleUpdate(t *testing.T) {
gql := `mutation {
product(id: 15, update: $update, where: { id: { eq: 1 } }) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func delete(t *testing.T) {
gql := `mutation {
product(delete: true, where: { id: { eq: 1 } }) {
id
name
}
}`
sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func TestCompileInsert(t *testing.T) {
t.Run("simpleInsert", simpleInsert)
t.Run("singleInsert", singleInsert)
t.Run("bulkInsert", bulkInsert)
t.Run("singleUpdate", singleUpdate)
t.Run("singleUpsert", singleUpsert)
t.Run("bulkUpsert", bulkUpsert)
t.Run("delete", delete)
}

295
psql/mutate.go Normal file
View File

@ -0,0 +1,295 @@
//nolint:errcheck
package psql
import (
"errors"
"fmt"
"io"
"github.com/dosco/super-graph/jsn"
"github.com/dosco/super-graph/qcode"
)
var noLimit = qcode.Paging{NoLimit: true}
func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
if len(qc.Selects) == 0 {
return 0, errors.New("empty query")
}
c := &compilerContext{w, qc.Selects, co}
root := &qc.Selects[0]
ti, err := c.schema.GetTable(root.Name)
if err != nil {
return 0, err
}
io.WriteString(c.w, `WITH `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` AS `)
switch qc.Type {
case qcode.QTInsert:
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.QTUpdate:
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.QTUpsert:
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
return 0, err
}
case qcode.QTDelete:
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
return 0, err
}
default:
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
}
io.WriteString(c.w, ` RETURNING *) `)
root.Paging = noLimit
root.DistinctOn = root.DistinctOn[:]
root.OrderBy = root.OrderBy[:]
root.Where = nil
root.Args = nil
return c.compileQuery(qc, w)
}
func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
vars Variables, ti *DBTableInfo) (uint32, error) {
insert, ok := vars[qc.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
}
jt, array, err := jsn.Tree(insert)
if err != nil {
return 0, err
}
io.WriteString(c.w, `(WITH "input" AS (SELECT '{{`)
io.WriteString(c.w, qc.ActionVar)
io.WriteString(c.w, `}}' :: json AS j) INSERT INTO `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` (`)
c.renderInsertUpdateColumns(qc, w, jt, ti, false)
io.WriteString(c.w, `)`)
io.WriteString(c.w, ` SELECT `)
c.renderInsertUpdateColumns(qc, w, jt, ti, true)
io.WriteString(c.w, ` FROM input i, `)
if array {
io.WriteString(c.w, `json_populate_recordset`)
} else {
io.WriteString(c.w, `json_populate_record`)
}
io.WriteString(c.w, `(NULL::`)
io.WriteString(c.w, ti.Name)
io.WriteString(c.w, `, i.j) t`)
if w := qc.Selects[0].Where; w != nil && w.Op == qcode.OpFalse {
io.WriteString(c.w, ` WHERE false`)
}
return 0, nil
}
func (c *compilerContext) renderInsertUpdateColumns(qc *qcode.QCode, w io.Writer,
jt map[string]interface{}, ti *DBTableInfo, values bool) (uint32, error) {
root := &qc.Selects[0]
i := 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if _, ok := root.PresetMap[cn]; ok {
continue
}
if len(root.Allowed) != 0 {
if _, ok := root.Allowed[cn]; !ok {
continue
}
}
if i != 0 {
io.WriteString(c.w, `, `)
}
io.WriteString(c.w, `"`)
io.WriteString(c.w, cn)
io.WriteString(c.w, `"`)
i++
}
if i != 0 && len(root.PresetList) != 0 {
io.WriteString(c.w, `, `)
}
for i := range root.PresetList {
cn := root.PresetList[i]
col, ok := ti.Columns[cn]
if !ok {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
if values {
io.WriteString(c.w, `'`)
io.WriteString(c.w, root.PresetMap[cn])
io.WriteString(c.w, `' :: `)
io.WriteString(c.w, col.Type)
} else {
io.WriteString(c.w, `"`)
io.WriteString(c.w, cn)
io.WriteString(c.w, `"`)
}
}
return 0, nil
}
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
update, ok := vars[qc.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
}
jt, array, err := jsn.Tree(update)
if err != nil {
return 0, err
}
io.WriteString(c.w, `(WITH "input" AS (SELECT '{{`)
io.WriteString(c.w, qc.ActionVar)
io.WriteString(c.w, `}}' :: json AS j) UPDATE `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` SET (`)
c.renderInsertUpdateColumns(qc, w, jt, ti, false)
io.WriteString(c.w, `) = (SELECT `)
c.renderInsertUpdateColumns(qc, w, jt, ti, true)
io.WriteString(c.w, ` FROM input i, `)
if array {
io.WriteString(c.w, `json_populate_recordset`)
} else {
io.WriteString(c.w, `json_populate_record`)
}
io.WriteString(c.w, `(NULL::`)
io.WriteString(c.w, ti.Name)
io.WriteString(c.w, `, i.j) t)`)
io.WriteString(c.w, ` WHERE `)
if err := c.renderWhere(root, ti); err != nil {
return 0, err
}
return 0, nil
}
func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
io.WriteString(c.w, `(DELETE FROM `)
quoted(c.w, ti.Name)
io.WriteString(c.w, ` WHERE `)
if err := c.renderWhere(root, ti); err != nil {
return 0, err
}
return 0, nil
}
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
vars Variables, ti *DBTableInfo) (uint32, error) {
root := &qc.Selects[0]
upsert, ok := vars[qc.ActionVar]
if !ok {
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
}
jt, _, err := jsn.Tree(upsert)
if err != nil {
return 0, err
}
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
return 0, err
}
io.WriteString(c.w, ` ON CONFLICT (`)
i := 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if col, ok := ti.Columns[cn]; !ok || !(col.UniqueKey || col.PrimaryKey) {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
io.WriteString(c.w, cn)
i++
}
if i == 0 {
io.WriteString(c.w, ti.PrimaryCol)
}
io.WriteString(c.w, `)`)
if root.Where != nil {
io.WriteString(c.w, ` WHERE `)
if err := c.renderWhere(root, ti); err != nil {
return 0, err
}
}
io.WriteString(c.w, ` DO UPDATE SET `)
i = 0
for _, cn := range ti.ColumnNames {
if _, ok := jt[cn]; !ok {
continue
}
if i != 0 {
io.WriteString(c.w, `, `)
}
io.WriteString(c.w, cn)
io.WriteString(c.w, ` = EXCLUDED.`)
io.WriteString(c.w, cn)
i++
}
return 0, nil
}
func quoted(w io.Writer, identifier string) {
io.WriteString(w, `"`)
io.WriteString(w, identifier)
io.WriteString(w, `"`)
}

306
psql/mutate_test.go Normal file
View File

@ -0,0 +1,306 @@
package psql
import (
"encoding/json"
"testing"
)
func simpleInsert(t *testing.T) {
gql := `mutation {
user(insert: $data) {
id
}
}`
sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleInsert(t *testing.T) {
gql := `mutation {
product(id: 15, insert: $insert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{insert}}' :: json AS j) INSERT INTO "products" ("name", "description", "user_id") SELECT "name", "description", "user_id" FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func bulkInsert(t *testing.T) {
gql := `mutation {
product(name: "test", id: 15, insert: $insert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{insert}}' :: json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleUpsert(t *testing.T) {
gql := `mutation {
product(upsert: $upsert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{upsert}}' :: json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleUpsertWhere(t *testing.T) {
gql := `mutation {
product(upsert: $upsert, where: { price : { gt: 3 } }) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{upsert}}' :: json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT (id) WHERE (("products"."price") > 3) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func bulkUpsert(t *testing.T) {
gql := `mutation {
product(upsert: $upsert) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{upsert}}' :: json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func singleUpdate(t *testing.T) {
gql := `mutation {
product(id: 15, update: $update, where: { id: { eq: 1 } }) {
id
name
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{update}}' :: json AS j) UPDATE "products" SET ("name", "description") = (SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func delete(t *testing.T) {
gql := `mutation {
product(delete: true, where: { id: { eq: 1 } }) {
id
name
}
}`
sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func blockedInsert(t *testing.T) {
gql := `mutation {
user(insert: $data) {
id
}
}`
sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "bad_dude")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func blockedUpdate(t *testing.T) {
gql := `mutation {
user(where: { id: { lt: 5 } }, update: $data) {
id
email
}
}`
sql := `WITH "users" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "bad_dude")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func simpleInsertWithPresets(t *testing.T) {
gql := `mutation {
product(insert: $data) {
id
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "name", "price", 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"name": "Tomato", "price": 5.76}`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func simpleUpdateWithPresets(t *testing.T) {
gql := `mutation {
product(update: $data) {
id
}
}`
sql := `WITH "products" AS (WITH "input" AS (SELECT '{{data}}' :: json AS j) UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "name", "price", 'now' :: timestamp without time zone FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING *) SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
vars := map[string]json.RawMessage{
"data": json.RawMessage(`{"name": "Apple", "price": 1.25}`),
}
resSQL, err := compileGQLToPSQL(gql, vars, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func TestCompileMutate(t *testing.T) {
t.Run("simpleInsert", simpleInsert)
t.Run("singleInsert", singleInsert)
t.Run("bulkInsert", bulkInsert)
t.Run("singleUpdate", singleUpdate)
t.Run("singleUpsert", singleUpsert)
t.Run("singleUpsertWhere", singleUpsertWhere)
t.Run("bulkUpsert", bulkUpsert)
t.Run("delete", delete)
t.Run("blockedInsert", blockedInsert)
t.Run("blockedUpdate", blockedUpdate)
t.Run("simpleInsertWithPresets", simpleInsertWithPresets)
t.Run("simpleUpdateWithPresets", simpleUpdateWithPresets)
}

223
psql/psql_test.go Normal file
View File

@ -0,0 +1,223 @@
package psql
import (
"log"
"os"
"testing"
"github.com/dosco/super-graph/qcode"
)
const (
errNotExpected = "Generated SQL did not match what was expected"
)
var (
qcompile *qcode.Compiler
pcompile *Compiler
)
func TestMain(m *testing.M) {
var err error
qcompile, err = qcode.NewCompiler(qcode.Config{
Blocklist: []string{
"secret",
"password",
"token",
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("user", "product", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "name", "price", "users", "customers"},
Filters: []string{
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }",
},
},
Insert: qcode.InsertConfig{
Presets: map[string]string{
"user_id": "$user_id",
"created_at": "now",
"updated_at": "now",
},
},
Update: qcode.UpdateConfig{
Filters: []string{"{ user_id: { eq: $user_id } }"},
Presets: map[string]string{"updated_at": "now"},
},
Delete: qcode.DeleteConfig{
Filters: []string{
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }",
},
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("anon", "product", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "name"},
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("anon1", "product", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "name", "price"},
DisableFunctions: true,
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("user", "users", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "full_name", "avatar", "email", "products"},
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("bad_dude", "users", qcode.TRConfig{
Query: qcode.QueryConfig{
Filters: []string{"false"},
DisableFunctions: true,
},
Insert: qcode.InsertConfig{
Filters: []string{"false"},
},
Update: qcode.UpdateConfig{
Filters: []string{"false"},
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("user", "mes", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "full_name", "avatar"},
Filters: []string{
"{ id: { eq: $user_id } }",
},
},
})
if err != nil {
log.Fatal(err)
}
err = qcompile.AddRole("user", "customers", qcode.TRConfig{
Query: qcode.QueryConfig{
Columns: []string{"id", "email", "full_name", "products"},
},
})
if err != nil {
log.Fatal(err)
}
tables := []*DBTable{
&DBTable{Name: "customers", Type: "table"},
&DBTable{Name: "users", Type: "table"},
&DBTable{Name: "products", Type: "table"},
&DBTable{Name: "purchases", Type: "table"},
}
columns := [][]*DBColumn{
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 9, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 10, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "avatar", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 9, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 10, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 11, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "name", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "description", Type: "text", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "price", Type: "numeric(7,2)", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "user_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "users", FKeyColID: []int16{1}},
&DBColumn{ID: 6, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "tsv", Type: "tsvector", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "customer_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "customers", FKeyColID: []int16{1}},
&DBColumn{ID: 3, Name: "product_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "products", FKeyColID: []int16{1}},
&DBColumn{ID: 4, Name: "sale_type", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "quantity", Type: "integer", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "due_date", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "returned", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
}
schema := &DBSchema{
ver: 110000,
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
al: make(map[string]struct{}),
}
aliases := map[string][]string{
"users": []string{"mes"},
}
for i, t := range tables {
if err := schema.updateSchema(t, columns[i], aliases); err != nil {
log.Fatal(err)
}
}
vars := NewVariables(map[string]string{
"admin_account_id": "5",
})
pcompile = NewCompiler(Config{
Schema: schema,
Vars: vars,
})
os.Exit(m.Run())
}
func compileGQLToPSQL(gql string, vars Variables, role string) ([]byte, error) {
qc, err := qcompile.Compile([]byte(gql), role)
if err != nil {
return nil, err
}
_, sqlStmt, err := pcompile.CompileEx(qc, vars)
if err != nil {
return nil, err
}
//fmt.Println(string(sqlStmt))
return sqlStmt, nil
}

1234
psql/query.go Normal file
View File

@ -0,0 +1,1234 @@
//nolint:errcheck
package psql
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"strings"
"github.com/dosco/super-graph/qcode"
"github.com/dosco/super-graph/util"
)
const (
empty = ""
closeBlock = 500
)
type Variables map[string]json.RawMessage
type Config struct {
Schema *DBSchema
Vars map[string]string
}
type Compiler struct {
schema *DBSchema
vars map[string]string
}
func NewCompiler(conf Config) *Compiler {
return &Compiler{conf.Schema, conf.Vars}
}
func (c *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
return c.schema.SetRel(child, parent, rel)
}
func (c *Compiler) IDColumn(table string) (string, error) {
t, err := c.schema.GetTable(table)
if err != nil {
return empty, err
}
return t.PrimaryCol, nil
}
type compilerContext struct {
w io.Writer
s []qcode.Select
*Compiler
}
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte, error) {
w := &bytes.Buffer{}
skipped, err := co.Compile(qc, w, vars)
return skipped, w.Bytes(), err
}
func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
switch qc.Type {
case qcode.QTQuery:
return co.compileQuery(qc, w)
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
return co.compileMutation(qc, w, vars)
}
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
}
func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer) (uint32, error) {
if len(qc.Selects) == 0 {
return 0, errors.New("empty query")
}
c := &compilerContext{w, qc.Selects, co}
multiRoot := (len(qc.Roots) > 1)
st := NewStack()
if multiRoot {
io.WriteString(c.w, `SELECT row_to_json("json_root") FROM (SELECT `)
for i, id := range qc.Roots {
root := qc.Selects[id]
st.Push(root.ID + closeBlock)
st.Push(root.ID)
if i != 0 {
io.WriteString(c.w, `, `)
}
io.WriteString(c.w, `"sel_`)
int2string(c.w, root.ID)
io.WriteString(c.w, `"."json_`)
int2string(c.w, root.ID)
io.WriteString(c.w, `"`)
alias(c.w, root.FieldName)
}
io.WriteString(c.w, ` FROM `)
} else {
root := qc.Selects[0]
io.WriteString(c.w, `SELECT json_object_agg(`)
io.WriteString(c.w, `'`)
io.WriteString(c.w, root.FieldName)
io.WriteString(c.w, `', `)
io.WriteString(c.w, `json_`)
int2string(c.w, root.ID)
st.Push(root.ID + closeBlock)
st.Push(root.ID)
io.WriteString(c.w, `) FROM `)
}
var ignored uint32
for {
if st.Len() == 0 {
break
}
id := st.Pop()
if id < closeBlock {
sel := &c.s[id]
if sel.ParentID == -1 {
io.WriteString(c.w, `(`)
}
ti, err := c.schema.GetTable(sel.Name)
if err != nil {
return 0, err
}
if sel.ParentID != -1 {
if err = c.renderLateralJoin(sel); err != nil {
return 0, err
}
}
skipped, err := c.renderSelect(sel, ti)
if err != nil {
return 0, err
}
ignored |= skipped
for _, cid := range sel.Children {
if hasBit(skipped, uint32(cid)) {
continue
}
child := &c.s[cid]
st.Push(child.ID + closeBlock)
st.Push(child.ID)
}
} else {
sel := &c.s[(id - closeBlock)]
ti, err := c.schema.GetTable(sel.Name)
if err != nil {
return 0, err
}
err = c.renderSelectClose(sel, ti)
if err != nil {
return 0, err
}
if sel.ParentID != -1 {
if err = c.renderLateralJoinClose(sel); err != nil {
return 0, err
}
} else {
io.WriteString(c.w, `)`)
aliasWithID(c.w, `sel`, sel.ID)
if st.Len() != 0 {
io.WriteString(c.w, `, `)
}
}
if len(sel.Args) != 0 {
for _, v := range sel.Args {
qcode.FreeNode(v)
}
}
}
}
if multiRoot {
io.WriteString(c.w, `) AS "json_root"`)
}
return ignored, nil
}
func (c *compilerContext) processChildren(sel *qcode.Select, ti *DBTableInfo) (uint32, []*qcode.Column) {
var skipped uint32
cols := make([]*qcode.Column, 0, len(sel.Cols))
colmap := make(map[string]struct{}, len(sel.Cols))
for i := range sel.Cols {
colmap[sel.Cols[i].Name] = struct{}{}
}
for _, id := range sel.Children {
child := &c.s[id]
rel, err := c.schema.GetRel(child.Name, ti.Name)
if err != nil {
skipped |= (1 << uint(id))
continue
}
switch rel.Type {
case RelOneToMany:
fallthrough
case RelBelongTo:
if _, ok := colmap[rel.Col2]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Col2, FieldName: rel.Col2})
}
case RelOneToManyThrough:
if _, ok := colmap[rel.Col1]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Col1, FieldName: rel.Col1})
}
case RelRemote:
if _, ok := colmap[rel.Col1]; !ok {
cols = append(cols, &qcode.Column{Table: ti.Name, Name: rel.Col1, FieldName: rel.Col2})
}
skipped |= (1 << uint(id))
default:
skipped |= (1 << uint(id))
}
}
return skipped, cols
}
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo) (uint32, error) {
skipped, childCols := c.processChildren(sel, ti)
hasOrder := len(sel.OrderBy) != 0
// SELECT
if !ti.Singular {
//fmt.Fprintf(w, `SELECT coalesce(json_agg("%s"`, c.sel.Name)
io.WriteString(c.w, `SELECT coalesce(json_agg("`)
io.WriteString(c.w, "json_")
int2string(c.w, sel.ID)
io.WriteString(c.w, `"`)
if hasOrder {
err := c.renderOrderBy(sel, ti)
if err != nil {
return skipped, err
}
}
//fmt.Fprintf(w, `), '[]') AS "%s" FROM (`, c.sel.Name)
io.WriteString(c.w, `), '[]')`)
aliasWithID(c.w, "json", sel.ID)
io.WriteString(c.w, ` FROM (`)
}
// ROW-TO-JSON
io.WriteString(c.w, `SELECT `)
if len(sel.DistinctOn) != 0 {
c.renderDistinctOn(sel, ti)
}
io.WriteString(c.w, `row_to_json((`)
//fmt.Fprintf(w, `SELECT "%d" FROM (SELECT `, c.sel.ID)
io.WriteString(c.w, `SELECT "json_row_`)
int2string(c.w, sel.ID)
io.WriteString(c.w, `" FROM (SELECT `)
// Combined column names
c.renderColumns(sel, ti)
c.renderRemoteRelColumns(sel, ti)
err := c.renderJoinedColumns(sel, ti, skipped)
if err != nil {
return skipped, err
}
//fmt.Fprintf(w, `) AS "%d"`, c.sel.ID)
io.WriteString(c.w, `)`)
aliasWithID(c.w, "json_row", sel.ID)
//fmt.Fprintf(w, `)) AS "%s"`, c.sel.Name)
io.WriteString(c.w, `))`)
aliasWithID(c.w, "json", sel.ID)
// END-ROW-TO-JSON
if hasOrder {
c.renderOrderByColumns(sel, ti)
}
// END-SELECT
// FROM (SELECT .... )
err = c.renderBaseSelect(sel, ti, childCols, skipped)
if err != nil {
return skipped, err
}
// END-FROM
return skipped, nil
}
func (c *compilerContext) renderSelectClose(sel *qcode.Select, ti *DBTableInfo) error {
hasOrder := len(sel.OrderBy) != 0
if hasOrder {
err := c.renderOrderBy(sel, ti)
if err != nil {
return err
}
}
switch {
case ti.Singular:
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
case len(sel.Paging.Limit) != 0:
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
io.WriteString(c.w, ` LIMIT ('`)
io.WriteString(c.w, sel.Paging.Limit)
io.WriteString(c.w, `') :: integer`)
case sel.Paging.NoLimit:
break
default:
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
}
if len(sel.Paging.Offset) != 0 {
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
io.WriteString(c.w, `OFFSET ('`)
io.WriteString(c.w, sel.Paging.Offset)
io.WriteString(c.w, `') :: integer`)
}
if !ti.Singular {
//fmt.Fprintf(w, `) AS "json_agg_%d"`, c.sel.ID)
io.WriteString(c.w, `)`)
aliasWithID(c.w, "json_agg", sel.ID)
}
return nil
}
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
io.WriteString(c.w, ` LEFT OUTER JOIN LATERAL (`)
return nil
}
func (c *compilerContext) renderLateralJoinClose(sel *qcode.Select) error {
//fmt.Fprintf(w, `) AS "%s_%d_join" ON ('true')`, c.sel.Name, c.sel.ID)
io.WriteString(c.w, `)`)
aliasWithIDSuffix(c.w, sel.Name, sel.ID, "_join")
io.WriteString(c.w, ` ON ('true')`)
return nil
}
func (c *compilerContext) renderJoin(sel *qcode.Select, ti *DBTableInfo) error {
parent := &c.s[sel.ParentID]
return c.renderJoinByName(ti.Name, parent.Name, parent.ID)
}
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
rel, err := c.schema.GetRel(table, parent)
if err != nil {
return err
}
// This join is only required for one-to-many relations since
// these make use of join tables that need to be pulled in.
if rel.Type != RelOneToManyThrough {
return err
}
pt, err := c.schema.GetTable(parent)
if err != nil {
return err
}
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
//rel.Through, rel.Through, rel.ColT, c.parent.Name, c.parent.ID, rel.Col1)
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
io.WriteString(c.w, rel.Through)
io.WriteString(c.w, `" ON ((`)
colWithTable(c.w, rel.Through, rel.ColT)
io.WriteString(c.w, `) = (`)
if id != -1 {
colWithTableID(c.w, pt.Name, id, rel.Col1)
} else {
colWithTable(c.w, pt.Name, rel.Col1)
}
io.WriteString(c.w, `))`)
return nil
}
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) {
i := 0
for _, col := range sel.Cols {
n := funcPrefixLen(col.Name)
if n != 0 {
if !sel.Functions {
continue
}
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[col.Name[n:]]; !ok {
continue
}
}
} else {
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[col.Name]; !ok {
continue
}
}
}
if i != 0 {
io.WriteString(c.w, ", ")
}
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s"`,
//c.sel.Name, c.sel.ID, col.Name, col.FieldName)
colWithTableIDAlias(c.w, ti.Name, sel.ID, col.Name, col.FieldName)
i++
}
}
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo) {
i := 0
for _, id := range sel.Children {
child := &c.s[id]
rel, err := c.schema.GetRel(child.Name, sel.Name)
if err != nil || rel.Type != RelRemote {
continue
}
if i != 0 || len(sel.Cols) != 0 {
io.WriteString(c.w, ", ")
}
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s"`,
//c.sel.Name, c.sel.ID, rel.Col1, rel.Col2)
colWithTableID(c.w, ti.Name, sel.ID, rel.Col1)
alias(c.w, rel.Col2)
i++
}
}
func (c *compilerContext) renderJoinedColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
colsRendered := len(sel.Cols) != 0
for _, id := range sel.Children {
skipThis := hasBit(skipped, uint32(id))
if colsRendered && !skipThis {
io.WriteString(c.w, ", ")
}
if skipThis {
continue
}
childSel := &c.s[id]
//fmt.Fprintf(w, `"%s_%d_join"."%s" AS "%s"`,
//s.Name, s.ID, s.Name, s.FieldName)
//if cti.Singular {
io.WriteString(c.w, `"`)
io.WriteString(c.w, childSel.Name)
io.WriteString(c.w, `_`)
int2string(c.w, childSel.ID)
io.WriteString(c.w, `_join"."json_`)
int2string(c.w, childSel.ID)
io.WriteString(c.w, `" AS "`)
io.WriteString(c.w, childSel.FieldName)
io.WriteString(c.w, `"`)
}
return nil
}
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo,
childCols []*qcode.Column, skipped uint32) error {
var groupBy []int
isRoot := sel.ParentID == -1
isFil := (sel.Where != nil && sel.Where.Op != qcode.OpNop)
isSearch := sel.Args["search"] != nil
isAgg := false
io.WriteString(c.w, ` FROM (SELECT `)
i := 0
for n, col := range sel.Cols {
cn := col.Name
_, isRealCol := ti.Columns[cn]
if !isRealCol {
if isSearch {
switch {
case cn == "search_rank":
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[cn]; !ok {
continue
}
}
cn = ti.TSVCol
arg := sel.Args["search"]
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `ts_rank("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name)
io.WriteString(c.w, `ts_rank(`)
colWithTable(c.w, ti.Name, cn)
if c.schema.ver >= 110000 {
io.WriteString(c.w, `, websearch_to_tsquery('`)
} else {
io.WriteString(c.w, `, to_tsquery('`)
}
io.WriteString(c.w, arg.Val)
io.WriteString(c.w, `'))`)
alias(c.w, col.Name)
i++
case strings.HasPrefix(cn, "search_headline_"):
cn1 := cn[16:]
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[cn1]; !ok {
continue
}
}
arg := sel.Args["search"]
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `ts_headline("%s"."%s", websearch_to_tsquery('%s')) AS %s`,
//c.sel.Name, cn, arg.Val, col.Name)
io.WriteString(c.w, `ts_headline(`)
colWithTable(c.w, ti.Name, cn1)
if c.schema.ver >= 110000 {
io.WriteString(c.w, `, websearch_to_tsquery('`)
} else {
io.WriteString(c.w, `, to_tsquery('`)
}
io.WriteString(c.w, arg.Val)
io.WriteString(c.w, `'))`)
alias(c.w, col.Name)
i++
}
} else {
pl := funcPrefixLen(cn)
if pl == 0 {
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
io.WriteString(c.w, `'`)
io.WriteString(c.w, cn)
io.WriteString(c.w, ` not defined'`)
alias(c.w, col.Name)
i++
} else if sel.Functions {
cn1 := cn[pl:]
if len(sel.Allowed) != 0 {
if _, ok := sel.Allowed[cn1]; !ok {
continue
}
}
if i != 0 {
io.WriteString(c.w, `, `)
}
fn := cn[0 : pl-1]
isAgg = true
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Name, cn, col.Name)
io.WriteString(c.w, fn)
io.WriteString(c.w, `(`)
colWithTable(c.w, ti.Name, cn1)
io.WriteString(c.w, `)`)
alias(c.w, col.Name)
i++
}
}
} else {
groupBy = append(groupBy, n)
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Name, cn)
if i != 0 {
io.WriteString(c.w, `, `)
}
colWithTable(c.w, ti.Name, cn)
i++
}
}
for _, col := range childCols {
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `"%s"."%s"`, col.Table, col.Name)
colWithTable(c.w, col.Table, col.Name)
i++
}
io.WriteString(c.w, ` FROM `)
//fmt.Fprintf(w, ` FROM "%s"`, c.sel.Name)
io.WriteString(c.w, `"`)
io.WriteString(c.w, ti.Name)
io.WriteString(c.w, `"`)
// if tn, ok := c.tmap[sel.Name]; ok {
// //fmt.Fprintf(w, ` FROM "%s" AS "%s"`, tn, c.sel.Name)
// tableWithAlias(c.w, ti.Name, sel.Name)
// } else {
// //fmt.Fprintf(w, ` FROM "%s"`, c.sel.Name)
// io.WriteString(c.w, `"`)
// io.WriteString(c.w, sel.Name)
// io.WriteString(c.w, `"`)
// }
if isRoot && isFil {
io.WriteString(c.w, ` WHERE (`)
if err := c.renderWhere(sel, ti); err != nil {
return err
}
io.WriteString(c.w, `)`)
}
if !isRoot {
if err := c.renderJoin(sel, ti); err != nil {
return err
}
io.WriteString(c.w, ` WHERE (`)
if err := c.renderRelationship(sel, ti); err != nil {
return err
}
if isFil {
io.WriteString(c.w, ` AND `)
if err := c.renderWhere(sel, ti); err != nil {
return err
}
}
io.WriteString(c.w, `)`)
}
if isAgg {
if len(groupBy) != 0 {
io.WriteString(c.w, ` GROUP BY `)
for i, id := range groupBy {
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Name, c.sel.Cols[id].Name)
colWithTable(c.w, ti.Name, sel.Cols[id].Name)
}
}
}
switch {
case ti.Singular:
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
case len(sel.Paging.Limit) != 0:
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
io.WriteString(c.w, ` LIMIT ('`)
io.WriteString(c.w, sel.Paging.Limit)
io.WriteString(c.w, `') :: integer`)
case sel.Paging.NoLimit:
break
default:
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
}
if len(sel.Paging.Offset) != 0 {
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
io.WriteString(c.w, ` OFFSET ('`)
io.WriteString(c.w, sel.Paging.Offset)
io.WriteString(c.w, `') :: integer`)
}
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Name, c.sel.ID)
io.WriteString(c.w, `)`)
aliasWithID(c.w, ti.Name, sel.ID)
return nil
}
func (c *compilerContext) renderOrderByColumns(sel *qcode.Select, ti *DBTableInfo) {
colsRendered := len(sel.Cols) != 0
for i := range sel.OrderBy {
if colsRendered {
//io.WriteString(w, ", ")
io.WriteString(c.w, `, `)
}
col := sel.OrderBy[i].Col
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s_%d_%s_ob"`,
//c.sel.Name, c.sel.ID, c,
//c.sel.Name, c.sel.ID, c)
colWithTableID(c.w, ti.Name, sel.ID, col)
io.WriteString(c.w, ` AS `)
tableIDColSuffix(c.w, sel.Name, sel.ID, col, "_ob")
}
}
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
parent := c.s[sel.ParentID]
return c.renderRelationshipByName(ti.Name, parent.Name, parent.ID)
}
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
rel, err := c.schema.GetRel(table, parent)
if err != nil {
return err
}
switch rel.Type {
case RelBelongTo:
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
//c.sel.Name, rel.Col1, c.parent.Name, c.parent.ID, rel.Col2)
io.WriteString(c.w, `((`)
colWithTable(c.w, table, rel.Col1)
io.WriteString(c.w, `) = (`)
if id != -1 {
colWithTableID(c.w, parent, id, rel.Col2)
} else {
colWithTable(c.w, parent, rel.Col2)
}
io.WriteString(c.w, `))`)
case RelOneToMany:
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
//c.sel.Name, rel.Col1, c.parent.Name, c.parent.ID, rel.Col2)
io.WriteString(c.w, `((`)
colWithTable(c.w, table, rel.Col1)
io.WriteString(c.w, `) = (`)
if id != -1 {
colWithTableID(c.w, parent, id, rel.Col2)
} else {
colWithTable(c.w, parent, rel.Col2)
}
io.WriteString(c.w, `))`)
case RelOneToManyThrough:
// This requires the through table to be joined onto this select
//fmt.Fprintf(w, `(("%s"."%s") = ("%s"."%s"))`,
//c.sel.Name, rel.Col1, rel.Through, rel.Col2)
io.WriteString(c.w, `((`)
colWithTable(c.w, table, rel.Col1)
io.WriteString(c.w, `) = (`)
colWithTable(c.w, rel.Through, rel.Col2)
io.WriteString(c.w, `))`)
}
return nil
}
func (c *compilerContext) renderWhere(sel *qcode.Select, ti *DBTableInfo) error {
st := util.NewStack()
if sel.Where != nil {
st.Push(sel.Where)
}
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
switch val := intf.(type) {
case qcode.ExpOp:
switch val {
case qcode.OpAnd:
io.WriteString(c.w, ` AND `)
case qcode.OpOr:
io.WriteString(c.w, ` OR `)
case qcode.OpNot:
io.WriteString(c.w, `NOT `)
case qcode.OpFalse:
io.WriteString(c.w, `false`)
default:
return fmt.Errorf("11: unexpected value %v (%t)", intf, intf)
}
case *qcode.Exp:
switch val.Op {
case qcode.OpFalse:
st.Push(val.Op)
qcode.FreeExp(val)
case qcode.OpAnd, qcode.OpOr:
for i := len(val.Children) - 1; i >= 0; i-- {
st.Push(val.Children[i])
if i > 0 {
st.Push(val.Op)
}
}
qcode.FreeExp(val)
case qcode.OpNot:
st.Push(val.Children[0])
st.Push(qcode.OpNot)
qcode.FreeExp(val)
default:
if len(val.NestedCols) != 0 {
io.WriteString(c.w, `EXISTS `)
if err := c.renderNestedWhere(val, sel, ti); err != nil {
return err
}
} else {
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Name, val.Col)
if err := c.renderOp(val, sel, ti); err != nil {
return err
}
qcode.FreeExp(val)
}
}
default:
return fmt.Errorf("12: unexpected value %v (%t)", intf, intf)
}
}
return nil
}
func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, sel *qcode.Select, ti *DBTableInfo) error {
for i := 0; i < len(ex.NestedCols)-1; i++ {
cti, err := c.schema.GetTable(ex.NestedCols[i])
if err != nil {
return err
}
if i != 0 {
io.WriteString(c.w, ` AND `)
}
io.WriteString(c.w, `(SELECT 1 FROM `)
io.WriteString(c.w, cti.Name)
if err := c.renderJoinByName(cti.Name, ti.Name, -1); err != nil {
return err
}
io.WriteString(c.w, ` WHERE `)
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
return err
}
}
for i := 0; i < len(ex.NestedCols)-1; i++ {
io.WriteString(c.w, `)`)
}
return nil
}
func (c *compilerContext) renderOp(ex *qcode.Exp, sel *qcode.Select, ti *DBTableInfo) error {
var col *DBColumn
var ok bool
if ex.Op == qcode.OpNop {
return nil
}
if len(ex.Col) != 0 {
if col, ok = ti.Columns[ex.Col]; !ok {
return fmt.Errorf("no column '%s' found ", ex.Col)
}
io.WriteString(c.w, `((`)
colWithTable(c.w, ti.Name, ex.Col)
io.WriteString(c.w, `) `)
}
switch ex.Op {
case qcode.OpEquals:
io.WriteString(c.w, `=`)
case qcode.OpNotEquals:
io.WriteString(c.w, `!=`)
case qcode.OpGreaterOrEquals:
io.WriteString(c.w, `>=`)
case qcode.OpLesserOrEquals:
io.WriteString(c.w, `<=`)
case qcode.OpGreaterThan:
io.WriteString(c.w, `>`)
case qcode.OpLesserThan:
io.WriteString(c.w, `<`)
case qcode.OpIn:
io.WriteString(c.w, `IN`)
case qcode.OpNotIn:
io.WriteString(c.w, `NOT IN`)
case qcode.OpLike:
io.WriteString(c.w, `LIKE`)
case qcode.OpNotLike:
io.WriteString(c.w, `NOT LIKE`)
case qcode.OpILike:
io.WriteString(c.w, `ILIKE`)
case qcode.OpNotILike:
io.WriteString(c.w, `NOT ILIKE`)
case qcode.OpSimilar:
io.WriteString(c.w, `SIMILAR TO`)
case qcode.OpNotSimilar:
io.WriteString(c.w, `NOT SIMILAR TO`)
case qcode.OpContains:
io.WriteString(c.w, `@>`)
case qcode.OpContainedIn:
io.WriteString(c.w, `<@`)
case qcode.OpHasKey:
io.WriteString(c.w, `?`)
case qcode.OpHasKeyAny:
io.WriteString(c.w, `?|`)
case qcode.OpHasKeyAll:
io.WriteString(c.w, `?&`)
case qcode.OpIsNull:
if strings.EqualFold(ex.Val, "true") {
io.WriteString(c.w, `IS NULL)`)
} else {
io.WriteString(c.w, `IS NOT NULL)`)
}
return nil
case qcode.OpEqID:
if len(ti.PrimaryCol) == 0 {
return fmt.Errorf("no primary key column defined for %s", ti.Name)
}
if col, ok = ti.Columns[ti.PrimaryCol]; !ok {
return fmt.Errorf("no primary key column '%s' found ", ti.PrimaryCol)
}
//fmt.Fprintf(w, `(("%s") =`, c.ti.PrimaryCol)
io.WriteString(c.w, `((`)
colWithTable(c.w, ti.Name, ti.PrimaryCol)
//io.WriteString(c.w, ti.PrimaryCol)
io.WriteString(c.w, `) =`)
case qcode.OpTsQuery:
if len(ti.TSVCol) == 0 {
return fmt.Errorf("no tsv column defined for %s", ti.Name)
}
if _, ok = ti.Columns[ti.TSVCol]; !ok {
return fmt.Errorf("no tsv column '%s' found ", ti.TSVCol)
}
//fmt.Fprintf(w, `(("%s") @@ websearch_to_tsquery('%s'))`, c.ti.TSVCol, val.Val)
io.WriteString(c.w, `((`)
colWithTable(c.w, ti.Name, ti.TSVCol)
if c.schema.ver >= 110000 {
io.WriteString(c.w, `) @@ websearch_to_tsquery('`)
} else {
io.WriteString(c.w, `) @@ to_tsquery('`)
}
io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `'))`)
return nil
default:
return fmt.Errorf("[Where] unexpected op code %d", ex.Op)
}
if ex.Type == qcode.ValList {
c.renderList(ex)
} else {
c.renderVal(ex, c.vars, col)
}
io.WriteString(c.w, `)`)
return nil
}
func (c *compilerContext) renderOrderBy(sel *qcode.Select, ti *DBTableInfo) error {
io.WriteString(c.w, ` ORDER BY `)
for i := range sel.OrderBy {
if i != 0 {
io.WriteString(c.w, `, `)
}
ob := sel.OrderBy[i]
switch ob.Order {
case qcode.OrderAsc:
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` ASC`)
case qcode.OrderDesc:
//fmt.Fprintf(w, `"%s_%d.ob.%s" DESC`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` DESC`)
case qcode.OrderAscNullsFirst:
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC NULLS FIRST`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` ASC NULLS FIRST`)
case qcode.OrderDescNullsFirst:
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS FIRST`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` DESC NULLLS FIRST`)
case qcode.OrderAscNullsLast:
//fmt.Fprintf(w, `"%s_%d.ob.%s ASC NULLS LAST`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` ASC NULLS LAST`)
case qcode.OrderDescNullsLast:
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS LAST`, sel.Name, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
io.WriteString(c.w, ` DESC NULLS LAST`)
default:
return fmt.Errorf("13: unexpected value %v", ob.Order)
}
}
return nil
}
func (c *compilerContext) renderDistinctOn(sel *qcode.Select, ti *DBTableInfo) {
io.WriteString(c.w, `DISTINCT ON (`)
for i := range sel.DistinctOn {
if i != 0 {
io.WriteString(c.w, `, `)
}
//fmt.Fprintf(w, `"%s_%d.ob.%s"`, c.sel.Name, c.sel.ID, c.sel.DistinctOn[i])
tableIDColSuffix(c.w, ti.Name, sel.ID, sel.DistinctOn[i], "_ob")
}
io.WriteString(c.w, `) `)
}
func (c *compilerContext) renderList(ex *qcode.Exp) {
io.WriteString(c.w, ` (`)
for i := range ex.ListVal {
if i != 0 {
io.WriteString(c.w, `, `)
}
switch ex.ListType {
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
io.WriteString(c.w, ex.ListVal[i])
case qcode.ValStr:
io.WriteString(c.w, `'`)
io.WriteString(c.w, ex.ListVal[i])
io.WriteString(c.w, `'`)
}
}
io.WriteString(c.w, `)`)
}
func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string, col *DBColumn) {
io.WriteString(c.w, ` `)
switch ex.Type {
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
if len(ex.Val) != 0 {
io.WriteString(c.w, ex.Val)
} else {
io.WriteString(c.w, `''`)
}
case qcode.ValStr:
io.WriteString(c.w, `'`)
io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `'`)
case qcode.ValVar:
io.WriteString(c.w, `'`)
if val, ok := vars[ex.Val]; ok {
io.WriteString(c.w, val)
} else {
//fmt.Fprintf(w, `'{{%s}}'`, ex.Val)
io.WriteString(c.w, `{{`)
io.WriteString(c.w, ex.Val)
io.WriteString(c.w, `}}`)
}
io.WriteString(c.w, `' :: `)
io.WriteString(c.w, col.Type)
}
//io.WriteString(c.w, `)`)
}
func funcPrefixLen(fn string) int {
switch {
case strings.HasPrefix(fn, "avg_"):
return 4
case strings.HasPrefix(fn, "count_"):
return 6
case strings.HasPrefix(fn, "max_"):
return 4
case strings.HasPrefix(fn, "min_"):
return 4
case strings.HasPrefix(fn, "sum_"):
return 4
case strings.HasPrefix(fn, "stddev_"):
return 7
case strings.HasPrefix(fn, "stddev_pop_"):
return 11
case strings.HasPrefix(fn, "stddev_samp_"):
return 12
case strings.HasPrefix(fn, "variance_"):
return 9
case strings.HasPrefix(fn, "var_pop_"):
return 8
case strings.HasPrefix(fn, "var_samp_"):
return 9
}
return 0
}
func hasBit(n uint32, pos uint32) bool {
val := n & (1 << pos)
return (val > 0)
}
func alias(w io.Writer, alias string) {
io.WriteString(w, ` AS "`)
io.WriteString(w, alias)
io.WriteString(w, `"`)
}
func aliasWithID(w io.Writer, alias string, id int32) {
io.WriteString(w, ` AS "`)
io.WriteString(w, alias)
io.WriteString(w, `_`)
int2string(w, id)
io.WriteString(w, `"`)
}
func aliasWithIDSuffix(w io.Writer, alias string, id int32, suffix string) {
io.WriteString(w, ` AS "`)
io.WriteString(w, alias)
io.WriteString(w, `_`)
int2string(w, id)
io.WriteString(w, suffix)
io.WriteString(w, `"`)
}
func colWithTable(w io.Writer, table, col string) {
io.WriteString(w, `"`)
io.WriteString(w, table)
io.WriteString(w, `"."`)
io.WriteString(w, col)
io.WriteString(w, `"`)
}
func colWithTableID(w io.Writer, table string, id int32, col string) {
io.WriteString(w, `"`)
io.WriteString(w, table)
io.WriteString(w, `_`)
int2string(w, id)
io.WriteString(w, `"."`)
io.WriteString(w, col)
io.WriteString(w, `"`)
}
func colWithTableIDAlias(w io.Writer, table string, id int32, col, alias string) {
io.WriteString(w, `"`)
io.WriteString(w, table)
io.WriteString(w, `_`)
int2string(w, id)
io.WriteString(w, `"."`)
io.WriteString(w, col)
io.WriteString(w, `" AS "`)
io.WriteString(w, alias)
io.WriteString(w, `"`)
}
func tableIDColSuffix(w io.Writer, table string, id int32, col, suffix string) {
io.WriteString(w, `"`)
io.WriteString(w, table)
io.WriteString(w, `_`)
int2string(w, id)
io.WriteString(w, `_`)
io.WriteString(w, col)
io.WriteString(w, suffix)
io.WriteString(w, `"`)
}
const charset = "0123456789"
func int2string(w io.Writer, val int32) {
if val < 10 {
w.Write([]byte{charset[val]})
return
}
temp := int32(0)
val2 := val
for val2 > 0 {
temp *= 10
temp += val2 % 10
val2 = int32(float64(val2 / 10))
}
val3 := temp
for val3 > 0 {
d := val3 % 10
val3 /= 10
w.Write([]byte{charset[d]})
}
}

563
psql/query_test.go Normal file
View File

@ -0,0 +1,563 @@
package psql
import (
"bytes"
"testing"
)
func withComplexArgs(t *testing.T) {
gql := `query {
proDUcts(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# no duplicate prices returned
distinct: [ price ]
# only items with an id >= 20 and < 28 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
NAME
price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0" ORDER BY "products_0_price_ob" DESC), '[]') AS "json_0" FROM (SELECT DISTINCT ON ("products_0_price_ob") row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "json_row_0")) AS "json_0", "products_0"."price" AS "products_0_price_ob" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") < 28) AND (("products"."id") >= 20)) LIMIT ('30') :: integer) AS "products_0" ORDER BY "products_0_price_ob" DESC LIMIT ('30') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereMultiOr(t *testing.T) {
gql := `query {
products(
where: {
or: {
not: { id: { is_null: true } },
price: { gt: 10 },
price: { lt: 20 }
} }
) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") < 20) OR (("products"."price") > 10) OR NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereIsNull(t *testing.T) {
gql := `query {
products(
where: {
and: {
not: { id: { is_null: true } },
price: { gt: 10 }
}}) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereAndList(t *testing.T) {
gql := `query {
products(
where: {
and: [
{ not: { id: { is_null: true } } },
{ price: { gt: 10 } },
] } ) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func fetchByID(t *testing.T) {
gql := `query {
product(id: 15) {
id
name
}
}`
sql := `SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 15)) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func searchQuery(t *testing.T) {
gql := `query {
products(search: "ale") {
id
name
search_rank
search_headline_description
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('ale')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('ale')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('ale'))) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "admin")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func oneToMany(t *testing.T) {
gql := `query {
users {
email
products {
name
price
}
}
}`
sql := `SELECT json_object_agg('users', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."json_1" AS "products") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("json_1"), '[]') AS "json_1" FROM (SELECT row_to_json((SELECT "json_row_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "json_row_1")) AS "json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND (("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func belongsTo(t *testing.T) {
gql := `query {
products {
name
price
users {
email
}
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "users_1_join"."json_1" AS "users") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("json_1"), '[]') AS "json_1" FROM (SELECT row_to_json((SELECT "json_row_1" FROM (SELECT "users_1"."email" AS "email") AS "json_row_1")) AS "json_1" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1" LIMIT ('20') :: integer) AS "json_agg_1") AS "users_1_join" ON ('true') LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func manyToMany(t *testing.T) {
gql := `query {
products {
name
customers {
email
full_name
}
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."name" AS "name", "customers_1_join"."json_1" AS "customers") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("json_1"), '[]') AS "json_1" FROM (SELECT row_to_json((SELECT "json_row_1" FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name") AS "json_row_1")) AS "json_1" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1" LIMIT ('20') :: integer) AS "json_agg_1") AS "customers_1_join" ON ('true') LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func manyToManyReverse(t *testing.T) {
gql := `query {
customers {
email
full_name
products {
name
}
}
}`
sql := `SELECT json_object_agg('customers', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "products_1_join"."json_1" AS "products") AS "json_row_0")) AS "json_0" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("json_1"), '[]') AS "json_1" FROM (SELECT row_to_json((SELECT "json_row_1" FROM (SELECT "products_1"."name" AS "name") AS "json_row_1")) AS "json_1" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND (("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunction(t *testing.T) {
gql := `query {
products {
name
count_price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunctionBlockedByCol(t *testing.T) {
gql := `query {
products {
name
count_price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "anon")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunctionDisabled(t *testing.T) {
gql := `query {
products {
name
count_price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "anon1")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunctionWithFilter(t *testing.T) {
gql := `query {
products(where: { id: { gt: 10 } }) {
id
max_price
}
}`
sql := `SELECT json_object_agg('products', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") > 10)) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func syntheticTables(t *testing.T) {
gql := `query {
me {
email
}
}`
sql := `SELECT json_object_agg('me', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT ) AS "json_row_0")) AS "json_0" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func queryWithVariables(t *testing.T) {
gql := `query {
product(id: $PRODUCT_ID, where: { price: { eq: $PRODUCT_PRICE } }) {
id
name
}
}`
sql := `SELECT json_object_agg('product', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "json_row_0")) AS "json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint)) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereOnRelations(t *testing.T) {
gql := `query {
users(where: {
not: {
products: {
price: { gt: 3 }
}
}
}) {
id
email
}
}`
sql := `SELECT json_object_agg('users', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")))) LIMIT ('20') :: integer) AS "users_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func multiRoot(t *testing.T) {
gql := `query {
product {
id
name
customer {
email
}
customers {
email
}
}
user {
id
email
}
customer {
id
}
}`
sql := `SELECT row_to_json("json_root") FROM (SELECT "sel_0"."json_0" AS "customer", "sel_1"."json_1" AS "user", "sel_2"."json_2" AS "product" FROM (SELECT row_to_json((SELECT "json_row_2" FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "customers_3_join"."json_3" AS "customers", "customer_4_join"."json_4" AS "customer") AS "json_row_2")) AS "json_2" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT row_to_json((SELECT "json_row_4" FROM (SELECT "customers_4"."email" AS "email") AS "json_row_4")) AS "json_4" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4" LIMIT ('1') :: integer) AS "customer_4_join" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("json_3"), '[]') AS "json_3" FROM (SELECT row_to_json((SELECT "json_row_3" FROM (SELECT "customers_3"."email" AS "email") AS "json_row_3")) AS "json_3" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3" LIMIT ('20') :: integer) AS "json_agg_3") AS "customers_3_join" ON ('true') LIMIT ('1') :: integer) AS "sel_2", (SELECT row_to_json((SELECT "json_row_1" FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email") AS "json_row_1")) AS "json_1" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1" LIMIT ('1') :: integer) AS "sel_1", (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "customers_0"."id" AS "id") AS "json_row_0")) AS "json_0" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0" LIMIT ('1') :: integer) AS "sel_0") AS "json_root"`
resSQL, err := compileGQLToPSQL(gql, nil, "user")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func blockedQuery(t *testing.T) {
gql := `query {
user(id: 5, where: { id: { gt: 3 } }) {
id
full_name
email
}
}`
sql := `SELECT json_object_agg('user', json_0) FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "bad_dude")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func blockedFunctions(t *testing.T) {
gql := `query {
users {
count_id
email
}
}`
sql := `SELECT json_object_agg('users', json_0) FROM (SELECT coalesce(json_agg("json_0"), '[]') AS "json_0" FROM (SELECT row_to_json((SELECT "json_row_0" FROM (SELECT "users_0"."email" AS "email") AS "json_row_0")) AS "json_0" FROM (SELECT "users"."email" FROM "users" WHERE (false) LIMIT ('20') :: integer) AS "users_0" LIMIT ('20') :: integer) AS "json_agg_0") AS "sel_0"`
resSQL, err := compileGQLToPSQL(gql, nil, "bad_dude")
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func TestCompileQuery(t *testing.T) {
t.Run("withComplexArgs", withComplexArgs)
t.Run("withWhereAndList", withWhereAndList)
t.Run("withWhereIsNull", withWhereIsNull)
t.Run("withWhereMultiOr", withWhereMultiOr)
t.Run("fetchByID", fetchByID)
t.Run("searchQuery", searchQuery)
t.Run("belongsTo", belongsTo)
t.Run("oneToMany", oneToMany)
t.Run("manyToMany", manyToMany)
t.Run("manyToManyReverse", manyToManyReverse)
t.Run("aggFunction", aggFunction)
t.Run("aggFunctionBlockedByCol", aggFunctionBlockedByCol)
t.Run("aggFunctionDisabled", aggFunctionDisabled)
t.Run("aggFunctionWithFilter", aggFunctionWithFilter)
t.Run("syntheticTables", syntheticTables)
t.Run("queryWithVariables", queryWithVariables)
t.Run("withWhereOnRelations", withWhereOnRelations)
t.Run("multiRoot", multiRoot)
t.Run("blockedQuery", blockedQuery)
t.Run("blockedFunctions", blockedFunctions)
}
var benchGQL = []byte(`query {
proDUcts(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# only items with an id >= 30 and < 30 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
NAME
price
user {
full_name
picture : avatar
}
}
}`)
func BenchmarkCompile(b *testing.B) {
w := &bytes.Buffer{}
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
w.Reset()
qc, err := qcompile.Compile(benchGQL, "user")
if err != nil {
b.Fatal(err)
}
_, err = pcompile.Compile(qc, w, nil)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkCompileParallel(b *testing.B) {
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
w := &bytes.Buffer{}
for pb.Next() {
w.Reset()
qc, err := qcompile.Compile(benchGQL, "user")
if err != nil {
b.Fatal(err)
}
_, err = pcompile.Compile(qc, w, nil)
if err != nil {
b.Fatal(err)
}
}
})
}

View File

@ -1,1066 +0,0 @@
package psql
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"math"
"strings"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/qcode"
"github.com/dosco/super-graph/util"
)
const (
empty = ""
closeBlock = 500
)
type Variables map[string]json.RawMessage
type Config struct {
Schema *DBSchema
Vars map[string]string
}
type Compiler struct {
schema *DBSchema
vars map[string]string
}
func NewCompiler(conf Config) *Compiler {
return &Compiler{conf.Schema, conf.Vars}
}
func (c *Compiler) AddRelationship(child, parent string, rel *DBRel) error {
return c.schema.SetRel(child, parent, rel)
}
func (c *Compiler) IDColumn(table string) (string, error) {
t, err := c.schema.GetTable(table)
if err != nil {
return empty, err
}
return t.PrimaryCol, nil
}
type compilerContext struct {
w *bytes.Buffer
s []qcode.Select
*Compiler
}
func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte, error) {
w := &bytes.Buffer{}
skipped, err := co.Compile(qc, w, vars)
return skipped, w.Bytes(), err
}
func (co *Compiler) Compile(qc *qcode.QCode, w *bytes.Buffer, vars Variables) (uint32, error) {
switch qc.Type {
case qcode.QTQuery:
return co.compileQuery(qc, w)
case qcode.QTMutation:
return co.compileMutation(qc, w, vars)
}
return 0, errors.New("unknown operation")
}
func (co *Compiler) compileQuery(qc *qcode.QCode, w *bytes.Buffer) (uint32, error) {
if len(qc.Selects) == 0 {
return 0, errors.New("empty query")
}
c := &compilerContext{w, qc.Selects, co}
root := &qc.Selects[0]
ti, err := c.schema.GetTable(root.Table)
if err != nil {
return 0, err
}
st := NewStack()
st.Push(root.ID + closeBlock)
st.Push(root.ID)
//fmt.Fprintf(w, `SELECT json_object_agg('%s', %s) FROM (`,
//root.FieldName, root.Table)
c.w.WriteString(`SELECT json_object_agg('`)
c.w.WriteString(root.FieldName)
c.w.WriteString(`', `)
if ti.Singular == false {
c.w.WriteString(root.Table)
} else {
c.w.WriteString("sel_json_")
int2string(c.w, root.ID)
}
c.w.WriteString(`) FROM (`)
var ignored uint32
for {
if st.Len() == 0 {
break
}
id := st.Pop()
if id < closeBlock {
sel := &c.s[id]
ti, err := c.schema.GetTable(sel.Table)
if err != nil {
return 0, err
}
if sel.ID != 0 {
if err = c.renderJoin(sel); err != nil {
return 0, err
}
}
skipped, err := c.renderSelect(sel, ti)
if err != nil {
return 0, err
}
ignored |= skipped
for _, cid := range sel.Children {
if hasBit(skipped, uint32(cid)) {
continue
}
child := &c.s[cid]
st.Push(child.ID + closeBlock)
st.Push(child.ID)
}
} else {
sel := &c.s[(id - closeBlock)]
ti, err := c.schema.GetTable(sel.Table)
if err != nil {
return 0, err
}
err = c.renderSelectClose(sel, ti)
if err != nil {
return 0, err
}
if sel.ID != 0 {
if err = c.renderJoinClose(sel); err != nil {
return 0, err
}
}
}
}
c.w.WriteString(`)`)
alias(c.w, `done_1337`)
c.w.WriteString(`;`)
return ignored, nil
}
func (c *compilerContext) processChildren(sel *qcode.Select, ti *DBTableInfo) (uint32, []*qcode.Column) {
var skipped uint32
cols := make([]*qcode.Column, 0, len(sel.Cols))
colmap := make(map[string]struct{}, len(sel.Cols))
for i := range sel.Cols {
colmap[sel.Cols[i].Name] = struct{}{}
}
for _, id := range sel.Children {
child := &c.s[id]
rel, err := c.schema.GetRel(child.Table, ti.Name)
if err != nil {
skipped |= (1 << uint(id))
continue
}
switch rel.Type {
case RelOneToMany:
fallthrough
case RelBelongTo:
if _, ok := colmap[rel.Col2]; !ok {
cols = append(cols, &qcode.Column{ti.Name, rel.Col2, rel.Col2})
}
case RelOneToManyThrough:
if _, ok := colmap[rel.Col1]; !ok {
cols = append(cols, &qcode.Column{ti.Name, rel.Col1, rel.Col1})
}
case RelRemote:
if _, ok := colmap[rel.Col1]; !ok {
cols = append(cols, &qcode.Column{ti.Name, rel.Col1, rel.Col2})
}
skipped |= (1 << uint(id))
default:
skipped |= (1 << uint(id))
}
}
return skipped, cols
}
func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo) (uint32, error) {
skipped, childCols := c.processChildren(sel, ti)
hasOrder := len(sel.OrderBy) != 0
// SELECT
if ti.Singular == false {
//fmt.Fprintf(w, `SELECT coalesce(json_agg("%s"`, c.sel.Table)
c.w.WriteString(`SELECT coalesce(json_agg("`)
c.w.WriteString("sel_json_")
int2string(c.w, sel.ID)
c.w.WriteString(`"`)
if hasOrder {
err := c.renderOrderBy(sel, ti)
if err != nil {
return skipped, err
}
}
//fmt.Fprintf(w, `), '[]') AS "%s" FROM (`, c.sel.Table)
c.w.WriteString(`), '[]')`)
alias(c.w, sel.Table)
c.w.WriteString(` FROM (`)
}
// ROW-TO-JSON
c.w.WriteString(`SELECT `)
if len(sel.DistinctOn) != 0 {
c.renderDistinctOn(sel, ti)
}
c.w.WriteString(`row_to_json((`)
//fmt.Fprintf(w, `SELECT "sel_%d" FROM (SELECT `, c.sel.ID)
c.w.WriteString(`SELECT "sel_`)
int2string(c.w, sel.ID)
c.w.WriteString(`" FROM (SELECT `)
// Combined column names
c.renderColumns(sel, ti)
c.renderRemoteRelColumns(sel, ti)
err := c.renderJoinedColumns(sel, ti, skipped)
if err != nil {
return skipped, err
}
//fmt.Fprintf(w, `) AS "sel_%d"`, c.sel.ID)
c.w.WriteString(`)`)
aliasWithID(c.w, "sel", sel.ID)
//fmt.Fprintf(w, `)) AS "%s"`, c.sel.Table)
c.w.WriteString(`))`)
aliasWithID(c.w, "sel_json", sel.ID)
// END-ROW-TO-JSON
if hasOrder {
c.renderOrderByColumns(sel, ti)
}
// END-SELECT
// FROM (SELECT .... )
err = c.renderBaseSelect(sel, ti, childCols, skipped)
if err != nil {
return skipped, err
}
// END-FROM
return skipped, nil
}
func (c *compilerContext) renderSelectClose(sel *qcode.Select, ti *DBTableInfo) error {
hasOrder := len(sel.OrderBy) != 0
if hasOrder {
err := c.renderOrderBy(sel, ti)
if err != nil {
return err
}
}
if sel.Action == 0 {
if len(sel.Paging.Limit) != 0 {
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
c.w.WriteString(` LIMIT ('`)
c.w.WriteString(sel.Paging.Limit)
c.w.WriteString(`') :: integer`)
} else if ti.Singular {
c.w.WriteString(` LIMIT ('1') :: integer`)
} else {
c.w.WriteString(` LIMIT ('20') :: integer`)
}
}
if len(sel.Paging.Offset) != 0 {
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
c.w.WriteString(`OFFSET ('`)
c.w.WriteString(sel.Paging.Offset)
c.w.WriteString(`') :: integer`)
}
if ti.Singular == false {
//fmt.Fprintf(w, `) AS "sel_json_agg_%d"`, c.sel.ID)
c.w.WriteString(`)`)
aliasWithID(c.w, "sel_json_agg", sel.ID)
}
return nil
}
func (c *compilerContext) renderJoin(sel *qcode.Select) error {
c.w.WriteString(` LEFT OUTER JOIN LATERAL (`)
return nil
}
func (c *compilerContext) renderJoinClose(sel *qcode.Select) error {
//fmt.Fprintf(w, `) AS "%s_%d_join" ON ('true')`, c.sel.Table, c.sel.ID)
c.w.WriteString(`)`)
aliasWithIDSuffix(c.w, sel.Table, sel.ID, "_join")
c.w.WriteString(` ON ('true')`)
return nil
}
func (c *compilerContext) renderJoinTable(sel *qcode.Select) error {
parent := &c.s[sel.ParentID]
rel, err := c.schema.GetRel(sel.Table, parent.Table)
if err != nil {
return err
}
if rel.Type != RelOneToManyThrough {
return err
}
pt, err := c.schema.GetTable(parent.Table)
if err != nil {
return err
}
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
//rel.Through, rel.Through, rel.ColT, c.parent.Table, c.parent.ID, rel.Col1)
c.w.WriteString(` LEFT OUTER JOIN "`)
c.w.WriteString(rel.Through)
c.w.WriteString(`" ON ((`)
colWithTable(c.w, rel.Through, rel.ColT)
c.w.WriteString(`) = (`)
colWithTableID(c.w, pt.Name, parent.ID, rel.Col1)
c.w.WriteString(`))`)
return nil
}
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) {
for i, col := range sel.Cols {
if i != 0 {
io.WriteString(c.w, ", ")
}
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s"`,
//c.sel.Table, c.sel.ID, col.Name, col.FieldName)
colWithTableIDAlias(c.w, ti.Name, sel.ID, col.Name, col.FieldName)
}
}
func (c *compilerContext) renderRemoteRelColumns(sel *qcode.Select, ti *DBTableInfo) {
i := 0
for _, id := range sel.Children {
child := &c.s[id]
rel, err := c.schema.GetRel(child.Table, sel.Table)
if err != nil || rel.Type != RelRemote {
continue
}
if i != 0 || len(sel.Cols) != 0 {
io.WriteString(c.w, ", ")
}
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s"`,
//c.sel.Table, c.sel.ID, rel.Col1, rel.Col2)
colWithTableID(c.w, ti.Name, sel.ID, rel.Col1)
alias(c.w, rel.Col2)
i++
}
}
func (c *compilerContext) renderJoinedColumns(sel *qcode.Select, ti *DBTableInfo, skipped uint32) error {
colsRendered := len(sel.Cols) != 0
for _, id := range sel.Children {
skipThis := hasBit(skipped, uint32(id))
if colsRendered && !skipThis {
io.WriteString(c.w, ", ")
}
if skipThis {
continue
}
childSel := &c.s[id]
//fmt.Fprintf(w, `"%s_%d_join"."%s" AS "%s"`,
//s.Table, s.ID, s.Table, s.FieldName)
colWithTableIDSuffixAlias(c.w, childSel.Table, childSel.ID,
"_join", childSel.Table, childSel.FieldName)
}
return nil
}
func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo,
childCols []*qcode.Column, skipped uint32) error {
var groupBy []int
isRoot := sel.ID == 0
isFil := sel.Where != nil
isSearch := sel.Args["search"] != nil
isAgg := false
c.w.WriteString(` FROM (SELECT `)
for i, col := range sel.Cols {
cn := col.Name
_, isRealCol := ti.Columns[cn]
if !isRealCol {
if isSearch {
switch {
case cn == "search_rank":
cn = ti.TSVCol
arg := sel.Args["search"]
//fmt.Fprintf(w, `ts_rank("%s"."%s", to_tsquery('%s')) AS %s`,
//c.sel.Table, cn, arg.Val, col.Name)
c.w.WriteString(`ts_rank(`)
colWithTable(c.w, ti.Name, cn)
c.w.WriteString(`, to_tsquery('`)
c.w.WriteString(arg.Val)
c.w.WriteString(`')`)
alias(c.w, col.Name)
case strings.HasPrefix(cn, "search_headline_"):
cn = cn[16:]
arg := sel.Args["search"]
//fmt.Fprintf(w, `ts_headline("%s"."%s", to_tsquery('%s')) AS %s`,
//c.sel.Table, cn, arg.Val, col.Name)
c.w.WriteString(`ts_headlinek(`)
colWithTable(c.w, ti.Name, cn)
c.w.WriteString(`, to_tsquery('`)
c.w.WriteString(arg.Val)
c.w.WriteString(`')`)
alias(c.w, col.Name)
}
} else {
pl := funcPrefixLen(cn)
if pl == 0 {
//fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
c.w.WriteString(`'`)
c.w.WriteString(cn)
c.w.WriteString(` not defined'`)
alias(c.w, col.Name)
} else {
isAgg = true
fn := cn[0 : pl-1]
cn := cn[pl:]
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Table, cn, col.Name)
c.w.WriteString(fn)
c.w.WriteString(`(`)
colWithTable(c.w, ti.Name, cn)
c.w.WriteString(`)`)
alias(c.w, col.Name)
}
}
} else {
groupBy = append(groupBy, i)
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Table, cn)
colWithTable(c.w, ti.Name, cn)
}
if i < len(sel.Cols)-1 || len(childCols) != 0 {
//io.WriteString(w, ", ")
c.w.WriteString(`, `)
}
}
for i, col := range childCols {
if i != 0 {
//io.WriteString(w, ", ")
c.w.WriteString(`, `)
}
//fmt.Fprintf(w, `"%s"."%s"`, col.Table, col.Name)
colWithTable(c.w, col.Table, col.Name)
}
c.w.WriteString(` FROM `)
//fmt.Fprintf(w, ` FROM "%s"`, c.sel.Table)
c.w.WriteString(`"`)
c.w.WriteString(ti.Name)
c.w.WriteString(`"`)
// if tn, ok := c.tmap[sel.Table]; ok {
// //fmt.Fprintf(w, ` FROM "%s" AS "%s"`, tn, c.sel.Table)
// tableWithAlias(c.w, ti.Name, sel.Table)
// } else {
// //fmt.Fprintf(w, ` FROM "%s"`, c.sel.Table)
// c.w.WriteString(`"`)
// c.w.WriteString(sel.Table)
// c.w.WriteString(`"`)
// }
if isRoot && isFil {
c.w.WriteString(` WHERE (`)
if err := c.renderWhere(sel, ti); err != nil {
return err
}
c.w.WriteString(`)`)
}
if !isRoot {
if err := c.renderJoinTable(sel); err != nil {
return err
}
c.w.WriteString(` WHERE (`)
if err := c.renderRelationship(sel, ti); err != nil {
return err
}
if isFil {
c.w.WriteString(` AND `)
if err := c.renderWhere(sel, ti); err != nil {
return err
}
}
c.w.WriteString(`)`)
}
if isAgg {
if len(groupBy) != 0 {
c.w.WriteString(` GROUP BY `)
for i, id := range groupBy {
if i != 0 {
c.w.WriteString(`, `)
}
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Table, c.sel.Cols[id].Name)
colWithTable(c.w, ti.Name, sel.Cols[id].Name)
}
}
}
if sel.Action == 0 {
if len(sel.Paging.Limit) != 0 {
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
c.w.WriteString(` LIMIT ('`)
c.w.WriteString(sel.Paging.Limit)
c.w.WriteString(`') :: integer`)
} else if ti.Singular {
c.w.WriteString(` LIMIT ('1') :: integer`)
} else {
c.w.WriteString(` LIMIT ('20') :: integer`)
}
}
if len(sel.Paging.Offset) != 0 {
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
c.w.WriteString(` OFFSET ('`)
c.w.WriteString(sel.Paging.Offset)
c.w.WriteString(`') :: integer`)
}
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Table, c.sel.ID)
c.w.WriteString(`)`)
aliasWithID(c.w, ti.Name, sel.ID)
return nil
}
func (c *compilerContext) renderOrderByColumns(sel *qcode.Select, ti *DBTableInfo) {
colsRendered := len(sel.Cols) != 0
for i := range sel.OrderBy {
if colsRendered {
//io.WriteString(w, ", ")
c.w.WriteString(`, `)
}
col := sel.OrderBy[i].Col
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s_%d_%s_ob"`,
//c.sel.Table, c.sel.ID, c,
//c.sel.Table, c.sel.ID, c)
colWithTableID(c.w, ti.Name, sel.ID, col)
c.w.WriteString(` AS `)
tableIDColSuffix(c.w, sel.Table, sel.ID, col, "_ob")
}
}
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
parent := c.s[sel.ParentID]
rel, err := c.schema.GetRel(sel.Table, parent.Table)
if err != nil {
return err
}
switch rel.Type {
case RelBelongTo:
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
//c.sel.Table, rel.Col1, c.parent.Table, c.parent.ID, rel.Col2)
c.w.WriteString(`((`)
colWithTable(c.w, ti.Name, rel.Col1)
c.w.WriteString(`) = (`)
colWithTableID(c.w, parent.Table, parent.ID, rel.Col2)
c.w.WriteString(`))`)
case RelOneToMany:
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
//c.sel.Table, rel.Col1, c.parent.Table, c.parent.ID, rel.Col2)
c.w.WriteString(`((`)
colWithTable(c.w, ti.Name, rel.Col1)
c.w.WriteString(`) = (`)
colWithTableID(c.w, parent.Table, parent.ID, rel.Col2)
c.w.WriteString(`))`)
case RelOneToManyThrough:
//fmt.Fprintf(w, `(("%s"."%s") = ("%s"."%s"))`,
//c.sel.Table, rel.Col1, rel.Through, rel.Col2)
c.w.WriteString(`((`)
colWithTable(c.w, ti.Name, rel.Col1)
c.w.WriteString(`) = (`)
colWithTable(c.w, rel.Through, rel.Col2)
c.w.WriteString(`))`)
}
return nil
}
func (c *compilerContext) renderWhere(sel *qcode.Select, ti *DBTableInfo) error {
st := util.NewStack()
if sel.Where != nil {
st.Push(sel.Where)
}
for {
if st.Len() == 0 {
break
}
intf := st.Pop()
switch val := intf.(type) {
case qcode.ExpOp:
switch val {
case qcode.OpAnd:
c.w.WriteString(` AND `)
case qcode.OpOr:
c.w.WriteString(` OR `)
case qcode.OpNot:
c.w.WriteString(`NOT `)
default:
return fmt.Errorf("11: unexpected value %v (%t)", intf, intf)
}
case *qcode.Exp:
switch val.Op {
case qcode.OpAnd, qcode.OpOr:
for i := len(val.Children) - 1; i >= 0; i-- {
st.Push(val.Children[i])
if i > 0 {
st.Push(val.Op)
}
}
qcode.FreeExp(val)
case qcode.OpNot:
st.Push(val.Children[0])
st.Push(qcode.OpNot)
qcode.FreeExp(val)
default:
if val.NestedCol {
//fmt.Fprintf(w, `(("%s") `, val.Col)
c.w.WriteString(`(("`)
c.w.WriteString(val.Col)
c.w.WriteString(`") `)
} else if len(val.Col) != 0 {
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Table, val.Col)
c.w.WriteString(`((`)
colWithTable(c.w, ti.Name, val.Col)
c.w.WriteString(`) `)
}
valExists := true
switch val.Op {
case qcode.OpEquals:
c.w.WriteString(`=`)
case qcode.OpNotEquals:
c.w.WriteString(`!=`)
case qcode.OpGreaterOrEquals:
c.w.WriteString(`>=`)
case qcode.OpLesserOrEquals:
c.w.WriteString(`<=`)
case qcode.OpGreaterThan:
c.w.WriteString(`>`)
case qcode.OpLesserThan:
c.w.WriteString(`<`)
case qcode.OpIn:
c.w.WriteString(`IN`)
case qcode.OpNotIn:
c.w.WriteString(`NOT IN`)
case qcode.OpLike:
c.w.WriteString(`LIKE`)
case qcode.OpNotLike:
c.w.WriteString(`NOT LIKE`)
case qcode.OpILike:
c.w.WriteString(`ILIKE`)
case qcode.OpNotILike:
c.w.WriteString(`NOT ILIKE`)
case qcode.OpSimilar:
c.w.WriteString(`SIMILAR TO`)
case qcode.OpNotSimilar:
c.w.WriteString(`NOT SIMILAR TO`)
case qcode.OpContains:
c.w.WriteString(`@>`)
case qcode.OpContainedIn:
c.w.WriteString(`<@`)
case qcode.OpHasKey:
c.w.WriteString(`?`)
case qcode.OpHasKeyAny:
c.w.WriteString(`?|`)
case qcode.OpHasKeyAll:
c.w.WriteString(`?&`)
case qcode.OpIsNull:
if strings.EqualFold(val.Val, "true") {
c.w.WriteString(`IS NULL)`)
} else {
c.w.WriteString(`IS NOT NULL)`)
}
valExists = false
case qcode.OpEqID:
if len(ti.PrimaryCol) == 0 {
return fmt.Errorf("no primary key column defined for %s", ti.Name)
}
//fmt.Fprintf(w, `(("%s") =`, c.ti.PrimaryCol)
c.w.WriteString(`((`)
colWithTable(c.w, ti.Name, ti.PrimaryCol)
//c.w.WriteString(ti.PrimaryCol)
c.w.WriteString(`) =`)
case qcode.OpTsQuery:
if len(ti.TSVCol) == 0 {
return fmt.Errorf("no tsv column defined for %s", ti.Name)
}
//fmt.Fprintf(w, `(("%s") @@ to_tsquery('%s'))`, c.ti.TSVCol, val.Val)
c.w.WriteString(`(("`)
c.w.WriteString(ti.TSVCol)
c.w.WriteString(`") @@ to_tsquery('`)
c.w.WriteString(val.Val)
c.w.WriteString(`'))`)
valExists = false
default:
return fmt.Errorf("[Where] unexpected op code %d", val.Op)
}
if valExists {
if val.Type == qcode.ValList {
c.renderList(val)
} else {
c.renderVal(val, c.vars)
}
c.w.WriteString(`)`)
}
qcode.FreeExp(val)
}
default:
return fmt.Errorf("12: unexpected value %v (%t)", intf, intf)
}
}
return nil
}
func (c *compilerContext) renderOrderBy(sel *qcode.Select, ti *DBTableInfo) error {
c.w.WriteString(` ORDER BY `)
for i := range sel.OrderBy {
if i != 0 {
c.w.WriteString(`, `)
}
ob := sel.OrderBy[i]
switch ob.Order {
case qcode.OrderAsc:
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` ASC`)
case qcode.OrderDesc:
//fmt.Fprintf(w, `"%s_%d.ob.%s" DESC`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` DESC`)
case qcode.OrderAscNullsFirst:
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC NULLS FIRST`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` ASC NULLS FIRST`)
case qcode.OrderDescNullsFirst:
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS FIRST`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` DESC NULLLS FIRST`)
case qcode.OrderAscNullsLast:
//fmt.Fprintf(w, `"%s_%d.ob.%s ASC NULLS LAST`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` ASC NULLS LAST`)
case qcode.OrderDescNullsLast:
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS LAST`, sel.Table, sel.ID, ob.Col)
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
c.w.WriteString(` DESC NULLS LAST`)
default:
return fmt.Errorf("13: unexpected value %v", ob.Order)
}
}
return nil
}
func (c *compilerContext) renderDistinctOn(sel *qcode.Select, ti *DBTableInfo) {
io.WriteString(c.w, `DISTINCT ON (`)
for i := range sel.DistinctOn {
if i != 0 {
c.w.WriteString(`, `)
}
//fmt.Fprintf(w, `"%s_%d.ob.%s"`, c.sel.Table, c.sel.ID, c.sel.DistinctOn[i])
tableIDColSuffix(c.w, ti.Name, sel.ID, sel.DistinctOn[i], "_ob")
}
c.w.WriteString(`) `)
}
func (c *compilerContext) renderList(ex *qcode.Exp) {
io.WriteString(c.w, ` (`)
for i := range ex.ListVal {
if i != 0 {
c.w.WriteString(`, `)
}
switch ex.ListType {
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
c.w.WriteString(ex.ListVal[i])
case qcode.ValStr:
c.w.WriteString(`'`)
c.w.WriteString(ex.ListVal[i])
c.w.WriteString(`'`)
}
}
c.w.WriteString(`)`)
}
func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string) {
io.WriteString(c.w, ` `)
switch ex.Type {
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
if len(ex.Val) != 0 {
c.w.WriteString(ex.Val)
} else {
c.w.WriteString(`''`)
}
case qcode.ValStr:
c.w.WriteString(`'`)
c.w.WriteString(ex.Val)
c.w.WriteString(`'`)
case qcode.ValVar:
if val, ok := vars[ex.Val]; ok {
c.w.WriteString(val)
} else {
//fmt.Fprintf(w, `'{{%s}}'`, ex.Val)
c.w.WriteString(`{{`)
c.w.WriteString(ex.Val)
c.w.WriteString(`}}`)
}
}
//c.w.WriteString(`)`)
}
func funcPrefixLen(fn string) int {
switch {
case strings.HasPrefix(fn, "avg_"):
return 4
case strings.HasPrefix(fn, "count_"):
return 6
case strings.HasPrefix(fn, "max_"):
return 4
case strings.HasPrefix(fn, "min_"):
return 4
case strings.HasPrefix(fn, "sum_"):
return 4
case strings.HasPrefix(fn, "stddev_"):
return 7
case strings.HasPrefix(fn, "stddev_pop_"):
return 11
case strings.HasPrefix(fn, "stddev_samp_"):
return 12
case strings.HasPrefix(fn, "variance_"):
return 9
case strings.HasPrefix(fn, "var_pop_"):
return 8
case strings.HasPrefix(fn, "var_samp_"):
return 9
}
return 0
}
func hasBit(n uint32, pos uint32) bool {
val := n & (1 << pos)
return (val > 0)
}
func alias(w *bytes.Buffer, alias string) {
w.WriteString(` AS "`)
w.WriteString(alias)
w.WriteString(`"`)
}
func aliasWithID(w *bytes.Buffer, alias string, id int32) {
w.WriteString(` AS "`)
w.WriteString(alias)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(`"`)
}
func aliasWithIDSuffix(w *bytes.Buffer, alias string, id int32, suffix string) {
w.WriteString(` AS "`)
w.WriteString(alias)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(suffix)
w.WriteString(`"`)
}
func colWithAlias(w *bytes.Buffer, col, alias string) {
w.WriteString(`"`)
w.WriteString(col)
w.WriteString(`" AS "`)
w.WriteString(alias)
w.WriteString(`"`)
}
func tableWithAlias(w *bytes.Buffer, table, alias string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`" AS "`)
w.WriteString(alias)
w.WriteString(`"`)
}
func colWithTable(w *bytes.Buffer, table, col string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`"."`)
w.WriteString(col)
w.WriteString(`"`)
}
func colWithTableID(w *bytes.Buffer, table string, id int32, col string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(`"."`)
w.WriteString(col)
w.WriteString(`"`)
}
func colWithTableIDAlias(w *bytes.Buffer, table string, id int32, col, alias string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(`"."`)
w.WriteString(col)
w.WriteString(`" AS "`)
w.WriteString(alias)
w.WriteString(`"`)
}
func colWithTableIDSuffixAlias(w *bytes.Buffer, table string, id int32,
suffix, col, alias string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(suffix)
w.WriteString(`"."`)
w.WriteString(col)
w.WriteString(`" AS "`)
w.WriteString(alias)
w.WriteString(`"`)
}
func tableIDColSuffix(w *bytes.Buffer, table string, id int32, col, suffix string) {
w.WriteString(`"`)
w.WriteString(table)
w.WriteString(`_`)
int2string(w, id)
w.WriteString(`_`)
w.WriteString(col)
w.WriteString(suffix)
w.WriteString(`"`)
}
const charset = "0123456789"
func int2string(w *bytes.Buffer, val int32) {
if val < 10 {
w.WriteByte(charset[val])
return
}
temp := int32(0)
val2 := val
for val2 > 0 {
temp *= 10
temp += val2 % 10
val2 = int32(math.Floor(float64(val2 / 10)))
}
val3 := temp
for val3 > 0 {
d := val3 % 10
val3 /= 10
w.WriteByte(charset[d])
}
}
func relID(h *xxhash.Digest, child, parent string) uint64 {
h.WriteString(child)
h.WriteString(parent)
v := h.Sum64()
h.Reset()
return v
}

View File

@ -1,561 +0,0 @@
package psql
import (
"bytes"
"log"
"os"
"testing"
"github.com/dosco/super-graph/qcode"
)
const (
errNotExpected = "Generated SQL did not match what was expected"
)
var (
qcompile *qcode.Compiler
pcompile *Compiler
)
func TestMain(m *testing.M) {
var err error
qcompile, err = qcode.NewCompiler(qcode.Config{
DefaultFilter: []string{
`{ user_id: { _eq: $user_id } }`,
},
FilterMap: qcode.Filters{
All: map[string][]string{
"users": []string{
"{ id: { eq: $user_id } }",
},
"products": []string{
"{ price: { gt: 0 } }",
"{ price: { lt: 8 } }",
},
"customers": []string{},
"mes": []string{
"{ id: { eq: $user_id } }",
},
},
Query: map[string][]string{
"users": []string{},
},
Update: map[string][]string{
"products": []string{
"{ user_id: { eq: $user_id } }",
},
},
},
Blocklist: []string{
"secret",
"password",
"token",
},
})
if err != nil {
log.Fatal(err)
}
tables := []*DBTable{
&DBTable{Name: "customers", Type: "table"},
&DBTable{Name: "users", Type: "table"},
&DBTable{Name: "products", Type: "table"},
&DBTable{Name: "purchases", Type: "table"},
}
columns := [][]*DBColumn{
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 9, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 10, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "avatar", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 9, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 10, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 11, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "name", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 3, Name: "description", Type: "text", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 4, Name: "price", Type: "numeric(7,2)", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "user_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "users", FKeyColID: []int16{1}},
&DBColumn{ID: 6, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 8, Name: "tsv", Type: "tsvector", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
[]*DBColumn{
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 2, Name: "customer_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "customers", FKeyColID: []int16{1}},
&DBColumn{ID: 3, Name: "product_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "products", FKeyColID: []int16{1}},
&DBColumn{ID: 4, Name: "sale_type", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 5, Name: "quantity", Type: "integer", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 6, Name: "due_date", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
&DBColumn{ID: 7, Name: "returned", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
}
schema := &DBSchema{
t: make(map[string]*DBTableInfo),
rm: make(map[string]map[string]*DBRel),
al: make(map[string]struct{}),
}
aliases := map[string][]string{
"users": []string{"mes"},
}
for i, t := range tables {
schema.updateSchema(t, columns[i], aliases)
}
vars := NewVariables(map[string]string{
"account_id": "select account_id from users where id = $user_id",
})
pcompile = NewCompiler(Config{
Schema: schema,
Vars: vars,
})
os.Exit(m.Run())
}
func compileGQLToPSQL(gql string, vars Variables) ([]byte, error) {
qc, err := qcompile.Compile([]byte(gql))
if err != nil {
return nil, err
}
_, sqlStmt, err := pcompile.CompileEx(qc, vars)
if err != nil {
return nil, err
}
return sqlStmt, nil
}
func withComplexArgs(t *testing.T) {
gql := `query {
proDUcts(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# no duplicate prices returned
distinct: [ price ]
# only items with an id >= 20 and < 28 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
NAME
price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0" ORDER BY "products_0_price_ob" DESC), '[]') AS "products" FROM (SELECT DISTINCT ON ("products_0_price_ob") row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0", "products_0"."price" AS "products_0_price_ob" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") < 28) AND (("products"."id") >= 20)) LIMIT ('30') :: integer) AS "products_0" ORDER BY "products_0_price_ob" DESC LIMIT ('30') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereMultiOr(t *testing.T) {
gql := `query {
products(
where: {
or: {
not: { id: { is_null: true } },
price: { gt: 10 },
price: { lt: 20 }
} }
) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") < 20) OR (("products"."price") > 10) OR NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereIsNull(t *testing.T) {
gql := `query {
products(
where: {
and: {
not: { id: { is_null: true } },
price: { gt: 10 }
}}) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func withWhereAndList(t *testing.T) {
gql := `query {
products(
where: {
and: [
{ not: { id: { is_null: true } } },
{ price: { gt: 10 } },
] } ) {
id
name
price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func fetchByID(t *testing.T) {
gql := `query {
product(id: 15) {
id
name
}
}`
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 15)) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func searchQuery(t *testing.T) {
gql := `query {
products(search: "Imperial") {
id
name
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("tsv") @@ to_tsquery('Imperial'))) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func oneToMany(t *testing.T) {
gql := `query {
users {
email
products {
name
price
}
}
}`
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func belongsTo(t *testing.T) {
gql := `query {
products {
name
price
users {
email
}
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "users_1_join"."users" AS "users") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "users_1"."email" AS "email") AS "sel_1")) AS "sel_json_1" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "users_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func manyToMany(t *testing.T) {
gql := `query {
products {
name
customers {
email
full_name
}
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "customers_1_join"."customers" AS "customers") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "customers_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func manyToManyReverse(t *testing.T) {
gql := `query {
customers {
email
full_name
products {
name
}
}
}`
sql := `SELECT json_object_agg('customers', customers) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunction(t *testing.T) {
gql := `query {
products {
name
count_price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func aggFunctionWithFilter(t *testing.T) {
gql := `query {
products(where: { id: { gt: 10 } }) {
id
max_price
}
}`
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") > 10)) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func queryWithVariables(t *testing.T) {
gql := `query {
product(id: $PRODUCT_ID, where: { price: { eq: $PRODUCT_PRICE } }) {
id
name
}
}`
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") = {{product_price}}) AND (("products"."id") = {{product_id}})) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func syntheticTables(t *testing.T) {
gql := `query {
me {
email
}
}`
sql := `SELECT json_object_agg('me', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = {{user_id}})) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "done_1337";`
resSQL, err := compileGQLToPSQL(gql, nil)
if err != nil {
t.Fatal(err)
}
if string(resSQL) != sql {
t.Fatal(errNotExpected)
}
}
func TestCompileSelect(t *testing.T) {
t.Run("withComplexArgs", withComplexArgs)
t.Run("withWhereAndList", withWhereAndList)
t.Run("withWhereIsNull", withWhereIsNull)
t.Run("withWhereMultiOr", withWhereMultiOr)
t.Run("fetchByID", fetchByID)
t.Run("searchQuery", searchQuery)
t.Run("belongsTo", belongsTo)
t.Run("oneToMany", oneToMany)
t.Run("manyToMany", manyToMany)
t.Run("manyToManyReverse", manyToManyReverse)
t.Run("aggFunction", aggFunction)
t.Run("aggFunctionWithFilter", aggFunctionWithFilter)
t.Run("syntheticTables", syntheticTables)
t.Run("queryWithVariables", queryWithVariables)
}
var benchGQL = []byte(`query {
proDUcts(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# only items with an id >= 30 and < 30 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
NAME
price
user {
full_name
picture : avatar
}
}
}`)
func BenchmarkCompile(b *testing.B) {
w := &bytes.Buffer{}
b.ResetTimer()
b.ReportAllocs()
for n := 0; n < b.N; n++ {
w.Reset()
qc, err := qcompile.Compile(benchGQL)
if err != nil {
b.Fatal(err)
}
_, err = pcompile.Compile(qc, w, nil)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkCompileParallel(b *testing.B) {
b.ReportAllocs()
b.RunParallel(func(pb *testing.PB) {
w := &bytes.Buffer{}
for pb.Next() {
w.Reset()
qc, err := qcompile.Compile(benchGQL)
if err != nil {
b.Fatal(err)
}
_, err = pcompile.Compile(qc, w, nil)
if err != nil {
b.Fatal(err)
}
}
})
}

View File

@ -3,6 +3,7 @@ package psql
import ( import (
"context" "context"
"fmt" "fmt"
"strconv"
"strings" "strings"
"github.com/gobuffalo/flect" "github.com/gobuffalo/flect"
@ -144,9 +145,10 @@ ORDER BY id;`
} }
type DBSchema struct { type DBSchema struct {
t map[string]*DBTableInfo ver int
rm map[string]map[string]*DBRel t map[string]*DBTableInfo
al map[string]struct{} rm map[string]map[string]*DBRel
al map[string]struct{}
} }
type DBTableInfo struct { type DBTableInfo struct {
@ -184,10 +186,22 @@ func NewDBSchema(db *pgxpool.Pool, aliases map[string][]string) (*DBSchema, erro
dbc, err := db.Acquire(context.Background()) dbc, err := db.Acquire(context.Background())
if err != nil { if err != nil {
return nil, fmt.Errorf("error acquiring connection from pool") return nil, fmt.Errorf("error acquiring connection from pool: %w", err)
} }
defer dbc.Release() defer dbc.Release()
var version string
err = dbc.QueryRow(context.Background(), `SHOW server_version_num`).Scan(&version)
if err != nil {
return nil, fmt.Errorf("error fetching version: %w", err)
}
schema.ver, err = strconv.Atoi(version)
if err != nil {
return nil, err
}
tables, err := GetTables(dbc) tables, err := GetTables(dbc)
if err != nil { if err != nil {
return nil, err return nil, err
@ -199,7 +213,9 @@ func NewDBSchema(db *pgxpool.Pool, aliases map[string][]string) (*DBSchema, erro
return nil, err return nil, err
} }
schema.updateSchema(t, cols, aliases) if err := schema.updateSchema(t, cols, aliases); err != nil {
return nil, err
}
} }
return schema, nil return schema, nil
@ -208,7 +224,7 @@ func NewDBSchema(db *pgxpool.Pool, aliases map[string][]string) (*DBSchema, erro
func (s *DBSchema) updateSchema( func (s *DBSchema) updateSchema(
t *DBTable, t *DBTable,
cols []*DBColumn, cols []*DBColumn,
aliases map[string][]string) { aliases map[string][]string) error {
// Foreign key columns in current table // Foreign key columns in current table
colByID := make(map[int16]*DBColumn) colByID := make(map[int16]*DBColumn)
@ -281,12 +297,16 @@ func (s *DBSchema) updateSchema(
// Belongs-to relation between current table and the // Belongs-to relation between current table and the
// table in the foreign key // table in the foreign key
rel1 := &DBRel{RelBelongTo, "", "", c.Name, fc.Name} rel1 := &DBRel{RelBelongTo, "", "", c.Name, fc.Name}
s.SetRel(ct, ft, rel1) if err := s.SetRel(ct, ft, rel1); err != nil {
return err
}
// One-to-many relation between the foreign key table and the // One-to-many relation between the foreign key table and the
// the current table // the current table
rel2 := &DBRel{RelOneToMany, "", "", fc.Name, c.Name} rel2 := &DBRel{RelOneToMany, "", "", fc.Name, c.Name}
s.SetRel(ft, ct, rel2) if err := s.SetRel(ft, ct, rel2); err != nil {
return err
}
jcols = append(jcols, c) jcols = append(jcols, c)
} }
@ -301,42 +321,54 @@ func (s *DBSchema) updateSchema(
if len(jcols) > 1 { if len(jcols) > 1 {
for i := range jcols { for i := range jcols {
for n := range jcols { for n := range jcols {
if n != i { if n == i {
s.updateSchemaOTMT(ct, jcols[i], jcols[n], colByID) continue
}
err := s.updateSchemaOTMT(ct, jcols[i], jcols[n], colByID)
if err != nil {
return err
} }
} }
} }
} }
return nil
} }
func (s *DBSchema) updateSchemaOTMT( func (s *DBSchema) updateSchemaOTMT(
ct string, ct string,
col1, col2 *DBColumn, col1, col2 *DBColumn,
colByID map[int16]*DBColumn) { colByID map[int16]*DBColumn) error {
t1 := strings.ToLower(col1.FKeyTable) t1 := strings.ToLower(col1.FKeyTable)
t2 := strings.ToLower(col2.FKeyTable) t2 := strings.ToLower(col2.FKeyTable)
fc1, ok := colByID[col1.FKeyColID[0]] fc1, ok := colByID[col1.FKeyColID[0]]
if !ok { if !ok {
return return fmt.Errorf("expected column id '%d' not found", col1.FKeyColID[0])
} }
fc2, ok := colByID[col2.FKeyColID[0]] fc2, ok := colByID[col2.FKeyColID[0]]
if !ok { if !ok {
return return fmt.Errorf("expected column id '%d' not found", col2.FKeyColID[0])
} }
// One-to-many-through relation between 1nd foreign key table and the // One-to-many-through relation between 1nd foreign key table and the
// 2nd foreign key table // 2nd foreign key table
//rel1 := &DBRel{RelOneToManyThrough, ct, fc1.Name, col1.Name} //rel1 := &DBRel{RelOneToManyThrough, ct, fc1.Name, col1.Name}
rel1 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name, col1.Name} rel1 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name, col1.Name}
s.SetRel(t1, t2, rel1) if err := s.SetRel(t1, t2, rel1); err != nil {
return err
}
// One-to-many-through relation between 2nd foreign key table and the // One-to-many-through relation between 2nd foreign key table and the
// 1nd foreign key table // 1nd foreign key table
//rel2 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name} //rel2 := &DBRel{RelOneToManyThrough, ct, col2.Name, fc2.Name}
rel2 := &DBRel{RelOneToManyThrough, ct, col1.Name, fc1.Name, col2.Name} rel2 := &DBRel{RelOneToManyThrough, ct, col1.Name, fc1.Name, col2.Name}
s.SetRel(t2, t1, rel2) if err := s.SetRel(t2, t1, rel2); err != nil {
return err
}
return nil
} }
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) { func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {

2
qcode/cleanup.sh Executable file
View File

@ -0,0 +1,2 @@
#!/bin/sh
cd corpus && rm -rf $(find . ! -name '00?.gql')

132
qcode/config.go Normal file
View File

@ -0,0 +1,132 @@
package qcode
import (
"regexp"
"sort"
"strings"
)
type Config struct {
Blocklist []string
}
type QueryConfig struct {
Limit int
Filters []string
Columns []string
DisableFunctions bool
}
type InsertConfig struct {
Filters []string
Columns []string
Presets map[string]string
}
type UpdateConfig struct {
Filters []string
Columns []string
Presets map[string]string
}
type DeleteConfig struct {
Filters []string
Columns []string
}
type TRConfig struct {
Query QueryConfig
Insert InsertConfig
Update UpdateConfig
Delete DeleteConfig
}
type trval struct {
query struct {
limit string
fil *Exp
cols map[string]struct{}
disable struct {
funcs bool
}
}
insert struct {
fil *Exp
cols map[string]struct{}
psmap map[string]string
pslist []string
}
update struct {
fil *Exp
cols map[string]struct{}
psmap map[string]string
pslist []string
}
delete struct {
fil *Exp
cols map[string]struct{}
}
}
func (trv *trval) allowedColumns(qt QType) map[string]struct{} {
switch qt {
case QTQuery:
return trv.query.cols
case QTInsert:
return trv.insert.cols
case QTUpdate:
return trv.update.cols
case QTDelete:
return trv.insert.cols
case QTUpsert:
return trv.insert.cols
}
return nil
}
func (trv *trval) filter(qt QType) *Exp {
switch qt {
case QTQuery:
return trv.query.fil
case QTInsert:
return trv.insert.fil
case QTUpdate:
return trv.update.fil
case QTDelete:
return trv.delete.fil
case QTUpsert:
return trv.insert.fil
}
return nil
}
func listToMap(list []string) map[string]struct{} {
m := make(map[string]struct{}, len(list))
for i := range list {
m[strings.ToLower(list[i])] = struct{}{}
}
return m
}
func mapToList(m map[string]string) []string {
list := []string{}
for k := range m {
list = append(list, strings.ToLower(k))
}
sort.Strings(list)
return list
}
var varRe = regexp.MustCompile(`\$([a-zA-Z0-9_]+)`)
func parsePresets(m map[string]string) map[string]string {
for k, v := range m {
m[k] = varRe.ReplaceAllString(v, `{{$1}}`)
}
return m
}

21
qcode/corpus/001.gql Normal file
View File

@ -0,0 +1,21 @@
query {
products(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# no duplicate prices returned
distinct: [ price ]
# only items with an id >= 30 and < 30 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}
}

14
qcode/corpus/002.gql Normal file
View File

@ -0,0 +1,14 @@
query {
products(
where: {
or: {
not: { id: { is_null: true } },
price: { gt: 10 },
price: { lt: 20 }
} }
) {
id
name
price
}
}

12
qcode/corpus/003.gql Normal file
View File

@ -0,0 +1,12 @@
query {
products(
where: {
and: {
not: { id: { is_null: true } },
price: { gt: 10 }
}}) {
id
name
price
}
}

View File

@ -1,14 +1,16 @@
// +build gofuzz
package qcode package qcode
// FuzzerEntrypoint for Fuzzbuzz // FuzzerEntrypoint for Fuzzbuzz
func FuzzerEntrypoint(data []byte) int { func Fuzz(data []byte) int {
//testData := string(data) GetQType(string(data))
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile(data) _, err := qcompile.Compile(data, "user")
if err != nil { if err != nil {
return -1 return 0
} }
return 0 return 1
} }

View File

@ -461,7 +461,7 @@ func (i *item) String() string {
case itemStringVal: case itemStringVal:
v = "string" v = "string"
} }
return fmt.Sprintf("%s", v) return v
} }
/* /*

View File

@ -18,19 +18,21 @@ type parserType int32
const ( const (
maxFields = 100 maxFields = 100
maxArgs = 10 maxArgs = 10
)
const (
parserError parserType = iota parserError parserType = iota
parserEOF parserEOF
opQuery opQuery
opMutate opMutate
opSub opSub
nodeStr NodeStr
nodeInt NodeInt
nodeFloat NodeFloat
nodeBool NodeBool
nodeObj NodeObj
nodeList NodeList
nodeVar NodeVar
) )
type Operation struct { type Operation struct {
@ -83,7 +85,6 @@ type Parser struct {
input []byte // the string being scanned input []byte // the string being scanned
pos int pos int
items []item items []item
depth int
err error err error
} }
@ -146,24 +147,13 @@ func parseSelectionSet(gql []byte) (*Operation, error) {
if p.peek(itemObjOpen) { if p.peek(itemObjOpen) {
p.ignore() p.ignore()
} op, err = p.parseQueryOp()
if p.peek(itemName) {
op = opPool.Get().(*Operation)
op.Reset()
op.Type = opQuery
op.Name = ""
op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
op.Fields, err = p.parseFields(op.Fields)
} else { } else {
op, err = p.parseOp() op, err = p.parseOp()
}
if err != nil { if err != nil {
return nil, err return nil, err
}
} }
lexPool.Put(l) lexPool.Put(l)
@ -194,15 +184,6 @@ func (p *Parser) ignore() {
p.pos = n p.pos = n
} }
func (p *Parser) current() item {
return p.items[p.pos]
}
func (p *Parser) eof() bool {
n := p.pos + 1
return p.items[n].typ == itemEOF
}
func (p *Parser) peek(types ...itemType) bool { func (p *Parser) peek(types ...itemType) bool {
n := p.pos + 1 n := p.pos + 1
if p.items[n].typ == itemEOF { if p.items[n].typ == itemEOF {
@ -257,6 +238,37 @@ func (p *Parser) parseOp() (*Operation, error) {
if p.peek(itemObjOpen) { if p.peek(itemObjOpen) {
p.ignore() p.ignore()
for n := 0; n < 10; n++ {
if !p.peek(itemName) {
break
}
op.Fields, err = p.parseFields(op.Fields)
if err != nil {
return nil, err
}
}
}
return op, nil
}
func (p *Parser) parseQueryOp() (*Operation, error) {
op := opPool.Get().(*Operation)
op.Reset()
op.Type = opQuery
op.Fields = op.fieldsA[:0]
op.Args = op.argsA[:0]
var err error
for n := 0; n < 10; n++ {
if !p.peek(itemName) {
break
}
op.Fields, err = p.parseFields(op.Fields) op.Fields, err = p.parseFields(op.Fields)
if err != nil { if err != nil {
return nil, err return nil, err
@ -284,7 +296,7 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
continue continue
} }
if p.peek(itemName) == false { if !p.peek(itemName) {
return nil, errors.New("expecting an alias or field name") return nil, errors.New("expecting an alias or field name")
} }
@ -298,16 +310,12 @@ func (p *Parser) parseFields(fields []Field) ([]Field, error) {
return nil, err return nil, err
} }
if f.ID != 0 { intf := st.Peek()
intf := st.Peek() if pid, ok := intf.(int32); ok {
pid, ok := intf.(int32)
if !ok {
return nil, fmt.Errorf("14: unexpected value %v (%t)", intf, intf)
}
f.ParentID = pid f.ParentID = pid
fields[pid].Children = append(fields[pid].Children, f.ID) fields[pid].Children = append(fields[pid].Children, f.ID)
} else {
f.ParentID = -1
} }
if p.peek(itemObjOpen) { if p.peek(itemObjOpen) {
@ -356,13 +364,13 @@ func (p *Parser) parseArgs(args []Arg) ([]Arg, error) {
p.ignore() p.ignore()
break break
} }
if p.peek(itemName) == false { if !p.peek(itemName) {
return nil, errors.New("expecting an argument name") return nil, errors.New("expecting an argument name")
} }
args = append(args, Arg{Name: p.val(p.next())}) args = append(args, Arg{Name: p.val(p.next())})
arg := &args[(len(args) - 1)] arg := &args[(len(args) - 1)]
if p.peek(itemColon) == false { if !p.peek(itemColon) {
return nil, errors.New("missing ':' after argument name") return nil, errors.New("missing ':' after argument name")
} }
p.ignore() p.ignore()
@ -405,7 +413,7 @@ func (p *Parser) parseList() (*Node, error) {
return nil, errors.New("List cannot be empty") return nil, errors.New("List cannot be empty")
} }
parent.Type = nodeList parent.Type = NodeList
parent.Children = nodes parent.Children = nodes
return parent, nil return parent, nil
@ -423,12 +431,12 @@ func (p *Parser) parseObj() (*Node, error) {
break break
} }
if p.peek(itemName) == false { if !p.peek(itemName) {
return nil, errors.New("expecting an argument name") return nil, errors.New("expecting an argument name")
} }
nodeName := p.val(p.next()) nodeName := p.val(p.next())
if p.peek(itemColon) == false { if !p.peek(itemColon) {
return nil, errors.New("missing ':' after Field argument name") return nil, errors.New("missing ':' after Field argument name")
} }
p.ignore() p.ignore()
@ -442,7 +450,7 @@ func (p *Parser) parseObj() (*Node, error) {
nodes = append(nodes, node) nodes = append(nodes, node)
} }
parent.Type = nodeObj parent.Type = NodeObj
parent.Children = nodes parent.Children = nodes
return parent, nil return parent, nil
@ -465,17 +473,17 @@ func (p *Parser) parseValue() (*Node, error) {
switch item.typ { switch item.typ {
case itemIntVal: case itemIntVal:
node.Type = nodeInt node.Type = NodeInt
case itemFloatVal: case itemFloatVal:
node.Type = nodeFloat node.Type = NodeFloat
case itemStringVal: case itemStringVal:
node.Type = nodeStr node.Type = NodeStr
case itemBoolVal: case itemBoolVal:
node.Type = nodeBool node.Type = NodeBool
case itemName: case itemName:
node.Type = nodeStr node.Type = NodeStr
case itemVariable: case itemVariable:
node.Type = nodeVar node.Type = NodeVar
default: default:
return nil, fmt.Errorf("expecting a number, string, object, list or variable as an argument value (not %s)", p.val(p.next())) return nil, fmt.Errorf("expecting a number, string, object, list or variable as an argument value (not %s)", p.val(p.next()))
} }
@ -506,19 +514,19 @@ func (t parserType) String() string {
v = "mutation" v = "mutation"
case opSub: case opSub:
v = "subscription" v = "subscription"
case nodeStr: case NodeStr:
v = "node-string" v = "node-string"
case nodeInt: case NodeInt:
v = "node-int" v = "node-int"
case nodeFloat: case NodeFloat:
v = "node-float" v = "node-float"
case nodeBool: case NodeBool:
v = "node-bool" v = "node-bool"
case nodeVar: case NodeVar:
v = "node-var" v = "node-var"
case nodeObj: case NodeObj:
v = "node-obj" v = "node-obj"
case nodeList: case NodeList:
v = "node-list" v = "node-list"
} }
return fmt.Sprintf("<%s>", v) return fmt.Sprintf("<%s>", v)

View File

@ -5,54 +5,22 @@ import (
"testing" "testing"
) )
/*
func compareOp(op1, op2 Operation) error {
if op1.Type != op2.Type {
return errors.New("operator type mismatch")
}
if op1.Name != op2.Name {
return errors.New("operator name mismatch")
}
if len(op1.Args) != len(op2.Args) {
return errors.New("operator args length mismatch")
}
for i := range op1.Args {
if !reflect.DeepEqual(op1.Args[i], op2.Args[i]) {
return fmt.Errorf("operator args: %v != %v", op1.Args[i], op2.Args[i])
}
}
if len(op1.Fields) != len(op2.Fields) {
return errors.New("operator field length mismatch")
}
for i := range op1.Fields {
if !reflect.DeepEqual(op1.Fields[i].Args, op2.Fields[i].Args) {
return fmt.Errorf("operator field args: %v != %v", op1.Fields[i].Args, op2.Fields[i].Args)
}
}
for i := range op1.Fields {
if !reflect.DeepEqual(op1.Fields[i].Children, op2.Fields[i].Children) {
return fmt.Errorf("operator field fields: %v != %v", op1.Fields[i].Children, op2.Fields[i].Children)
}
}
return nil
}
*/
func TestCompile1(t *testing.T) { func TestCompile1(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qc, _ := NewCompiler(Config{})
err := qc.AddRole("user", "product", TRConfig{
Query: QueryConfig{
Columns: []string{"id", "Name"},
},
})
if err != nil {
t.Error(err)
}
_, err := qcompile.Compile([]byte(` _, err = qc.Compile([]byte(`
product(id: 15) { { product(id: 15) {
id id
name name
}`)) } }`), "user")
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -60,13 +28,21 @@ func TestCompile1(t *testing.T) {
} }
func TestCompile2(t *testing.T) { func TestCompile2(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qc, _ := NewCompiler(Config{})
err := qc.AddRole("user", "product", TRConfig{
Query: QueryConfig{
Columns: []string{"ID"},
},
})
if err != nil {
t.Error(err)
}
_, err := qcompile.Compile([]byte(` _, err = qc.Compile([]byte(`
query { product(id: 15) { query { product(id: 15) {
id id
name name
} }`)) } }`), "user")
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -74,15 +50,23 @@ func TestCompile2(t *testing.T) {
} }
func TestCompile3(t *testing.T) { func TestCompile3(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qc, _ := NewCompiler(Config{})
err := qc.AddRole("user", "product", TRConfig{
Query: QueryConfig{
Columns: []string{"ID"},
},
})
if err != nil {
t.Error(err)
}
_, err := qcompile.Compile([]byte(` _, err = qc.Compile([]byte(`
mutation { mutation {
product(id: 15, name: "Test") { product(id: 15, name: "Test") {
id id
name name
} }
}`)) }`), "user")
if err != nil { if err != nil {
t.Fatal(err) t.Fatal(err)
@ -91,7 +75,7 @@ func TestCompile3(t *testing.T) {
func TestInvalidCompile1(t *testing.T) { func TestInvalidCompile1(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(`#`)) _, err := qcompile.Compile([]byte(`#`), "user")
if err == nil { if err == nil {
t.Fatal(errors.New("expecting an error")) t.Fatal(errors.New("expecting an error"))
@ -100,7 +84,7 @@ func TestInvalidCompile1(t *testing.T) {
func TestInvalidCompile2(t *testing.T) { func TestInvalidCompile2(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(`{u(where:{not:0})}`)) _, err := qcompile.Compile([]byte(`{u(where:{not:0})}`), "user")
if err == nil { if err == nil {
t.Fatal(errors.New("expecting an error")) t.Fatal(errors.New("expecting an error"))
@ -109,7 +93,7 @@ func TestInvalidCompile2(t *testing.T) {
func TestEmptyCompile(t *testing.T) { func TestEmptyCompile(t *testing.T) {
qcompile, _ := NewCompiler(Config{}) qcompile, _ := NewCompiler(Config{})
_, err := qcompile.Compile([]byte(``)) _, err := qcompile.Compile([]byte(``), "user")
if err == nil { if err == nil {
t.Fatal(errors.New("expecting an error")) t.Fatal(errors.New("expecting an error"))
@ -144,7 +128,7 @@ func BenchmarkQCompile(b *testing.B) {
b.ReportAllocs() b.ReportAllocs()
for n := 0; n < b.N; n++ { for n := 0; n < b.N; n++ {
_, err := qcompile.Compile(gql) _, err := qcompile.Compile(gql, "user")
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)
@ -160,7 +144,7 @@ func BenchmarkQCompileP(b *testing.B) {
b.RunParallel(func(pb *testing.PB) { b.RunParallel(func(pb *testing.PB) {
for pb.Next() { for pb.Next() {
_, err := qcompile.Compile(gql) _, err := qcompile.Compile(gql, "user")
if err != nil { if err != nil {
b.Fatal(err) b.Fatal(err)

View File

@ -3,6 +3,7 @@ package qcode
import ( import (
"errors" "errors"
"fmt" "fmt"
"strconv"
"strings" "strings"
"sync" "sync"
@ -15,19 +16,41 @@ type Action int
const ( const (
maxSelectors = 30 maxSelectors = 30
)
const (
QTQuery QType = iota + 1 QTQuery QType = iota + 1
QTMutation QTMutation
QTInsert
ActionInsert Action = iota + 1 QTUpdate
ActionUpdate QTDelete
ActionDelete QTUpsert
ActionUpsert
) )
type QCode struct { type QCode struct {
Type QType Type QType
Selects []Select ActionVar string
Selects []Select
Roots []int32
rootsA [5]int32
}
type Select struct {
ID int32
ParentID int32
Args map[string]*Node
Name string
FieldName string
Cols []Column
Where *Exp
OrderBy []*OrderBy
DistinctOn []string
Paging Paging
Children []int32
Functions bool
Allowed map[string]struct{}
PresetMap map[string]string
PresetList []string
} }
type Column struct { type Column struct {
@ -36,33 +59,17 @@ type Column struct {
FieldName string FieldName string
} }
type Select struct {
ID int32
ParentID int32
Args map[string]*Node
Table string
FieldName string
Cols []Column
Where *Exp
OrderBy []*OrderBy
DistinctOn []string
Paging Paging
Action Action
ActionVar string
Children []int32
}
type Exp struct { type Exp struct {
Op ExpOp Op ExpOp
Col string Col string
NestedCol bool NestedCols []string
Type ValType Type ValType
Val string Val string
ListType ValType ListType ValType
ListVal []string ListVal []string
Children []*Exp Children []*Exp
childrenA [5]*Exp childrenA [5]*Exp
doFree bool doFree bool
} }
var zeroExp = Exp{doFree: true} var zeroExp = Exp{doFree: true}
@ -77,8 +84,9 @@ type OrderBy struct {
} }
type Paging struct { type Paging struct {
Limit string Limit string
Offset string Offset string
NoLimit bool
} }
type ExpOp int type ExpOp int
@ -110,6 +118,7 @@ const (
OpIsNull OpIsNull
OpEqID OpEqID
OpTsQuery OpTsQuery
OpFalse
) )
type ValType int type ValType int
@ -145,37 +154,9 @@ const (
OrderDescNullsLast OrderDescNullsLast
) )
type Filters struct {
All map[string][]string
Query map[string][]string
Insert map[string][]string
Update map[string][]string
Delete map[string][]string
}
type Config struct {
DefaultFilter []string
FilterMap Filters
Blocklist []string
KeepArgs bool
}
type Compiler struct { type Compiler struct {
df *Exp tr map[string]map[string]*trval
fm struct {
all map[string]*Exp
query map[string]*Exp
insert map[string]*Exp
update map[string]*Exp
delete map[string]*Exp
}
bl map[string]struct{} bl map[string]struct{}
ka bool
}
var opMap = map[parserType]QType{
opQuery: QTQuery,
opMutate: QTMutation,
} }
var expPool = sync.Pool{ var expPool = sync.Pool{
@ -183,43 +164,12 @@ var expPool = sync.Pool{
} }
func NewCompiler(c Config) (*Compiler, error) { func NewCompiler(c Config) (*Compiler, error) {
var err error co := &Compiler{}
co := &Compiler{ka: c.KeepArgs} co.tr = make(map[string]map[string]*trval)
co.bl = make(map[string]struct{}, len(c.Blocklist)) co.bl = make(map[string]struct{}, len(c.Blocklist))
for i := range c.Blocklist { for i := range c.Blocklist {
co.bl[c.Blocklist[i]] = struct{}{} co.bl[strings.ToLower(c.Blocklist[i])] = struct{}{}
}
co.df, err = compileFilter(c.DefaultFilter)
if err != nil {
return nil, err
}
co.fm.all, err = buildFilters(c.FilterMap.All)
if err != nil {
return nil, err
}
co.fm.query, err = buildFilters(c.FilterMap.Query)
if err != nil {
return nil, err
}
co.fm.insert, err = buildFilters(c.FilterMap.Insert)
if err != nil {
return nil, err
}
co.fm.update, err = buildFilters(c.FilterMap.Update)
if err != nil {
return nil, err
}
co.fm.delete, err = buildFilters(c.FilterMap.Delete)
if err != nil {
return nil, err
} }
seedExp := [100]Exp{} seedExp := [100]Exp{}
@ -232,60 +182,102 @@ func NewCompiler(c Config) (*Compiler, error) {
return co, nil return co, nil
} }
func buildFilters(filMap map[string][]string) (map[string]*Exp, error) { func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
fm := make(map[string]*Exp, len(filMap)) var err error
trv := &trval{}
for k, v := range filMap { // query config
fil, err := compileFilter(v) trv.query.fil, err = compileFilter(trc.Query.Filters)
if err != nil { if err != nil {
return nil, err return err
} }
singular := flect.Singularize(k) if trc.Query.Limit > 0 {
plural := flect.Pluralize(k) trv.query.limit = strconv.Itoa(trc.Query.Limit)
}
trv.query.cols = listToMap(trc.Query.Columns)
trv.query.disable.funcs = trc.Query.DisableFunctions
fm[singular] = fil // insert config
fm[plural] = fil if trv.insert.fil, err = compileFilter(trc.Insert.Filters); err != nil {
return err
}
trv.insert.cols = listToMap(trc.Insert.Columns)
trv.insert.psmap = parsePresets(trc.Insert.Presets)
trv.insert.pslist = mapToList(trv.insert.psmap)
// update config
if trv.update.fil, err = compileFilter(trc.Update.Filters); err != nil {
return err
}
trv.update.cols = listToMap(trc.Update.Columns)
trv.update.psmap = parsePresets(trc.Update.Presets)
trv.update.pslist = mapToList(trv.update.psmap)
// delete config
if trv.delete.fil, err = compileFilter(trc.Delete.Filters); err != nil {
return err
}
trv.delete.cols = listToMap(trc.Delete.Columns)
singular := flect.Singularize(table)
plural := flect.Pluralize(table)
if _, ok := com.tr[role]; !ok {
com.tr[role] = make(map[string]*trval)
} }
return fm, nil com.tr[role][singular] = trv
com.tr[role][plural] = trv
return nil
} }
func (com *Compiler) Compile(query []byte) (*QCode, error) { func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
var qc QCode
var err error var err error
qc := QCode{Type: QTQuery}
qc.Roots = qc.rootsA[:0]
op, err := Parse(query) op, err := Parse(query)
if err != nil { if err != nil {
return nil, err return nil, err
} }
qc.Selects, err = com.compileQuery(op) if err = com.compileQuery(&qc, op, role); err != nil {
if err != nil {
return nil, err return nil, err
} }
if t, ok := opMap[op.Type]; ok {
qc.Type = t
} else {
return nil, fmt.Errorf("Unknown operation type %d", op.Type)
}
opPool.Put(op) opPool.Put(op)
return &qc, nil return &qc, nil
} }
func (com *Compiler) compileQuery(op *Operation) ([]Select, error) { func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
id := int32(0) id := int32(0)
parentID := int32(0) parentID := int32(-1)
if len(op.Fields) == 0 {
return errors.New("invalid graphql no query found")
}
if op.Type == opMutate {
if err := com.setMutationType(qc, op.Fields[0].Args); err != nil {
return err
}
}
selects := make([]Select, 0, 5) selects := make([]Select, 0, 5)
st := NewStack() st := NewStack()
action := qc.Type
if len(op.Fields) == 0 { if len(op.Fields) == 0 {
return nil, errors.New("empty query") return errors.New("empty query")
}
for i := range op.Fields {
if op.Fields[i].ParentID == -1 {
st.Push(op.Fields[i].ID)
}
} }
st.Push(op.Fields[0].ID)
for { for {
if st.Len() == 0 { if st.Len() == 0 {
@ -293,7 +285,7 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
} }
if id >= maxSelectors { if id >= maxSelectors {
return nil, fmt.Errorf("selector limit reached (%d)", maxSelectors) return fmt.Errorf("selector limit reached (%d)", maxSelectors)
} }
fid := st.Pop() fid := st.Pop()
@ -303,31 +295,55 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
continue continue
} }
trv := com.getRole(role, field.Name)
selects = append(selects, Select{ selects = append(selects, Select{
ID: id, ID: id,
ParentID: parentID, ParentID: parentID,
Table: field.Name, Name: field.Name,
Children: make([]int32, 0, 5), Children: make([]int32, 0, 5),
Allowed: trv.allowedColumns(action),
Functions: true,
}) })
s := &selects[(len(selects) - 1)] s := &selects[(len(selects) - 1)]
if s.ID != 0 { switch action {
p := &selects[s.ParentID] case QTQuery:
p.Children = append(p.Children, s.ID) s.Functions = !trv.query.disable.funcs
s.Paging.Limit = trv.query.limit
case QTInsert:
s.PresetMap = trv.insert.psmap
s.PresetList = trv.insert.pslist
case QTUpdate:
s.PresetMap = trv.update.psmap
s.PresetList = trv.update.pslist
} }
if len(field.Alias) != 0 { if len(field.Alias) != 0 {
s.FieldName = field.Alias s.FieldName = field.Alias
} else { } else {
s.FieldName = s.Table s.FieldName = s.Name
} }
err := com.compileArgs(s, field.Args) err := com.compileArgs(qc, s, field.Args)
if err != nil { if err != nil {
return nil, err return err
}
// Order is important addFilters must come after compileArgs
com.addFilters(qc, s, role)
if s.ParentID == -1 {
qc.Roots = append(qc.Roots, s.ID)
} else {
p := &selects[s.ParentID]
p.Children = append(p.Children, s.ID)
} }
s.Cols = make([]Column, 0, len(field.Children)) s.Cols = make([]Column, 0, len(field.Children))
action = QTQuery
for _, cid := range field.Children { for _, cid := range field.Children {
f := op.Fields[cid] f := op.Fields[cid]
@ -356,105 +372,122 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
} }
if id == 0 { if id == 0 {
return nil, errors.New("invalid query") return errors.New("invalid query")
} }
var fil *Exp qc.Selects = selects[:id]
return nil
root := &selects[0]
switch op.Type {
case opQuery:
fil, _ = com.fm.query[root.Table]
case opMutate:
switch root.Action {
case ActionInsert:
fil, _ = com.fm.insert[root.Table]
case ActionUpdate:
fil, _ = com.fm.update[root.Table]
case ActionDelete:
fil, _ = com.fm.delete[root.Table]
case ActionUpsert:
fil, _ = com.fm.insert[root.Table]
}
}
if fil == nil {
fil, _ = com.fm.all[root.Table]
}
if fil == nil {
fil = com.df
}
if fil != nil && fil.Op != OpNop {
if root.Where != nil {
ow := root.Where
root.Where = expPool.Get().(*Exp)
root.Where.Reset()
root.Where.Op = OpAnd
root.Where.Children = root.Where.childrenA[:2]
root.Where.Children[0] = fil
root.Where.Children[1] = ow
} else {
root.Where = fil
}
}
return selects[:id], nil
} }
func (com *Compiler) compileArgs(sel *Select, args []Arg) error { func (com *Compiler) addFilters(qc *QCode, sel *Select, role string) {
var err error var fil *Exp
if com.ka { if trv, ok := com.tr[role][sel.Name]; ok {
sel.Args = make(map[string]*Node, len(args)) fil = trv.filter(qc.Type)
} else {
return
} }
if fil == nil {
return
}
switch fil.Op {
case OpNop:
case OpFalse:
sel.Where = fil
default:
if sel.Where != nil {
ow := sel.Where
sel.Where = expPool.Get().(*Exp)
sel.Where.Reset()
sel.Where.Op = OpAnd
sel.Where.Children = sel.Where.childrenA[:2]
sel.Where.Children[0] = fil
sel.Where.Children[1] = ow
} else {
sel.Where = fil
}
}
}
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg) error {
var err error
var ka bool
for i := range args { for i := range args {
arg := &args[i] arg := &args[i]
switch arg.Name { switch arg.Name {
case "id": case "id":
if sel.ID == 0 { err, ka = com.compileArgID(sel, arg)
err = com.compileArgID(sel, arg)
}
case "search": case "search":
err = com.compileArgSearch(sel, arg) err, ka = com.compileArgSearch(sel, arg)
case "where": case "where":
err = com.compileArgWhere(sel, arg) err, ka = com.compileArgWhere(sel, arg)
case "orderby", "order_by", "order": case "orderby", "order_by", "order":
err = com.compileArgOrderBy(sel, arg) err, ka = com.compileArgOrderBy(sel, arg)
case "distinct_on", "distinct": case "distinct_on", "distinct":
err = com.compileArgDistinctOn(sel, arg) err, ka = com.compileArgDistinctOn(sel, arg)
case "limit": case "limit":
err = com.compileArgLimit(sel, arg) err, ka = com.compileArgLimit(sel, arg)
case "offset": case "offset":
err = com.compileArgOffset(sel, arg) err, ka = com.compileArgOffset(sel, arg)
case "insert": }
sel.Action = ActionInsert
err = com.compileArgAction(sel, arg) if !ka {
case "update": nodePool.Put(arg.Val)
sel.Action = ActionUpdate
err = com.compileArgAction(sel, arg)
case "upsert":
sel.Action = ActionUpsert
err = com.compileArgAction(sel, arg)
case "delete":
sel.Action = ActionDelete
err = com.compileArgAction(sel, arg)
} }
if err != nil { if err != nil {
return err return err
} }
}
if sel.Args != nil { return nil
sel.Args[arg.Name] = arg.Val }
} else {
nodePool.Put(arg.Val) func (com *Compiler) setMutationType(qc *QCode, args []Arg) error {
setActionVar := func(arg *Arg) error {
if arg.Val.Type != NodeVar {
return fmt.Errorf("value for argument '%s' must be a variable", arg.Name)
}
qc.ActionVar = arg.Val.Val
return nil
}
for i := range args {
arg := &args[i]
switch arg.Name {
case "insert":
qc.Type = QTInsert
return setActionVar(arg)
case "update":
qc.Type = QTUpdate
return setActionVar(arg)
case "upsert":
qc.Type = QTUpsert
return setActionVar(arg)
case "delete":
qc.Type = QTDelete
if arg.Val.Type != NodeBool {
return fmt.Errorf("value for argument '%s' must be a boolean", arg.Name)
}
if arg.Val.Val == "false" {
qc.Type = QTQuery
}
return nil
} }
} }
@ -462,7 +495,7 @@ func (com *Compiler) compileArgs(sel *Select, args []Arg) error {
} }
func (com *Compiler) compileArgObj(st *util.Stack, arg *Arg) (*Exp, error) { func (com *Compiler) compileArgObj(st *util.Stack, arg *Arg) (*Exp, error) {
if arg.Val.Type != nodeObj { if arg.Val.Type != NodeObj {
return nil, fmt.Errorf("expecting an object") return nil, fmt.Errorf("expecting an object")
} }
@ -514,11 +547,6 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
} else { } else {
node.exp.Children = append(node.exp.Children, ex) node.exp.Children = append(node.exp.Children, ex)
} }
}
if com.ka {
return root, nil
} }
pushChild(st, nil, node) pushChild(st, nil, node)
@ -539,9 +567,13 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
return root, nil return root, nil
} }
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error { func (com *Compiler) compileArgID(sel *Select, arg *Arg) (error, bool) {
if sel.ID != 0 {
return nil, false
}
if sel.Where != nil && sel.Where.Op == OpEqID { if sel.Where != nil && sel.Where.Op == OpEqID {
return nil return nil, false
} }
ex := expPool.Get().(*Exp) ex := expPool.Get().(*Exp)
@ -551,30 +583,41 @@ func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
ex.Val = arg.Val.Val ex.Val = arg.Val.Val
switch arg.Val.Type { switch arg.Val.Type {
case nodeStr: case NodeStr:
ex.Type = ValStr ex.Type = ValStr
case nodeInt: case NodeInt:
ex.Type = ValInt ex.Type = ValInt
case nodeFloat: case NodeFloat:
ex.Type = ValFloat ex.Type = ValFloat
case nodeVar: case NodeVar:
ex.Type = ValVar ex.Type = ValVar
default: default:
fmt.Errorf("expecting a string, int, float or variable") return fmt.Errorf("expecting a string, int, float or variable"), false
} }
sel.Where = ex sel.Where = ex
return nil return nil, false
} }
func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error { func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) (error, bool) {
ex := expPool.Get().(*Exp) ex := expPool.Get().(*Exp)
ex.Reset() ex.Reset()
ex.Op = OpTsQuery ex.Op = OpTsQuery
ex.Type = ValStr
ex.Val = arg.Val.Val ex.Val = arg.Val.Val
if arg.Val.Type == NodeVar {
ex.Type = ValVar
} else {
ex.Type = ValStr
}
if sel.Args == nil {
sel.Args = make(map[string]*Node)
}
sel.Args[arg.Name] = arg.Val
if sel.Where != nil { if sel.Where != nil {
ow := sel.Where ow := sel.Where
@ -587,16 +630,16 @@ func (com *Compiler) compileArgSearch(sel *Select, arg *Arg) error {
} else { } else {
sel.Where = ex sel.Where = ex
} }
return nil return nil, true
} }
func (com *Compiler) compileArgWhere(sel *Select, arg *Arg) error { func (com *Compiler) compileArgWhere(sel *Select, arg *Arg) (error, bool) {
st := util.NewStack() st := util.NewStack()
var err error var err error
ex, err := com.compileArgObj(st, arg) ex, err := com.compileArgObj(st, arg)
if err != nil { if err != nil {
return err return err, false
} }
if sel.Where != nil { if sel.Where != nil {
@ -612,12 +655,12 @@ func (com *Compiler) compileArgWhere(sel *Select, arg *Arg) error {
sel.Where = ex sel.Where = ex
} }
return nil return nil, false
} }
func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error { func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) (error, bool) {
if arg.Val.Type != nodeObj { if arg.Val.Type != NodeObj {
return fmt.Errorf("expecting an object") return fmt.Errorf("expecting an object"), false
} }
st := util.NewStack() st := util.NewStack()
@ -635,23 +678,19 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
node, ok := intf.(*Node) node, ok := intf.(*Node)
if !ok || node == nil { if !ok || node == nil {
return fmt.Errorf("17: unexpected value %v (%t)", intf, intf) return fmt.Errorf("17: unexpected value %v (%t)", intf, intf), false
} }
if _, ok := com.bl[node.Name]; ok { if _, ok := com.bl[node.Name]; ok {
if !com.ka { nodePool.Put(node)
nodePool.Put(node)
}
continue continue
} }
if node.Type == nodeObj { if node.Type == NodeObj {
for i := range node.Children { for i := range node.Children {
st.Push(node.Children[i]) st.Push(node.Children[i])
} }
if !com.ka { nodePool.Put(node)
nodePool.Put(node)
}
continue continue
} }
@ -671,85 +710,70 @@ func (com *Compiler) compileArgOrderBy(sel *Select, arg *Arg) error {
case "desc_nulls_last": case "desc_nulls_last":
ob.Order = OrderDescNullsLast ob.Order = OrderDescNullsLast
default: default:
return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first") return fmt.Errorf("valid values include asc, desc, asc_nulls_first and desc_nulls_first"), false
} }
setOrderByColName(ob, node) setOrderByColName(ob, node)
sel.OrderBy = append(sel.OrderBy, ob) sel.OrderBy = append(sel.OrderBy, ob)
nodePool.Put(node)
if !com.ka {
nodePool.Put(node)
}
} }
return nil return nil, false
} }
func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) error { func (com *Compiler) compileArgDistinctOn(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if _, ok := com.bl[node.Name]; ok { if _, ok := com.bl[node.Name]; ok {
return nil return nil, false
} }
if node.Type != nodeList && node.Type != nodeStr { if node.Type != NodeList && node.Type != NodeStr {
return fmt.Errorf("expecting a list of strings or just a string") return fmt.Errorf("expecting a list of strings or just a string"), false
} }
if node.Type == nodeStr { if node.Type == NodeStr {
sel.DistinctOn = append(sel.DistinctOn, node.Val) sel.DistinctOn = append(sel.DistinctOn, node.Val)
} }
for i := range node.Children { for i := range node.Children {
sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val) sel.DistinctOn = append(sel.DistinctOn, node.Children[i].Val)
if !com.ka { nodePool.Put(node.Children[i])
nodePool.Put(node.Children[i])
}
} }
return nil return nil, false
} }
func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) error { func (com *Compiler) compileArgLimit(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != nodeInt { if node.Type != NodeInt {
return fmt.Errorf("expecting an integer") return fmt.Errorf("expecting an integer"), false
} }
sel.Paging.Limit = node.Val sel.Paging.Limit = node.Val
return nil return nil, false
} }
func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error { func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) (error, bool) {
node := arg.Val node := arg.Val
if node.Type != nodeInt { if node.Type != NodeInt {
return fmt.Errorf("expecting an integer") return fmt.Errorf("expecting an integer"), false
} }
sel.Paging.Offset = node.Val sel.Paging.Offset = node.Val
return nil return nil, false
} }
func (com *Compiler) compileArgAction(sel *Select, arg *Arg) error { var zeroTrv = &trval{}
switch sel.Action {
case ActionDelete:
if arg.Val.Type != nodeBool {
return fmt.Errorf("value for argument '%s' must be a boolean", arg.Name)
}
if arg.Val.Val == "false" {
sel.Action = 0
}
default: func (com *Compiler) getRole(role, field string) *trval {
if arg.Val.Type != nodeVar { if trv, ok := com.tr[role][field]; ok {
return fmt.Errorf("value for argument '%s' must be a variable", arg.Name) return trv
} } else {
sel.ActionVar = arg.Val.Val return zeroTrv
} }
return nil
} }
func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) { func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) {
@ -854,17 +878,17 @@ func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) {
if ex.Op != OpAnd && ex.Op != OpOr && ex.Op != OpNot { if ex.Op != OpAnd && ex.Op != OpOr && ex.Op != OpNot {
switch node.Type { switch node.Type {
case nodeStr: case NodeStr:
ex.Type = ValStr ex.Type = ValStr
case nodeInt: case NodeInt:
ex.Type = ValInt ex.Type = ValInt
case nodeBool: case NodeBool:
ex.Type = ValBool ex.Type = ValBool
case nodeFloat: case NodeFloat:
ex.Type = ValFloat ex.Type = ValFloat
case nodeList: case NodeList:
ex.Type = ValList ex.Type = ValList
case nodeVar: case NodeVar:
ex.Type = ValVar ex.Type = ValVar
default: default:
return nil, fmt.Errorf("[Where] valid values include string, int, float, boolean and list: %s", node.Type) return nil, fmt.Errorf("[Where] valid values include string, int, float, boolean and list: %s", node.Type)
@ -878,13 +902,13 @@ func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) {
func setListVal(ex *Exp, node *Node) { func setListVal(ex *Exp, node *Node) {
if len(node.Children) != 0 { if len(node.Children) != 0 {
switch node.Children[0].Type { switch node.Children[0].Type {
case nodeStr: case NodeStr:
ex.ListType = ValStr ex.ListType = ValStr
case nodeInt: case NodeInt:
ex.ListType = ValInt ex.ListType = ValInt
case nodeBool: case NodeBool:
ex.ListType = ValBool ex.ListType = ValBool
case nodeFloat: case NodeFloat:
ex.ListType = ValFloat ex.ListType = ValFloat
} }
} }
@ -897,7 +921,7 @@ func setWhereColName(ex *Exp, node *Node) {
var list []string var list []string
for n := node.Parent; n != nil; n = n.Parent { for n := node.Parent; n != nil; n = n.Parent {
if n.Type != nodeObj { if n.Type != NodeObj {
continue continue
} }
if len(n.Name) != 0 { if len(n.Name) != 0 {
@ -911,10 +935,8 @@ func setWhereColName(ex *Exp, node *Node) {
} }
if len(list) == 1 { if len(list) == 1 {
ex.Col = list[0] ex.Col = list[0]
} else if len(list) > 1 {
} else if len(list) > 2 { ex.NestedCols = list
ex.Col = buildPath(list)
ex.NestedCol = true
} }
} }
@ -954,6 +976,10 @@ func compileFilter(filter []string) (*Exp, error) {
} }
for i := range filter { for i := range filter {
if filter[i] == "false" {
return &Exp{Op: OpFalse, doFree: false}, nil
}
node, err := ParseArgValue(filter[i]) node, err := ParseArgValue(filter[i])
if err != nil { if err != nil {
return nil, err return nil, err

23
qcode/utils.go Normal file
View File

@ -0,0 +1,23 @@
package qcode
func GetQType(gql string) QType {
for i := range gql {
b := gql[i]
if b == '{' {
return QTQuery
}
if al(b) {
switch b {
case 'm', 'M':
return QTMutation
case 'q', 'Q':
return QTQuery
}
}
}
return -1
}
func al(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
}

View File

@ -18,6 +18,8 @@ const (
) )
type allowItem struct { type allowItem struct {
name string
hash string
uri string uri string
gql string gql string
vars json.RawMessage vars json.RawMessage
@ -26,7 +28,8 @@ type allowItem struct {
var _allowList allowList var _allowList allowList
type allowList struct { type allowList struct {
list map[string]*allowItem list []*allowItem
index map[string]int
filepath string filepath string
saveChan chan *allowItem saveChan chan *allowItem
active bool active bool
@ -34,7 +37,7 @@ type allowList struct {
func initAllowList(cpath string) { func initAllowList(cpath string) {
_allowList = allowList{ _allowList = allowList{
list: make(map[string]*allowItem), index: make(map[string]int),
saveChan: make(chan *allowItem), saveChan: make(chan *allowItem),
active: true, active: true,
} }
@ -45,7 +48,7 @@ func initAllowList(cpath string) {
if _, err := os.Stat(fp); err == nil { if _, err := os.Stat(fp); err == nil {
_allowList.filepath = fp _allowList.filepath = fp
} else if !os.IsNotExist(err) { } else if !os.IsNotExist(err) {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
@ -55,7 +58,7 @@ func initAllowList(cpath string) {
if _, err := os.Stat(fp); err == nil { if _, err := os.Stat(fp); err == nil {
_allowList.filepath = fp _allowList.filepath = fp
} else if !os.IsNotExist(err) { } else if !os.IsNotExist(err) {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
@ -65,13 +68,13 @@ func initAllowList(cpath string) {
if _, err := os.Stat(fp); err == nil { if _, err := os.Stat(fp); err == nil {
_allowList.filepath = fp _allowList.filepath = fp
} else if !os.IsNotExist(err) { } else if !os.IsNotExist(err) {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
if len(_allowList.filepath) == 0 { if len(_allowList.filepath) == 0 {
if conf.UseAllowList { if conf.Production {
logger.Fatal().Msg("allow.list not found") errlog.Fatal().Msg("allow.list not found")
} }
if len(cpath) == 0 { if len(cpath) == 0 {
@ -93,7 +96,7 @@ func initAllowList(cpath string) {
} }
func (al *allowList) add(req *gqlReq) { func (al *allowList) add(req *gqlReq) {
if al.active == false || len(req.ref) == 0 || len(req.Query) == 0 { if len(req.ref) == 0 || len(req.Query) == 0 {
return return
} }
@ -118,11 +121,39 @@ func (al *allowList) add(req *gqlReq) {
} }
} }
func (al *allowList) load() { func (al *allowList) upsert(query, vars []byte, uri string) {
if al.active == false { q := string(query)
return hash := gqlHash(q, vars, "")
name := gqlName(q)
var key string
if len(name) == 0 {
key = hash
} else {
key = name
} }
if i, ok := al.index[key]; !ok {
al.list = append(al.list, &allowItem{
name: name,
hash: hash,
uri: uri,
gql: q,
vars: vars,
})
al.index[key] = len(al.list) - 1
} else {
item := al.list[i]
item.name = name
item.hash = hash
item.gql = q
item.vars = vars
}
}
func (al *allowList) load() {
b, err := ioutil.ReadFile(al.filepath) b, err := ioutil.ReadFile(al.filepath)
if err != nil { if err != nil {
log.Fatal(err) log.Fatal(err)
@ -171,18 +202,7 @@ func (al *allowList) load() {
if c == 0 { if c == 0 {
if ty == AL_QUERY { if ty == AL_QUERY {
q := string(b[s:(e + 1)]) al.upsert(b[s:(e+1)], varBytes, uri)
item := &allowItem{
uri: uri,
gql: q,
}
if len(varBytes) != 0 {
item.vars = varBytes
}
al.list[gqlHash(q, varBytes)] = item
varBytes = nil varBytes = nil
} else if ty == AL_VARS { } else if ty == AL_VARS {
@ -200,10 +220,34 @@ func (al *allowList) load() {
} }
func (al *allowList) save(item *allowItem) { func (al *allowList) save(item *allowItem) {
if al.active == false { var err error
return
item.hash = gqlHash(item.gql, item.vars, "")
item.name = gqlName(item.gql)
if len(item.name) == 0 {
key := item.hash
if _, ok := al.index[key]; ok {
return
}
al.list = append(al.list, item)
al.index[key] = len(al.list) - 1
} else {
key := item.name
if i, ok := al.index[key]; ok {
if al.list[i].hash == item.hash {
return
}
al.list[i] = item
} else {
al.list = append(al.list, item)
al.index[key] = len(al.list) - 1
}
} }
al.list[gqlHash(item.gql, item.vars)] = item
f, err := os.Create(al.filepath) f, err := os.Create(al.filepath)
if err != nil { if err != nil {
@ -229,22 +273,35 @@ func (al *allowList) save(item *allowItem) {
k := keys[i] k := keys[i]
v := urlMap[k] v := urlMap[k]
f.WriteString(fmt.Sprintf("# %s\n\n", k)) if _, err := f.WriteString(fmt.Sprintf("# %s\n\n", k)); err != nil {
logger.Error().Err(err).Send()
return
}
for i := range v { for i := range v {
if len(v[i].vars) != 0 && bytes.Equal(v[i].vars, []byte("{}")) == false { if len(v[i].vars) != 0 && !bytes.Equal(v[i].vars, []byte("{}")) {
vj, err := json.MarshalIndent(v[i].vars, "", "\t") vj, err := json.MarshalIndent(v[i].vars, "", "\t")
if err != nil { if err != nil {
logger.Warn().Err(err).Msg("Failed to write allow list 'vars' to file") logger.Warn().Err(err).Msg("Failed to write allow list 'vars' to file")
continue continue
} }
f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
_, err = f.WriteString(fmt.Sprintf("variables %s\n\n", vj))
if err != nil {
logger.Error().Err(err).Send()
return
}
} }
if v[i].gql[0] == '{' { if v[i].gql[0] == '{' {
f.WriteString(fmt.Sprintf("query %s\n\n", v[i].gql)) _, err = f.WriteString(fmt.Sprintf("query %s\n\n", v[i].gql))
} else { } else {
f.WriteString(fmt.Sprintf("%s\n\n", v[i].gql)) _, err = f.WriteString(fmt.Sprintf("%s\n\n", v[i].gql))
}
if err != nil {
logger.Error().Err(err).Send()
return
} }
} }
} }

94
serv/args.go Normal file
View File

@ -0,0 +1,94 @@
package serv
import (
"bytes"
"context"
"errors"
"fmt"
"io"
"github.com/dosco/super-graph/jsn"
)
func argMap(ctx context.Context, vars []byte) func(w io.Writer, tag string) (int, error) {
return func(w io.Writer, tag string) (int, error) {
switch tag {
case "user_id_provider":
if v := ctx.Value(userIDProviderKey); v != nil {
return io.WriteString(w, v.(string))
}
return 0, errors.New("query requires variable $user_id_provider")
case "user_id":
if v := ctx.Value(userIDKey); v != nil {
return io.WriteString(w, v.(string))
}
return 0, errors.New("query requires variable $user_id")
case "user_role":
if v := ctx.Value(userRoleKey); v != nil {
return io.WriteString(w, v.(string))
}
return 0, errors.New("query requires variable $user_role")
}
fields := jsn.Get(vars, [][]byte{[]byte(tag)})
if len(fields) == 0 {
return 0, nil
}
return w.Write(fields[0].Value)
}
}
func argList(ctx *coreContext, args [][]byte) ([]interface{}, error) {
vars := make([]interface{}, len(args))
var fields map[string]interface{}
var err error
if len(ctx.req.Vars) != 0 {
fields, _, err = jsn.Tree(ctx.req.Vars)
if err != nil {
return nil, err
}
}
for i := range args {
av := args[i]
switch {
case bytes.Equal(av, []byte("user_id")):
if v := ctx.Value(userIDKey); v != nil {
vars[i] = v.(string)
} else {
return nil, errors.New("query requires variable $user_id")
}
case bytes.Equal(av, []byte("user_id_provider")):
if v := ctx.Value(userIDProviderKey); v != nil {
vars[i] = v.(string)
} else {
return nil, errors.New("query requires variable $user_id_provider")
}
case bytes.Equal(av, []byte("user_role")):
if v := ctx.Value(userRoleKey); v != nil {
vars[i] = v.(string)
} else {
return nil, errors.New("query requires variable $user_role")
}
default:
if v, ok := fields[string(av)]; ok {
vars[i] = v
} else {
return nil, fmt.Errorf("query requires variable $%s", string(av))
}
}
}
return vars, nil
}

View File

@ -7,28 +7,42 @@ import (
) )
var ( var (
userIDProviderKey = struct{}{} userIDProviderKey = "user_id_provider"
userIDKey = struct{}{} userIDKey = "user_id"
userRoleKey = "user_role"
) )
func headerAuth(r *http.Request, c *config) *http.Request { func headerAuth(next http.HandlerFunc) http.HandlerFunc {
if len(c.Auth.Header) == 0 { return func(w http.ResponseWriter, r *http.Request) {
return nil ctx := r.Context()
}
userID := r.Header.Get(c.Auth.Header) userIDProvider := r.Header.Get("X-User-ID-Provider")
if len(userID) != 0 { if len(userIDProvider) != 0 {
ctx := context.WithValue(r.Context(), userIDKey, userID) ctx = context.WithValue(ctx, userIDProviderKey, userIDProvider)
return r.WithContext(ctx) }
}
return nil userID := r.Header.Get("X-User-ID")
if len(userID) != 0 {
ctx = context.WithValue(ctx, userIDKey, userID)
}
userRole := r.Header.Get("X-User-Role")
if len(userRole) != 0 {
ctx = context.WithValue(ctx, userRoleKey, userRole)
}
next.ServeHTTP(w, r.WithContext(ctx))
}
} }
func withAuth(next http.HandlerFunc) http.HandlerFunc { func withAuth(next http.HandlerFunc) http.HandlerFunc {
at := conf.Auth.Type at := conf.Auth.Type
ru := conf.Auth.Rails.URL ru := conf.Auth.Rails.URL
if conf.Auth.CredsInHeader {
next = headerAuth(next)
}
switch at { switch at {
case "rails": case "rails":
if strings.HasPrefix(ru, "memcache:") { if strings.HasPrefix(ru, "memcache:") {

View File

@ -11,8 +11,7 @@ import (
const ( const (
authHeader = "Authorization" authHeader = "Authorization"
jwtBase int = iota jwtAuth0 int = iota + 1
jwtAuth0
) )
func jwtHandler(next http.HandlerFunc) http.HandlerFunc { func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
@ -35,7 +34,7 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
case len(publicKeyFile) != 0: case len(publicKeyFile) != 0:
kd, err := ioutil.ReadFile(publicKeyFile) kd, err := ioutil.ReadFile(publicKeyFile)
if err != nil { if err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
switch conf.Auth.JWT.PubKeyType { switch conf.Auth.JWT.PubKeyType {
@ -51,18 +50,13 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
} }
if err != nil { if err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
var tok string var tok string
if rn := headerAuth(r, conf); rn != nil {
next.ServeHTTP(w, rn)
return
}
if len(cookie) != 0 { if len(cookie) != 0 {
ck, err := r.Cookie(cookie) ck, err := r.Cookie(cookie)
if err != nil { if err != nil {
@ -100,7 +94,9 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
} else { } else {
ctx = context.WithValue(ctx, userIDKey, claims.Subject) ctx = context.WithValue(ctx, userIDKey, claims.Subject)
} }
next.ServeHTTP(w, r.WithContext(ctx)) next.ServeHTTP(w, r.WithContext(ctx))
return
} }
next.ServeHTTP(w, r) next.ServeHTTP(w, r)

View File

@ -15,11 +15,11 @@ import (
func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc { func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
cookie := conf.Auth.Cookie cookie := conf.Auth.Cookie
if len(cookie) == 0 { if len(cookie) == 0 {
logger.Fatal().Msg("no auth.cookie defined") errlog.Fatal().Msg("no auth.cookie defined")
} }
if len(conf.Auth.Rails.URL) == 0 { if len(conf.Auth.Rails.URL) == 0 {
logger.Fatal().Msg("no auth.rails.url defined") errlog.Fatal().Msg("no auth.rails.url defined")
} }
rp := &redis.Pool{ rp := &redis.Pool{
@ -28,13 +28,13 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
Dial: func() (redis.Conn, error) { Dial: func() (redis.Conn, error) {
c, err := redis.DialURL(conf.Auth.Rails.URL) c, err := redis.DialURL(conf.Auth.Rails.URL)
if err != nil { if err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
pwd := conf.Auth.Rails.Password pwd := conf.Auth.Rails.Password
if len(pwd) != 0 { if len(pwd) != 0 {
if _, err := c.Do("AUTH", pwd); err != nil { if _, err := c.Do("AUTH", pwd); err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
return c, err return c, err
@ -42,11 +42,6 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
} }
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
if rn := headerAuth(r, conf); rn != nil {
next.ServeHTTP(w, rn)
return
}
ck, err := r.Cookie(cookie) ck, err := r.Cookie(cookie)
if err != nil { if err != nil {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -74,26 +69,21 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc { func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
cookie := conf.Auth.Cookie cookie := conf.Auth.Cookie
if len(cookie) == 0 { if len(cookie) == 0 {
logger.Fatal().Msg("no auth.cookie defined") errlog.Fatal().Msg("no auth.cookie defined")
} }
if len(conf.Auth.Rails.URL) == 0 { if len(conf.Auth.Rails.URL) == 0 {
logger.Fatal().Msg("no auth.rails.url defined") errlog.Fatal().Msg("no auth.rails.url defined")
} }
rURL, err := url.Parse(conf.Auth.Rails.URL) rURL, err := url.Parse(conf.Auth.Rails.URL)
if err != nil { if err != nil {
logger.Fatal().Err(err) errlog.Fatal().Err(err).Send()
} }
mc := memcache.New(rURL.Host) mc := memcache.New(rURL.Host)
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
if rn := headerAuth(r, conf); rn != nil {
next.ServeHTTP(w, rn)
return
}
ck, err := r.Cookie(cookie) ck, err := r.Cookie(cookie)
if err != nil { if err != nil {
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
@ -121,30 +111,25 @@ func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc { func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
cookie := conf.Auth.Cookie cookie := conf.Auth.Cookie
if len(cookie) == 0 { if len(cookie) == 0 {
logger.Fatal().Msg("no auth.cookie defined") errlog.Fatal().Msg("no auth.cookie defined")
} }
ra, err := railsAuth(conf) ra, err := railsAuth(conf)
if err != nil { if err != nil {
logger.Fatal().Err(err) errlog.Fatal().Err(err).Send()
} }
return func(w http.ResponseWriter, r *http.Request) { return func(w http.ResponseWriter, r *http.Request) {
if rn := headerAuth(r, conf); rn != nil {
next.ServeHTTP(w, rn)
return
}
ck, err := r.Cookie(cookie) ck, err := r.Cookie(cookie)
if err != nil { if err != nil || len(ck.Value) == 0 {
logger.Error().Err(err) logger.Warn().Err(err).Msg("rails cookie missing")
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
return return
} }
userID, err := ra.ParseCookie(ck.Value) userID, err := ra.ParseCookie(ck.Value)
if err != nil { if err != nil {
logger.Error().Err(err) logger.Warn().Err(err).Msg("failed to parse rails cookie")
next.ServeHTTP(w, r) next.ServeHTTP(w, r)
return return
} }

View File

@ -4,13 +4,12 @@ import (
"context" "context"
"fmt" "fmt"
"os" "os"
"runtime"
"strings" "strings"
"github.com/dosco/super-graph/psql" "github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode" "github.com/dosco/super-graph/qcode"
"github.com/gobuffalo/flect"
"github.com/jackc/pgx/v4" "github.com/jackc/pgx/v4"
"github.com/jackc/pgx/v4/log/zerologadapter"
"github.com/jackc/pgx/v4/pgxpool" "github.com/jackc/pgx/v4/pgxpool"
"github.com/rs/zerolog" "github.com/rs/zerolog"
"github.com/spf13/cobra" "github.com/spf13/cobra"
@ -21,28 +20,33 @@ import (
const ( const (
serverName = "Super Graph" serverName = "Super Graph"
authFailBlockAlways = iota + 1
authFailBlockPerQuery
authFailBlockNever
) )
var ( var (
logger *zerolog.Logger // These variables are set using -ldflags
conf *config version string
confPath string gitBranch string
db *pgxpool.Pool lastCommitSHA string
qcompile *qcode.Compiler lastCommitTime string
pcompile *psql.Compiler )
authFailBlock int
var (
logger zerolog.Logger // logger for everything but errors
errlog zerolog.Logger // logger for errors includes line numbers
conf *config // parsed config
confPath string // path to the config file
db *pgxpool.Pool // database connection pool
schema *psql.DBSchema // database tables, columns and relationships
qcompile *qcode.Compiler // qcode compiler
pcompile *psql.Compiler // postgres sql compiler
) )
func Init() { func Init() {
logger = initLog() initLog()
rootCmd := &cobra.Command{ rootCmd := &cobra.Command{
Use: "super-graph", Use: "super-graph",
Short: "An instant high-performance GraphQL API. No code needed. https://supergraph.dev", Short: BuildDetails(),
} }
rootCmd.AddCommand(&cobra.Command{ rootCmd.AddCommand(&cobra.Command{
@ -117,6 +121,13 @@ e.g. db:migrate -+1
Run: cmdDBSetup, Run: cmdDBSetup,
}) })
rootCmd.AddCommand(&cobra.Command{
Use: "db:reset",
Short: "Reset database",
Long: "This command will drop, create, migrate and seed the database (won't run in production)",
Run: cmdDBReset,
})
rootCmd.AddCommand(&cobra.Command{ rootCmd.AddCommand(&cobra.Command{
Use: "new APP-NAME", Use: "new APP-NAME",
Short: "Create a new application", Short: "Create a new application",
@ -131,61 +142,66 @@ e.g. db:migrate -+1
Run: cmdConfDump, Run: cmdConfDump,
}) })
rootCmd.AddCommand(&cobra.Command{
Use: "version",
Short: "Super Graph binary version information",
Run: cmdVersion,
})
rootCmd.Flags().StringVar(&confPath, rootCmd.Flags().StringVar(&confPath,
"path", "./config", "path to config files") "path", "./config", "path to config files")
if err := rootCmd.Execute(); err != nil { if err := rootCmd.Execute(); err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
} }
func initLog() *zerolog.Logger { func initLog() {
out := zerolog.ConsoleWriter{Out: os.Stderr} out := zerolog.ConsoleWriter{Out: os.Stderr}
logger := zerolog.New(out). logger = zerolog.New(out).With().Timestamp().Logger()
With(). errlog = logger.With().Caller().Logger()
Timestamp().
Caller().
Logger()
return &logger
} }
func initConf() (*config, error) { func initConf() (*config, error) {
vi := newConfig() vi := newConfig(getConfigName())
if err := vi.ReadInConfig(); err != nil { if err := vi.ReadInConfig(); err != nil {
return nil, err return nil, err
} }
inherits := vi.GetString("inherits")
if len(inherits) != 0 {
vi = newConfig(inherits)
if err := vi.ReadInConfig(); err != nil {
return nil, err
}
if vi.IsSet("inherits") {
errlog.Fatal().Msgf("inherited config (%s) cannot itself inherit (%s)",
inherits,
vi.GetString("inherits"))
}
vi.SetConfigName(getConfigName())
if err := vi.MergeInConfig(); err != nil {
return nil, err
}
}
c := &config{} c := &config{}
if err := vi.Unmarshal(c); err != nil { if err := c.Init(vi); err != nil {
return nil, fmt.Errorf("unable to decode config, %v", err) return nil, fmt.Errorf("unable to decode config, %v", err)
} }
if len(c.Tables) == 0 {
c.Tables = c.DB.Tables
}
for k, v := range c.Inflections {
flect.AddPlural(k, v)
}
for i := range c.Tables {
t := c.Tables[i]
t.Name = flect.Pluralize(strings.ToLower(t.Name))
}
authFailBlock = getAuthFailBlock(c)
logLevel, err := zerolog.ParseLevel(c.LogLevel) logLevel, err := zerolog.ParseLevel(c.LogLevel)
if err != nil { if err != nil {
logger.Error().Err(err).Msg("error setting log_level") errlog.Error().Err(err).Msg("error setting log_level")
} }
zerolog.SetGlobalLevel(logLevel) zerolog.SetGlobalLevel(logLevel)
//fmt.Printf("%#v", c)
return c, nil return c, nil
} }
@ -217,7 +233,7 @@ func initDB(c *config, useDB bool) (*pgx.Conn, error) {
config.LogLevel = pgx.LogLevelNone config.LogLevel = pgx.LogLevelNone
} }
config.Logger = zerologadapter.NewLogger(*logger) config.Logger = NewSQLLogger(logger)
db, err := pgx.ConnectConfig(context.Background(), config) db, err := pgx.ConnectConfig(context.Background(), config)
if err != nil { if err != nil {
@ -252,7 +268,7 @@ func initDBPool(c *config) (*pgxpool.Pool, error) {
config.ConnConfig.LogLevel = pgx.LogLevelNone config.ConnConfig.LogLevel = pgx.LogLevelNone
} }
config.ConnConfig.Logger = zerologadapter.NewLogger(*logger) config.ConnConfig.Logger = NewSQLLogger(logger)
// if c.DB.MaxRetries != 0 { // if c.DB.MaxRetries != 0 {
// opt.MaxRetries = c.DB.MaxRetries // opt.MaxRetries = c.DB.MaxRetries
@ -275,10 +291,44 @@ func initCompiler() {
qcompile, pcompile, err = initCompilers(conf) qcompile, pcompile, err = initCompilers(conf)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to initialize compilers") errlog.Fatal().Err(err).Msg("failed to initialize compilers")
} }
if err := initResolvers(); err != nil { if err := initResolvers(); err != nil {
logger.Fatal().Err(err).Msg("failed to initialized resolvers") errlog.Fatal().Err(err).Msg("failed to initialized resolvers")
} }
} }
func initConfOnce() {
var err error
if conf == nil {
if conf, err = initConf(); err != nil {
errlog.Fatal().Err(err).Msg("failed to read config")
}
}
}
func cmdVersion(cmd *cobra.Command, args []string) {
fmt.Printf("%s\n", BuildDetails())
}
func BuildDetails() string {
return fmt.Sprintf(`
Super Graph %v
For documentation, visit https://supergraph.dev
Commit SHA-1 : %v
Commit timestamp : %v
Branch : %v
Go version : %v
Licensed under the Apache Public License 2.0
Copyright 2015-2019 Vikram Rangnekar.
`,
version,
lastCommitSHA,
lastCommitTime,
gitBranch,
runtime.Version())
}

View File

@ -9,20 +9,19 @@ import (
func cmdConfDump(cmd *cobra.Command, args []string) { func cmdConfDump(cmd *cobra.Command, args []string) {
if len(args) != 1 { if len(args) != 1 {
cmd.Help() cmd.Help() //nolint: errcheck
os.Exit(1) os.Exit(1)
} }
fname := fmt.Sprintf("%s.%s", getConfigName(), args[0]) fname := fmt.Sprintf("%s.%s", getConfigName(), args[0])
vi := newConfig() conf, err := initConf()
if err != nil {
if err := vi.ReadInConfig(); err != nil { errlog.Fatal().Err(err).Msg("failed to read config")
logger.Fatal().Err(err).Send()
} }
if err := vi.WriteConfigAs(fname); err != nil { if err := conf.Viper.WriteConfigAs(fname); err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
logger.Info().Msgf("config dumped to ./%s", fname) logger.Info().Msgf("config dumped to ./%s", fname)

View File

@ -14,19 +14,6 @@ import (
"github.com/spf13/cobra" "github.com/spf13/cobra"
) )
var sampleMigration = `-- This is a sample migration.
create table users(
id serial primary key,
fullname varchar not null,
email varchar not null
);
---- create above / drop below ----
drop table users;
`
var newMigrationText = `-- Write your migrate up statements here var newMigrationText = `-- Write your migrate up statements here
---- create above / drop below ---- ---- create above / drop below ----
@ -36,6 +23,7 @@ var newMigrationText = `-- Write your migrate up statements here
` `
func cmdDBSetup(cmd *cobra.Command, args []string) { func cmdDBSetup(cmd *cobra.Command, args []string) {
initConfOnce()
cmdDBCreate(cmd, []string{}) cmdDBCreate(cmd, []string{})
cmdDBMigrate(cmd, []string{"up"}) cmdDBMigrate(cmd, []string{"up"})
@ -47,25 +35,31 @@ func cmdDBSetup(cmd *cobra.Command, args []string) {
return return
} }
if os.IsNotExist(err) == false { if !os.IsNotExist(err) {
logger.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile) errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", sfile)
} }
logger.Warn().Msgf("failed to read seed file '%s'", sfile) logger.Warn().Msgf("failed to read seed file '%s'", sfile)
} }
func cmdDBCreate(cmd *cobra.Command, args []string) { func cmdDBReset(cmd *cobra.Command, args []string) {
var err error initConfOnce()
if conf, err = initConf(); err != nil { if conf.Production {
logger.Fatal().Err(err).Msg("failed to read config") errlog.Fatal().Msg("db:reset does not work in production")
return
} }
cmdDBDrop(cmd, []string{})
cmdDBSetup(cmd, []string{})
}
func cmdDBCreate(cmd *cobra.Command, args []string) {
initConfOnce()
ctx := context.Background() ctx := context.Background()
conn, err := initDB(conf, false) conn, err := initDB(conf, false)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
defer conn.Close(ctx) defer conn.Close(ctx)
@ -73,24 +67,19 @@ func cmdDBCreate(cmd *cobra.Command, args []string) {
_, err = conn.Exec(ctx, sql) _, err = conn.Exec(ctx, sql)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to create database") errlog.Fatal().Err(err).Msg("failed to create database")
} }
logger.Info().Msgf("created database '%s'", conf.DB.DBName) logger.Info().Msgf("created database '%s'", conf.DB.DBName)
} }
func cmdDBDrop(cmd *cobra.Command, args []string) { func cmdDBDrop(cmd *cobra.Command, args []string) {
var err error initConfOnce()
if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config")
}
ctx := context.Background() ctx := context.Background()
conn, err := initDB(conf, false) conn, err := initDB(conf, false)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
defer conn.Close(ctx) defer conn.Close(ctx)
@ -98,7 +87,7 @@ func cmdDBDrop(cmd *cobra.Command, args []string) {
_, err = conn.Exec(ctx, sql) _, err = conn.Exec(ctx, sql)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to create database") errlog.Fatal().Err(err).Msg("failed to create database")
} }
logger.Info().Msgf("dropped database '%s'", conf.DB.DBName) logger.Info().Msgf("dropped database '%s'", conf.DB.DBName)
@ -106,16 +95,11 @@ func cmdDBDrop(cmd *cobra.Command, args []string) {
func cmdDBNew(cmd *cobra.Command, args []string) { func cmdDBNew(cmd *cobra.Command, args []string) {
if len(args) != 1 { if len(args) != 1 {
cmd.Help() cmd.Help() //nolint: errcheck
os.Exit(1) os.Exit(1)
} }
var err error initConfOnce()
if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config")
}
name := args[0] name := args[0]
m, err := migrate.FindMigrations(conf.MigrationsPath) m, err := migrate.FindMigrations(conf.MigrationsPath)
@ -124,7 +108,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
os.Exit(1) os.Exit(1)
} }
mname := fmt.Sprintf("%03d_%s.sql", len(m)+100, name) mname := fmt.Sprintf("%d_%s.sql", len(m), name)
// Write new migration // Write new migration
mpath := filepath.Join(conf.MigrationsPath, mname) mpath := filepath.Join(conf.MigrationsPath, mname)
@ -144,39 +128,34 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
} }
func cmdDBMigrate(cmd *cobra.Command, args []string) { func cmdDBMigrate(cmd *cobra.Command, args []string) {
var err error
if len(args) == 0 { if len(args) == 0 {
cmd.Help() cmd.Help() //nolint: errcheck
os.Exit(1) os.Exit(1)
} }
initConfOnce()
dest := args[0] dest := args[0]
if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config")
}
conn, err := initDB(conf, true) conn, err := initDB(conf, true)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
defer conn.Close(context.Background()) defer conn.Close(context.Background())
m, err := migrate.NewMigrator(conn, "schema_version") m, err := migrate.NewMigrator(conn, "schema_version")
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to initializing migrator") errlog.Fatal().Err(err).Msg("failed to initializing migrator")
} }
m.Data = getMigrationVars() m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath) err = m.LoadMigrations(conf.MigrationsPath)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to load migrations") errlog.Fatal().Err(err).Msg("failed to load migrations")
} }
if len(m.Migrations) == 0 { if len(m.Migrations) == 0 {
logger.Fatal().Msg("No migrations found") errlog.Fatal().Msg("No migrations found")
} }
m.OnStart = func(sequence int32, name, direction, sql string) { m.OnStart = func(sequence int32, name, direction, sql string) {
@ -195,7 +174,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
var n int64 var n int64
n, err = strconv.ParseInt(d, 10, 32) n, err = strconv.ParseInt(d, 10, 32)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("invalid destination") errlog.Fatal().Err(err).Msg("invalid destination")
} }
return int32(n) return int32(n)
} }
@ -219,24 +198,22 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
err = m.MigrateTo(currentVersion + mustParseDestination(dest[1:])) err = m.MigrateTo(currentVersion + mustParseDestination(dest[1:]))
} else { } else {
cmd.Help() cmd.Help() //nolint: errcheck
os.Exit(1) os.Exit(1)
} }
if err != nil { if err != nil {
logger.Info().Err(err).Send() logger.Info().Err(err).Send()
// logger.Info().Err(err).Send()
// if err, ok := err.(m.MigrationPgError); ok { // if err, ok := err.(m.MigrationPgError); ok {
// if err.Detail != "" { // if err.Detail != "" {
// logger.Info().Err(err).Msg(err.Detail) // info.Err(err).Msg(err.Detail)
// } // }
// if err.Position != 0 { // if err.Position != 0 {
// ele, err := ExtractErrorLine(err.Sql, int(err.Position)) // ele, err := ExtractErrorLine(err.Sql, int(err.Position))
// if err != nil { // if err != nil {
// logger.Fatal().Err(err).Send() // errlog.Fatal().Err(err).Send()
// } // }
// prefix := fmt.Sprintf() // prefix := fmt.Sprintf()
@ -251,37 +228,33 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
} }
func cmdDBStatus(cmd *cobra.Command, args []string) { func cmdDBStatus(cmd *cobra.Command, args []string) {
var err error initConfOnce()
if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config")
}
conn, err := initDB(conf, true) conn, err := initDB(conf, true)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
defer conn.Close(context.Background()) defer conn.Close(context.Background())
m, err := migrate.NewMigrator(conn, "schema_version") m, err := migrate.NewMigrator(conn, "schema_version")
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to initialize migrator") errlog.Fatal().Err(err).Msg("failed to initialize migrator")
} }
m.Data = getMigrationVars() m.Data = getMigrationVars()
err = m.LoadMigrations(conf.MigrationsPath) err = m.LoadMigrations(conf.MigrationsPath)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to load migrations") errlog.Fatal().Err(err).Msg("failed to load migrations")
} }
if len(m.Migrations) == 0 { if len(m.Migrations) == 0 {
logger.Fatal().Msg("no migrations found") errlog.Fatal().Msg("no migrations found")
} }
mver, err := m.GetCurrentVersion() mver, err := m.GetCurrentVersion()
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to retrieve migration") errlog.Fatal().Err(err).Msg("failed to retrieve migration")
} }
var status string var status string

View File

@ -16,7 +16,7 @@ import (
func cmdNew(cmd *cobra.Command, args []string) { func cmdNew(cmd *cobra.Command, args []string) {
if len(args) != 1 { if len(args) != 1 {
cmd.Help() cmd.Help() //nolint: errcheck
os.Exit(1) os.Exit(1)
} }
@ -115,13 +115,17 @@ func (t *Templ) get(name string) ([]byte, error) {
b := bytes.Buffer{} b := bytes.Buffer{}
tmpl := fasttemplate.New(v, "{%", "%}") tmpl := fasttemplate.New(v, "{%", "%}")
tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) { _, err := tmpl.ExecuteFunc(&b, func(w io.Writer, tag string) (int, error) {
if val, ok := t.data[strings.TrimSpace(tag)]; ok { if val, ok := t.data[strings.TrimSpace(tag)]; ok {
return w.Write([]byte(val)) return w.Write([]byte(val))
} }
return 0, fmt.Errorf("unknown template variable '%s'", tag) return 0, fmt.Errorf("unknown template variable '%s'", tag)
}) })
if err != nil {
return nil, err
}
return b.Bytes(), nil return b.Bytes(), nil
} }
@ -133,13 +137,13 @@ func ifNotExists(filePath string, doFn func(string) error) {
return return
} }
if os.IsNotExist(err) == false { if !os.IsNotExist(err) {
logger.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath) errlog.Fatal().Err(err).Msgf("unable to check if '%s' exists", filePath)
} }
err = doFn(filePath) err = doFn(filePath)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msgf("unable to create '%s'", filePath) errlog.Fatal().Err(err).Msgf("unable to create '%s'", filePath)
} }
logger.Info().Msgf("created '%s'", filePath) logger.Info().Msgf("created '%s'", filePath)

View File

@ -1,6 +1,7 @@
package serv package serv
import ( import (
"bytes"
"context" "context"
"encoding/json" "encoding/json"
"fmt" "fmt"
@ -12,20 +13,21 @@ import (
"github.com/brianvoe/gofakeit" "github.com/brianvoe/gofakeit"
"github.com/dop251/goja" "github.com/dop251/goja"
"github.com/spf13/cobra" "github.com/spf13/cobra"
"github.com/valyala/fasttemplate"
) )
func cmdDBSeed(cmd *cobra.Command, args []string) { func cmdDBSeed(cmd *cobra.Command, args []string) {
var err error var err error
if conf, err = initConf(); err != nil { if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config") errlog.Fatal().Err(err).Msg("failed to read config")
} }
conf.UseAllowList = false conf.Production = false
db, err = initDBPool(conf) db, err = initDBPool(conf)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
initCompiler() initCompiler()
@ -34,14 +36,14 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
b, err := ioutil.ReadFile(path.Join(confPath, conf.SeedFile)) b, err := ioutil.ReadFile(path.Join(confPath, conf.SeedFile))
if err != nil { if err != nil {
logger.Fatal().Err(err).Msgf("failed to read seed file '%s'", sfile) errlog.Fatal().Err(err).Msgf("failed to read seed file '%s'", sfile)
} }
vm := goja.New() vm := goja.New()
vm.Set("graphql", graphQLFunc) vm.Set("graphql", graphQLFunc)
console := vm.NewObject() console := vm.NewObject()
console.Set("log", logFunc) console.Set("log", logFunc) //nolint: errcheck
vm.Set("console", console) vm.Set("console", console)
fake := vm.NewObject() fake := vm.NewObject()
@ -50,38 +52,83 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
_, err = vm.RunScript("seed.js", string(b)) _, err = vm.RunScript("seed.js", string(b))
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to execute script") errlog.Fatal().Err(err).Msg("failed to execute script")
} }
logger.Info().Msg("seed script done") logger.Info().Msg("seed script done")
} }
//func runFunc(call goja.FunctionCall) { //func runFunc(call goja.FunctionCall) {
func graphQLFunc(query string, data interface{}) map[string]interface{} { func graphQLFunc(query string, data interface{}, opt map[string]string) map[string]interface{} {
b, err := json.Marshal(data) vars, err := json.Marshal(data)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to json serialize") errlog.Fatal().Err(err).Send()
} }
c := &coreContext{Context: context.Background()} c := context.Background()
c.req.Query = query
c.req.Vars = b
res, err := c.execQuery() if v, ok := opt["user_id"]; ok && len(v) != 0 {
c = context.WithValue(c, userIDKey, v)
}
var role string
if v, ok := opt["role"]; ok && len(v) != 0 {
role = v
} else {
role = "user"
}
stmts, err := buildRoleStmt([]byte(query), vars, role)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("graphql query failed") errlog.Fatal().Err(err).Msg("graphql query failed")
}
st := stmts[0]
buf := &bytes.Buffer{}
t := fasttemplate.New(st.sql, openVar, closeVar)
_, err = t.ExecuteFunc(buf, argMap(c, vars))
if err != nil {
errlog.Fatal().Err(err).Send()
}
finalSQL := buf.String()
tx, err := db.Begin(c)
if err != nil {
errlog.Fatal().Err(err).Send()
}
defer tx.Rollback(c) //nolint: errcheck
if conf.DB.SetUserID {
if err := setLocalUserID(c, tx); err != nil {
errlog.Fatal().Err(err).Send()
}
}
var root []byte
if err = tx.QueryRow(context.Background(), finalSQL).Scan(&root); err != nil {
errlog.Fatal().Err(err).Msg("sql query failed")
}
if err := tx.Commit(c); err != nil {
errlog.Fatal().Err(err).Send()
} }
val := make(map[string]interface{}) val := make(map[string]interface{})
err = json.Unmarshal(res, &val) err = json.Unmarshal(root, &val)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to deserialize json") errlog.Fatal().Err(err).Send()
} }
return val return val
} }
//nolint: errcheck
func logFunc(args ...interface{}) { func logFunc(args ...interface{}) {
for _, arg := range args { for _, arg := range args {
if _, ok := arg.(map[string]interface{}); ok { if _, ok := arg.(map[string]interface{}); ok {
@ -98,6 +145,7 @@ func logFunc(args ...interface{}) {
} }
} }
//nolint: errcheck
func setFakeFuncs(f *goja.Object) { func setFakeFuncs(f *goja.Object) {
gofakeit.Seed(0) gofakeit.Seed(0)
@ -155,10 +203,9 @@ func setFakeFuncs(f *goja.Object) {
f.Set("transmission_gear_type", gofakeit.TransmissionGearType) f.Set("transmission_gear_type", gofakeit.TransmissionGearType)
// Text // Text
f.Set("word", gofakeit.Word) f.Set("word", gofakeit.Word)
f.Set("sentence", gofakeit.Sentence) f.Set("sentence", gofakeit.Sentence)
f.Set("paragrph", gofakeit.Paragraph) f.Set("paragraph", gofakeit.Paragraph)
f.Set("question", gofakeit.Question) f.Set("question", gofakeit.Question)
f.Set("quote", gofakeit.Quote) f.Set("quote", gofakeit.Quote)

View File

@ -8,12 +8,12 @@ func cmdServ(cmd *cobra.Command, args []string) {
var err error var err error
if conf, err = initConf(); err != nil { if conf, err = initConf(); err != nil {
logger.Fatal().Err(err).Msg("failed to read config") errlog.Fatal().Err(err).Msg("failed to read config")
} }
db, err = initDBPool(conf) db, err = initDBPool(conf)
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("failed to connect to database") errlog.Fatal().Err(err).Msg("failed to connect to database")
} }
initCompiler() initCompiler()

View File

@ -1,12 +1,19 @@
package serv package serv
import ( import (
"fmt"
"os"
"regexp"
"strings" "strings"
"unicode"
"github.com/gobuffalo/flect"
"github.com/spf13/viper" "github.com/spf13/viper"
) )
type config struct { type config struct {
*viper.Viper
AppName string `mapstructure:"app_name"` AppName string `mapstructure:"app_name"`
Env string Env string
HostPort string `mapstructure:"host_port"` HostPort string `mapstructure:"host_port"`
@ -16,17 +23,18 @@ type config struct {
LogLevel string `mapstructure:"log_level"` LogLevel string `mapstructure:"log_level"`
EnableTracing bool `mapstructure:"enable_tracing"` EnableTracing bool `mapstructure:"enable_tracing"`
UseAllowList bool `mapstructure:"use_allow_list"` UseAllowList bool `mapstructure:"use_allow_list"`
Production bool
WatchAndReload bool `mapstructure:"reload_on_config_change"` WatchAndReload bool `mapstructure:"reload_on_config_change"`
AuthFailBlock string `mapstructure:"auth_fail_block"` AuthFailBlock bool `mapstructure:"auth_fail_block"`
SeedFile string `mapstructure:"seed_file"` SeedFile string `mapstructure:"seed_file"`
MigrationsPath string `mapstructure:"migrations_path"` MigrationsPath string `mapstructure:"migrations_path"`
Inflections map[string]string Inflections map[string]string
Auth struct { Auth struct {
Type string Type string
Cookie string Cookie string
Header string CredsInHeader bool `mapstructure:"creds_in_header"`
Rails struct { Rails struct {
Version string Version string
@ -56,33 +64,28 @@ type config struct {
User string User string
Password string Password string
Schema string Schema string
PoolSize int32 `mapstructure:"pool_size"` PoolSize int32 `mapstructure:"pool_size"`
MaxRetries int `mapstructure:"max_retries"` MaxRetries int `mapstructure:"max_retries"`
LogLevel string `mapstructure:"log_level"` SetUserID bool `mapstructure:"set_user_id"`
vars map[string][]byte `mapstructure:"variables"` Vars map[string]string `mapstructure:"variables"`
Blocklist []string
Defaults struct {
Filter []string
Blocklist []string
}
Tables []configTable Tables []configTable
} `mapstructure:"database"` } `mapstructure:"database"`
Tables []configTable Tables []configTable
RolesQuery string `mapstructure:"roles_query"`
Roles []configRole
roles map[string]*configRole
} }
type configTable struct { type configTable struct {
Name string Name string
Filter []string Table string
FilterQuery []string `mapstructure:"filter_query"` Blocklist []string
FilterInsert []string `mapstructure:"filter_insert"` Remotes []configRemote
FilterUpdate []string `mapstructure:"filter_update"`
FilterDelete []string `mapstructure:"filter_delete"`
Table string
Blocklist []string
Remotes []configRemote
} }
type configRemote struct { type configRemote struct {
@ -98,16 +101,64 @@ type configRemote struct {
} `mapstructure:"set_headers"` } `mapstructure:"set_headers"`
} }
func newConfig() *viper.Viper { type configQuery struct {
Limit int
Filters []string
Columns []string
DisableFunctions bool `mapstructure:"disable_functions"`
Block bool
}
type configInsert struct {
Filters []string
Columns []string
Presets map[string]string
Block bool
}
type configUpdate struct {
Filters []string
Columns []string
Presets map[string]string
Block bool
}
type configDelete struct {
Filters []string
Columns []string
Block bool
}
type configRoleTable struct {
Name string
Query configQuery
Insert configInsert
Update configUpdate
Delete configDelete
}
type configRole struct {
Name string
Match string
Tables []configRoleTable
tablesMap map[string]*configRoleTable
}
func newConfig(name string) *viper.Viper {
vi := viper.New() vi := viper.New()
vi.SetEnvPrefix("SG") vi.SetEnvPrefix("SG")
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_")) vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
vi.AutomaticEnv() vi.AutomaticEnv()
vi.SetConfigName(name)
vi.AddConfigPath(confPath) vi.AddConfigPath(confPath)
vi.AddConfigPath("./config") vi.AddConfigPath("./config")
vi.SetConfigName(getConfigName())
if dir, _ := os.Getwd(); strings.HasSuffix(dir, "/serv") {
vi.AddConfigPath("../config")
}
vi.SetDefault("host_port", "0.0.0.0:8080") vi.SetDefault("host_port", "0.0.0.0:8080")
vi.SetDefault("web_ui", false) vi.SetDefault("web_ui", false)
@ -122,9 +173,10 @@ func newConfig() *viper.Viper {
vi.SetDefault("database.schema", "public") vi.SetDefault("database.schema", "public")
vi.SetDefault("env", "development") vi.SetDefault("env", "development")
vi.BindEnv("env", "GO_ENV")
vi.BindEnv("HOST", "HOST") vi.BindEnv("env", "GO_ENV") //nolint: errcheck
vi.BindEnv("PORT", "PORT") vi.BindEnv("HOST", "HOST") //nolint: errcheck
vi.BindEnv("PORT", "PORT") //nolint: errcheck
vi.SetDefault("auth.rails.max_idle", 80) vi.SetDefault("auth.rails.max_idle", 80)
vi.SetDefault("auth.rails.max_active", 12000) vi.SetDefault("auth.rails.max_active", 12000)
@ -132,24 +184,101 @@ func newConfig() *viper.Viper {
return vi return vi
} }
func (c *config) getVariables() map[string]string { func (c *config) Init(vi *viper.Viper) error {
vars := make(map[string]string, len(c.DB.vars)) if err := vi.Unmarshal(c); err != nil {
return fmt.Errorf("unable to decode config, %v", err)
for k, v := range c.DB.vars { }
isVar := false c.Viper = vi
for i := range v { if len(c.Tables) == 0 {
if v[i] == '$' { c.Tables = c.DB.Tables
isVar = true }
} else if v[i] == ' ' {
isVar = false if c.UseAllowList {
} else if isVar && v[i] >= 'a' && v[i] <= 'z' { c.Production = true
v[i] = 'A' + (v[i] - 'a') }
}
} for k, v := range c.Inflections {
vars[k] = string(v) flect.AddPlural(k, v)
}
for i := range c.Tables {
t := c.Tables[i]
t.Name = flect.Pluralize(strings.ToLower(t.Name))
t.Table = flect.Pluralize(strings.ToLower(t.Table))
}
for i := range c.Roles {
r := c.Roles[i]
r.Name = strings.ToLower(r.Name)
}
for k, v := range c.DB.Vars {
c.DB.Vars[k] = sanitize(v)
}
c.RolesQuery = sanitize(c.RolesQuery)
c.roles = make(map[string]*configRole)
for i := range c.Roles {
role := &c.Roles[i]
if _, ok := c.roles[role.Name]; ok {
errlog.Fatal().Msgf("duplicate role '%s' found", role.Name)
}
role.Name = strings.ToLower(role.Name)
role.Match = sanitize(role.Match)
role.tablesMap = make(map[string]*configRoleTable)
for n, table := range role.Tables {
role.tablesMap[table.Name] = &role.Tables[n]
}
c.roles[role.Name] = role
}
if _, ok := c.roles["user"]; !ok {
u := configRole{Name: "user"}
c.Roles = append(c.Roles, u)
c.roles["user"] = &u
}
if _, ok := c.roles["anon"]; !ok {
logger.Warn().Msg("unauthenticated requests will be blocked. no role 'anon' defined")
c.AuthFailBlock = true
}
c.validate()
return nil
}
func (c *config) validate() {
rm := make(map[string]struct{})
for i := range c.Roles {
name := c.Roles[i].Name
if _, ok := rm[name]; ok {
errlog.Fatal().Msgf("duplicate config for role '%s'", c.Roles[i].Name)
}
rm[name] = struct{}{}
}
tm := make(map[string]struct{})
for i := range c.Tables {
name := c.Tables[i].Name
if _, ok := tm[name]; ok {
errlog.Fatal().Msgf("duplicate config for table '%s'", c.Tables[i].Name)
}
tm[name] = struct{}{}
}
if len(c.RolesQuery) == 0 {
logger.Warn().Msgf("no 'roles_query' defined.")
} }
return vars
} }
func (c *config) getAliasMap() map[string][]string { func (c *config) getAliasMap() map[string][]string {
@ -162,8 +291,25 @@ func (c *config) getAliasMap() map[string][]string {
continue continue
} }
k := strings.ToLower(t.Table) m[t.Table] = append(m[t.Table], t.Name)
m[k] = append(m[k], strings.ToLower(t.Name))
} }
return m return m
} }
var varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
var varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
func sanitize(s string) string {
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
s1 := strings.Map(func(r rune) rune {
if unicode.IsSpace(r) {
return ' '
}
return r
}, s0)
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
return strings.ToLower(m)
})
}

13
serv/config_test.go Normal file
View File

@ -0,0 +1,13 @@
package serv
import (
"testing"
)
func TestInitConf(t *testing.T) {
_, err := initConf()
if err != nil {
t.Fatal(err.Error())
}
}

View File

@ -8,20 +8,14 @@ import (
"fmt" "fmt"
"io" "io"
"net/http" "net/http"
"sync"
"time" "time"
"github.com/cespare/xxhash/v2" "github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/jsn"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode" "github.com/dosco/super-graph/qcode"
"github.com/jackc/pgx/v4"
"github.com/valyala/fasttemplate" "github.com/valyala/fasttemplate"
) )
const (
empty = ""
)
type coreContext struct { type coreContext struct {
req gqlReq req gqlReq
res gqlResp res gqlResp
@ -32,6 +26,16 @@ func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
c.req.ref = req.Referer() c.req.ref = req.Referer()
c.req.hdr = req.Header c.req.hdr = req.Header
if len(c.req.Vars) == 2 {
c.req.Vars = nil
}
if authCheck(c) {
c.req.role = "user"
} else {
c.req.role = "anon"
}
b, err := c.execQuery() b, err := c.execQuery()
if err != nil { if err != nil {
return err return err
@ -41,343 +45,231 @@ func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
} }
func (c *coreContext) execQuery() ([]byte, error) { func (c *coreContext) execQuery() ([]byte, error) {
var err error
var skipped uint32
var qc *qcode.QCode
var data []byte var data []byte
var st *stmt
var err error
if conf.UseAllowList { if conf.Production {
var ps *preparedItem data, st, err = c.resolvePreparedSQL()
data, ps, err = c.resolvePreparedSQL(c.req.Query)
if err != nil { if err != nil {
return nil, err logger.Error().
} Err(err).
Str("default_role", c.req.role).
Msg(c.req.Query)
skipped = ps.skipped return nil, errors.New("query failed. check logs for error")
qc = ps.qc
} else {
qc, err = qcompile.Compile([]byte(c.req.Query))
if err != nil {
return nil, err
}
data, skipped, err = c.resolveSQL(qc)
if err != nil {
return nil, err
}
}
if len(data) == 0 || skipped == 0 {
return data, nil
}
sel := qc.Selects
h := xxhash.New()
// fetch the field name used within the db response json
// that are used to mark insertion points and the mapping between
// those field names and their select objects
fids, sfmap := parentFieldIds(h, sel, skipped)
// fetch the field values of the marked insertion points
// these values contain the id to be used with fetching remote data
from := jsn.Get(data, fids)
var to []jsn.Field
switch {
case len(from) == 1:
to, err = c.resolveRemote(c.req.hdr, h, from[0], sel, sfmap)
case len(from) > 1:
to, err = c.resolveRemotes(c.req.hdr, h, from, sel, sfmap)
default:
return nil, errors.New("something wrong no remote ids found in db response")
}
if err != nil {
return nil, err
}
var ob bytes.Buffer
err = jsn.Replace(&ob, data, from, to)
if err != nil {
return nil, err
}
return ob.Bytes(), nil
}
func (c *coreContext) resolveRemote(
hdr http.Header,
h *xxhash.Digest,
field jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
// replacement data for the marked insertion points
// key and value will be replaced by whats below
toA := [1]jsn.Field{}
to := toA[:1]
// use the json key to find the related Select object
k1 := xxhash.Sum64(field.Key)
s, ok := sfmap[k1]
if !ok {
return nil, nil
}
p := sel[s.ParentID]
// then use the Table nme in the Select and it's parent
// to find the resolver to use for this relationship
k2 := mkkey(h, s.Table, p.Table)
r, ok := rmap[k2]
if !ok {
return nil, nil
}
id := jsn.Value(field.Value)
if len(id) == 0 {
return nil, nil
}
st := time.Now()
b, err := r.Fn(hdr, id)
if err != nil {
return nil, err
}
if conf.EnableTracing {
c.addTrace(sel, s.ID, st)
}
if len(r.Path) != 0 {
b = jsn.Strip(b, r.Path)
}
var ob bytes.Buffer
if len(s.Cols) != 0 {
err = jsn.Filter(&ob, b, colsToList(s.Cols))
if err != nil {
return nil, err
} }
} else { } else {
ob.WriteString("null") if data, st, err = c.resolveSQL(); err != nil {
return nil, err
}
} }
to[0] = jsn.Field{[]byte(s.FieldName), ob.Bytes()} return execRemoteJoin(st, data, c.req.hdr)
return to, nil
} }
func (c *coreContext) resolveRemotes( func (c *coreContext) resolvePreparedSQL() ([]byte, *stmt, error) {
hdr http.Header, var tx pgx.Tx
h *xxhash.Digest, var err error
from []jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
// replacement data for the marked insertion points qt := qcode.GetQType(c.req.Query)
// key and value will be replaced by whats below mutation := (qt == qcode.QTMutation)
to := make([]jsn.Field, len(from)) anonQuery := (qt == qcode.QTQuery && c.req.role == "anon")
var wg sync.WaitGroup useRoleQuery := len(conf.RolesQuery) != 0 && mutation
wg.Add(len(from)) useTx := useRoleQuery || conf.DB.SetUserID
var cerr error if useTx {
if tx, err = db.Begin(context.Background()); err != nil {
for i, id := range from { return nil, nil, err
// use the json key to find the related Select object
k1 := xxhash.Sum64(id.Key)
s, ok := sfmap[k1]
if !ok {
return nil, nil
} }
p := sel[s.ParentID] defer tx.Rollback(c) //nolint: errcheck
// then use the Table nme in the Select and it's parent
// to find the resolver to use for this relationship
k2 := mkkey(h, s.Table, p.Table)
r, ok := rmap[k2]
if !ok {
return nil, nil
}
id := jsn.Value(id.Value)
if len(id) == 0 {
return nil, nil
}
go func(n int, id []byte, s *qcode.Select) {
defer wg.Done()
st := time.Now()
b, err := r.Fn(hdr, id)
if err != nil {
cerr = fmt.Errorf("%s: %s", s.Table, err)
return
}
if conf.EnableTracing {
c.addTrace(sel, s.ID, st)
}
if len(r.Path) != 0 {
b = jsn.Strip(b, r.Path)
}
var ob bytes.Buffer
if len(s.Cols) != 0 {
err = jsn.Filter(&ob, b, colsToList(s.Cols))
if err != nil {
cerr = fmt.Errorf("%s: %s", s.Table, err)
return
}
} else {
ob.WriteString("null")
}
to[n] = jsn.Field{[]byte(s.FieldName), ob.Bytes()}
}(i, id, s)
} }
wg.Wait()
return to, cerr if conf.DB.SetUserID {
} if err := setLocalUserID(c, tx); err != nil {
return nil, nil, err
}
}
func (c *coreContext) resolvePreparedSQL(gql string) ([]byte, *preparedItem, error) { var role string
ps, ok := _preparedList[gqlHash(gql, c.req.Vars)]
if useRoleQuery {
if role, err = c.executeRoleQuery(tx); err != nil {
return nil, nil, err
}
} else if v := c.Value(userRoleKey); v != nil {
role = v.(string)
} else {
role = c.req.role
}
ps, ok := _preparedList[gqlHash(c.req.Query, c.req.Vars, role)]
if !ok { if !ok {
return nil, nil, errUnauthorized return nil, nil, errUnauthorized
} }
var root []byte var root []byte
vars := varList(c, ps.args) var row pgx.Row
tx, err := db.Begin(c) vars, err := argList(c, ps.args)
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
defer tx.Rollback(c)
if v := c.Value(userIDKey); v != nil { if useTx {
_, err = tx.Exec(c, fmt.Sprintf(`SET LOCAL "user.id" = %s;`, v)) row = tx.QueryRow(context.Background(), ps.sd.SQL, vars...)
} else {
row = db.QueryRow(context.Background(), ps.sd.SQL, vars...)
}
if err != nil { if mutation || anonQuery {
err = row.Scan(&root)
} else {
err = row.Scan(&role, &root)
}
if len(role) == 0 {
logger.Debug().Str("default_role", c.req.role).Msg(c.req.Query)
} else {
logger.Debug().Str("default_role", c.req.role).Str("role", role).Msg(c.req.Query)
}
if err != nil {
return nil, nil, err
}
c.req.role = role
if useTx {
if err := tx.Commit(context.Background()); err != nil {
return nil, nil, err return nil, nil, err
} }
} }
err = tx.QueryRow(c, ps.stmt.SQL, vars...).Scan(&root) return root, ps.st, nil
if err != nil {
return nil, nil, err
}
if err := tx.Commit(c); err != nil {
return nil, nil, err
}
fmt.Printf("PRE: %v\n", ps.stmt)
return root, ps, nil
} }
func (c *coreContext) resolveSQL(qc *qcode.QCode) ([]byte, uint32, error) { func (c *coreContext) resolveSQL() ([]byte, *stmt, error) {
var vars map[string]json.RawMessage var tx pgx.Tx
stmt := &bytes.Buffer{} var err error
if len(c.req.Vars) != 0 { qt := qcode.GetQType(c.req.Query)
if err := json.Unmarshal(c.req.Vars, &vars); err != nil { mutation := (qt == qcode.QTMutation)
return nil, 0, err //anonQuery := (qt == qcode.QTQuery && c.req.role == "anon")
useRoleQuery := len(conf.RolesQuery) != 0 && mutation
useTx := useRoleQuery || conf.DB.SetUserID
if useTx {
if tx, err = db.Begin(context.Background()); err != nil {
return nil, nil, err
}
defer tx.Rollback(context.Background()) //nolint: errcheck
}
if conf.DB.SetUserID {
if err := setLocalUserID(c, tx); err != nil {
return nil, nil, err
} }
} }
skipped, err := pcompile.Compile(qc, stmt, psql.Variables(vars)) if useRoleQuery {
if c.req.role, err = c.executeRoleQuery(tx); err != nil {
return nil, nil, err
}
} else if v := c.Value(userRoleKey); v != nil {
c.req.role = v.(string)
}
stmts, err := buildStmt(qt, []byte(c.req.Query), c.req.Vars, c.req.role)
if err != nil { if err != nil {
return nil, 0, err return nil, nil, err
} }
st := &stmts[0]
t := fasttemplate.New(stmt.String(), openVar, closeVar) t := fasttemplate.New(st.sql, openVar, closeVar)
buf := &bytes.Buffer{}
stmt.Reset()
_, err = t.ExecuteFunc(stmt, varMap(c))
if err == errNoUserID &&
authFailBlock == authFailBlockPerQuery &&
authCheck(c) == false {
return nil, 0, errUnauthorized
}
_, err = t.ExecuteFunc(buf, argMap(c, c.req.Vars))
if err != nil { if err != nil {
return nil, 0, err return nil, nil, err
} }
finalSQL := buf.String()
finalSQL := stmt.String() var stime time.Time
// if conf.LogLevel == "debug" {
// os.Stdout.WriteString(finalSQL)
// os.Stdout.WriteString("\n\n")
// }
var st time.Time
if conf.EnableTracing { if conf.EnableTracing {
st = time.Now() stime = time.Now()
}
var root []byte
var role string
var row pgx.Row
defaultRole := c.req.role
if useTx {
row = tx.QueryRow(context.Background(), finalSQL)
} else {
row = db.QueryRow(context.Background(), finalSQL)
}
if len(stmts) == 1 {
err = row.Scan(&root)
} else {
err = row.Scan(&role, &root)
}
if len(role) == 0 {
logger.Debug().Str("default_role", defaultRole).Msg(c.req.Query)
} else {
logger.Debug().Str("default_role", defaultRole).Str("role", role).Msg(c.req.Query)
} }
tx, err := db.Begin(c)
if err != nil { if err != nil {
return nil, 0, err return nil, nil, err
} }
defer tx.Rollback(c)
if v := c.Value(userIDKey); v != nil { if useTx {
_, err = tx.Exec(c, fmt.Sprintf(`SET LOCAL "user.id" = %s;`, v)) if err := tx.Commit(context.Background()); err != nil {
return nil, nil, err
if err != nil {
return nil, 0, err
} }
} }
//fmt.Printf("\nRAW: %#v\n", finalSQL) if !conf.Production {
var root []byte
err = tx.QueryRow(c, finalSQL).Scan(&root)
if err != nil {
return nil, 0, err
}
if err := tx.Commit(c); err != nil {
return nil, 0, err
}
if conf.EnableTracing && len(qc.Selects) != 0 {
c.addTrace(
qc.Selects,
qc.Selects[0].ID,
st)
}
if conf.UseAllowList == false {
_allowList.add(&c.req) _allowList.add(&c.req)
} }
return root, skipped, nil if len(stmts) > 1 {
if st = findStmt(role, stmts); st == nil {
return nil, nil, fmt.Errorf("invalid role '%s' returned", role)
}
}
if conf.EnableTracing {
for _, id := range st.qc.Roots {
c.addTrace(st.qc.Selects, id, stime)
}
}
return root, st, nil
}
func (c *coreContext) executeRoleQuery(tx pgx.Tx) (string, error) {
var role string
row := tx.QueryRow(context.Background(), "_sg_get_role", c.req.role, 1)
if err := row.Scan(&role); err != nil {
return "", err
}
return role, nil
} }
func (c *coreContext) render(w io.Writer, data []byte) error { func (c *coreContext) render(w io.Writer, data []byte) error {
@ -401,15 +293,15 @@ func (c *coreContext) addTrace(sel []qcode.Select, id int32, st time.Time) {
c.res.Extensions.Tracing.Duration = du c.res.Extensions.Tracing.Duration = du
n := 1 n := 1
for i := id; i != 0; i = sel[i].ParentID { for i := id; i != -1; i = sel[i].ParentID {
n++ n++
} }
path := make([]string, n) path := make([]string, n)
n-- n--
for i := id; ; i = sel[i].ParentID { for i := id; ; i = sel[i].ParentID {
path[n] = sel[i].Table path[n] = sel[i].Name
if sel[i].ID == 0 { if sel[i].ParentID == -1 {
break break
} }
n-- n--
@ -418,7 +310,7 @@ func (c *coreContext) addTrace(sel []qcode.Select, id int32, st time.Time) {
tr := resolver{ tr := resolver{
Path: path, Path: path,
ParentType: "Query", ParentType: "Query",
FieldName: sel[id].Table, FieldName: sel[id].Name,
ReturnType: "object", ReturnType: "object",
StartOffset: 1, StartOffset: 1,
Duration: du, Duration: du,
@ -428,6 +320,15 @@ func (c *coreContext) addTrace(sel []qcode.Select, id int32, st time.Time) {
append(c.res.Extensions.Tracing.Execution.Resolvers, tr) append(c.res.Extensions.Tracing.Execution.Resolvers, tr)
} }
func setLocalUserID(c context.Context, tx pgx.Tx) error {
var err error
if v := c.Value(userIDKey); v != nil {
_, err = tx.Exec(context.Background(), fmt.Sprintf(`SET LOCAL "user.id" = %s;`, v))
}
return err
}
func parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) ( func parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
[][]byte, [][]byte,
map[uint64]*qcode.Select) { map[uint64]*qcode.Select) {
@ -452,12 +353,12 @@ func parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
for i := range sel { for i := range sel {
s := &sel[i] s := &sel[i]
if isSkipped(skipped, uint32(s.ID)) == false { if !isSkipped(skipped, uint32(s.ID)) {
continue continue
} }
p := sel[s.ParentID] p := sel[s.ParentID]
k := mkkey(h, s.Table, p.Table) k := mkkey(h, s.Name, p.Name)
if r, ok := rmap[k]; ok { if r, ok := rmap[k]; ok {
fm[n] = r.IDField fm[n] = r.IDField

185
serv/core_build.go Normal file
View File

@ -0,0 +1,185 @@
package serv
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode"
)
type stmt struct {
role *configRole
qc *qcode.QCode
skipped uint32
sql string
}
func buildStmt(qt qcode.QType, gql, vars []byte, role string) ([]stmt, error) {
switch qt {
case qcode.QTMutation:
return buildRoleStmt(gql, vars, role)
case qcode.QTQuery:
switch {
case role == "anon":
return buildRoleStmt(gql, vars, role)
default:
return buildMultiStmt(gql, vars)
}
default:
return nil, fmt.Errorf("unknown query type '%d'", qt)
}
}
func buildRoleStmt(gql, vars []byte, role string) ([]stmt, error) {
ro, ok := conf.roles[role]
if !ok {
return nil, fmt.Errorf(`roles '%s' not defined in config`, role)
}
var vm map[string]json.RawMessage
var err error
if len(vars) != 0 {
if err := json.Unmarshal(vars, &vm); err != nil {
return nil, err
}
}
qc, err := qcompile.Compile(gql, ro.Name)
if err != nil {
return nil, err
}
// For the 'anon' role in production only compile
// queries for tables defined in the config file.
if conf.Production && ro.Name == "anon" && !hasTablesWithConfig(qc, ro) {
return nil, errors.New("query contains tables with no 'anon' role config")
}
stmts := []stmt{stmt{role: ro, qc: qc}}
w := &bytes.Buffer{}
skipped, err := pcompile.Compile(qc, w, psql.Variables(vm))
if err != nil {
return nil, err
}
stmts[0].skipped = skipped
stmts[0].sql = w.String()
return stmts, nil
}
func buildMultiStmt(gql, vars []byte) ([]stmt, error) {
var vm map[string]json.RawMessage
var err error
if len(vars) != 0 {
if err := json.Unmarshal(vars, &vm); err != nil {
return nil, err
}
}
if len(conf.RolesQuery) == 0 {
return buildRoleStmt(gql, vars, "user")
}
stmts := make([]stmt, 0, len(conf.Roles))
w := &bytes.Buffer{}
for i := 0; i < len(conf.Roles); i++ {
role := &conf.Roles[i]
if role.Name == "anon" {
continue
}
qc, err := qcompile.Compile(gql, role.Name)
if err != nil {
return nil, err
}
stmts = append(stmts, stmt{role: role, qc: qc})
skipped, err := pcompile.Compile(qc, w, psql.Variables(vm))
if err != nil {
return nil, err
}
s := &stmts[len(stmts)-1]
s.skipped = skipped
s.sql = w.String()
w.Reset()
}
sql, err := renderUserQuery(stmts, vm)
if err != nil {
return nil, err
}
stmts[0].sql = sql
return stmts, nil
}
//nolint: errcheck
func renderUserQuery(
stmts []stmt, vars map[string]json.RawMessage) (string, error) {
w := &bytes.Buffer{}
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
for _, s := range stmts {
if len(s.role.Match) == 0 &&
s.role.Name != "user" && s.role.Name != "anon" {
continue
}
io.WriteString(w, `WHEN '`)
io.WriteString(w, s.role.Name)
io.WriteString(w, `' THEN (`)
io.WriteString(w, s.sql)
io.WriteString(w, `) `)
}
io.WriteString(w, `END) FROM (SELECT (CASE WHEN EXISTS (`)
io.WriteString(w, conf.RolesQuery)
io.WriteString(w, `) THEN `)
io.WriteString(w, `(SELECT (CASE`)
for _, s := range stmts {
if len(s.role.Match) == 0 {
continue
}
io.WriteString(w, ` WHEN `)
io.WriteString(w, s.role.Match)
io.WriteString(w, ` THEN '`)
io.WriteString(w, s.role.Name)
io.WriteString(w, `'`)
}
io.WriteString(w, ` ELSE 'user' END) FROM (`)
io.WriteString(w, conf.RolesQuery)
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) `)
io.WriteString(w, `ELSE 'anon' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
return w.String(), nil
}
func hasTablesWithConfig(qc *qcode.QCode, role *configRole) bool {
for _, id := range qc.Roots {
t, err := schema.GetTable(qc.Selects[id].Name)
if err != nil {
return false
}
if _, ok := role.tablesMap[t.Name]; !ok {
return false
}
}
return true
}

197
serv/core_remote.go Normal file
View File

@ -0,0 +1,197 @@
package serv
import (
"bytes"
"errors"
"fmt"
"net/http"
"sync"
"github.com/cespare/xxhash/v2"
"github.com/dosco/super-graph/jsn"
"github.com/dosco/super-graph/qcode"
)
func execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]byte, error) {
var err error
if len(data) == 0 || st.skipped == 0 {
return data, nil
}
sel := st.qc.Selects
h := xxhash.New()
// fetch the field name used within the db response json
// that are used to mark insertion points and the mapping between
// those field names and their select objects
fids, sfmap := parentFieldIds(h, sel, st.skipped)
// fetch the field values of the marked insertion points
// these values contain the id to be used with fetching remote data
from := jsn.Get(data, fids)
var to []jsn.Field
switch {
case len(from) == 1:
to, err = resolveRemote(hdr, h, from[0], sel, sfmap)
case len(from) > 1:
to, err = resolveRemotes(hdr, h, from, sel, sfmap)
default:
return nil, errors.New("something wrong no remote ids found in db response")
}
if err != nil {
return nil, err
}
var ob bytes.Buffer
err = jsn.Replace(&ob, data, from, to)
if err != nil {
return nil, err
}
return ob.Bytes(), nil
}
func resolveRemote(
hdr http.Header,
h *xxhash.Digest,
field jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
// replacement data for the marked insertion points
// key and value will be replaced by whats below
toA := [1]jsn.Field{}
to := toA[:1]
// use the json key to find the related Select object
k1 := xxhash.Sum64(field.Key)
s, ok := sfmap[k1]
if !ok {
return nil, nil
}
p := sel[s.ParentID]
// then use the Table nme in the Select and it's parent
// to find the resolver to use for this relationship
k2 := mkkey(h, s.Name, p.Name)
r, ok := rmap[k2]
if !ok {
return nil, nil
}
id := jsn.Value(field.Value)
if len(id) == 0 {
return nil, nil
}
//st := time.Now()
b, err := r.Fn(hdr, id)
if err != nil {
return nil, err
}
if len(r.Path) != 0 {
b = jsn.Strip(b, r.Path)
}
var ob bytes.Buffer
if len(s.Cols) != 0 {
err = jsn.Filter(&ob, b, colsToList(s.Cols))
if err != nil {
return nil, err
}
} else {
ob.WriteString("null")
}
to[0] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
return to, nil
}
func resolveRemotes(
hdr http.Header,
h *xxhash.Digest,
from []jsn.Field,
sel []qcode.Select,
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
// replacement data for the marked insertion points
// key and value will be replaced by whats below
to := make([]jsn.Field, len(from))
var wg sync.WaitGroup
wg.Add(len(from))
var cerr error
for i, id := range from {
// use the json key to find the related Select object
k1 := xxhash.Sum64(id.Key)
s, ok := sfmap[k1]
if !ok {
return nil, nil
}
p := sel[s.ParentID]
// then use the Table nme in the Select and it's parent
// to find the resolver to use for this relationship
k2 := mkkey(h, s.Name, p.Name)
r, ok := rmap[k2]
if !ok {
return nil, nil
}
id := jsn.Value(id.Value)
if len(id) == 0 {
return nil, nil
}
go func(n int, id []byte, s *qcode.Select) {
defer wg.Done()
//st := time.Now()
b, err := r.Fn(hdr, id)
if err != nil {
cerr = fmt.Errorf("%s: %s", s.Name, err)
return
}
if len(r.Path) != 0 {
b = jsn.Strip(b, r.Path)
}
var ob bytes.Buffer
if len(s.Cols) != 0 {
err = jsn.Filter(&ob, b, colsToList(s.Cols))
if err != nil {
cerr = fmt.Errorf("%s: %s", s.Name, err)
return
}
} else {
ob.WriteString("null")
}
to[n] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
}(i, id, s)
}
wg.Wait()
return to, cerr
}

21
serv/corpus/001.gql Normal file
View File

@ -0,0 +1,21 @@
query {
products(
# returns only 30 items
limit: 30,
# starts from item 10, commented out for now
# offset: 10,
# orders the response items by highest price
order_by: { price: desc },
# no duplicate prices returned
distinct: [ price ]
# only items with an id >= 30 and < 30 are returned
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
id
name
price
}
}

11
serv/fuzz.go Normal file
View File

@ -0,0 +1,11 @@
// +build gofuzz
package serv
func Fuzz(data []byte) int {
gql := string(data)
gqlName(gql)
gqlHash(gql, nil, "")
return 1
}

16
serv/fuzz_test.go Normal file
View File

@ -0,0 +1,16 @@
package serv
import "testing"
func TestFuzzCrashers(t *testing.T) {
var crashers = []string{
"query",
"q",
"que",
}
for _, f := range crashers {
_ = gqlName(f)
gqlHash(f, nil, "")
}
}

View File

@ -8,8 +8,6 @@ import (
"net/http" "net/http"
"strings" "strings"
"time" "time"
"github.com/gorilla/websocket"
) )
const ( const (
@ -20,8 +18,6 @@ const (
) )
var ( var (
upgrader = websocket.Upgrader{}
errNoUserID = errors.New("no user_id available")
errUnauthorized = errors.New("not authorized") errUnauthorized = errors.New("not authorized")
) )
@ -30,14 +26,13 @@ type gqlReq struct {
Query string `json:"query"` Query string `json:"query"`
Vars json.RawMessage `json:"variables"` Vars json.RawMessage `json:"variables"`
ref string ref string
role string
hdr http.Header hdr http.Header
} }
type variables map[string]json.RawMessage
type gqlResp struct { type gqlResp struct {
Error string `json:"error,omitempty"` Error string `json:"message,omitempty"`
Data json.RawMessage `json:"data"` Data json.RawMessage `json:"data,omitempty"`
Extensions *extensions `json:"extensions,omitempty"` Extensions *extensions `json:"extensions,omitempty"`
} }
@ -69,85 +64,50 @@ type resolver struct {
func apiv1Http(w http.ResponseWriter, r *http.Request) { func apiv1Http(w http.ResponseWriter, r *http.Request) {
ctx := &coreContext{Context: r.Context()} ctx := &coreContext{Context: r.Context()}
if authFailBlock == authFailBlockAlways && authCheck(ctx) == false { //nolint: errcheck
err := "Not authorized" if conf.AuthFailBlock && !authCheck(ctx) {
logger.Debug().Msg(err) w.WriteHeader(http.StatusUnauthorized)
http.Error(w, err, 401) json.NewEncoder(w).Encode(gqlResp{Error: errUnauthorized.Error()})
return return
} }
b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes)) b, err := ioutil.ReadAll(io.LimitReader(r.Body, maxReadBytes))
defer r.Body.Close()
if err != nil { if err != nil {
logger.Err(err).Msg("failed to read request body") errlog.Error().Err(err).Msg("failed to read request body")
errorResp(w, err) errorResp(w, err)
return return
} }
defer r.Body.Close()
err = json.Unmarshal(b, &ctx.req) err = json.Unmarshal(b, &ctx.req)
if err != nil { if err != nil {
logger.Err(err).Msg("failed to decode json request body") errlog.Error().Err(err).Msg("failed to decode json request body")
errorResp(w, err) errorResp(w, err)
return return
} }
if strings.EqualFold(ctx.req.OpName, introspectionQuery) { if strings.EqualFold(ctx.req.OpName, introspectionQuery) {
// dat, err := ioutil.ReadFile("test.schema") introspect(w)
// if err != nil {
// http.Error(w, err.Error(), http.StatusInternalServerError)
// return
// }
//w.Write(dat)
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{
"data": {
"__schema": {
"queryType": {
"name": "Query"
},
"mutationType": null,
"subscriptionType": null
}
},
"extensions":{
"tracing":{
"version":1,
"startTime":"2019-06-04T19:53:31.093Z",
"endTime":"2019-06-04T19:53:31.108Z",
"duration":15219720,
"execution": {
"resolvers": [{
"path": ["__schema"],
"parentType": "Query",
"fieldName": "__schema",
"returnType": "__Schema!",
"startOffset": 50950,
"duration": 17187
}]
}
}
}
}`))
return return
} }
err = ctx.handleReq(w, r) err = ctx.handleReq(w, r)
//nolint: errcheck
if err == errUnauthorized { if err == errUnauthorized {
err := "Not authorized" w.WriteHeader(http.StatusUnauthorized)
logger.Debug().Msg(err) json.NewEncoder(w).Encode(gqlResp{Error: err.Error()})
http.Error(w, err, 401) return
} }
if err != nil { if err != nil {
logger.Err(err).Msg("Failed to handle request") errlog.Error().Err(err).Msg("failed to handle request")
errorResp(w, err) errorResp(w, err)
return
} }
} }
//nolint: errcheck
func errorResp(w http.ResponseWriter, err error) { func errorResp(w http.ResponseWriter, err error) {
w.WriteHeader(http.StatusBadRequest)
json.NewEncoder(w).Encode(gqlResp{Error: err.Error()}) json.NewEncoder(w).Encode(gqlResp{Error: err.Error()})
} }

37
serv/introsp.go Normal file
View File

@ -0,0 +1,37 @@
package serv
import "net/http"
//nolint: errcheck
func introspect(w http.ResponseWriter) {
w.Header().Set("Content-Type", "application/json")
w.Write([]byte(`{
"data": {
"__schema": {
"queryType": {
"name": "Query"
},
"mutationType": null,
"subscriptionType": null
}
},
"extensions":{
"tracing":{
"version":1,
"startTime":"2019-06-04T19:53:31.093Z",
"endTime":"2019-06-04T19:53:31.108Z",
"duration":15219720,
"execution": {
"resolvers": [{
"path": ["__schema"],
"parentType": "Query",
"fieldName": "__schema",
"returnType": "__Schema!",
"startOffset": 50950,
"duration": 17187
}]
}
}
}
}`))
}

View File

@ -3,21 +3,19 @@ package serv
import ( import (
"bytes" "bytes"
"context" "context"
"encoding/json"
"fmt" "fmt"
"io" "io"
"github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode" "github.com/dosco/super-graph/qcode"
"github.com/jackc/pgconn" "github.com/jackc/pgconn"
"github.com/jackc/pgx/v4"
"github.com/valyala/fasttemplate" "github.com/valyala/fasttemplate"
) )
type preparedItem struct { type preparedItem struct {
stmt *pgconn.StatementDescription sd *pgconn.StatementDescription
args [][]byte args [][]byte
skipped uint32 st *stmt
qc *qcode.QCode
} }
var ( var (
@ -27,78 +25,181 @@ var (
func initPreparedList() { func initPreparedList() {
_preparedList = make(map[string]*preparedItem) _preparedList = make(map[string]*preparedItem)
for k, v := range _allowList.list { tx, err := db.Begin(context.Background())
err := prepareStmt(k, v.gql, v.vars) if err != nil {
if err != nil { errlog.Fatal().Err(err).Send()
logger.Warn().Err(err).Send() }
defer tx.Rollback(context.Background()) //nolint: errcheck
err = prepareRoleStmt(tx)
if err != nil {
errlog.Fatal().Err(err).Msg("failed to prepare get role statement")
}
if err := tx.Commit(context.Background()); err != nil {
errlog.Fatal().Err(err).Send()
}
success := 0
for _, v := range _allowList.list {
if len(v.gql) == 0 {
continue
}
err := prepareStmt(v.gql, v.vars)
if err == nil {
success++
continue
}
if len(v.vars) == 0 {
logger.Warn().Err(err).Msg(v.gql)
} else {
logger.Warn().Err(err).Msgf("%s %s", v.vars, v.gql)
} }
} }
logger.Info().
Msgf("Registered %d of %d queries from allow.list as prepared statements",
success, len(_allowList.list))
} }
func prepareStmt(key, gql string, varBytes json.RawMessage) error { func prepareStmt(gql string, vars []byte) error {
if len(gql) == 0 || len(key) == 0 { qt := qcode.GetQType(gql)
return nil q := []byte(gql)
}
qc, err := qcompile.Compile([]byte(gql)) tx, err := db.Begin(context.Background())
if err != nil { if err != nil {
return err return err
} }
defer tx.Rollback(context.Background()) //nolint: errcheck
var vars map[string]json.RawMessage switch qt {
case qcode.QTQuery:
if len(varBytes) != 0 { stmts1, err := buildMultiStmt(q, vars)
vars = make(map[string]json.RawMessage) if err != nil {
if err := json.Unmarshal(varBytes, &vars); err != nil {
return err return err
} }
err = prepare(tx, &stmts1[0], gqlHash(gql, vars, "user"))
if err != nil {
return err
}
stmts2, err := buildRoleStmt(q, vars, "anon")
if err != nil {
return err
}
err = prepare(tx, &stmts2[0], gqlHash(gql, vars, "anon"))
if err != nil {
return err
}
case qcode.QTMutation:
for _, role := range conf.Roles {
stmts, err := buildRoleStmt(q, vars, role.Name)
if err != nil {
return err
}
err = prepare(tx, &stmts[0], gqlHash(gql, vars, role.Name))
if err != nil {
return err
}
}
} }
buf := &bytes.Buffer{} if len(vars) == 0 {
logger.Debug().Msgf("Building prepared statement for:\n %s", gql)
skipped, err := pcompile.Compile(qc, buf, psql.Variables(vars)) } else {
if err != nil { logger.Debug().Msgf("Building prepared statement:\n %s\n%s", vars, gql)
return err
} }
t := fasttemplate.New(buf.String(), `{{`, `}}`) if err := tx.Commit(context.Background()); err != nil {
am := make([][]byte, 0, 5)
i := 0
finalSQL := t.ExecuteFuncString(func(w io.Writer, tag string) (int, error) {
am = append(am, []byte(tag))
i++
return w.Write([]byte(fmt.Sprintf("$%d", i)))
})
if err != nil {
return err
}
ctx := context.Background()
tx, err := db.Begin(ctx)
if err != nil {
return err
}
defer tx.Rollback(ctx)
pstmt, err := tx.Prepare(ctx, "", finalSQL)
if err != nil {
return err
}
_preparedList[key] = &preparedItem{
stmt: pstmt,
args: am,
skipped: skipped,
qc: qc,
}
if err := tx.Commit(ctx); err != nil {
return err return err
} }
return nil return nil
} }
func prepare(tx pgx.Tx, st *stmt, key string) error {
finalSQL, am := processTemplate(st.sql)
sd, err := tx.Prepare(context.Background(), "", finalSQL)
if err != nil {
return err
}
_preparedList[key] = &preparedItem{
sd: sd,
args: am,
st: st,
}
return nil
}
// nolint: errcheck
func prepareRoleStmt(tx pgx.Tx) error {
if len(conf.RolesQuery) == 0 {
return nil
}
w := &bytes.Buffer{}
io.WriteString(w, `SELECT (CASE`)
for _, role := range conf.Roles {
if len(role.Match) == 0 {
continue
}
io.WriteString(w, ` WHEN `)
io.WriteString(w, role.Match)
io.WriteString(w, ` THEN '`)
io.WriteString(w, role.Name)
io.WriteString(w, `'`)
}
io.WriteString(w, ` ELSE {{role}} END) FROM (`)
io.WriteString(w, conf.RolesQuery)
io.WriteString(w, `) AS "_sg_auth_roles_query"`)
roleSQL, _ := processTemplate(w.String())
_, err := tx.Prepare(context.Background(), "_sg_get_role", roleSQL)
if err != nil {
return err
}
return nil
}
func processTemplate(tmpl string) (string, [][]byte) {
st := struct {
vmap map[string]int
am [][]byte
i int
}{
vmap: make(map[string]int),
am: make([][]byte, 0, 5),
i: 0,
}
execFunc := func(w io.Writer, tag string) (int, error) {
if n, ok := st.vmap[tag]; ok {
return w.Write([]byte(fmt.Sprintf("$%d", n)))
}
st.am = append(st.am, []byte(tag))
st.i++
st.vmap[tag] = st.i
return w.Write([]byte(fmt.Sprintf("$%d", st.i)))
}
t1 := fasttemplate.New(tmpl, `'{{`, `}}'`)
ts1 := t1.ExecuteFuncString(execFunc)
t2 := fasttemplate.New(ts1, `{{`, `}}`)
ts2 := t2.ExecuteFuncString(execFunc)
return ts2, st.am
}

View File

@ -107,6 +107,13 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
case event := <-watcher.Events: case event := <-watcher.Events:
// Ensure that we use the correct events, as they are not uniform across // Ensure that we use the correct events, as they are not uniform across
// platforms. See https://github.com/fsnotify/fsnotify/issues/74 // platforms. See https://github.com/fsnotify/fsnotify/issues/74
if !conf.Production && strings.HasSuffix(event.Name, "/allow.list") {
continue
}
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
var trigger bool var trigger bool
switch runtime.GOOS { switch runtime.GOOS {
case "darwin", "freebsd", "openbsd", "netbsd", "dragonfly": case "darwin", "freebsd", "openbsd", "netbsd", "dragonfly":
@ -161,7 +168,7 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
func ReExec() { func ReExec() {
err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ()) err := syscall.Exec(binSelf, append([]string{binSelf}, os.Args[1:]...), os.Environ())
if err != nil { if err != nil {
logger.Fatal().Err(err).Msg("cannot restart") errlog.Fatal().Err(err).Msg("cannot restart")
} }
} }

View File

@ -117,7 +117,7 @@ func buildFn(r configRemote) func(http.Header, []byte) ([]byte, error) {
res, err := client.Do(req) res, err := client.Do(req)
if err != nil { if err != nil {
logger.Error().Err(err).Msgf("Failed to connect to: %s", uri) errlog.Error().Err(err).Msgf("Failed to connect to: %s", uri)
return nil, err return nil, err
} }
defer res.Body.Close() defer res.Body.Close()

346
serv/rice-box.go Normal file

File diff suppressed because one or more lines are too long

View File

@ -12,52 +12,18 @@ import (
rice "github.com/GeertJohan/go.rice" rice "github.com/GeertJohan/go.rice"
"github.com/dosco/super-graph/psql" "github.com/dosco/super-graph/psql"
"github.com/dosco/super-graph/qcode" "github.com/dosco/super-graph/qcode"
"github.com/gobuffalo/flect"
) )
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) { func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
schema, err := psql.NewDBSchema(db, c.getAliasMap()) var err error
schema, err = psql.NewDBSchema(db, c.getAliasMap())
if err != nil { if err != nil {
return nil, nil, err return nil, nil, err
} }
conf := qcode.Config{ conf := qcode.Config{
DefaultFilter: c.DB.Defaults.Filter, Blocklist: c.DB.Blocklist,
FilterMap: qcode.Filters{
All: make(map[string][]string, len(c.Tables)),
Query: make(map[string][]string, len(c.Tables)),
Insert: make(map[string][]string, len(c.Tables)),
Update: make(map[string][]string, len(c.Tables)),
Delete: make(map[string][]string, len(c.Tables)),
},
Blocklist: c.DB.Defaults.Blocklist,
KeepArgs: false,
}
for i := range c.Tables {
t := c.Tables[i]
singular := flect.Singularize(t.Name)
plural := flect.Pluralize(t.Name)
setFilter := func(fm map[string][]string, fil []string) {
switch {
case len(fil) == 0:
return
case fil[0] == "none" || len(fil[0]) == 0:
fm[singular] = []string{}
fm[plural] = []string{}
default:
fm[singular] = t.Filter
fm[plural] = t.Filter
}
}
setFilter(conf.FilterMap.All, t.Filter)
setFilter(conf.FilterMap.Query, t.FilterQuery)
setFilter(conf.FilterMap.Insert, t.FilterInsert)
setFilter(conf.FilterMap.Update, t.FilterUpdate)
setFilter(conf.FilterMap.Delete, t.FilterDelete)
} }
qc, err := qcode.NewCompiler(conf) qc, err := qcode.NewCompiler(conf)
@ -65,16 +31,73 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
return nil, nil, err return nil, nil, err
} }
blockFilter := []string{"false"}
for _, r := range c.Roles {
for _, t := range r.Tables {
query := qcode.QueryConfig{
Limit: t.Query.Limit,
Filters: t.Query.Filters,
Columns: t.Query.Columns,
DisableFunctions: t.Query.DisableFunctions,
}
if t.Query.Block {
query.Filters = blockFilter
}
insert := qcode.InsertConfig{
Filters: t.Insert.Filters,
Columns: t.Insert.Columns,
Presets: t.Insert.Presets,
}
if t.Query.Block {
insert.Filters = blockFilter
}
update := qcode.UpdateConfig{
Filters: t.Insert.Filters,
Columns: t.Insert.Columns,
Presets: t.Insert.Presets,
}
if t.Query.Block {
update.Filters = blockFilter
}
delete := qcode.DeleteConfig{
Filters: t.Insert.Filters,
Columns: t.Insert.Columns,
}
if t.Query.Block {
delete.Filters = blockFilter
}
err := qc.AddRole(r.Name, t.Name, qcode.TRConfig{
Query: query,
Insert: insert,
Update: update,
Delete: delete,
})
if err != nil {
return nil, nil, err
}
}
}
pc := psql.NewCompiler(psql.Config{ pc := psql.NewCompiler(psql.Config{
Schema: schema, Schema: schema,
Vars: c.getVariables(), Vars: c.DB.Vars,
}) })
return qc, pc, nil return qc, pc, nil
} }
func initWatcher(cpath string) { func initWatcher(cpath string) {
if conf.WatchAndReload == false { if !conf.WatchAndReload {
return return
} }
@ -88,7 +111,7 @@ func initWatcher(cpath string) {
go func() { go func() {
err := Do(logger.Printf, d) err := Do(logger.Printf, d)
if err != nil { if err != nil {
logger.Fatal().Err(err).Send() errlog.Fatal().Err(err).Send()
} }
}() }()
} }
@ -121,7 +144,7 @@ func startHTTP() {
<-sigint <-sigint
if err := srv.Shutdown(context.Background()); err != nil { if err := srv.Shutdown(context.Background()); err != nil {
logger.Error().Err(err).Msg("shutdown signal received") errlog.Error().Err(err).Msg("shutdown signal received")
} }
close(idleConnsClosed) close(idleConnsClosed)
}() }()
@ -130,10 +153,16 @@ func startHTTP() {
db.Close() db.Close()
}) })
fmt.Printf("%s listening on %s (%s)\n", serverName, hostPort, conf.Env) logger.Info().
Str("version", version).
Str("git_branch", gitBranch).
Str("host_post", hostPort).
Str("app_name", conf.AppName).
Str("env", conf.Env).
Msgf("%s listening", serverName)
if err := srv.ListenAndServe(); err != http.ErrServerClosed { if err := srv.ListenAndServe(); err != http.ErrServerClosed {
logger.Error().Err(err).Msg("server closed") errlog.Error().Err(err).Msg("server closed")
} }
<-idleConnsClosed <-idleConnsClosed
@ -143,6 +172,7 @@ func routeHandler() http.Handler {
mux := http.NewServeMux() mux := http.NewServeMux()
mux.Handle("/api/v1/graphql", withAuth(apiv1Http)) mux.Handle("/api/v1/graphql", withAuth(apiv1Http))
if conf.WebUI { if conf.WebUI {
mux.Handle("/", http.FileServer(rice.MustFindBox("../web/build").HTTPBox())) mux.Handle("/", http.FileServer(rice.MustFindBox("../web/build").HTTPBox()))
} }
@ -178,16 +208,3 @@ func getConfigName() string {
return ge return ge
} }
func getAuthFailBlock(c *config) int {
switch c.AuthFailBlock {
case "always":
return authFailBlockAlways
case "per_query", "perquery", "query":
return authFailBlockPerQuery
case "never", "false":
return authFailBlockNever
}
return authFailBlockAlways
}

43
serv/sqllog.go Normal file
View File

@ -0,0 +1,43 @@
package serv
import (
"context"
"github.com/jackc/pgx/v4"
"github.com/rs/zerolog"
)
type Logger struct {
logger zerolog.Logger
}
// NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
// logging fascade as output.
func NewSQLLogger(logger zerolog.Logger) *Logger {
return &Logger{
logger: logger.With().Logger(),
}
}
func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
var zlevel zerolog.Level
switch level {
case pgx.LogLevelNone:
zlevel = zerolog.NoLevel
case pgx.LogLevelError:
zlevel = zerolog.ErrorLevel
case pgx.LogLevelWarn:
zlevel = zerolog.WarnLevel
case pgx.LogLevelDebug, pgx.LogLevelInfo:
zlevel = zerolog.DebugLevel
default:
zlevel = zerolog.DebugLevel
}
if sql, ok := data["sql"]; ok {
delete(data, "sql")
pl.logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
} else {
pl.logger.WithLevel(zlevel).Fields(data).Msg(msg)
}
}

View File

@ -12,6 +12,7 @@ import (
"github.com/dosco/super-graph/jsn" "github.com/dosco/super-graph/jsn"
) )
// nolint: errcheck
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 { func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
h.WriteString(k1) h.WriteString(k1)
h.WriteString(k2) h.WriteString(k2)
@ -21,16 +22,37 @@ func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
return v return v
} }
func gqlHash(b string, vars []byte) string { // nolint: errcheck
func gqlHash(b string, vars []byte, role string) string {
b = strings.TrimSpace(b) b = strings.TrimSpace(b)
h := sha1.New() h := sha1.New()
query := "query"
s, e := 0, 0 s, e := 0, 0
space := []byte{' '} space := []byte{' '}
starting := true
var b0, b1 byte var b0, b1 byte
if len(b) == 0 {
return ""
}
for { for {
if starting && b[e] == 'q' {
n := 0
se := e
for e < len(b) && n < len(query) && b[e] == query[n] {
n++
e++
}
if n != len(query) {
io.WriteString(h, strings.ToLower(b[se:e]))
}
}
if e >= len(b) {
break
}
if ws(b[e]) { if ws(b[e]) {
for e < len(b) && ws(b[e]) { for e < len(b) && ws(b[e]) {
e++ e++
@ -42,8 +64,9 @@ func gqlHash(b string, vars []byte) string {
h.Write(space) h.Write(space)
} }
} else { } else {
starting = false
s = e s = e
for e < len(b) && ws(b[e]) == false { for e < len(b) && !ws(b[e]) {
e++ e++
} }
if e != 0 { if e != 0 {
@ -56,7 +79,11 @@ func gqlHash(b string, vars []byte) string {
} }
} }
if vars == nil || len(vars) == 0 { if len(role) != 0 {
io.WriteString(h, role)
}
if len(vars) == 0 {
return hex.EncodeToString(h.Sum(nil)) return hex.EncodeToString(h.Sum(nil))
} }
@ -80,3 +107,37 @@ func ws(b byte) bool {
func al(b byte) bool { func al(b byte) bool {
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9') return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
} }
func gqlName(b string) string {
state, s := 0, 0
for i := 0; i < len(b); i++ {
switch {
case state == 2 && b[i] == '{':
return b[s:i]
case state == 2 && b[i] == ' ':
return b[s:i]
case state == 1 && b[i] == '{':
return ""
case state == 1 && b[i] != ' ':
s = i
state = 2
case state == 1 && b[i] == ' ':
continue
case i != 0 && b[i] == ' ' && (b[i-1] == 'n' || b[i-1] == 'y'):
state = 1
}
}
return ""
}
func findStmt(role string, stmts []stmt) *stmt {
for i := range stmts {
if stmts[i].role.Name != role {
continue
}
return &stmts[i]
}
return nil
}

View File

@ -5,7 +5,7 @@ import (
"testing" "testing"
) )
func TestRelaxHash1(t *testing.T) { func TestGQLHash1(t *testing.T) {
var v1 = ` var v1 = `
products( products(
limit: 30, limit: 30,
@ -24,15 +24,15 @@ func TestRelaxHash1(t *testing.T) {
price price
} ` } `
h1 := gqlHash(v1, nil) h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil) h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 { if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should") t.Fatal("Hashes don't match they should")
} }
} }
func TestRelaxHash2(t *testing.T) { func TestGQLHash2(t *testing.T) {
var v1 = ` var v1 = `
{ {
products( products(
@ -53,15 +53,15 @@ func TestRelaxHash2(t *testing.T) {
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } ` var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
h1 := gqlHash(v1, nil) h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil) h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 { if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should") t.Fatal("Hashes don't match they should")
} }
} }
func TestRelaxHash3(t *testing.T) { func TestGQLHash3(t *testing.T) {
var v1 = `users { var v1 = `users {
id id
email email
@ -86,15 +86,44 @@ func TestRelaxHash3(t *testing.T) {
} }
` `
h1 := gqlHash(v1, nil) h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil) h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 { if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should") t.Fatal("Hashes don't match they should")
} }
} }
func TestRelaxHashWithVars1(t *testing.T) { func TestGQLHash4(t *testing.T) {
var v1 = `
query {
products(
limit: 30
order_by: { price: desc }
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) {
id
name
price
user {
id
email
}
}
}`
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
h1 := gqlHash(v1, nil, "")
h2 := gqlHash(v2, nil, "")
if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should")
}
}
func TestGQLHashWithVars1(t *testing.T) {
var q1 = ` var q1 = `
products( products(
limit: 30, limit: 30,
@ -136,15 +165,15 @@ func TestRelaxHashWithVars1(t *testing.T) {
"user": 123 "user": 123
}` }`
h1 := gqlHash(q1, []byte(v1)) h1 := gqlHash(q1, []byte(v1), "user")
h2 := gqlHash(q2, []byte(v2)) h2 := gqlHash(q2, []byte(v2), "user")
if strings.Compare(h1, h2) != 0 { if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should") t.Fatal("Hashes don't match they should")
} }
} }
func TestRelaxHashWithVars2(t *testing.T) { func TestGQLHashWithVars2(t *testing.T) {
var q1 = ` var q1 = `
products( products(
limit: 30, limit: 30,
@ -193,10 +222,87 @@ func TestRelaxHashWithVars2(t *testing.T) {
"user": 123 "user": 123
}` }`
h1 := gqlHash(q1, []byte(v1)) h1 := gqlHash(q1, []byte(v1), "user")
h2 := gqlHash(q2, []byte(v2)) h2 := gqlHash(q2, []byte(v2), "user")
if strings.Compare(h1, h2) != 0 { if strings.Compare(h1, h2) != 0 {
t.Fatal("Hashes don't match they should") t.Fatal("Hashes don't match they should")
} }
} }
func TestGQLName1(t *testing.T) {
var q = `
query {
products(
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) { id name } }`
name := gqlName(q)
if len(name) != 0 {
t.Fatal("Name should be empty, not ", name)
}
}
func TestGQLName2(t *testing.T) {
var q = `
query hakuna_matata {
products(
distinct: [price]
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
) {
id
name
}
}`
name := gqlName(q)
if name != "hakuna_matata" {
t.Fatal("Name should be 'hakuna_matata', not ", name)
}
}
func TestGQLName3(t *testing.T) {
var q = `
mutation means{ users { id } }`
// var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
name := gqlName(q)
if name != "means" {
t.Fatal("Name should be 'means', not ", name)
}
}
func TestGQLName4(t *testing.T) {
var q = `
query no_worries
users {
id
}
}`
name := gqlName(q)
if name != "no_worries" {
t.Fatal("Name should be 'no_worries', not ", name)
}
}
func TestGQLName5(t *testing.T) {
var q = `
{
users {
id
}
}`
name := gqlName(q)
if len(name) != 0 {
t.Fatal("Name should be empty, not ", name)
}
}

View File

@ -1,107 +0,0 @@
package serv
import (
"bytes"
"fmt"
"io"
"github.com/dosco/super-graph/jsn"
)
func varMap(ctx *coreContext) func(w io.Writer, tag string) (int, error) {
return func(w io.Writer, tag string) (int, error) {
switch tag {
case "user_id":
if v := ctx.Value(userIDKey); v != nil {
return stringVar(w, v.(string))
}
return 0, errNoUserID
case "user_id_provider":
if v := ctx.Value(userIDProviderKey); v != nil {
return stringVar(w, v.(string))
}
return 0, errNoUserID
}
fields := jsn.Get(ctx.req.Vars, [][]byte{[]byte(tag)})
if len(fields) == 0 {
return 0, fmt.Errorf("variable '%s' not found", tag)
}
is := false
for i := range fields[0].Value {
c := fields[0].Value[i]
if c != ' ' {
is = (c == '"') || (c == '{') || (c == '[')
break
}
}
if is {
return stringVarB(w, fields[0].Value)
}
w.Write(fields[0].Value)
return 0, nil
}
}
func varList(ctx *coreContext, args [][]byte) []interface{} {
vars := make([]interface{}, len(args))
var fields map[string]interface{}
var err error
if len(ctx.req.Vars) != 0 {
fields, _, err = jsn.Tree(ctx.req.Vars)
if err != nil {
logger.Warn().Err(err).Msg("Failed to parse variables")
}
}
for i := range args {
av := args[i]
switch {
case bytes.Equal(av, []byte("user_id")):
if v := ctx.Value(userIDKey); v != nil {
vars[i] = v.(string)
}
case bytes.Equal(av, []byte("user_id_provider")):
if v := ctx.Value(userIDProviderKey); v != nil {
vars[i] = v.(string)
}
default:
if v, ok := fields[string(av)]; ok {
vars[i] = v
}
}
}
return vars
}
func stringVar(w io.Writer, v string) (int, error) {
if n, err := w.Write([]byte(`'`)); err != nil {
return n, err
}
if n, err := w.Write([]byte(v)); err != nil {
return n, err
}
return w.Write([]byte(`'`))
}
func stringVarB(w io.Writer, v []byte) (int, error) {
if n, err := w.Write([]byte(`'`)); err != nil {
return n, err
}
if n, err := w.Write(v); err != nil {
return n, err
}
return w.Write([]byte(`'`))
}

View File

@ -77,10 +77,10 @@ SQL Output
database: database:
variables: variables:
account_id: "select account_id from users where id = $user_id" admin_account_id: "5"
defaults: defaults:
filter: ["{ user_id: { eq: $user_id } }"] Filters: ["{ user_id: { eq: $user_id } }"]
blacklist: blacklist:
- password - password
@ -88,14 +88,14 @@ SQL Output
fields: fields:
- name: users - name: users
filter: ["{ id: { eq: $user_id } }"] Filters: ["{ id: { eq: $user_id } }"]
- name: products - name: products
filter: [ Filters: [
"{ price: { gt: 0 } }", "{ price: { gt: 0 } }",
"{ price: { lt: 8 } }" "{ price: { lt: 8 } }"
] ]
- name: me - name: me
table: users table: users
filter: ["{ id: { eq: $user_id } }"] Filters: ["{ id: { eq: $user_id } }"]

View File

@ -3,17 +3,16 @@ host_port: 0.0.0.0:8080
web_ui: true web_ui: true
# debug, info, warn, error, fatal, panic # debug, info, warn, error, fatal, panic
log_level: "debug" log_level: "info"
# Disable this in development to get a list of # When production mode is 'true' only queries
# queries used. When enabled super graph # from the allow list are permitted.
# will only allow queries from this list # When it's 'false' all queries are saved to the
# List saved to ./config/allow.list # the allow list in ./config/allow.list
use_allow_list: false production: false
# Throw a 401 on auth failure for queries that need auth # Throw a 401 on auth failure for queries that need auth
# valid values: always, per_query, never auth_fail_block: false
auth_fail_block: never
# Latency tracing for database queries and remote joins # Latency tracing for database queries and remote joins
# the resulting latency information is returned with the # the resulting latency information is returned with the
@ -49,11 +48,12 @@ migrations_path: ./config/migrations
auth: auth:
# Can be 'rails' or 'jwt' # Can be 'rails' or 'jwt'
type: rails type: rails
cookie: _app_session cookie: _{% app_name_slug %}_session
# Comment this out if you want to disable setting # Comment this out if you want to disable setting
# the user_id via a header. Good for testing # the user_id via a header for testing.
header: X-User-ID # Disable in production
creds_in_header: true
rails: rails:
# Rails version this is used for reading the # Rails version this is used for reading the
@ -93,63 +93,97 @@ database:
#max_retries: 0 #max_retries: 0
#log_level: "debug" #log_level: "debug"
# Define variables here that you want to use in filters # Set session variable "user.id" to the user id
# sub-queries must be wrapped in () # Enable this if you need the user id in triggers, etc
set_user_id: false
# Define additional variables here to be used with filters
variables: variables:
account_id: "(select account_id from users where id = $user_id)" admin_account_id: "5"
# Define defaults to for the field key and values below # Field and table names that you wish to block
defaults: blocklist:
# filter: ["{ user_id: { eq: $user_id } }"] - ar_internal_metadata
- schema_migrations
- secret
- password
- encrypted
- token
# Field and table names that you wish to block tables:
blocklist: - name: customers
- ar_internal_metadata remotes:
- schema_migrations - name: payments
- secret id: stripe_id
- password url: http://rails_app:3000/stripe/$id
- encrypted path: data
- token # debug: true
pass_headers:
- cookie
set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
tables: - # You can create new fields that have a
- name: users # real db table backing them
# This filter will overwrite defaults.filter name: me
# filter: ["{ id: { eq: $user_id } }"] table: users
# filter_query: ["{ id: { eq: $user_id } }"]
filter_update: ["{ id: { eq: $user_id } }"]
filter_delete: ["{ id: { eq: $user_id } }"]
# - name: products roles_query: "SELECT * FROM users WHERE id = $user_id"
# # Multiple filters are AND'd together
# filter: [
# "{ price: { gt: 0 } }",
# "{ price: { lt: 8 } }"
# ]
- name: customers roles:
# No filter is used for this field not - name: anon
# even defaults.filter tables:
filter: none - name: products
limit: 10
remotes: query:
- name: payments columns: ["id", "name", "description" ]
id: stripe_id aggregation: false
url: http://rails_app:3000/stripe/$id
path: data
# debug: true
pass_headers:
- cookie
set_headers:
- name: Host
value: 0.0.0.0
# - name: Authorization
# value: Bearer <stripe_api_key>
- # You can create new fields that have a insert:
# real db table backing them block: false
name: me
table: users update:
filter: ["{ id: { eq: $user_id } }"] block: false
# - name: posts delete:
# filter: ["{ account_id: { _eq: $account_id } }"] block: false
- name: user
tables:
- name: users
query:
filters: ["{ id: { _eq: $user_id } }"]
- name: products
query:
limit: 50
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
disable_functions: false
insert:
filters: ["{ user_id: { eq: $user_id } }"]
columns: ["id", "name", "description" ]
presets:
- created_at: "now"
update:
filters: ["{ user_id: { eq: $user_id } }"]
columns:
- id
- name
presets:
- updated_at: "now"
delete:
block: true
- name: admin
match: id = 1000
tables:
- name: users
filters: []

Some files were not shown because too many files have changed in this diff Show More