Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
6293d37e73 | |||
7a3fe5a1df | |||
2a32c179ba | |||
0a02bde219 | |||
966aa9ce8c | |||
6f18d56ca0 | |||
c400461835 | |||
a6691de1b7 | |||
e6934cda02 | |||
4cf7956ff5 | |||
5356455904 | |||
074aded5c0 | |||
c7557f761f | |||
09d6460a13 | |||
40c99e9ef3 | |||
75ff5510d4 | |||
1370d24985 |
1
.gitignore
vendored
1
.gitignore
vendored
@ -35,3 +35,4 @@ suppressions
|
|||||||
release
|
release
|
||||||
.gofuzz
|
.gofuzz
|
||||||
*-fuzz.zip
|
*-fuzz.zip
|
||||||
|
|
||||||
|
@ -7,7 +7,7 @@ rules:
|
|||||||
- name: run
|
- name: run
|
||||||
match: \.go$
|
match: \.go$
|
||||||
ignore: web|examples|docs|_test\.go$
|
ignore: web|examples|docs|_test\.go$
|
||||||
command: go run cmd/main.go serv
|
command: go run main.go serv
|
||||||
- name: test
|
- name: test
|
||||||
match: _test\.go$
|
match: _test\.go$
|
||||||
command: go test -cover {PKG}
|
command: go test -cover {PKG}
|
@ -1,7 +1,7 @@
|
|||||||
# stage: 1
|
# stage: 1
|
||||||
FROM node:10 as react-build
|
FROM node:10 as react-build
|
||||||
WORKDIR /web
|
WORKDIR /web
|
||||||
COPY /cmd/internal/serv/web/ ./
|
COPY /internal/serv/web/ ./
|
||||||
RUN yarn
|
RUN yarn
|
||||||
RUN yarn build
|
RUN yarn build
|
||||||
|
|
||||||
@ -24,8 +24,8 @@ RUN chmod 755 /usr/local/bin/sops
|
|||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
COPY . /app
|
COPY . /app
|
||||||
|
|
||||||
RUN mkdir -p /app/cmd/internal/serv/web/build
|
RUN mkdir -p /app/internal/serv/web/build
|
||||||
COPY --from=react-build /web/build/ ./cmd/internal/serv/web/build
|
COPY --from=react-build /web/build/ ./internal/serv/web/build
|
||||||
|
|
||||||
RUN go mod vendor
|
RUN go mod vendor
|
||||||
RUN make build
|
RUN make build
|
||||||
@ -45,7 +45,7 @@ RUN mkdir -p /config
|
|||||||
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
COPY --from=go-build /etc/ssl/certs/ca-certificates.crt /etc/ssl/certs/
|
||||||
COPY --from=go-build /app/config/* /config/
|
COPY --from=go-build /app/config/* /config/
|
||||||
COPY --from=go-build /app/super-graph .
|
COPY --from=go-build /app/super-graph .
|
||||||
COPY --from=go-build /app/cmd/scripts/start.sh .
|
COPY --from=go-build /app/internal/scripts/start.sh .
|
||||||
COPY --from=go-build /usr/local/bin/sops .
|
COPY --from=go-build /usr/local/bin/sops .
|
||||||
|
|
||||||
RUN chmod +x /super-graph
|
RUN chmod +x /super-graph
|
||||||
|
23
Makefile
23
Makefile
@ -12,10 +12,10 @@ endif
|
|||||||
export GO111MODULE := on
|
export GO111MODULE := on
|
||||||
|
|
||||||
# Build-time Go variables
|
# Build-time Go variables
|
||||||
version = github.com/dosco/super-graph/serv.version
|
version = github.com/dosco/super-graph/internal/serv.version
|
||||||
gitBranch = github.com/dosco/super-graph/serv.gitBranch
|
gitBranch = github.com/dosco/super-graph/internal/serv.gitBranch
|
||||||
lastCommitSHA = github.com/dosco/super-graph/serv.lastCommitSHA
|
lastCommitSHA = github.com/dosco/super-graph/internal/serv.lastCommitSHA
|
||||||
lastCommitTime = github.com/dosco/super-graph/serv.lastCommitTime
|
lastCommitTime = github.com/dosco/super-graph/internal/serv.lastCommitTime
|
||||||
|
|
||||||
BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime}=${BUILD_DATE}" -X "${version}=${BUILD_VERSION}" -X ${gitBranch}=${BUILD_BRANCH}'
|
BUILD_FLAGS ?= -ldflags '-s -w -X ${lastCommitSHA}=${BUILD} -X "${lastCommitTime}=${BUILD_DATE}" -X "${version}=${BUILD_VERSION}" -X ${gitBranch}=${BUILD_BRANCH}'
|
||||||
|
|
||||||
@ -28,18 +28,18 @@ BIN_DIR := $(GOPATH)/bin
|
|||||||
GORICE := $(BIN_DIR)/rice
|
GORICE := $(BIN_DIR)/rice
|
||||||
GOLANGCILINT := $(BIN_DIR)/golangci-lint
|
GOLANGCILINT := $(BIN_DIR)/golangci-lint
|
||||||
GITCHGLOG := $(BIN_DIR)/git-chglog
|
GITCHGLOG := $(BIN_DIR)/git-chglog
|
||||||
WEB_BUILD_DIR := ./cmd/internal/serv/web/build/manifest.json
|
WEB_BUILD_DIR := ./internal/serv/web/build/manifest.json
|
||||||
|
|
||||||
$(GORICE):
|
$(GORICE):
|
||||||
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
|
@GO111MODULE=off go get -u github.com/GeertJohan/go.rice/rice
|
||||||
|
|
||||||
$(WEB_BUILD_DIR):
|
$(WEB_BUILD_DIR):
|
||||||
@echo "First install Yarn and create a build of the web UI then re-run make install"
|
@echo "First install Yarn and create a build of the web UI then re-run make install"
|
||||||
@echo "Run this command: yarn --cwd cmd/internal/serv/web/ build"
|
@echo "Run this command: yarn --cwd internal/serv/web/ build"
|
||||||
@exit 1
|
@exit 1
|
||||||
|
|
||||||
$(GITCHGLOG):
|
$(GITCHGLOG):
|
||||||
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/cmd/git-chglog
|
@GO111MODULE=off go get -u github.com/git-chglog/git-chglog/git-chglog
|
||||||
|
|
||||||
changelog: $(GITCHGLOG)
|
changelog: $(GITCHGLOG)
|
||||||
@git-chglog $(ARGS)
|
@git-chglog $(ARGS)
|
||||||
@ -57,7 +57,7 @@ os = $(word 1, $@)
|
|||||||
|
|
||||||
$(PLATFORMS): lint test
|
$(PLATFORMS): lint test
|
||||||
@mkdir -p release
|
@mkdir -p release
|
||||||
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 cmd/main.go
|
@GOOS=$(os) GOARCH=amd64 go build $(BUILD_FLAGS) -o release/$(BINARY)-$(BUILD_VERSION)-$(os)-amd64 main.go
|
||||||
|
|
||||||
release: windows linux darwin
|
release: windows linux darwin
|
||||||
|
|
||||||
@ -69,7 +69,7 @@ gen: $(GORICE) $(WEB_BUILD_DIR)
|
|||||||
@go generate ./...
|
@go generate ./...
|
||||||
|
|
||||||
$(BINARY): clean
|
$(BINARY): clean
|
||||||
@go build $(BUILD_FLAGS) -o $(BINARY) cmd/main.go
|
@go build $(BUILD_FLAGS) -o $(BINARY) main.go
|
||||||
|
|
||||||
clean:
|
clean:
|
||||||
@rm -f $(BINARY)
|
@rm -f $(BINARY)
|
||||||
@ -77,11 +77,10 @@ clean:
|
|||||||
run: clean
|
run: clean
|
||||||
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
@go run $(BUILD_FLAGS) main.go $(ARGS)
|
||||||
|
|
||||||
install:
|
install: clean build
|
||||||
@echo $(GOPATH)
|
|
||||||
@echo "Commit Hash: `git rev-parse HEAD`"
|
@echo "Commit Hash: `git rev-parse HEAD`"
|
||||||
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
@echo "Old Hash: `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`"
|
||||||
@go install $(BUILD_FLAGS) cmd
|
@mv $(BINARY) $(GOPATH)/bin/$(BINARY)
|
||||||
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
|
@echo "New Hash:" `shasum $(GOPATH)/bin/$(BINARY) 2>/dev/null | cut -c -32`
|
||||||
|
|
||||||
uninstall: clean
|
uninstall: clean
|
||||||
|
@ -15,10 +15,7 @@ Designed to 100x your developer productivity. Super Graph will instantly and wit
|
|||||||
## Using it as a service
|
## Using it as a service
|
||||||
|
|
||||||
```console
|
```console
|
||||||
git clone https://github.com/dosco/super-graph
|
get get https://github.com/dosco/super-graph
|
||||||
cd ./super-graph
|
|
||||||
make install
|
|
||||||
|
|
||||||
super-graph new <app_name>
|
super-graph new <app_name>
|
||||||
```
|
```
|
||||||
|
|
||||||
|
@ -1,30 +0,0 @@
|
|||||||
{
|
|
||||||
"files": {
|
|
||||||
"main.css": "/static/css/main.c6b5c55c.chunk.css",
|
|
||||||
"main.js": "/static/js/main.04d74040.chunk.js",
|
|
||||||
"main.js.map": "/static/js/main.04d74040.chunk.js.map",
|
|
||||||
"runtime-main.js": "/static/js/runtime-main.4aea9da3.js",
|
|
||||||
"runtime-main.js.map": "/static/js/runtime-main.4aea9da3.js.map",
|
|
||||||
"static/js/2.03370bd3.chunk.js": "/static/js/2.03370bd3.chunk.js",
|
|
||||||
"static/js/2.03370bd3.chunk.js.map": "/static/js/2.03370bd3.chunk.js.map",
|
|
||||||
"index.html": "/index.html",
|
|
||||||
"precache-manifest.e33bc3c7c6774d7032c490820c96901d.js": "/precache-manifest.e33bc3c7c6774d7032c490820c96901d.js",
|
|
||||||
"service-worker.js": "/service-worker.js",
|
|
||||||
"static/css/main.c6b5c55c.chunk.css.map": "/static/css/main.c6b5c55c.chunk.css.map",
|
|
||||||
"static/media/GraphQLLanguageService.js.flow": "/static/media/GraphQLLanguageService.js.5ab204b9.flow",
|
|
||||||
"static/media/autocompleteUtils.js.flow": "/static/media/autocompleteUtils.js.4ce7ba19.flow",
|
|
||||||
"static/media/getAutocompleteSuggestions.js.flow": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow",
|
|
||||||
"static/media/getDefinition.js.flow": "/static/media/getDefinition.js.4dbec62f.flow",
|
|
||||||
"static/media/getDiagnostics.js.flow": "/static/media/getDiagnostics.js.65b0979a.flow",
|
|
||||||
"static/media/getHoverInformation.js.flow": "/static/media/getHoverInformation.js.d9411837.flow",
|
|
||||||
"static/media/getOutline.js.flow": "/static/media/getOutline.js.c04e3998.flow",
|
|
||||||
"static/media/index.js.flow": "/static/media/index.js.02c24280.flow",
|
|
||||||
"static/media/logo.png": "/static/media/logo.57ee3b60.png"
|
|
||||||
},
|
|
||||||
"entrypoints": [
|
|
||||||
"static/js/runtime-main.4aea9da3.js",
|
|
||||||
"static/js/2.03370bd3.chunk.js",
|
|
||||||
"static/css/main.c6b5c55c.chunk.css",
|
|
||||||
"static/js/main.04d74040.chunk.js"
|
|
||||||
]
|
|
||||||
}
|
|
@ -1 +0,0 @@
|
|||||||
<!doctype html><html lang="en"><head><meta charset="utf-8"/><link rel="shortcut icon" href="/favicon.ico"/><meta name="viewport" content="width=device-width,initial-scale=1,shrink-to-fit=no"/><meta name="theme-color" content="#000000"/><link rel="manifest" href="/manifest.json"/><link href="https://fonts.googleapis.com/css?family=Open+Sans:300,400,600,700|Source+Code+Pro:400,700" rel="stylesheet"><title>Super Graph - GraphQL API for Rails</title><link href="/static/css/main.c6b5c55c.chunk.css" rel="stylesheet"></head><body><noscript>You need to enable JavaScript to run this app.</noscript><div id="root"></div><script>!function(i){function e(e){for(var r,t,n=e[0],o=e[1],u=e[2],l=0,f=[];l<n.length;l++)t=n[l],Object.prototype.hasOwnProperty.call(p,t)&&p[t]&&f.push(p[t][0]),p[t]=0;for(r in o)Object.prototype.hasOwnProperty.call(o,r)&&(i[r]=o[r]);for(s&&s(e);f.length;)f.shift()();return c.push.apply(c,u||[]),a()}function a(){for(var e,r=0;r<c.length;r++){for(var t=c[r],n=!0,o=1;o<t.length;o++){var u=t[o];0!==p[u]&&(n=!1)}n&&(c.splice(r--,1),e=l(l.s=t[0]))}return e}var t={},p={1:0},c=[];function l(e){if(t[e])return t[e].exports;var r=t[e]={i:e,l:!1,exports:{}};return i[e].call(r.exports,r,r.exports,l),r.l=!0,r.exports}l.m=i,l.c=t,l.d=function(e,r,t){l.o(e,r)||Object.defineProperty(e,r,{enumerable:!0,get:t})},l.r=function(e){"undefined"!=typeof Symbol&&Symbol.toStringTag&&Object.defineProperty(e,Symbol.toStringTag,{value:"Module"}),Object.defineProperty(e,"__esModule",{value:!0})},l.t=function(r,e){if(1&e&&(r=l(r)),8&e)return r;if(4&e&&"object"==typeof r&&r&&r.__esModule)return r;var t=Object.create(null);if(l.r(t),Object.defineProperty(t,"default",{enumerable:!0,value:r}),2&e&&"string"!=typeof r)for(var n in r)l.d(t,n,function(e){return r[e]}.bind(null,n));return t},l.n=function(e){var r=e&&e.__esModule?function(){return e.default}:function(){return e};return l.d(r,"a",r),r},l.o=function(e,r){return Object.prototype.hasOwnProperty.call(e,r)},l.p="/";var r=this.webpackJsonpweb=this.webpackJsonpweb||[],n=r.push.bind(r);r.push=e,r=r.slice();for(var o=0;o<r.length;o++)e(r[o]);var s=n;a()}([])</script><script src="/static/js/2.03370bd3.chunk.js"></script><script src="/static/js/main.04d74040.chunk.js"></script></body></html>
|
|
@ -1,58 +0,0 @@
|
|||||||
self.__precacheManifest = (self.__precacheManifest || []).concat([
|
|
||||||
{
|
|
||||||
"revision": "ecdae64182d05c64e7f7f200ed03a4ed",
|
|
||||||
"url": "/index.html"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "6e9467dc213a3e2b84ea",
|
|
||||||
"url": "/static/css/main.c6b5c55c.chunk.css"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "c156a125990ddf5dcc51",
|
|
||||||
"url": "/static/js/2.03370bd3.chunk.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "6e9467dc213a3e2b84ea",
|
|
||||||
"url": "/static/js/main.04d74040.chunk.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "427262b6771d3f49a7c5",
|
|
||||||
"url": "/static/js/runtime-main.4aea9da3.js"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "5ab204b9b95c06640dbefae9a65b1db2",
|
|
||||||
"url": "/static/media/GraphQLLanguageService.js.5ab204b9.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "4ce7ba191f7ebee4426768f246b2f0e0",
|
|
||||||
"url": "/static/media/autocompleteUtils.js.4ce7ba19.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "7f98f032085704c8943ec2d1925c7c84",
|
|
||||||
"url": "/static/media/getAutocompleteSuggestions.js.7f98f032.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "4dbec62f1d8e8417afb9cbd19f1268c3",
|
|
||||||
"url": "/static/media/getDefinition.js.4dbec62f.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "65b0979ac23feca49e4411883fd8eaab",
|
|
||||||
"url": "/static/media/getDiagnostics.js.65b0979a.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "d94118379d362fc161aa1246bcc14d43",
|
|
||||||
"url": "/static/media/getHoverInformation.js.d9411837.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "c04e3998712b37a96f0bfd283fa06b52",
|
|
||||||
"url": "/static/media/getOutline.js.c04e3998.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "02c24280c5e4a7eb3c6cfcb079a8f1e3",
|
|
||||||
"url": "/static/media/index.js.02c24280.flow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"revision": "57ee3b6084cb9d3c754cc12d25a98035",
|
|
||||||
"url": "/static/media/logo.57ee3b60.png"
|
|
||||||
}
|
|
||||||
]);
|
|
@ -1,2 +0,0 @@
|
|||||||
body{margin:0;padding:0;font-family:-apple-system,BlinkMacSystemFont,Segoe UI,Roboto,Oxygen,Ubuntu,Cantarell,Fira Sans,Droid Sans,Helvetica Neue,sans-serif;-webkit-font-smoothing:antialiased;-moz-osx-font-smoothing:grayscale;background-color:#0f202d}code{font-family:source-code-pro,Menlo,Monaco,Consolas,Courier New,monospace}.playground>div:nth-child(2){height:calc(100vh - 131px)}
|
|
||||||
/*# sourceMappingURL=main.c6b5c55c.chunk.css.map */
|
|
File diff suppressed because one or more lines are too long
@ -1,2 +0,0 @@
|
|||||||
(this.webpackJsonpweb=this.webpackJsonpweb||[]).push([[0],{163:function(e,t,n){var r={".":61,"./":61,"./GraphQLLanguageService":117,"./GraphQLLanguageService.js":117,"./GraphQLLanguageService.js.flow":315,"./autocompleteUtils":91,"./autocompleteUtils.js":91,"./autocompleteUtils.js.flow":316,"./getAutocompleteSuggestions":77,"./getAutocompleteSuggestions.js":77,"./getAutocompleteSuggestions.js.flow":317,"./getDefinition":92,"./getDefinition.js":92,"./getDefinition.js.flow":318,"./getDiagnostics":94,"./getDiagnostics.js":94,"./getDiagnostics.js.flow":319,"./getHoverInformation":95,"./getHoverInformation.js":95,"./getHoverInformation.js.flow":320,"./getOutline":116,"./getOutline.js":116,"./getOutline.js.flow":321,"./index":61,"./index.js":61,"./index.js.flow":322};function o(e){var t=a(e);return n(t)}function a(e){if(!n.o(r,e)){var t=new Error("Cannot find module '"+e+"'");throw t.code="MODULE_NOT_FOUND",t}return r[e]}o.keys=function(){return Object.keys(r)},o.resolve=a,e.exports=o,o.id=163},190:function(e,t,n){"use strict";(function(e){var r=n(100),o=n(101),a=n(201),i=n(191),s=n(202),l=n(5),c=n.n(l),u=n(20),g=n(130),f=(n(441),window.fetch);window.fetch=function(){return arguments[1].credentials="include",Promise.resolve(f.apply(e,arguments))};var p=function(e){function t(){return Object(r.a)(this,t),Object(a.a)(this,Object(i.a)(t).apply(this,arguments))}return Object(s.a)(t,e),Object(o.a)(t,[{key:"render",value:function(){return c.a.createElement("div",null,c.a.createElement("header",{style:{background:"#09141b",color:"#03a9f4",letterSpacing:"0.15rem",height:"65px",display:"flex",alignItems:"center"}},c.a.createElement("h3",{style:{textDecoration:"none",margin:"0px",fontSize:"18px"}},c.a.createElement("span",{style:{textTransform:"uppercase",marginLeft:"20px",paddingRight:"10px",borderRight:"1px solid #fff"}},"Super Graph"),c.a.createElement("span",{style:{fontSize:"16px",marginLeft:"10px",color:"#fff"}},"Instant GraphQL"))),c.a.createElement(u.Provider,{store:g.store},c.a.createElement(g.Playground,{endpoint:"/api/v1/graphql",settings:"{ 'schema.polling.enable': false, 'request.credentials': 'include', 'general.betaUpdates': true, 'editor.reuseHeaders': true, 'editor.theme': 'dark' }"})))}}]),t}(l.Component);t.a=p}).call(this,n(32))},205:function(e,t,n){e.exports=n(206)},206:function(e,t,n){"use strict";n.r(t);var r=n(5),o=n.n(r),a=n(52),i=n.n(a),s=n(190);i.a.render(o.a.createElement(s.a,null),document.getElementById("root"))},441:function(e,t,n){}},[[205,1,2]]]);
|
|
||||||
//# sourceMappingURL=main.04d74040.chunk.js.map
|
|
File diff suppressed because one or more lines are too long
@ -1,328 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
DocumentNode,
|
|
||||||
FragmentSpreadNode,
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
OperationDefinitionNode,
|
|
||||||
TypeDefinitionNode,
|
|
||||||
NamedTypeNode,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
DefinitionQueryResult,
|
|
||||||
Diagnostic,
|
|
||||||
GraphQLCache,
|
|
||||||
GraphQLConfig,
|
|
||||||
GraphQLProjectConfig,
|
|
||||||
Uri,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
import type {Hover} from 'vscode-languageserver-types';
|
|
||||||
|
|
||||||
import {Kind, parse, print} from 'graphql';
|
|
||||||
import {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
|
||||||
import {getHoverInformation} from './getHoverInformation';
|
|
||||||
import {validateQuery, getRange, SEVERITY} from './getDiagnostics';
|
|
||||||
import {
|
|
||||||
getDefinitionQueryResultForFragmentSpread,
|
|
||||||
getDefinitionQueryResultForDefinitionNode,
|
|
||||||
getDefinitionQueryResultForNamedType,
|
|
||||||
} from './getDefinition';
|
|
||||||
import {getASTNodeAtPosition} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
const {
|
|
||||||
FRAGMENT_DEFINITION,
|
|
||||||
OBJECT_TYPE_DEFINITION,
|
|
||||||
INTERFACE_TYPE_DEFINITION,
|
|
||||||
ENUM_TYPE_DEFINITION,
|
|
||||||
UNION_TYPE_DEFINITION,
|
|
||||||
SCALAR_TYPE_DEFINITION,
|
|
||||||
INPUT_OBJECT_TYPE_DEFINITION,
|
|
||||||
SCALAR_TYPE_EXTENSION,
|
|
||||||
OBJECT_TYPE_EXTENSION,
|
|
||||||
INTERFACE_TYPE_EXTENSION,
|
|
||||||
UNION_TYPE_EXTENSION,
|
|
||||||
ENUM_TYPE_EXTENSION,
|
|
||||||
INPUT_OBJECT_TYPE_EXTENSION,
|
|
||||||
DIRECTIVE_DEFINITION,
|
|
||||||
FRAGMENT_SPREAD,
|
|
||||||
OPERATION_DEFINITION,
|
|
||||||
NAMED_TYPE,
|
|
||||||
} = Kind;
|
|
||||||
|
|
||||||
export class GraphQLLanguageService {
|
|
||||||
_graphQLCache: GraphQLCache;
|
|
||||||
_graphQLConfig: GraphQLConfig;
|
|
||||||
|
|
||||||
constructor(cache: GraphQLCache) {
|
|
||||||
this._graphQLCache = cache;
|
|
||||||
this._graphQLConfig = cache.getGraphQLConfig();
|
|
||||||
}
|
|
||||||
|
|
||||||
async getDiagnostics(
|
|
||||||
query: string,
|
|
||||||
uri: Uri,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Promise<Array<Diagnostic>> {
|
|
||||||
// Perform syntax diagnostics first, as this doesn't require
|
|
||||||
// schema/fragment definitions, even the project configuration.
|
|
||||||
let queryHasExtensions = false;
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(uri);
|
|
||||||
const schemaPath = projectConfig.schemaPath;
|
|
||||||
try {
|
|
||||||
const queryAST = parse(query);
|
|
||||||
if (!schemaPath || uri !== schemaPath) {
|
|
||||||
queryHasExtensions = queryAST.definitions.some(definition => {
|
|
||||||
switch (definition.kind) {
|
|
||||||
case OBJECT_TYPE_DEFINITION:
|
|
||||||
case INTERFACE_TYPE_DEFINITION:
|
|
||||||
case ENUM_TYPE_DEFINITION:
|
|
||||||
case UNION_TYPE_DEFINITION:
|
|
||||||
case SCALAR_TYPE_DEFINITION:
|
|
||||||
case INPUT_OBJECT_TYPE_DEFINITION:
|
|
||||||
case SCALAR_TYPE_EXTENSION:
|
|
||||||
case OBJECT_TYPE_EXTENSION:
|
|
||||||
case INTERFACE_TYPE_EXTENSION:
|
|
||||||
case UNION_TYPE_EXTENSION:
|
|
||||||
case ENUM_TYPE_EXTENSION:
|
|
||||||
case INPUT_OBJECT_TYPE_EXTENSION:
|
|
||||||
case DIRECTIVE_DEFINITION:
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
});
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
const range = getRange(error.locations[0], query);
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
severity: SEVERITY.ERROR,
|
|
||||||
message: error.message,
|
|
||||||
source: 'GraphQL: Syntax',
|
|
||||||
range,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
// If there's a matching config, proceed to prepare to run validation
|
|
||||||
let source = query;
|
|
||||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
const fragmentDependencies = await this._graphQLCache.getFragmentDependencies(
|
|
||||||
query,
|
|
||||||
fragmentDefinitions,
|
|
||||||
);
|
|
||||||
const dependenciesSource = fragmentDependencies.reduce(
|
|
||||||
(prev, cur) => `${prev} ${print(cur.definition)}`,
|
|
||||||
'',
|
|
||||||
);
|
|
||||||
|
|
||||||
source = `${source} ${dependenciesSource}`;
|
|
||||||
|
|
||||||
let validationAst = null;
|
|
||||||
try {
|
|
||||||
validationAst = parse(source);
|
|
||||||
} catch (error) {
|
|
||||||
// the query string is already checked to be parsed properly - errors
|
|
||||||
// from this parse must be from corrupted fragment dependencies.
|
|
||||||
// For IDEs we don't care for errors outside of the currently edited
|
|
||||||
// query, so we return an empty array here.
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check if there are custom validation rules to be used
|
|
||||||
let customRules;
|
|
||||||
const customRulesModulePath =
|
|
||||||
projectConfig.extensions.customValidationRules;
|
|
||||||
if (customRulesModulePath) {
|
|
||||||
/* eslint-disable no-implicit-coercion */
|
|
||||||
const rulesPath = require.resolve(`${customRulesModulePath}`);
|
|
||||||
if (rulesPath) {
|
|
||||||
customRules = require(`${rulesPath}`)(this._graphQLConfig);
|
|
||||||
}
|
|
||||||
/* eslint-enable no-implicit-coercion */
|
|
||||||
}
|
|
||||||
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName, queryHasExtensions)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (!schema) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
return validateQuery(validationAst, schema, customRules, isRelayCompatMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
async getAutocompleteSuggestions(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<Array<CompletionItem>> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (schema) {
|
|
||||||
return getAutocompleteSuggestions(schema, query, position);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
async getHoverInformation(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<Hover.contents> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
const schema = await this._graphQLCache
|
|
||||||
.getSchema(projectConfig.projectName)
|
|
||||||
.catch(() => null);
|
|
||||||
|
|
||||||
if (schema) {
|
|
||||||
return getHoverInformation(schema, query, position);
|
|
||||||
}
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
|
|
||||||
async getDefinition(
|
|
||||||
query: string,
|
|
||||||
position: Position,
|
|
||||||
filePath: Uri,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const projectConfig = this._graphQLConfig.getConfigForFile(filePath);
|
|
||||||
|
|
||||||
let ast;
|
|
||||||
try {
|
|
||||||
ast = parse(query);
|
|
||||||
} catch (error) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const node = getASTNodeAtPosition(query, ast, position);
|
|
||||||
if (node) {
|
|
||||||
switch (node.kind) {
|
|
||||||
case FRAGMENT_SPREAD:
|
|
||||||
return this._getDefinitionForFragmentSpread(
|
|
||||||
query,
|
|
||||||
ast,
|
|
||||||
node,
|
|
||||||
filePath,
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
case FRAGMENT_DEFINITION:
|
|
||||||
case OPERATION_DEFINITION:
|
|
||||||
return getDefinitionQueryResultForDefinitionNode(
|
|
||||||
filePath,
|
|
||||||
query,
|
|
||||||
(node: FragmentDefinitionNode | OperationDefinitionNode),
|
|
||||||
);
|
|
||||||
case NAMED_TYPE:
|
|
||||||
return this._getDefinitionForNamedType(
|
|
||||||
query,
|
|
||||||
ast,
|
|
||||||
node,
|
|
||||||
filePath,
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _getDefinitionForNamedType(
|
|
||||||
query: string,
|
|
||||||
ast: DocumentNode,
|
|
||||||
node: NamedTypeNode,
|
|
||||||
filePath: Uri,
|
|
||||||
projectConfig: GraphQLProjectConfig,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const objectTypeDefinitions = await this._graphQLCache.getObjectTypeDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dependencies = await this._graphQLCache.getObjectTypeDependenciesForAST(
|
|
||||||
ast,
|
|
||||||
objectTypeDefinitions,
|
|
||||||
);
|
|
||||||
|
|
||||||
const localObjectTypeDefinitions = ast.definitions.filter(
|
|
||||||
definition =>
|
|
||||||
definition.kind === OBJECT_TYPE_DEFINITION ||
|
|
||||||
definition.kind === INPUT_OBJECT_TYPE_DEFINITION ||
|
|
||||||
definition.kind === ENUM_TYPE_DEFINITION,
|
|
||||||
);
|
|
||||||
|
|
||||||
const typeCastedDefs = ((localObjectTypeDefinitions: any): Array<
|
|
||||||
TypeDefinitionNode,
|
|
||||||
>);
|
|
||||||
|
|
||||||
const localOperationDefinationInfos = typeCastedDefs.map(
|
|
||||||
(definition: TypeDefinitionNode) => ({
|
|
||||||
filePath,
|
|
||||||
content: query,
|
|
||||||
definition,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await getDefinitionQueryResultForNamedType(
|
|
||||||
query,
|
|
||||||
node,
|
|
||||||
dependencies.concat(localOperationDefinationInfos),
|
|
||||||
);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
|
|
||||||
async _getDefinitionForFragmentSpread(
|
|
||||||
query: string,
|
|
||||||
ast: DocumentNode,
|
|
||||||
node: FragmentSpreadNode,
|
|
||||||
filePath: Uri,
|
|
||||||
projectConfig: GraphQLProjectConfig,
|
|
||||||
): Promise<?DefinitionQueryResult> {
|
|
||||||
const fragmentDefinitions = await this._graphQLCache.getFragmentDefinitions(
|
|
||||||
projectConfig,
|
|
||||||
);
|
|
||||||
|
|
||||||
const dependencies = await this._graphQLCache.getFragmentDependenciesForAST(
|
|
||||||
ast,
|
|
||||||
fragmentDefinitions,
|
|
||||||
);
|
|
||||||
|
|
||||||
const localFragDefinitions = ast.definitions.filter(
|
|
||||||
definition => definition.kind === FRAGMENT_DEFINITION,
|
|
||||||
);
|
|
||||||
|
|
||||||
const typeCastedDefs = ((localFragDefinitions: any): Array<
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
>);
|
|
||||||
|
|
||||||
const localFragInfos = typeCastedDefs.map(
|
|
||||||
(definition: FragmentDefinitionNode) => ({
|
|
||||||
filePath,
|
|
||||||
content: query,
|
|
||||||
definition,
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
|
|
||||||
const result = await getDefinitionQueryResultForFragmentSpread(
|
|
||||||
query,
|
|
||||||
node,
|
|
||||||
dependencies.concat(localFragInfos),
|
|
||||||
);
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
|
||||||
}
|
|
@ -1,204 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {GraphQLField, GraphQLSchema, GraphQLType} from 'graphql';
|
|
||||||
import {isCompositeType} from 'graphql';
|
|
||||||
import {
|
|
||||||
SchemaMetaFieldDef,
|
|
||||||
TypeMetaFieldDef,
|
|
||||||
TypeNameMetaFieldDef,
|
|
||||||
} from 'graphql/type/introspection';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
ContextToken,
|
|
||||||
State,
|
|
||||||
TypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
// Utility for returning the state representing the Definition this token state
|
|
||||||
// is within, if any.
|
|
||||||
export function getDefinitionState(tokenState: State): ?State {
|
|
||||||
let definitionState;
|
|
||||||
|
|
||||||
forEachState(tokenState, state => {
|
|
||||||
switch (state.kind) {
|
|
||||||
case 'Query':
|
|
||||||
case 'ShortQuery':
|
|
||||||
case 'Mutation':
|
|
||||||
case 'Subscription':
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
definitionState = state;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return definitionState;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Gets the field definition given a type and field name
|
|
||||||
export function getFieldDef(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
type: GraphQLType,
|
|
||||||
fieldName: string,
|
|
||||||
): ?GraphQLField<*, *> {
|
|
||||||
if (fieldName === SchemaMetaFieldDef.name && schema.getQueryType() === type) {
|
|
||||||
return SchemaMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (fieldName === TypeMetaFieldDef.name && schema.getQueryType() === type) {
|
|
||||||
return TypeMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (fieldName === TypeNameMetaFieldDef.name && isCompositeType(type)) {
|
|
||||||
return TypeNameMetaFieldDef;
|
|
||||||
}
|
|
||||||
if (type.getFields && typeof type.getFields === 'function') {
|
|
||||||
return (type.getFields()[fieldName]: any);
|
|
||||||
}
|
|
||||||
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility for iterating through a CodeMirror parse state stack bottom-up.
|
|
||||||
export function forEachState(
|
|
||||||
stack: State,
|
|
||||||
fn: (state: State) => ?TypeInfo,
|
|
||||||
): void {
|
|
||||||
const reverseStateStack = [];
|
|
||||||
let state = stack;
|
|
||||||
while (state && state.kind) {
|
|
||||||
reverseStateStack.push(state);
|
|
||||||
state = state.prevState;
|
|
||||||
}
|
|
||||||
for (let i = reverseStateStack.length - 1; i >= 0; i--) {
|
|
||||||
fn(reverseStateStack[i]);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export function objectValues(object: Object): Array<any> {
|
|
||||||
const keys = Object.keys(object);
|
|
||||||
const len = keys.length;
|
|
||||||
const values = new Array(len);
|
|
||||||
for (let i = 0; i < len; ++i) {
|
|
||||||
values[i] = object[keys[i]];
|
|
||||||
}
|
|
||||||
return values;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Create the expected hint response given a possible list and a token
|
|
||||||
export function hintList(
|
|
||||||
token: ContextToken,
|
|
||||||
list: Array<CompletionItem>,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
return filterAndSortList(list, normalizeText(token.string));
|
|
||||||
}
|
|
||||||
|
|
||||||
// Given a list of hint entries and currently typed text, sort and filter to
|
|
||||||
// provide a concise list.
|
|
||||||
function filterAndSortList(
|
|
||||||
list: Array<CompletionItem>,
|
|
||||||
text: string,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (!text) {
|
|
||||||
return filterNonEmpty(list, entry => !entry.isDeprecated);
|
|
||||||
}
|
|
||||||
|
|
||||||
const byProximity = list.map(entry => ({
|
|
||||||
proximity: getProximity(normalizeText(entry.label), text),
|
|
||||||
entry,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const conciseMatches = filterNonEmpty(
|
|
||||||
filterNonEmpty(byProximity, pair => pair.proximity <= 2),
|
|
||||||
pair => !pair.entry.isDeprecated,
|
|
||||||
);
|
|
||||||
|
|
||||||
const sortedMatches = conciseMatches.sort(
|
|
||||||
(a, b) =>
|
|
||||||
(a.entry.isDeprecated ? 1 : 0) - (b.entry.isDeprecated ? 1 : 0) ||
|
|
||||||
a.proximity - b.proximity ||
|
|
||||||
a.entry.label.length - b.entry.label.length,
|
|
||||||
);
|
|
||||||
|
|
||||||
return sortedMatches.map(pair => pair.entry);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Filters the array by the predicate, unless it results in an empty array,
|
|
||||||
// in which case return the original array.
|
|
||||||
function filterNonEmpty(
|
|
||||||
array: Array<Object>,
|
|
||||||
predicate: (entry: Object) => boolean,
|
|
||||||
): Array<Object> {
|
|
||||||
const filtered = array.filter(predicate);
|
|
||||||
return filtered.length === 0 ? array : filtered;
|
|
||||||
}
|
|
||||||
|
|
||||||
function normalizeText(text: string): string {
|
|
||||||
return text.toLowerCase().replace(/\W/g, '');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Determine a numeric proximity for a suggestion based on current text.
|
|
||||||
function getProximity(suggestion: string, text: string): number {
|
|
||||||
// start with lexical distance
|
|
||||||
let proximity = lexicalDistance(text, suggestion);
|
|
||||||
if (suggestion.length > text.length) {
|
|
||||||
// do not penalize long suggestions.
|
|
||||||
proximity -= suggestion.length - text.length - 1;
|
|
||||||
// penalize suggestions not starting with this phrase
|
|
||||||
proximity += suggestion.indexOf(text) === 0 ? 0 : 0.5;
|
|
||||||
}
|
|
||||||
return proximity;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Computes the lexical distance between strings A and B.
|
|
||||||
*
|
|
||||||
* The "distance" between two strings is given by counting the minimum number
|
|
||||||
* of edits needed to transform string A into string B. An edit can be an
|
|
||||||
* insertion, deletion, or substitution of a single character, or a swap of two
|
|
||||||
* adjacent characters.
|
|
||||||
*
|
|
||||||
* This distance can be useful for detecting typos in input or sorting
|
|
||||||
*
|
|
||||||
* @param {string} a
|
|
||||||
* @param {string} b
|
|
||||||
* @return {int} distance in number of edits
|
|
||||||
*/
|
|
||||||
function lexicalDistance(a: string, b: string): number {
|
|
||||||
let i;
|
|
||||||
let j;
|
|
||||||
const d = [];
|
|
||||||
const aLength = a.length;
|
|
||||||
const bLength = b.length;
|
|
||||||
|
|
||||||
for (i = 0; i <= aLength; i++) {
|
|
||||||
d[i] = [i];
|
|
||||||
}
|
|
||||||
|
|
||||||
for (j = 1; j <= bLength; j++) {
|
|
||||||
d[0][j] = j;
|
|
||||||
}
|
|
||||||
|
|
||||||
for (i = 1; i <= aLength; i++) {
|
|
||||||
for (j = 1; j <= bLength; j++) {
|
|
||||||
const cost = a[i - 1] === b[j - 1] ? 0 : 1;
|
|
||||||
|
|
||||||
d[i][j] = Math.min(
|
|
||||||
d[i - 1][j] + 1,
|
|
||||||
d[i][j - 1] + 1,
|
|
||||||
d[i - 1][j - 1] + cost,
|
|
||||||
);
|
|
||||||
|
|
||||||
if (i > 1 && j > 1 && a[i - 1] === b[j - 2] && a[i - 2] === b[j - 1]) {
|
|
||||||
d[i][j] = Math.min(d[i][j], d[i - 2][j - 2] + cost);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return d[aLength][bLength];
|
|
||||||
}
|
|
@ -1,665 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
GraphQLDirective,
|
|
||||||
GraphQLSchema,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
CompletionItem,
|
|
||||||
ContextToken,
|
|
||||||
State,
|
|
||||||
TypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
import {
|
|
||||||
GraphQLBoolean,
|
|
||||||
GraphQLEnumType,
|
|
||||||
GraphQLInputObjectType,
|
|
||||||
GraphQLList,
|
|
||||||
SchemaMetaFieldDef,
|
|
||||||
TypeMetaFieldDef,
|
|
||||||
TypeNameMetaFieldDef,
|
|
||||||
assertAbstractType,
|
|
||||||
doTypesOverlap,
|
|
||||||
getNamedType,
|
|
||||||
getNullableType,
|
|
||||||
isAbstractType,
|
|
||||||
isCompositeType,
|
|
||||||
isInputType,
|
|
||||||
} from 'graphql';
|
|
||||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
|
||||||
import {
|
|
||||||
forEachState,
|
|
||||||
getDefinitionState,
|
|
||||||
getFieldDef,
|
|
||||||
hintList,
|
|
||||||
objectValues,
|
|
||||||
} from './autocompleteUtils';
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Given GraphQLSchema, queryText, and context of the current position within
|
|
||||||
* the source text, provide a list of typeahead entries.
|
|
||||||
*/
|
|
||||||
export function getAutocompleteSuggestions(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
contextToken?: ContextToken,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
|
||||||
|
|
||||||
const state =
|
|
||||||
token.state.kind === 'Invalid' ? token.state.prevState : token.state;
|
|
||||||
|
|
||||||
// relieve flow errors by checking if `state` exists
|
|
||||||
if (!state) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const kind = state.kind;
|
|
||||||
const step = state.step;
|
|
||||||
const typeInfo = getTypeInfo(schema, token.state);
|
|
||||||
|
|
||||||
// Definition kinds
|
|
||||||
if (kind === 'Document') {
|
|
||||||
return hintList(token, [
|
|
||||||
{label: 'query'},
|
|
||||||
{label: 'mutation'},
|
|
||||||
{label: 'subscription'},
|
|
||||||
{label: 'fragment'},
|
|
||||||
{label: '{'},
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Field names
|
|
||||||
if (kind === 'SelectionSet' || kind === 'Field' || kind === 'AliasedField') {
|
|
||||||
return getSuggestionsForFieldNames(token, typeInfo, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Argument names
|
|
||||||
if (kind === 'Arguments' || (kind === 'Argument' && step === 0)) {
|
|
||||||
const argDefs = typeInfo.argDefs;
|
|
||||||
if (argDefs) {
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
argDefs.map(argDef => ({
|
|
||||||
label: argDef.name,
|
|
||||||
detail: String(argDef.type),
|
|
||||||
documentation: argDef.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Input Object fields
|
|
||||||
if (kind === 'ObjectValue' || (kind === 'ObjectField' && step === 0)) {
|
|
||||||
if (typeInfo.objectFieldDefs) {
|
|
||||||
const objectFields = objectValues(typeInfo.objectFieldDefs);
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
objectFields.map(field => ({
|
|
||||||
label: field.name,
|
|
||||||
detail: String(field.type),
|
|
||||||
documentation: field.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Input values: Enum and Boolean
|
|
||||||
if (
|
|
||||||
kind === 'EnumValue' ||
|
|
||||||
(kind === 'ListValue' && step === 1) ||
|
|
||||||
(kind === 'ObjectField' && step === 2) ||
|
|
||||||
(kind === 'Argument' && step === 2)
|
|
||||||
) {
|
|
||||||
return getSuggestionsForInputValues(token, typeInfo);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fragment type conditions
|
|
||||||
if (
|
|
||||||
(kind === 'TypeCondition' && step === 1) ||
|
|
||||||
(kind === 'NamedType' &&
|
|
||||||
state.prevState != null &&
|
|
||||||
state.prevState.kind === 'TypeCondition')
|
|
||||||
) {
|
|
||||||
return getSuggestionsForFragmentTypeConditions(token, typeInfo, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Fragment spread names
|
|
||||||
if (kind === 'FragmentSpread' && step === 1) {
|
|
||||||
return getSuggestionsForFragmentSpread(token, typeInfo, schema, queryText);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Variable definition types
|
|
||||||
if (
|
|
||||||
(kind === 'VariableDefinition' && step === 2) ||
|
|
||||||
(kind === 'ListType' && step === 1) ||
|
|
||||||
(kind === 'NamedType' &&
|
|
||||||
state.prevState &&
|
|
||||||
(state.prevState.kind === 'VariableDefinition' ||
|
|
||||||
state.prevState.kind === 'ListType'))
|
|
||||||
) {
|
|
||||||
return getSuggestionsForVariableDefinition(token, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
// Directive names
|
|
||||||
if (kind === 'Directive') {
|
|
||||||
return getSuggestionsForDirective(token, state, schema);
|
|
||||||
}
|
|
||||||
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
// Helper functions to get suggestions for each kinds
|
|
||||||
function getSuggestionsForFieldNames(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (typeInfo.parentType) {
|
|
||||||
const parentType = typeInfo.parentType;
|
|
||||||
const fields =
|
|
||||||
parentType.getFields instanceof Function
|
|
||||||
? objectValues(parentType.getFields())
|
|
||||||
: [];
|
|
||||||
if (isAbstractType(parentType)) {
|
|
||||||
fields.push(TypeNameMetaFieldDef);
|
|
||||||
}
|
|
||||||
if (parentType === schema.getQueryType()) {
|
|
||||||
fields.push(SchemaMetaFieldDef, TypeMetaFieldDef);
|
|
||||||
}
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
fields.map(field => ({
|
|
||||||
label: field.name,
|
|
||||||
detail: String(field.type),
|
|
||||||
documentation: field.description,
|
|
||||||
isDeprecated: field.isDeprecated,
|
|
||||||
deprecationReason: field.deprecationReason,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForInputValues(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const namedInputType = getNamedType(typeInfo.inputType);
|
|
||||||
if (namedInputType instanceof GraphQLEnumType) {
|
|
||||||
const values = namedInputType.getValues();
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
values.map(value => ({
|
|
||||||
label: value.name,
|
|
||||||
detail: String(namedInputType),
|
|
||||||
documentation: value.description,
|
|
||||||
isDeprecated: value.isDeprecated,
|
|
||||||
deprecationReason: value.deprecationReason,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
} else if (namedInputType === GraphQLBoolean) {
|
|
||||||
return hintList(token, [
|
|
||||||
{
|
|
||||||
label: 'true',
|
|
||||||
detail: String(GraphQLBoolean),
|
|
||||||
documentation: 'Not false.',
|
|
||||||
},
|
|
||||||
{
|
|
||||||
label: 'false',
|
|
||||||
detail: String(GraphQLBoolean),
|
|
||||||
documentation: 'Not true.',
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
}
|
|
||||||
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForFragmentTypeConditions(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
let possibleTypes;
|
|
||||||
if (typeInfo.parentType) {
|
|
||||||
if (isAbstractType(typeInfo.parentType)) {
|
|
||||||
const abstractType = assertAbstractType(typeInfo.parentType);
|
|
||||||
// Collect both the possible Object types as well as the interfaces
|
|
||||||
// they implement.
|
|
||||||
const possibleObjTypes = schema.getPossibleTypes(abstractType);
|
|
||||||
const possibleIfaceMap = Object.create(null);
|
|
||||||
possibleObjTypes.forEach(type => {
|
|
||||||
type.getInterfaces().forEach(iface => {
|
|
||||||
possibleIfaceMap[iface.name] = iface;
|
|
||||||
});
|
|
||||||
});
|
|
||||||
possibleTypes = possibleObjTypes.concat(objectValues(possibleIfaceMap));
|
|
||||||
} else {
|
|
||||||
// The parent type is a non-abstract Object type, so the only possible
|
|
||||||
// type that can be used is that same type.
|
|
||||||
possibleTypes = [typeInfo.parentType];
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
const typeMap = schema.getTypeMap();
|
|
||||||
possibleTypes = objectValues(typeMap).filter(isCompositeType);
|
|
||||||
}
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
possibleTypes.map(type => {
|
|
||||||
const namedType = getNamedType(type);
|
|
||||||
return {
|
|
||||||
label: String(type),
|
|
||||||
documentation: (namedType && namedType.description) || '',
|
|
||||||
};
|
|
||||||
}),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForFragmentSpread(
|
|
||||||
token: ContextToken,
|
|
||||||
typeInfo: TypeInfo,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const typeMap = schema.getTypeMap();
|
|
||||||
const defState = getDefinitionState(token.state);
|
|
||||||
const fragments = getFragmentDefinitions(queryText);
|
|
||||||
|
|
||||||
// Filter down to only the fragments which may exist here.
|
|
||||||
const relevantFrags = fragments.filter(
|
|
||||||
frag =>
|
|
||||||
// Only include fragments with known types.
|
|
||||||
typeMap[frag.typeCondition.name.value] &&
|
|
||||||
// Only include fragments which are not cyclic.
|
|
||||||
!(
|
|
||||||
defState &&
|
|
||||||
defState.kind === 'FragmentDefinition' &&
|
|
||||||
defState.name === frag.name.value
|
|
||||||
) &&
|
|
||||||
// Only include fragments which could possibly be spread here.
|
|
||||||
isCompositeType(typeInfo.parentType) &&
|
|
||||||
isCompositeType(typeMap[frag.typeCondition.name.value]) &&
|
|
||||||
doTypesOverlap(
|
|
||||||
schema,
|
|
||||||
typeInfo.parentType,
|
|
||||||
typeMap[frag.typeCondition.name.value],
|
|
||||||
),
|
|
||||||
);
|
|
||||||
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
relevantFrags.map(frag => ({
|
|
||||||
label: frag.name.value,
|
|
||||||
detail: String(typeMap[frag.typeCondition.name.value]),
|
|
||||||
documentation: `fragment ${frag.name.value} on ${
|
|
||||||
frag.typeCondition.name.value
|
|
||||||
}`,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getFragmentDefinitions(
|
|
||||||
queryText: string,
|
|
||||||
): Array<FragmentDefinitionNode> {
|
|
||||||
const fragmentDefs = [];
|
|
||||||
runOnlineParser(queryText, (_, state) => {
|
|
||||||
if (state.kind === 'FragmentDefinition' && state.name && state.type) {
|
|
||||||
fragmentDefs.push({
|
|
||||||
kind: 'FragmentDefinition',
|
|
||||||
name: {
|
|
||||||
kind: 'Name',
|
|
||||||
value: state.name,
|
|
||||||
},
|
|
||||||
selectionSet: {
|
|
||||||
kind: 'SelectionSet',
|
|
||||||
selections: [],
|
|
||||||
},
|
|
||||||
typeCondition: {
|
|
||||||
kind: 'NamedType',
|
|
||||||
name: {
|
|
||||||
kind: 'Name',
|
|
||||||
value: state.type,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return fragmentDefs;
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForVariableDefinition(
|
|
||||||
token: ContextToken,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
const inputTypeMap = schema.getTypeMap();
|
|
||||||
const inputTypes = objectValues(inputTypeMap).filter(isInputType);
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
inputTypes.map(type => ({
|
|
||||||
label: type.name,
|
|
||||||
documentation: type.description,
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getSuggestionsForDirective(
|
|
||||||
token: ContextToken,
|
|
||||||
state: State,
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
): Array<CompletionItem> {
|
|
||||||
if (state.prevState && state.prevState.kind) {
|
|
||||||
const directives = schema
|
|
||||||
.getDirectives()
|
|
||||||
.filter(directive => canUseDirective(state.prevState, directive));
|
|
||||||
return hintList(
|
|
||||||
token,
|
|
||||||
directives.map(directive => ({
|
|
||||||
label: directive.name,
|
|
||||||
documentation: directive.description || '',
|
|
||||||
})),
|
|
||||||
);
|
|
||||||
}
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getTokenAtPosition(
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
): ContextToken {
|
|
||||||
let styleAtCursor = null;
|
|
||||||
let stateAtCursor = null;
|
|
||||||
let stringAtCursor = null;
|
|
||||||
const token = runOnlineParser(queryText, (stream, state, style, index) => {
|
|
||||||
if (index === cursor.line) {
|
|
||||||
if (stream.getCurrentPosition() >= cursor.character) {
|
|
||||||
styleAtCursor = style;
|
|
||||||
stateAtCursor = {...state};
|
|
||||||
stringAtCursor = stream.current();
|
|
||||||
return 'BREAK';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
// Return the state/style of parsed token in case those at cursor aren't
|
|
||||||
// available.
|
|
||||||
return {
|
|
||||||
start: token.start,
|
|
||||||
end: token.end,
|
|
||||||
string: stringAtCursor || token.string,
|
|
||||||
state: stateAtCursor || token.state,
|
|
||||||
style: styleAtCursor || token.style,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Provides an utility function to parse a given query text and construct a
|
|
||||||
* `token` context object.
|
|
||||||
* A token context provides useful information about the token/style that
|
|
||||||
* CharacterStream currently possesses, as well as the end state and style
|
|
||||||
* of the token.
|
|
||||||
*/
|
|
||||||
type callbackFnType = (
|
|
||||||
stream: CharacterStream,
|
|
||||||
state: State,
|
|
||||||
style: string,
|
|
||||||
index: number,
|
|
||||||
) => void | 'BREAK';
|
|
||||||
|
|
||||||
function runOnlineParser(
|
|
||||||
queryText: string,
|
|
||||||
callback: callbackFnType,
|
|
||||||
): ContextToken {
|
|
||||||
const lines = queryText.split('\n');
|
|
||||||
const parser = onlineParser();
|
|
||||||
let state = parser.startState();
|
|
||||||
let style = '';
|
|
||||||
|
|
||||||
let stream: CharacterStream = new CharacterStream('');
|
|
||||||
|
|
||||||
for (let i = 0; i < lines.length; i++) {
|
|
||||||
stream = new CharacterStream(lines[i]);
|
|
||||||
while (!stream.eol()) {
|
|
||||||
style = parser.token(stream, state);
|
|
||||||
const code = callback(stream, state, style, i);
|
|
||||||
if (code === 'BREAK') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Above while loop won't run if there is an empty line.
|
|
||||||
// Run the callback one more time to catch this.
|
|
||||||
callback(stream, state, style, i);
|
|
||||||
|
|
||||||
if (!state.kind) {
|
|
||||||
state = parser.startState();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
start: stream.getStartOfToken(),
|
|
||||||
end: stream.getCurrentPosition(),
|
|
||||||
string: stream.current(),
|
|
||||||
state,
|
|
||||||
style,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function canUseDirective(
|
|
||||||
state: $PropertyType<State, 'prevState'>,
|
|
||||||
directive: GraphQLDirective,
|
|
||||||
): boolean {
|
|
||||||
if (!state || !state.kind) {
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
const kind = state.kind;
|
|
||||||
const locations = directive.locations;
|
|
||||||
switch (kind) {
|
|
||||||
case 'Query':
|
|
||||||
return locations.indexOf('QUERY') !== -1;
|
|
||||||
case 'Mutation':
|
|
||||||
return locations.indexOf('MUTATION') !== -1;
|
|
||||||
case 'Subscription':
|
|
||||||
return locations.indexOf('SUBSCRIPTION') !== -1;
|
|
||||||
case 'Field':
|
|
||||||
case 'AliasedField':
|
|
||||||
return locations.indexOf('FIELD') !== -1;
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
return locations.indexOf('FRAGMENT_DEFINITION') !== -1;
|
|
||||||
case 'FragmentSpread':
|
|
||||||
return locations.indexOf('FRAGMENT_SPREAD') !== -1;
|
|
||||||
case 'InlineFragment':
|
|
||||||
return locations.indexOf('INLINE_FRAGMENT') !== -1;
|
|
||||||
|
|
||||||
// Schema Definitions
|
|
||||||
case 'SchemaDef':
|
|
||||||
return locations.indexOf('SCHEMA') !== -1;
|
|
||||||
case 'ScalarDef':
|
|
||||||
return locations.indexOf('SCALAR') !== -1;
|
|
||||||
case 'ObjectTypeDef':
|
|
||||||
return locations.indexOf('OBJECT') !== -1;
|
|
||||||
case 'FieldDef':
|
|
||||||
return locations.indexOf('FIELD_DEFINITION') !== -1;
|
|
||||||
case 'InterfaceDef':
|
|
||||||
return locations.indexOf('INTERFACE') !== -1;
|
|
||||||
case 'UnionDef':
|
|
||||||
return locations.indexOf('UNION') !== -1;
|
|
||||||
case 'EnumDef':
|
|
||||||
return locations.indexOf('ENUM') !== -1;
|
|
||||||
case 'EnumValue':
|
|
||||||
return locations.indexOf('ENUM_VALUE') !== -1;
|
|
||||||
case 'InputDef':
|
|
||||||
return locations.indexOf('INPUT_OBJECT') !== -1;
|
|
||||||
case 'InputValueDef':
|
|
||||||
const prevStateKind = state.prevState && state.prevState.kind;
|
|
||||||
switch (prevStateKind) {
|
|
||||||
case 'ArgumentsDef':
|
|
||||||
return locations.indexOf('ARGUMENT_DEFINITION') !== -1;
|
|
||||||
case 'InputDef':
|
|
||||||
return locations.indexOf('INPUT_FIELD_DEFINITION') !== -1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Utility for collecting rich type information given any token's state
|
|
||||||
// from the graphql-mode parser.
|
|
||||||
export function getTypeInfo(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
tokenState: State,
|
|
||||||
): TypeInfo {
|
|
||||||
let argDef;
|
|
||||||
let argDefs;
|
|
||||||
let directiveDef;
|
|
||||||
let enumValue;
|
|
||||||
let fieldDef;
|
|
||||||
let inputType;
|
|
||||||
let objectFieldDefs;
|
|
||||||
let parentType;
|
|
||||||
let type;
|
|
||||||
|
|
||||||
forEachState(tokenState, state => {
|
|
||||||
switch (state.kind) {
|
|
||||||
case 'Query':
|
|
||||||
case 'ShortQuery':
|
|
||||||
type = schema.getQueryType();
|
|
||||||
break;
|
|
||||||
case 'Mutation':
|
|
||||||
type = schema.getMutationType();
|
|
||||||
break;
|
|
||||||
case 'Subscription':
|
|
||||||
type = schema.getSubscriptionType();
|
|
||||||
break;
|
|
||||||
case 'InlineFragment':
|
|
||||||
case 'FragmentDefinition':
|
|
||||||
if (state.type) {
|
|
||||||
type = schema.getType(state.type);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Field':
|
|
||||||
case 'AliasedField':
|
|
||||||
if (!type || !state.name) {
|
|
||||||
fieldDef = null;
|
|
||||||
} else {
|
|
||||||
fieldDef = parentType
|
|
||||||
? getFieldDef(schema, parentType, state.name)
|
|
||||||
: null;
|
|
||||||
type = fieldDef ? fieldDef.type : null;
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'SelectionSet':
|
|
||||||
parentType = getNamedType(type);
|
|
||||||
break;
|
|
||||||
case 'Directive':
|
|
||||||
directiveDef = state.name ? schema.getDirective(state.name) : null;
|
|
||||||
break;
|
|
||||||
case 'Arguments':
|
|
||||||
if (!state.prevState) {
|
|
||||||
argDefs = null;
|
|
||||||
} else {
|
|
||||||
switch (state.prevState.kind) {
|
|
||||||
case 'Field':
|
|
||||||
argDefs = fieldDef && fieldDef.args;
|
|
||||||
break;
|
|
||||||
case 'Directive':
|
|
||||||
argDefs = directiveDef && directiveDef.args;
|
|
||||||
break;
|
|
||||||
case 'AliasedField':
|
|
||||||
const name = state.prevState && state.prevState.name;
|
|
||||||
if (!name) {
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
const field = parentType
|
|
||||||
? getFieldDef(schema, parentType, name)
|
|
||||||
: null;
|
|
||||||
if (!field) {
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
argDefs = field.args;
|
|
||||||
break;
|
|
||||||
default:
|
|
||||||
argDefs = null;
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
case 'Argument':
|
|
||||||
if (argDefs) {
|
|
||||||
for (let i = 0; i < argDefs.length; i++) {
|
|
||||||
if (argDefs[i].name === state.name) {
|
|
||||||
argDef = argDefs[i];
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
inputType = argDef && argDef.type;
|
|
||||||
break;
|
|
||||||
case 'EnumValue':
|
|
||||||
const enumType = getNamedType(inputType);
|
|
||||||
enumValue =
|
|
||||||
enumType instanceof GraphQLEnumType
|
|
||||||
? find(enumType.getValues(), val => val.value === state.name)
|
|
||||||
: null;
|
|
||||||
break;
|
|
||||||
case 'ListValue':
|
|
||||||
const nullableType = getNullableType(inputType);
|
|
||||||
inputType =
|
|
||||||
nullableType instanceof GraphQLList ? nullableType.ofType : null;
|
|
||||||
break;
|
|
||||||
case 'ObjectValue':
|
|
||||||
const objectType = getNamedType(inputType);
|
|
||||||
objectFieldDefs =
|
|
||||||
objectType instanceof GraphQLInputObjectType
|
|
||||||
? objectType.getFields()
|
|
||||||
: null;
|
|
||||||
break;
|
|
||||||
case 'ObjectField':
|
|
||||||
const objectField =
|
|
||||||
state.name && objectFieldDefs ? objectFieldDefs[state.name] : null;
|
|
||||||
inputType = objectField && objectField.type;
|
|
||||||
break;
|
|
||||||
case 'NamedType':
|
|
||||||
if (state.name) {
|
|
||||||
type = schema.getType(state.name);
|
|
||||||
}
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
argDef,
|
|
||||||
argDefs,
|
|
||||||
directiveDef,
|
|
||||||
enumValue,
|
|
||||||
fieldDef,
|
|
||||||
inputType,
|
|
||||||
objectFieldDefs,
|
|
||||||
parentType,
|
|
||||||
type,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
// Returns the first item in the array which causes predicate to return truthy.
|
|
||||||
function find(array, predicate) {
|
|
||||||
for (let i = 0; i < array.length; i++) {
|
|
||||||
if (predicate(array[i])) {
|
|
||||||
return array[i];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
}
|
|
@ -1,136 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
ASTNode,
|
|
||||||
FragmentSpreadNode,
|
|
||||||
FragmentDefinitionNode,
|
|
||||||
OperationDefinitionNode,
|
|
||||||
NamedTypeNode,
|
|
||||||
TypeDefinitionNode,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
Definition,
|
|
||||||
DefinitionQueryResult,
|
|
||||||
FragmentInfo,
|
|
||||||
Position,
|
|
||||||
Range,
|
|
||||||
Uri,
|
|
||||||
ObjectTypeInfo,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
import {locToRange, offsetToPosition} from 'graphql-language-service-utils';
|
|
||||||
import invariant from 'assert';
|
|
||||||
|
|
||||||
export const LANGUAGE = 'GraphQL';
|
|
||||||
|
|
||||||
function getRange(text: string, node: ASTNode): Range {
|
|
||||||
const location = node.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return locToRange(text, location);
|
|
||||||
}
|
|
||||||
|
|
||||||
function getPosition(text: string, node: ASTNode): Position {
|
|
||||||
const location = node.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return offsetToPosition(text, location.start);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getDefinitionQueryResultForNamedType(
|
|
||||||
text: string,
|
|
||||||
node: NamedTypeNode,
|
|
||||||
dependencies: Array<ObjectTypeInfo>,
|
|
||||||
): Promise<DefinitionQueryResult> {
|
|
||||||
const name = node.name.value;
|
|
||||||
const defNodes = dependencies.filter(
|
|
||||||
({definition}) => definition.name && definition.name.value === name,
|
|
||||||
);
|
|
||||||
if (defNodes.length === 0) {
|
|
||||||
process.stderr.write(`Definition not found for GraphQL type ${name}`);
|
|
||||||
return {queryRange: [], definitions: []};
|
|
||||||
}
|
|
||||||
const definitions: Array<Definition> = defNodes.map(
|
|
||||||
({filePath, content, definition}) =>
|
|
||||||
getDefinitionForNodeDefinition(filePath || '', content, definition),
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
definitions,
|
|
||||||
queryRange: definitions.map(_ => getRange(text, node)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getDefinitionQueryResultForFragmentSpread(
|
|
||||||
text: string,
|
|
||||||
fragment: FragmentSpreadNode,
|
|
||||||
dependencies: Array<FragmentInfo>,
|
|
||||||
): Promise<DefinitionQueryResult> {
|
|
||||||
const name = fragment.name.value;
|
|
||||||
const defNodes = dependencies.filter(
|
|
||||||
({definition}) => definition.name.value === name,
|
|
||||||
);
|
|
||||||
if (defNodes.length === 0) {
|
|
||||||
process.stderr.write(`Definition not found for GraphQL fragment ${name}`);
|
|
||||||
return {queryRange: [], definitions: []};
|
|
||||||
}
|
|
||||||
const definitions: Array<Definition> = defNodes.map(
|
|
||||||
({filePath, content, definition}) =>
|
|
||||||
getDefinitionForFragmentDefinition(filePath || '', content, definition),
|
|
||||||
);
|
|
||||||
return {
|
|
||||||
definitions,
|
|
||||||
queryRange: definitions.map(_ => getRange(text, fragment)),
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getDefinitionQueryResultForDefinitionNode(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
|
||||||
): DefinitionQueryResult {
|
|
||||||
return {
|
|
||||||
definitions: [getDefinitionForFragmentDefinition(path, text, definition)],
|
|
||||||
queryRange: definition.name ? [getRange(text, definition.name)] : [],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDefinitionForFragmentDefinition(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: FragmentDefinitionNode | OperationDefinitionNode,
|
|
||||||
): Definition {
|
|
||||||
const name = definition.name;
|
|
||||||
invariant(name, 'Expected ASTNode to have a Name.');
|
|
||||||
return {
|
|
||||||
path,
|
|
||||||
position: getPosition(text, definition),
|
|
||||||
range: getRange(text, definition),
|
|
||||||
name: name.value || '',
|
|
||||||
language: LANGUAGE,
|
|
||||||
// This is a file inside the project root, good enough for now
|
|
||||||
projectRoot: path,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function getDefinitionForNodeDefinition(
|
|
||||||
path: Uri,
|
|
||||||
text: string,
|
|
||||||
definition: TypeDefinitionNode,
|
|
||||||
): Definition {
|
|
||||||
const name = definition.name;
|
|
||||||
invariant(name, 'Expected ASTNode to have a Name.');
|
|
||||||
return {
|
|
||||||
path,
|
|
||||||
position: getPosition(text, definition),
|
|
||||||
range: getRange(text, definition),
|
|
||||||
name: name.value || '',
|
|
||||||
language: LANGUAGE,
|
|
||||||
// This is a file inside the project root, good enough for now
|
|
||||||
projectRoot: path,
|
|
||||||
};
|
|
||||||
}
|
|
@ -1,172 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
ASTNode,
|
|
||||||
DocumentNode,
|
|
||||||
GraphQLError,
|
|
||||||
GraphQLSchema,
|
|
||||||
Location,
|
|
||||||
SourceLocation,
|
|
||||||
} from 'graphql';
|
|
||||||
import type {
|
|
||||||
Diagnostic,
|
|
||||||
CustomValidationRule,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
import invariant from 'assert';
|
|
||||||
import {findDeprecatedUsages, parse} from 'graphql';
|
|
||||||
import {CharacterStream, onlineParser} from 'graphql-language-service-parser';
|
|
||||||
import {
|
|
||||||
Position,
|
|
||||||
Range,
|
|
||||||
validateWithCustomRules,
|
|
||||||
} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
export const SEVERITY = {
|
|
||||||
ERROR: 1,
|
|
||||||
WARNING: 2,
|
|
||||||
INFORMATION: 3,
|
|
||||||
HINT: 4,
|
|
||||||
};
|
|
||||||
|
|
||||||
export function getDiagnostics(
|
|
||||||
query: string,
|
|
||||||
schema: ?GraphQLSchema = null,
|
|
||||||
customRules?: Array<CustomValidationRule>,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
let ast = null;
|
|
||||||
try {
|
|
||||||
ast = parse(query);
|
|
||||||
} catch (error) {
|
|
||||||
const range = getRange(error.locations[0], query);
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
severity: SEVERITY.ERROR,
|
|
||||||
message: error.message,
|
|
||||||
source: 'GraphQL: Syntax',
|
|
||||||
range,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
|
|
||||||
return validateQuery(ast, schema, customRules, isRelayCompatMode);
|
|
||||||
}
|
|
||||||
|
|
||||||
export function validateQuery(
|
|
||||||
ast: DocumentNode,
|
|
||||||
schema: ?GraphQLSchema = null,
|
|
||||||
customRules?: Array<CustomValidationRule>,
|
|
||||||
isRelayCompatMode?: boolean,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
// We cannot validate the query unless a schema is provided.
|
|
||||||
if (!schema) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const validationErrorAnnotations = mapCat(
|
|
||||||
validateWithCustomRules(schema, ast, customRules, isRelayCompatMode),
|
|
||||||
error => annotations(error, SEVERITY.ERROR, 'Validation'),
|
|
||||||
);
|
|
||||||
// Note: findDeprecatedUsages was added in graphql@0.9.0, but we want to
|
|
||||||
// support older versions of graphql-js.
|
|
||||||
const deprecationWarningAnnotations = !findDeprecatedUsages
|
|
||||||
? []
|
|
||||||
: mapCat(findDeprecatedUsages(schema, ast), error =>
|
|
||||||
annotations(error, SEVERITY.WARNING, 'Deprecation'),
|
|
||||||
);
|
|
||||||
return validationErrorAnnotations.concat(deprecationWarningAnnotations);
|
|
||||||
}
|
|
||||||
|
|
||||||
// General utility for map-cating (aka flat-mapping).
|
|
||||||
function mapCat<T>(
|
|
||||||
array: Array<T>,
|
|
||||||
mapper: (item: T) => Array<any>,
|
|
||||||
): Array<any> {
|
|
||||||
return Array.prototype.concat.apply([], array.map(mapper));
|
|
||||||
}
|
|
||||||
|
|
||||||
function annotations(
|
|
||||||
error: GraphQLError,
|
|
||||||
severity: number,
|
|
||||||
type: string,
|
|
||||||
): Array<Diagnostic> {
|
|
||||||
if (!error.nodes) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
return error.nodes.map(node => {
|
|
||||||
const highlightNode =
|
|
||||||
node.kind !== 'Variable' && node.name
|
|
||||||
? node.name
|
|
||||||
: node.variable
|
|
||||||
? node.variable
|
|
||||||
: node;
|
|
||||||
|
|
||||||
invariant(error.locations, 'GraphQL validation error requires locations.');
|
|
||||||
const loc = error.locations[0];
|
|
||||||
const highlightLoc = getLocation(highlightNode);
|
|
||||||
const end = loc.column + (highlightLoc.end - highlightLoc.start);
|
|
||||||
return {
|
|
||||||
source: `GraphQL: ${type}`,
|
|
||||||
message: error.message,
|
|
||||||
severity,
|
|
||||||
range: new Range(
|
|
||||||
new Position(loc.line - 1, loc.column - 1),
|
|
||||||
new Position(loc.line - 1, end),
|
|
||||||
),
|
|
||||||
};
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
export function getRange(location: SourceLocation, queryText: string) {
|
|
||||||
const parser = onlineParser();
|
|
||||||
const state = parser.startState();
|
|
||||||
const lines = queryText.split('\n');
|
|
||||||
|
|
||||||
invariant(
|
|
||||||
lines.length >= location.line,
|
|
||||||
'Query text must have more lines than where the error happened',
|
|
||||||
);
|
|
||||||
|
|
||||||
let stream = null;
|
|
||||||
|
|
||||||
for (let i = 0; i < location.line; i++) {
|
|
||||||
stream = new CharacterStream(lines[i]);
|
|
||||||
while (!stream.eol()) {
|
|
||||||
const style = parser.token(stream, state);
|
|
||||||
if (style === 'invalidchar') {
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
invariant(stream, 'Expected Parser stream to be available.');
|
|
||||||
|
|
||||||
const line = location.line - 1;
|
|
||||||
const start = stream.getStartOfToken();
|
|
||||||
const end = stream.getCurrentPosition();
|
|
||||||
|
|
||||||
return new Range(new Position(line, start), new Position(line, end));
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Get location info from a node in a type-safe way.
|
|
||||||
*
|
|
||||||
* The only way a node could not have a location is if we initialized the parser
|
|
||||||
* (and therefore the lexer) with the `noLocation` option, but we always
|
|
||||||
* call `parse` without options above.
|
|
||||||
*/
|
|
||||||
function getLocation(node: any): Location {
|
|
||||||
const typeCastedNode = (node: ASTNode);
|
|
||||||
const location = typeCastedNode.loc;
|
|
||||||
invariant(location, 'Expected ASTNode to have a location.');
|
|
||||||
return location;
|
|
||||||
}
|
|
@ -1,186 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Ported from codemirror-graphql
|
|
||||||
* https://github.com/graphql/codemirror-graphql/blob/master/src/info.js
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {GraphQLSchema} from 'graphql';
|
|
||||||
import type {ContextToken} from 'graphql-language-service-types';
|
|
||||||
import type {Hover} from 'vscode-languageserver-types';
|
|
||||||
import type {Position} from 'graphql-language-service-utils';
|
|
||||||
import {getTokenAtPosition, getTypeInfo} from './getAutocompleteSuggestions';
|
|
||||||
import {GraphQLNonNull, GraphQLList} from 'graphql';
|
|
||||||
|
|
||||||
export function getHoverInformation(
|
|
||||||
schema: GraphQLSchema,
|
|
||||||
queryText: string,
|
|
||||||
cursor: Position,
|
|
||||||
contextToken?: ContextToken,
|
|
||||||
): Hover.contents {
|
|
||||||
const token = contextToken || getTokenAtPosition(queryText, cursor);
|
|
||||||
|
|
||||||
if (!schema || !token || !token.state) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const state = token.state;
|
|
||||||
const kind = state.kind;
|
|
||||||
const step = state.step;
|
|
||||||
const typeInfo = getTypeInfo(schema, token.state);
|
|
||||||
const options = {schema};
|
|
||||||
|
|
||||||
// Given a Schema and a Token, produce the contents of an info tooltip.
|
|
||||||
// To do this, create a div element that we will render "into" and then pass
|
|
||||||
// it to various rendering functions.
|
|
||||||
if (
|
|
||||||
(kind === 'Field' && step === 0 && typeInfo.fieldDef) ||
|
|
||||||
(kind === 'AliasedField' && step === 2 && typeInfo.fieldDef)
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderField(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.fieldDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (kind === 'Directive' && step === 1 && typeInfo.directiveDef) {
|
|
||||||
const into = [];
|
|
||||||
renderDirective(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.directiveDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (kind === 'Argument' && step === 0 && typeInfo.argDef) {
|
|
||||||
const into = [];
|
|
||||||
renderArg(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.argDef);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (
|
|
||||||
kind === 'EnumValue' &&
|
|
||||||
typeInfo.enumValue &&
|
|
||||||
typeInfo.enumValue.description
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderEnumValue(into, typeInfo, options);
|
|
||||||
renderDescription(into, options, typeInfo.enumValue);
|
|
||||||
return into.join('').trim();
|
|
||||||
} else if (
|
|
||||||
kind === 'NamedType' &&
|
|
||||||
typeInfo.type &&
|
|
||||||
typeInfo.type.description
|
|
||||||
) {
|
|
||||||
const into = [];
|
|
||||||
renderType(into, typeInfo, options, typeInfo.type);
|
|
||||||
renderDescription(into, options, typeInfo.type);
|
|
||||||
return into.join('').trim();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderField(into, typeInfo, options) {
|
|
||||||
renderQualifiedField(into, typeInfo, options);
|
|
||||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.type);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderQualifiedField(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.fieldDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const fieldName = (typeInfo.fieldDef.name: string);
|
|
||||||
if (fieldName.slice(0, 2) !== '__') {
|
|
||||||
renderType(into, typeInfo, options, typeInfo.parentType);
|
|
||||||
text(into, '.');
|
|
||||||
}
|
|
||||||
text(into, fieldName);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDirective(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.directiveDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const name = '@' + typeInfo.directiveDef.name;
|
|
||||||
text(into, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderArg(into, typeInfo, options) {
|
|
||||||
if (typeInfo.directiveDef) {
|
|
||||||
renderDirective(into, typeInfo, options);
|
|
||||||
} else if (typeInfo.fieldDef) {
|
|
||||||
renderQualifiedField(into, typeInfo, options);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!typeInfo.argDef) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const name = typeInfo.argDef.name;
|
|
||||||
text(into, '(');
|
|
||||||
text(into, name);
|
|
||||||
renderTypeAnnotation(into, typeInfo, options, typeInfo.inputType);
|
|
||||||
text(into, ')');
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderTypeAnnotation(into, typeInfo, options, t) {
|
|
||||||
text(into, ': ');
|
|
||||||
renderType(into, typeInfo, options, t);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderEnumValue(into, typeInfo, options) {
|
|
||||||
if (!typeInfo.enumValue) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const name = typeInfo.enumValue.name;
|
|
||||||
renderType(into, typeInfo, options, typeInfo.inputType);
|
|
||||||
text(into, '.');
|
|
||||||
text(into, name);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderType(into, typeInfo, options, t) {
|
|
||||||
if (!t) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
if (t instanceof GraphQLNonNull) {
|
|
||||||
renderType(into, typeInfo, options, t.ofType);
|
|
||||||
text(into, '!');
|
|
||||||
} else if (t instanceof GraphQLList) {
|
|
||||||
text(into, '[');
|
|
||||||
renderType(into, typeInfo, options, t.ofType);
|
|
||||||
text(into, ']');
|
|
||||||
} else {
|
|
||||||
text(into, t.name);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDescription(into, options, def) {
|
|
||||||
if (!def) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const description =
|
|
||||||
typeof def.description === 'string' ? def.description : null;
|
|
||||||
if (description) {
|
|
||||||
text(into, '\n\n');
|
|
||||||
text(into, description);
|
|
||||||
}
|
|
||||||
renderDeprecation(into, options, def);
|
|
||||||
}
|
|
||||||
|
|
||||||
function renderDeprecation(into, options, def) {
|
|
||||||
if (!def) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
const reason =
|
|
||||||
typeof def.deprecationReason === 'string' ? def.deprecationReason : null;
|
|
||||||
if (!reason) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
text(into, '\n\n');
|
|
||||||
text(into, 'Deprecated: ');
|
|
||||||
text(into, reason);
|
|
||||||
}
|
|
||||||
|
|
||||||
function text(into: string[], content: string) {
|
|
||||||
into.push(content);
|
|
||||||
}
|
|
@ -1,121 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
import type {
|
|
||||||
Outline,
|
|
||||||
TextToken,
|
|
||||||
TokenKind,
|
|
||||||
} from 'graphql-language-service-types';
|
|
||||||
|
|
||||||
import {Kind, parse, visit} from 'graphql';
|
|
||||||
import {offsetToPosition} from 'graphql-language-service-utils';
|
|
||||||
|
|
||||||
const {INLINE_FRAGMENT} = Kind;
|
|
||||||
|
|
||||||
const OUTLINEABLE_KINDS = {
|
|
||||||
Field: true,
|
|
||||||
OperationDefinition: true,
|
|
||||||
Document: true,
|
|
||||||
SelectionSet: true,
|
|
||||||
Name: true,
|
|
||||||
FragmentDefinition: true,
|
|
||||||
FragmentSpread: true,
|
|
||||||
InlineFragment: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
type OutlineTreeConverterType = {[name: string]: Function};
|
|
||||||
|
|
||||||
export function getOutline(queryText: string): ?Outline {
|
|
||||||
let ast;
|
|
||||||
try {
|
|
||||||
ast = parse(queryText);
|
|
||||||
} catch (error) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const visitorFns = outlineTreeConverter(queryText);
|
|
||||||
const outlineTrees = visit(ast, {
|
|
||||||
leave(node) {
|
|
||||||
if (
|
|
||||||
OUTLINEABLE_KINDS.hasOwnProperty(node.kind) &&
|
|
||||||
visitorFns[node.kind]
|
|
||||||
) {
|
|
||||||
return visitorFns[node.kind](node);
|
|
||||||
}
|
|
||||||
return null;
|
|
||||||
},
|
|
||||||
});
|
|
||||||
return {outlineTrees};
|
|
||||||
}
|
|
||||||
|
|
||||||
function outlineTreeConverter(docText: string): OutlineTreeConverterType {
|
|
||||||
const meta = node => ({
|
|
||||||
representativeName: node.name,
|
|
||||||
startPosition: offsetToPosition(docText, node.loc.start),
|
|
||||||
endPosition: offsetToPosition(docText, node.loc.end),
|
|
||||||
children: node.selectionSet || [],
|
|
||||||
});
|
|
||||||
return {
|
|
||||||
Field: node => {
|
|
||||||
const tokenizedText = node.alias
|
|
||||||
? [buildToken('plain', node.alias), buildToken('plain', ': ')]
|
|
||||||
: [];
|
|
||||||
tokenizedText.push(buildToken('plain', node.name));
|
|
||||||
return {tokenizedText, ...meta(node)};
|
|
||||||
},
|
|
||||||
OperationDefinition: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('keyword', node.operation),
|
|
||||||
buildToken('whitespace', ' '),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
Document: node => node.definitions,
|
|
||||||
SelectionSet: node =>
|
|
||||||
concatMap(node.selections, child => {
|
|
||||||
return child.kind === INLINE_FRAGMENT ? child.selectionSet : child;
|
|
||||||
}),
|
|
||||||
Name: node => node.value,
|
|
||||||
FragmentDefinition: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('keyword', 'fragment'),
|
|
||||||
buildToken('whitespace', ' '),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
FragmentSpread: node => ({
|
|
||||||
tokenizedText: [
|
|
||||||
buildToken('plain', '...'),
|
|
||||||
buildToken('class-name', node.name),
|
|
||||||
],
|
|
||||||
...meta(node),
|
|
||||||
}),
|
|
||||||
InlineFragment: node => node.selectionSet,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function buildToken(kind: TokenKind, value: string): TextToken {
|
|
||||||
return {kind, value};
|
|
||||||
}
|
|
||||||
|
|
||||||
function concatMap(arr: Array<any>, fn: Function): Array<any> {
|
|
||||||
const res = [];
|
|
||||||
for (let i = 0; i < arr.length; i++) {
|
|
||||||
const x = fn(arr[i], i);
|
|
||||||
if (Array.isArray(x)) {
|
|
||||||
res.push(...x);
|
|
||||||
} else {
|
|
||||||
res.push(x);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return res;
|
|
||||||
}
|
|
@ -1,31 +0,0 @@
|
|||||||
/**
|
|
||||||
* Copyright (c) Facebook, Inc.
|
|
||||||
* All rights reserved.
|
|
||||||
*
|
|
||||||
* This source code is licensed under the license found in the
|
|
||||||
* LICENSE file in the root directory of this source tree.
|
|
||||||
*
|
|
||||||
* @flow
|
|
||||||
*/
|
|
||||||
|
|
||||||
export {
|
|
||||||
getDefinitionState,
|
|
||||||
getFieldDef,
|
|
||||||
forEachState,
|
|
||||||
objectValues,
|
|
||||||
hintList,
|
|
||||||
} from './autocompleteUtils';
|
|
||||||
|
|
||||||
export {getAutocompleteSuggestions} from './getAutocompleteSuggestions';
|
|
||||||
|
|
||||||
export {
|
|
||||||
LANGUAGE,
|
|
||||||
getDefinitionQueryResultForFragmentSpread,
|
|
||||||
getDefinitionQueryResultForDefinitionNode,
|
|
||||||
} from './getDefinition';
|
|
||||||
|
|
||||||
export {getDiagnostics, validateQuery} from './getDiagnostics';
|
|
||||||
export {getOutline} from './getOutline';
|
|
||||||
export {getHoverInformation} from './getHoverInformation';
|
|
||||||
|
|
||||||
export {GraphQLLanguageService} from './GraphQLLanguageService';
|
|
@ -1,7 +0,0 @@
|
|||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 841.9 595.3">
|
|
||||||
<g fill="#61DAFB">
|
|
||||||
<path d="M666.3 296.5c0-32.5-40.7-63.3-103.1-82.4 14.4-63.6 8-114.2-20.2-130.4-6.5-3.8-14.1-5.6-22.4-5.6v22.3c4.6 0 8.3.9 11.4 2.6 13.6 7.8 19.5 37.5 14.9 75.7-1.1 9.4-2.9 19.3-5.1 29.4-19.6-4.8-41-8.5-63.5-10.9-13.5-18.5-27.5-35.3-41.6-50 32.6-30.3 63.2-46.9 84-46.9V78c-27.5 0-63.5 19.6-99.9 53.6-36.4-33.8-72.4-53.2-99.9-53.2v22.3c20.7 0 51.4 16.5 84 46.6-14 14.7-28 31.4-41.3 49.9-22.6 2.4-44 6.1-63.6 11-2.3-10-4-19.7-5.2-29-4.7-38.2 1.1-67.9 14.6-75.8 3-1.8 6.9-2.6 11.5-2.6V78.5c-8.4 0-16 1.8-22.6 5.6-28.1 16.2-34.4 66.7-19.9 130.1-62.2 19.2-102.7 49.9-102.7 82.3 0 32.5 40.7 63.3 103.1 82.4-14.4 63.6-8 114.2 20.2 130.4 6.5 3.8 14.1 5.6 22.5 5.6 27.5 0 63.5-19.6 99.9-53.6 36.4 33.8 72.4 53.2 99.9 53.2 8.4 0 16-1.8 22.6-5.6 28.1-16.2 34.4-66.7 19.9-130.1 62-19.1 102.5-49.9 102.5-82.3zm-130.2-66.7c-3.7 12.9-8.3 26.2-13.5 39.5-4.1-8-8.4-16-13.1-24-4.6-8-9.5-15.8-14.4-23.4 14.2 2.1 27.9 4.7 41 7.9zm-45.8 106.5c-7.8 13.5-15.8 26.3-24.1 38.2-14.9 1.3-30 2-45.2 2-15.1 0-30.2-.7-45-1.9-8.3-11.9-16.4-24.6-24.2-38-7.6-13.1-14.5-26.4-20.8-39.8 6.2-13.4 13.2-26.8 20.7-39.9 7.8-13.5 15.8-26.3 24.1-38.2 14.9-1.3 30-2 45.2-2 15.1 0 30.2.7 45 1.9 8.3 11.9 16.4 24.6 24.2 38 7.6 13.1 14.5 26.4 20.8 39.8-6.3 13.4-13.2 26.8-20.7 39.9zm32.3-13c5.4 13.4 10 26.8 13.8 39.8-13.1 3.2-26.9 5.9-41.2 8 4.9-7.7 9.8-15.6 14.4-23.7 4.6-8 8.9-16.1 13-24.1zM421.2 430c-9.3-9.6-18.6-20.3-27.8-32 9 .4 18.2.7 27.5.7 9.4 0 18.7-.2 27.8-.7-9 11.7-18.3 22.4-27.5 32zm-74.4-58.9c-14.2-2.1-27.9-4.7-41-7.9 3.7-12.9 8.3-26.2 13.5-39.5 4.1 8 8.4 16 13.1 24 4.7 8 9.5 15.8 14.4 23.4zM420.7 163c9.3 9.6 18.6 20.3 27.8 32-9-.4-18.2-.7-27.5-.7-9.4 0-18.7.2-27.8.7 9-11.7 18.3-22.4 27.5-32zm-74 58.9c-4.9 7.7-9.8 15.6-14.4 23.7-4.6 8-8.9 16-13 24-5.4-13.4-10-26.8-13.8-39.8 13.1-3.1 26.9-5.8 41.2-7.9zm-90.5 125.2c-35.4-15.1-58.3-34.9-58.3-50.6 0-15.7 22.9-35.6 58.3-50.6 8.6-3.7 18-7 27.7-10.1 5.7 19.6 13.2 40 22.5 60.9-9.2 20.8-16.6 41.1-22.2 60.6-9.9-3.1-19.3-6.5-28-10.2zM310 490c-13.6-7.8-19.5-37.5-14.9-75.7 1.1-9.4 2.9-19.3 5.1-29.4 19.6 4.8 41 8.5 63.5 10.9 13.5 18.5 27.5 35.3 41.6 50-32.6 30.3-63.2 46.9-84 46.9-4.5-.1-8.3-1-11.3-2.7zm237.2-76.2c4.7 38.2-1.1 67.9-14.6 75.8-3 1.8-6.9 2.6-11.5 2.6-20.7 0-51.4-16.5-84-46.6 14-14.7 28-31.4 41.3-49.9 22.6-2.4 44-6.1 63.6-11 2.3 10.1 4.1 19.8 5.2 29.1zm38.5-66.7c-8.6 3.7-18 7-27.7 10.1-5.7-19.6-13.2-40-22.5-60.9 9.2-20.8 16.6-41.1 22.2-60.6 9.9 3.1 19.3 6.5 28.1 10.2 35.4 15.1 58.3 34.9 58.3 50.6-.1 15.7-23 35.6-58.4 50.6zM320.8 78.4z"/>
|
|
||||||
<circle cx="420.9" cy="296.5" r="45.7"/>
|
|
||||||
<path d="M520.5 78.1z"/>
|
|
||||||
</g>
|
|
||||||
</svg>
|
|
Before Width: | Height: | Size: 2.6 KiB |
@ -30,7 +30,7 @@ reload_on_config_change: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
migrations_path: ./config/migrations
|
migrations_path: ./migrations
|
||||||
|
|
||||||
# Secret key for general encryption operations like
|
# Secret key for general encryption operations like
|
||||||
# encrypting the cursor data
|
# encrypting the cursor data
|
||||||
@ -116,18 +116,18 @@ database:
|
|||||||
# database ping timeout is used for db health checking
|
# database ping timeout is used for db health checking
|
||||||
ping_timeout: 1m
|
ping_timeout: 1m
|
||||||
|
|
||||||
# Define additional variables here to be used with filters
|
# Define additional variables here to be used with filters
|
||||||
variables:
|
variables:
|
||||||
admin_account_id: "5"
|
admin_account_id: "5"
|
||||||
|
|
||||||
# Field and table names that you wish to block
|
# Field and table names that you wish to block
|
||||||
blocklist:
|
blocklist:
|
||||||
- ar_internal_metadata
|
- ar_internal_metadata
|
||||||
- schema_migrations
|
- schema_migrations
|
||||||
- secret
|
- secret
|
||||||
- password
|
- password
|
||||||
- encrypted
|
- encrypted
|
||||||
- token
|
- token
|
||||||
|
|
||||||
tables:
|
tables:
|
||||||
- name: customers
|
- name: customers
|
||||||
|
@ -30,7 +30,7 @@ enable_tracing: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
# migrations_path: migrations
|
# migrations_path: ./migrations
|
||||||
|
|
||||||
# Secret key for general encryption operations like
|
# Secret key for general encryption operations like
|
||||||
# encrypting the cursor data
|
# encrypting the cursor data
|
||||||
|
41
core/api.go
41
core/api.go
@ -55,6 +55,7 @@ import (
|
|||||||
_log "log"
|
_log "log"
|
||||||
"os"
|
"os"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql"
|
||||||
"github.com/dosco/super-graph/core/internal/allow"
|
"github.com/dosco/super-graph/core/internal/allow"
|
||||||
"github.com/dosco/super-graph/core/internal/crypto"
|
"github.com/dosco/super-graph/core/internal/crypto"
|
||||||
"github.com/dosco/super-graph/core/internal/psql"
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
@ -87,6 +88,7 @@ type SuperGraph struct {
|
|||||||
prepared map[string]*preparedItem
|
prepared map[string]*preparedItem
|
||||||
roles map[string]*Role
|
roles map[string]*Role
|
||||||
getRole *sql.Stmt
|
getRole *sql.Stmt
|
||||||
|
rmap map[uint64]*resolvFn
|
||||||
abacEnabled bool
|
abacEnabled bool
|
||||||
anonExists bool
|
anonExists bool
|
||||||
qc *qcode.Compiler
|
qc *qcode.Compiler
|
||||||
@ -118,6 +120,10 @@ func NewSuperGraph(conf *Config, db *sql.DB) (*SuperGraph, error) {
|
|||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := sg.initResolvers(); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
if len(conf.SecretKey) != 0 {
|
if len(conf.SecretKey) != 0 {
|
||||||
sk := sha256.Sum256([]byte(conf.SecretKey))
|
sk := sha256.Sum256([]byte(conf.SecretKey))
|
||||||
conf.SecretKey = ""
|
conf.SecretKey = ""
|
||||||
@ -149,7 +155,29 @@ type Result struct {
|
|||||||
// In developer mode all names queries are saved into a file `allow.list` and in production mode only
|
// In developer mode all names queries are saved into a file `allow.list` and in production mode only
|
||||||
// queries from this file can be run.
|
// queries from this file can be run.
|
||||||
func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMessage) (*Result, error) {
|
func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMessage) (*Result, error) {
|
||||||
ct := scontext{Context: c, sg: sg, query: query, vars: vars}
|
var res Result
|
||||||
|
|
||||||
|
res.op = qcode.GetQType(query)
|
||||||
|
res.name = allow.QueryName(query)
|
||||||
|
|
||||||
|
// use the chirino/graphql library for introspection queries
|
||||||
|
// disabled when allow list is enforced
|
||||||
|
if !sg.conf.UseAllowList && res.name == "IntrospectionQuery" {
|
||||||
|
engine, err := sg.createGraphQLEgine()
|
||||||
|
if err != nil {
|
||||||
|
res.Error = err.Error()
|
||||||
|
return &res, err
|
||||||
|
}
|
||||||
|
|
||||||
|
r := engine.ExecuteOne(&graphql.EngineRequest{Query: query})
|
||||||
|
res.Data = r.Data
|
||||||
|
if r.Error() != nil {
|
||||||
|
res.Error = r.Error().Error()
|
||||||
|
}
|
||||||
|
return &res, r.Error()
|
||||||
|
}
|
||||||
|
|
||||||
|
ct := scontext{Context: c, sg: sg, query: query, vars: vars, res: res}
|
||||||
|
|
||||||
if len(vars) <= 2 {
|
if len(vars) <= 2 {
|
||||||
ct.vars = nil
|
ct.vars = nil
|
||||||
@ -161,9 +189,6 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
|||||||
ct.role = "anon"
|
ct.role = "anon"
|
||||||
}
|
}
|
||||||
|
|
||||||
ct.res.op = qcode.GetQType(query)
|
|
||||||
ct.res.name = allow.QueryName(query)
|
|
||||||
|
|
||||||
data, err := ct.execQuery()
|
data, err := ct.execQuery()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return &ct.res, err
|
return &ct.res, err
|
||||||
@ -173,3 +198,11 @@ func (sg *SuperGraph) GraphQL(c context.Context, query string, vars json.RawMess
|
|||||||
|
|
||||||
return &ct.res, nil
|
return &ct.res, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (sg *SuperGraph) GraphQLSchema() (string, error) {
|
||||||
|
engine, err := sg.createGraphQLEgine()
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
return engine.Schema.String(), nil
|
||||||
|
}
|
||||||
|
24
core/core.go
24
core/core.go
@ -89,25 +89,25 @@ func (sg *SuperGraph) initCompilers() error {
|
|||||||
|
|
||||||
func (c *scontext) execQuery() ([]byte, error) {
|
func (c *scontext) execQuery() ([]byte, error) {
|
||||||
var data []byte
|
var data []byte
|
||||||
// var st *stmt
|
var st *stmt
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
if c.sg.conf.UseAllowList {
|
if c.sg.conf.UseAllowList {
|
||||||
data, _, err = c.resolvePreparedSQL()
|
data, st, err = c.resolvePreparedSQL()
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
data, _, err = c.resolveSQL()
|
data, st, err = c.resolveSQL()
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return data, nil
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
//return execRemoteJoin(st, data, c.req.hdr)
|
if len(data) == 0 || st.skipped == 0 {
|
||||||
|
return data, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// return c.sg.execRemoteJoin(st, data, c.req.hdr)
|
||||||
|
return c.sg.execRemoteJoin(st, data, nil)
|
||||||
}
|
}
|
||||||
|
|
||||||
func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
func (c *scontext) resolvePreparedSQL() ([]byte, *stmt, error) {
|
||||||
|
@ -5,8 +5,8 @@ import (
|
|||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/core/internal/crypto"
|
"github.com/dosco/super-graph/core/internal/crypto"
|
||||||
"github.com/dosco/super-graph/jsn"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
func (sg *SuperGraph) encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
|
func (sg *SuperGraph) encryptCursor(qc *qcode.QCode, data []byte) ([]byte, error) {
|
||||||
|
483
core/graph-schema.go
Normal file
483
core/graph-schema.go
Normal file
@ -0,0 +1,483 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"errors"
|
||||||
|
"regexp"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/chirino/graphql"
|
||||||
|
"github.com/chirino/graphql/resolvers"
|
||||||
|
"github.com/chirino/graphql/schema"
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
|
)
|
||||||
|
|
||||||
|
var typeMap map[string]string = map[string]string{
|
||||||
|
"smallint": "Int",
|
||||||
|
"integer": "Int",
|
||||||
|
"bigint": "Int",
|
||||||
|
"smallserial": "Int",
|
||||||
|
"serial": "Int",
|
||||||
|
"bigserial": "Int",
|
||||||
|
"decimal": "Float",
|
||||||
|
"numeric": "Float",
|
||||||
|
"real": "Float",
|
||||||
|
"double precision": "Float",
|
||||||
|
"money": "Float",
|
||||||
|
"boolean": "Boolean",
|
||||||
|
}
|
||||||
|
|
||||||
|
func (sg *SuperGraph) createGraphQLEgine() (*graphql.Engine, error) {
|
||||||
|
engine := graphql.New()
|
||||||
|
engineSchema := engine.Schema
|
||||||
|
dbSchema := sg.schema
|
||||||
|
|
||||||
|
engineSchema.Parse(`
|
||||||
|
enum OrderDirection {
|
||||||
|
asc
|
||||||
|
desc
|
||||||
|
}
|
||||||
|
`)
|
||||||
|
|
||||||
|
gqltype := func(col psql.DBColumn) schema.Type {
|
||||||
|
typeName := typeMap[strings.ToLower(col.Type)]
|
||||||
|
if typeName == "" {
|
||||||
|
typeName = "String"
|
||||||
|
}
|
||||||
|
var t schema.Type = &schema.TypeName{Ident: schema.Ident{Text: typeName}}
|
||||||
|
if col.NotNull {
|
||||||
|
t = &schema.NonNull{OfType: t}
|
||||||
|
}
|
||||||
|
return t
|
||||||
|
}
|
||||||
|
|
||||||
|
query := &schema.Object{
|
||||||
|
Name: "Query",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
mutation := &schema.Object{
|
||||||
|
Name: "Mutation",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[query.Name] = query
|
||||||
|
engineSchema.Types[mutation.Name] = mutation
|
||||||
|
engineSchema.EntryPoints[schema.Query] = query
|
||||||
|
engineSchema.EntryPoints[schema.Mutation] = mutation
|
||||||
|
|
||||||
|
validGraphQLIdentifierRegex := regexp.MustCompile(`^[A-Za-z_][A-Za-z_0-9]*$`)
|
||||||
|
|
||||||
|
scalarExpressionTypesNeeded := map[string]bool{}
|
||||||
|
tableNames := dbSchema.GetTableNames()
|
||||||
|
for _, table := range tableNames {
|
||||||
|
|
||||||
|
ti, err := dbSchema.GetTable(table)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
if !ti.IsSingular {
|
||||||
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
|
singularName := ti.Singular
|
||||||
|
if !validGraphQLIdentifierRegex.MatchString(singularName) {
|
||||||
|
return nil, errors.New("table name is not a valid GraphQL identifier: " + singularName)
|
||||||
|
}
|
||||||
|
pluralName := ti.Plural
|
||||||
|
if !validGraphQLIdentifierRegex.MatchString(pluralName) {
|
||||||
|
return nil, errors.New("table name is not a valid GraphQL identifier: " + pluralName)
|
||||||
|
}
|
||||||
|
|
||||||
|
outputType := &schema.Object{
|
||||||
|
Name: singularName + "Output",
|
||||||
|
Fields: schema.FieldList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[outputType.Name] = outputType
|
||||||
|
|
||||||
|
inputType := &schema.InputObject{
|
||||||
|
Name: singularName + "Input",
|
||||||
|
Fields: schema.InputValueList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[inputType.Name] = inputType
|
||||||
|
|
||||||
|
orderByType := &schema.InputObject{
|
||||||
|
Name: singularName + "OrderBy",
|
||||||
|
Fields: schema.InputValueList{},
|
||||||
|
}
|
||||||
|
engineSchema.Types[orderByType.Name] = orderByType
|
||||||
|
|
||||||
|
expressionTypeName := singularName + "Expression"
|
||||||
|
expressionType := &schema.InputObject{
|
||||||
|
Name: expressionTypeName,
|
||||||
|
Fields: schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "and"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "or"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionTypeName}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
engineSchema.Types[expressionType.Name] = expressionType
|
||||||
|
|
||||||
|
for _, col := range ti.Columns {
|
||||||
|
colName := col.Name
|
||||||
|
if !validGraphQLIdentifierRegex.MatchString(colName) {
|
||||||
|
return nil, errors.New("column name is not a valid GraphQL identifier: " + colName)
|
||||||
|
}
|
||||||
|
|
||||||
|
colType := gqltype(col)
|
||||||
|
nullableColType := ""
|
||||||
|
if x, ok := colType.(*schema.NonNull); ok {
|
||||||
|
nullableColType = x.OfType.(*schema.TypeName).Ident.Text
|
||||||
|
} else {
|
||||||
|
nullableColType = colType.(*schema.TypeName).Ident.Text
|
||||||
|
}
|
||||||
|
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
|
||||||
|
// If it's a numeric type...
|
||||||
|
if nullableColType == "Float" || nullableColType == "Int" {
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "avg_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "count_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "max_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "min_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_pop_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "stddev_samp_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "variance_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "var_pop_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
outputType.Fields = append(outputType.Fields, &schema.Field{
|
||||||
|
Name: "var_samp_" + colName,
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
inputType.Fields = append(inputType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: colType,
|
||||||
|
})
|
||||||
|
orderByType.Fields = append(orderByType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "OrderDirection"}}},
|
||||||
|
})
|
||||||
|
|
||||||
|
scalarExpressionTypesNeeded[nullableColType] = true
|
||||||
|
|
||||||
|
expressionType.Fields = append(expressionType.Fields, &schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: colName},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: nullableColType + "Expression"}}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
outputTypeName := &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}
|
||||||
|
inputTypeName := &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}
|
||||||
|
pluralOutputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: outputType.Name}}}}}
|
||||||
|
pluralInputTypeName := &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: inputType.Name}}}}}
|
||||||
|
|
||||||
|
args := schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."},
|
||||||
|
Name: schema.Ident{Text: "order_by"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: orderByType.Name}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "where"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: expressionType.Name}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "limit"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "offset"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "first"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "last"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Int"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "before"},
|
||||||
|
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "after"},
|
||||||
|
Type: &schema.TypeName{Ident: schema.Ident{Text: "String"}},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
if ti.PrimaryCol != nil {
|
||||||
|
t := gqltype(*ti.PrimaryCol)
|
||||||
|
if _, ok := t.(*schema.NonNull); !ok {
|
||||||
|
t = &schema.NonNull{OfType: t}
|
||||||
|
}
|
||||||
|
args = append(args, &schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "Finds the record by the primary key"},
|
||||||
|
Name: schema.Ident{Text: "id"},
|
||||||
|
Type: t,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
if ti.TSVCol != nil {
|
||||||
|
args = append(args, &schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: "Performs full text search using a TSV index"},
|
||||||
|
Name: schema.Ident{Text: "search"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
query.Fields = append(query.Fields, &schema.Field{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: singularName,
|
||||||
|
Type: outputTypeName,
|
||||||
|
Args: args,
|
||||||
|
})
|
||||||
|
query.Fields = append(query.Fields, &schema.Field{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: pluralName,
|
||||||
|
Type: pluralOutputTypeName,
|
||||||
|
Args: args,
|
||||||
|
})
|
||||||
|
|
||||||
|
mutationArgs := append(args, schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "insert"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "update"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "upsert"},
|
||||||
|
Type: inputTypeName,
|
||||||
|
},
|
||||||
|
}...)
|
||||||
|
|
||||||
|
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||||
|
Name: singularName,
|
||||||
|
Args: mutationArgs,
|
||||||
|
Type: outputType,
|
||||||
|
})
|
||||||
|
mutation.Fields = append(mutation.Fields, &schema.Field{
|
||||||
|
Name: pluralName,
|
||||||
|
Args: append(mutationArgs, schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "inserts"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "updates"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Desc: &schema.Description{Text: ""},
|
||||||
|
Name: schema.Ident{Text: "upserts"},
|
||||||
|
Type: pluralInputTypeName,
|
||||||
|
},
|
||||||
|
}...),
|
||||||
|
Type: outputType,
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
for typeName, _ := range scalarExpressionTypesNeeded {
|
||||||
|
expressionType := &schema.InputObject{
|
||||||
|
Name: typeName + "Expression",
|
||||||
|
Fields: schema.InputValueList{
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "eq"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "neq"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "gt"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "greater_than"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lt"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lesser_than"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "gte"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "greater_or_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lte"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "lesser_or_equals"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nin"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "like"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nlike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_like"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "ilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_ilike"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "similar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "nsimilar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "not_similar"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key_any"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "has_key_all"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "contains"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.List{OfType: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: typeName}}}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "contained_in"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "String"}}},
|
||||||
|
},
|
||||||
|
&schema.InputValue{
|
||||||
|
Name: schema.Ident{Text: "is_null"},
|
||||||
|
Type: &schema.NonNull{OfType: &schema.TypeName{Ident: schema.Ident{Text: "Boolean"}}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
engineSchema.Types[expressionType.Name] = expressionType
|
||||||
|
}
|
||||||
|
|
||||||
|
err := engineSchema.ResolveTypes()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
engine.Resolver = resolvers.Func(func(request *resolvers.ResolveRequest, next resolvers.Resolution) resolvers.Resolution {
|
||||||
|
resolver := resolvers.MetadataResolver.Resolve(request, next)
|
||||||
|
if resolver != nil {
|
||||||
|
return resolver
|
||||||
|
}
|
||||||
|
resolver = resolvers.MethodResolver.Resolve(request, next) // needed by the MetadataResolver
|
||||||
|
if resolver != nil {
|
||||||
|
return resolver
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
})
|
||||||
|
return engine, nil
|
||||||
|
}
|
@ -71,7 +71,7 @@ func (sg *SuperGraph) initConfig() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Roles: validate and sanitize
|
// Roles: validate and sanitize
|
||||||
c.RolesQuery = sanitize(c.RolesQuery)
|
c.RolesQuery = sanitizeVars(c.RolesQuery)
|
||||||
|
|
||||||
if len(c.RolesQuery) == 0 {
|
if len(c.RolesQuery) == 0 {
|
||||||
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
sg.log.Printf("WRN roles_query not defined: attribute based access control disabled")
|
||||||
@ -108,6 +108,7 @@ func addTables(c *Config, di *psql.DBInfo) error {
|
|||||||
if err := addTable(di, t.Columns, t); err != nil {
|
if err := addTable(di, t.Columns, t); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -0,0 +1,84 @@
|
|||||||
|
package cockraochdb_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
|
"log"
|
||||||
|
"os"
|
||||||
|
"os/exec"
|
||||||
|
"regexp"
|
||||||
|
"sync/atomic"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||||
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCockroachDB(t *testing.T) {
|
||||||
|
|
||||||
|
dir, err := ioutil.TempDir("", "temp-cockraochdb-")
|
||||||
|
if err != nil {
|
||||||
|
log.Fatal(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
cmd := exec.Command("cockroach", "start", "--insecure", "--listen-addr", ":0", "--http-addr", ":0", "--store=path="+dir)
|
||||||
|
finder := &urlFinder{
|
||||||
|
c: make(chan bool),
|
||||||
|
}
|
||||||
|
cmd.Stdout = finder
|
||||||
|
cmd.Stderr = ioutil.Discard
|
||||||
|
|
||||||
|
err = cmd.Start()
|
||||||
|
if err != nil {
|
||||||
|
t.Skip("is CockroachDB installed?: " + err.Error())
|
||||||
|
}
|
||||||
|
fmt.Println("started temporary cockroach db")
|
||||||
|
|
||||||
|
stopped := int32(0)
|
||||||
|
stopDatabase := func() {
|
||||||
|
fmt.Println("stopping temporary cockroach db")
|
||||||
|
if atomic.CompareAndSwapInt32(&stopped, 0, 1) {
|
||||||
|
cmd.Process.Kill()
|
||||||
|
cmd.Process.Wait()
|
||||||
|
os.RemoveAll(dir)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
defer stopDatabase()
|
||||||
|
|
||||||
|
// Wait till we figure out the URL we should connect to...
|
||||||
|
<-finder.c
|
||||||
|
db, err := sql.Open("pgx", finder.URL)
|
||||||
|
if err != nil {
|
||||||
|
stopDatabase()
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
integration_tests.SetupSchema(t, db)
|
||||||
|
|
||||||
|
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||||
|
if t.Name() == "TestCockroachDB/nested_insert" {
|
||||||
|
t.Skip("nested inserts currently not working yet on cockroach db")
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
type urlFinder struct {
|
||||||
|
c chan bool
|
||||||
|
done bool
|
||||||
|
URL string
|
||||||
|
}
|
||||||
|
|
||||||
|
func (finder *urlFinder) Write(p []byte) (n int, err error) {
|
||||||
|
s := string(p)
|
||||||
|
urlRegex := regexp.MustCompile(`\nsql:\s+(postgresql:[^\s]+)\n`)
|
||||||
|
if !finder.done {
|
||||||
|
submatch := urlRegex.FindAllStringSubmatch(s, -1)
|
||||||
|
if submatch != nil {
|
||||||
|
finder.URL = submatch[0][1]
|
||||||
|
finder.done = true
|
||||||
|
close(finder.c)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return len(p), nil
|
||||||
|
}
|
260
core/internal/integration_tests/integration_tests.go
Normal file
260
core/internal/integration_tests/integration_tests.go
Normal file
@ -0,0 +1,260 @@
|
|||||||
|
package integration_tests
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
"database/sql"
|
||||||
|
"encoding/json"
|
||||||
|
"io/ioutil"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func SetupSchema(t *testing.T, db *sql.DB) {
|
||||||
|
|
||||||
|
_, err := db.Exec(`
|
||||||
|
CREATE TABLE users (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
full_name text
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`CREATE TABLE product (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
name text,
|
||||||
|
weight float
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`CREATE TABLE line_item (
|
||||||
|
id integer PRIMARY KEY,
|
||||||
|
product integer REFERENCES product(id),
|
||||||
|
quantity integer,
|
||||||
|
price float
|
||||||
|
)`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func DropSchema(t *testing.T, db *sql.DB) {
|
||||||
|
|
||||||
|
_, err := db.Exec(`DROP TABLE IF EXISTS line_item`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`DROP TABLE IF EXISTS product`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
_, err = db.Exec(`DROP TABLE IF EXISTS users`)
|
||||||
|
require.NoError(t, err)
|
||||||
|
}
|
||||||
|
|
||||||
|
func TestSuperGraph(t *testing.T, db *sql.DB, before func(t *testing.T)) {
|
||||||
|
config := core.Config{}
|
||||||
|
config.UseAllowList = false
|
||||||
|
config.AllowListFile = "./allow.list"
|
||||||
|
config.RolesQuery = `SELECT * FROM users WHERE id = $user_id`
|
||||||
|
|
||||||
|
config.Roles = []core.Role{
|
||||||
|
core.Role{
|
||||||
|
Name: "anon",
|
||||||
|
Tables: []core.RoleTable{
|
||||||
|
core.RoleTable{Name: "users", Query: core.Query{Limit: 100}},
|
||||||
|
core.RoleTable{Name: "product", Query: core.Query{Limit: 100}},
|
||||||
|
core.RoleTable{Name: "line_item", Query: core.Query{Limit: 100}},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
sg, err := core.NewSuperGraph(&config, db)
|
||||||
|
require.NoError(t, err)
|
||||||
|
ctx := context.Background()
|
||||||
|
|
||||||
|
t.Run("seed fixtures", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { products (insert: $products) { id } }`,
|
||||||
|
json.RawMessage(`{"products":[
|
||||||
|
{"id":1, "name":"Charmin Ultra Soft", "weight": 0.5},
|
||||||
|
{"id":2, "name":"Hand Sanitizer", "weight": 0.2},
|
||||||
|
{"id":3, "name":"Case of Corona", "weight": 1.2}
|
||||||
|
]}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"products": [{"id": 1}, {"id": 2}, {"id": 3}]}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`mutation { line_items (insert: $line_items) { id } }`,
|
||||||
|
json.RawMessage(`{"line_items":[
|
||||||
|
{"id":5001, "product":1, "price":6.95, "quantity":10},
|
||||||
|
{"id":5002, "product":2, "price":10.99, "quantity":2}
|
||||||
|
]}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001}, {"id": 5002}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{"id":5001}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 10}}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("get line items", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`query { line_items { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 10}, {"id": 5002, "price": 10.99, "quantity": 2}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("update line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_item(update:$update, id:$id) { id } }`,
|
||||||
|
json.RawMessage(`{"id":5001, "update":{"quantity":20}}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001}}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`query { line_item(id:$id) { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{"id":5001}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5001, "price": 6.95, "quantity": 20}}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("delete line item", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_item(delete:true, id:$id) { id } }`,
|
||||||
|
json.RawMessage(`{"id":5002}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_item": {"id": 5002}}`, string(res.Data))
|
||||||
|
|
||||||
|
res, err = sg.GraphQL(ctx,
|
||||||
|
`query { line_items { id, price, quantity } }`,
|
||||||
|
json.RawMessage(`{}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5001, "price": 6.95, "quantity": 20}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("nested insert", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
res, err := sg.GraphQL(ctx,
|
||||||
|
`mutation { line_items (insert: $line_item) { id, product { name } } }`,
|
||||||
|
json.RawMessage(`{"line_item":
|
||||||
|
{"id":5003, "product": { "connect": { "id": 1} }, "price":10.95, "quantity":15}
|
||||||
|
}`))
|
||||||
|
require.NoError(t, err, res.SQL())
|
||||||
|
require.Equal(t, `{"line_items": [{"id": 5003, "product": {"name": "Charmin Ultra Soft"}}]}`, string(res.Data))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("schema introspection", func(t *testing.T) {
|
||||||
|
before(t)
|
||||||
|
schema, err := sg.GraphQLSchema()
|
||||||
|
require.NoError(t, err)
|
||||||
|
// Uncomment the following line if you need to regenerate the expected schema.
|
||||||
|
//ioutil.WriteFile("../introspection.graphql", []byte(schema), 0644)
|
||||||
|
expected, err := ioutil.ReadFile("../introspection.graphql")
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, string(expected), schema)
|
||||||
|
})
|
||||||
|
|
||||||
|
res, err := sg.GraphQL(ctx, introspectionQuery, json.RawMessage(``))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
assert.Contains(t, string(res.Data),
|
||||||
|
`{"queryType":{"name":"Query"},"mutationType":{"name":"Mutation"},"subscriptionType":null,"types":`)
|
||||||
|
}
|
||||||
|
|
||||||
|
const introspectionQuery = `
|
||||||
|
query IntrospectionQuery {
|
||||||
|
__schema {
|
||||||
|
queryType { name }
|
||||||
|
mutationType { name }
|
||||||
|
subscriptionType { name }
|
||||||
|
types {
|
||||||
|
...FullType
|
||||||
|
}
|
||||||
|
directives {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
locations
|
||||||
|
args {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fragment FullType on __Type {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
description
|
||||||
|
fields(includeDeprecated: true) {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
args {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
type {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
isDeprecated
|
||||||
|
deprecationReason
|
||||||
|
}
|
||||||
|
inputFields {
|
||||||
|
...InputValue
|
||||||
|
}
|
||||||
|
interfaces {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
enumValues(includeDeprecated: true) {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
isDeprecated
|
||||||
|
deprecationReason
|
||||||
|
}
|
||||||
|
possibleTypes {
|
||||||
|
...TypeRef
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fragment InputValue on __InputValue {
|
||||||
|
name
|
||||||
|
description
|
||||||
|
type { ...TypeRef }
|
||||||
|
defaultValue
|
||||||
|
}
|
||||||
|
fragment TypeRef on __Type {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
ofType {
|
||||||
|
kind
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
`
|
319
core/internal/integration_tests/introspection.graphql
Normal file
319
core/internal/integration_tests/introspection.graphql
Normal file
@ -0,0 +1,319 @@
|
|||||||
|
input FloatExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[Float!]!
|
||||||
|
eq:Float!
|
||||||
|
equals:Float!
|
||||||
|
greater_or_equals:Float!
|
||||||
|
greater_than:Float!
|
||||||
|
gt:Float!
|
||||||
|
gte:Float!
|
||||||
|
has_key:Float!
|
||||||
|
has_key_all:[Float!]!
|
||||||
|
has_key_any:[Float!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[Float!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:Float!
|
||||||
|
lesser_than:Float!
|
||||||
|
like:String!
|
||||||
|
lt:Float!
|
||||||
|
lte:Float!
|
||||||
|
neq:Float!
|
||||||
|
nilike:String!
|
||||||
|
nin:[Float!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:Float!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[Float!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
input IntExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[Int!]!
|
||||||
|
eq:Int!
|
||||||
|
equals:Int!
|
||||||
|
greater_or_equals:Int!
|
||||||
|
greater_than:Int!
|
||||||
|
gt:Int!
|
||||||
|
gte:Int!
|
||||||
|
has_key:Int!
|
||||||
|
has_key_all:[Int!]!
|
||||||
|
has_key_any:[Int!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[Int!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:Int!
|
||||||
|
lesser_than:Int!
|
||||||
|
like:String!
|
||||||
|
lt:Int!
|
||||||
|
lte:Int!
|
||||||
|
neq:Int!
|
||||||
|
nilike:String!
|
||||||
|
nin:[Int!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:Int!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[Int!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
type Mutation {
|
||||||
|
line_item(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput
|
||||||
|
):line_itemOutput
|
||||||
|
line_items(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:line_itemInput, update:line_itemInput, upsert:line_itemInput, inserts:[line_itemInput!]!, updates:[line_itemInput!]!, upserts:[line_itemInput!]!
|
||||||
|
):line_itemOutput
|
||||||
|
product(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:productInput, update:productInput, upsert:productInput
|
||||||
|
):productOutput
|
||||||
|
products(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:productInput, update:productInput, upsert:productInput, inserts:[productInput!]!, updates:[productInput!]!, upserts:[productInput!]!
|
||||||
|
):productOutput
|
||||||
|
user(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:userInput, update:userInput, upsert:userInput
|
||||||
|
):userOutput
|
||||||
|
users(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!, insert:userInput, update:userInput, upsert:userInput, inserts:[userInput!]!, updates:[userInput!]!, upserts:[userInput!]!
|
||||||
|
):userOutput
|
||||||
|
}
|
||||||
|
enum OrderDirection {
|
||||||
|
asc
|
||||||
|
desc
|
||||||
|
}
|
||||||
|
type Query {
|
||||||
|
line_item(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):line_itemOutput
|
||||||
|
line_items(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:line_itemOrderBy!, where:line_itemExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[line_itemOutput!]!
|
||||||
|
product(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):productOutput
|
||||||
|
products(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:productOrderBy!, where:productExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[productOutput!]!
|
||||||
|
user(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):userOutput
|
||||||
|
users(
|
||||||
|
"To sort or ordering results just use the order_by argument. This can be combined with where, search, etc to build complex queries to fit you needs."
|
||||||
|
order_by:userOrderBy!, where:userExpression!, limit:Int!, offset:Int!, first:Int!, last:Int!, before:String, after:String,
|
||||||
|
"Finds the record by the primary key"
|
||||||
|
id:Int!
|
||||||
|
):[userOutput!]!
|
||||||
|
}
|
||||||
|
input StringExpression {
|
||||||
|
contained_in:String!
|
||||||
|
contains:[String!]!
|
||||||
|
eq:String!
|
||||||
|
equals:String!
|
||||||
|
greater_or_equals:String!
|
||||||
|
greater_than:String!
|
||||||
|
gt:String!
|
||||||
|
gte:String!
|
||||||
|
has_key:String!
|
||||||
|
has_key_all:[String!]!
|
||||||
|
has_key_any:[String!]!
|
||||||
|
ilike:String!
|
||||||
|
in:[String!]!
|
||||||
|
is_null:Boolean!
|
||||||
|
lesser_or_equals:String!
|
||||||
|
lesser_than:String!
|
||||||
|
like:String!
|
||||||
|
lt:String!
|
||||||
|
lte:String!
|
||||||
|
neq:String!
|
||||||
|
nilike:String!
|
||||||
|
nin:[String!]!
|
||||||
|
nlike:String!
|
||||||
|
not_equals:String!
|
||||||
|
not_ilike:String!
|
||||||
|
not_in:[String!]!
|
||||||
|
not_like:String!
|
||||||
|
not_similar:String!
|
||||||
|
nsimilar:String!
|
||||||
|
similar:String!
|
||||||
|
}
|
||||||
|
input line_itemExpression {
|
||||||
|
and:line_itemExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
not:line_itemExpression!
|
||||||
|
or:line_itemExpression!
|
||||||
|
price:FloatExpression!
|
||||||
|
product:IntExpression!
|
||||||
|
quantity:IntExpression!
|
||||||
|
}
|
||||||
|
input line_itemInput {
|
||||||
|
id:Int!
|
||||||
|
price:Float
|
||||||
|
product:Int
|
||||||
|
quantity:Int
|
||||||
|
}
|
||||||
|
input line_itemOrderBy {
|
||||||
|
id:OrderDirection!
|
||||||
|
price:OrderDirection!
|
||||||
|
product:OrderDirection!
|
||||||
|
quantity:OrderDirection!
|
||||||
|
}
|
||||||
|
type line_itemOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
avg_price:Float
|
||||||
|
avg_product:Int
|
||||||
|
avg_quantity:Int
|
||||||
|
count_id:Int!
|
||||||
|
count_price:Float
|
||||||
|
count_product:Int
|
||||||
|
count_quantity:Int
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
max_price:Float
|
||||||
|
max_product:Int
|
||||||
|
max_quantity:Int
|
||||||
|
min_id:Int!
|
||||||
|
min_price:Float
|
||||||
|
min_product:Int
|
||||||
|
min_quantity:Int
|
||||||
|
price:Float
|
||||||
|
product:Int
|
||||||
|
quantity:Int
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_pop_price:Float
|
||||||
|
stddev_pop_product:Int
|
||||||
|
stddev_pop_quantity:Int
|
||||||
|
stddev_price:Float
|
||||||
|
stddev_product:Int
|
||||||
|
stddev_quantity:Int
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
stddev_samp_price:Float
|
||||||
|
stddev_samp_product:Int
|
||||||
|
stddev_samp_quantity:Int
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_pop_price:Float
|
||||||
|
var_pop_product:Int
|
||||||
|
var_pop_quantity:Int
|
||||||
|
var_samp_id:Int!
|
||||||
|
var_samp_price:Float
|
||||||
|
var_samp_product:Int
|
||||||
|
var_samp_quantity:Int
|
||||||
|
variance_id:Int!
|
||||||
|
variance_price:Float
|
||||||
|
variance_product:Int
|
||||||
|
variance_quantity:Int
|
||||||
|
}
|
||||||
|
input productExpression {
|
||||||
|
and:productExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
name:StringExpression!
|
||||||
|
not:productExpression!
|
||||||
|
or:productExpression!
|
||||||
|
weight:FloatExpression!
|
||||||
|
}
|
||||||
|
input productInput {
|
||||||
|
id:Int!
|
||||||
|
name:String
|
||||||
|
weight:Float
|
||||||
|
}
|
||||||
|
input productOrderBy {
|
||||||
|
id:OrderDirection!
|
||||||
|
name:OrderDirection!
|
||||||
|
weight:OrderDirection!
|
||||||
|
}
|
||||||
|
type productOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
avg_weight:Float
|
||||||
|
count_id:Int!
|
||||||
|
count_weight:Float
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
max_weight:Float
|
||||||
|
min_id:Int!
|
||||||
|
min_weight:Float
|
||||||
|
name:String
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_pop_weight:Float
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
stddev_samp_weight:Float
|
||||||
|
stddev_weight:Float
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_pop_weight:Float
|
||||||
|
var_samp_id:Int!
|
||||||
|
var_samp_weight:Float
|
||||||
|
variance_id:Int!
|
||||||
|
variance_weight:Float
|
||||||
|
weight:Float
|
||||||
|
}
|
||||||
|
input userExpression {
|
||||||
|
and:userExpression!
|
||||||
|
full_name:StringExpression!
|
||||||
|
id:IntExpression!
|
||||||
|
not:userExpression!
|
||||||
|
or:userExpression!
|
||||||
|
}
|
||||||
|
input userInput {
|
||||||
|
full_name:String
|
||||||
|
id:Int!
|
||||||
|
}
|
||||||
|
input userOrderBy {
|
||||||
|
full_name:OrderDirection!
|
||||||
|
id:OrderDirection!
|
||||||
|
}
|
||||||
|
type userOutput {
|
||||||
|
avg_id:Int!
|
||||||
|
count_id:Int!
|
||||||
|
full_name:String
|
||||||
|
id:Int!
|
||||||
|
max_id:Int!
|
||||||
|
min_id:Int!
|
||||||
|
stddev_id:Int!
|
||||||
|
stddev_pop_id:Int!
|
||||||
|
stddev_samp_id:Int!
|
||||||
|
var_pop_id:Int!
|
||||||
|
var_samp_id:Int!
|
||||||
|
variance_id:Int!
|
||||||
|
}
|
||||||
|
schema {
|
||||||
|
mutation: Mutation
|
||||||
|
query: Query
|
||||||
|
}
|
@ -0,0 +1,27 @@
|
|||||||
|
package cockraochdb_test
|
||||||
|
|
||||||
|
import (
|
||||||
|
"database/sql"
|
||||||
|
"os"
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
integration_tests "github.com/dosco/super-graph/core/internal/integration_tests"
|
||||||
|
_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestCockroachDB(t *testing.T) {
|
||||||
|
|
||||||
|
url, found := os.LookupEnv("SG_POSTGRESQL_TEST_URL")
|
||||||
|
if !found {
|
||||||
|
t.Skip("set the SG_POSTGRESQL_TEST_URL env variable if you want to run integration tests against a PostgreSQL database")
|
||||||
|
}
|
||||||
|
|
||||||
|
db, err := sql.Open("pgx", url)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
integration_tests.DropSchema(t, db)
|
||||||
|
integration_tests.SetupSchema(t, db)
|
||||||
|
integration_tests.TestSuperGraph(t, db, func(t *testing.T) {
|
||||||
|
})
|
||||||
|
}
|
@ -21,9 +21,17 @@ func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
|||||||
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
return 0, fmt.Errorf("variable '%s' is empty", qc.ActionVar)
|
||||||
}
|
}
|
||||||
|
|
||||||
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT '{{`)
|
io.WriteString(c.w, `WITH "_sg_input" AS (SELECT `)
|
||||||
|
if insert[0] == '[' {
|
||||||
|
io.WriteString(c.w, `json_array_elements(`)
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, `'{{`)
|
||||||
io.WriteString(c.w, qc.ActionVar)
|
io.WriteString(c.w, qc.ActionVar)
|
||||||
io.WriteString(c.w, `}}' :: json AS j)`)
|
io.WriteString(c.w, `}}' :: json`)
|
||||||
|
if insert[0] == '[' {
|
||||||
|
io.WriteString(c.w, `)`)
|
||||||
|
}
|
||||||
|
io.WriteString(c.w, ` AS j)`)
|
||||||
|
|
||||||
st := util.NewStack()
|
st := util.NewStack()
|
||||||
st.Push(kvitem{_type: itemInsert, key: ti.Name, val: insert, ti: ti})
|
st.Push(kvitem{_type: itemInsert, key: ti.Name, val: insert, ti: ti})
|
||||||
@ -90,26 +98,9 @@ func (c *compilerContext) renderInsertStmt(qc *qcode.QCode, w io.Writer, item re
|
|||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||||
renderNestedInsertRelColumns(w, item.kvitem, true)
|
renderNestedInsertRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
renderNestedInsertRelTables(w, item.kvitem)
|
renderNestedInsertRelTables(w, item.kvitem)
|
||||||
|
io.WriteString(w, ` RETURNING *)`)
|
||||||
if item.array {
|
|
||||||
io.WriteString(w, `json_populate_recordset`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `json_populate_record`)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.WriteString(w, `(NULL::`)
|
|
||||||
io.WriteString(w, ti.Name)
|
|
||||||
|
|
||||||
if len(item.path) == 0 {
|
|
||||||
io.WriteString(w, `, i.j) t RETURNING *)`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `, i.j->`)
|
|
||||||
joinPath(w, item.path)
|
|
||||||
io.WriteString(w, `) t RETURNING *)`)
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -172,21 +163,21 @@ func renderNestedInsertRelColumns(w io.Writer, item kvitem, values bool) error {
|
|||||||
func renderNestedInsertRelTables(w io.Writer, item kvitem) error {
|
func renderNestedInsertRelTables(w io.Writer, item kvitem) error {
|
||||||
if len(item.items) == 0 {
|
if len(item.items) == 0 {
|
||||||
if item.relPC != nil && item.relPC.Type == RelOneToMany {
|
if item.relPC != nil && item.relPC.Type == RelOneToMany {
|
||||||
quoted(w, item.relPC.Left.Table)
|
|
||||||
io.WriteString(w, `, `)
|
io.WriteString(w, `, `)
|
||||||
|
quoted(w, item.relPC.Left.Table)
|
||||||
}
|
}
|
||||||
} else {
|
} else {
|
||||||
// Render tables needed to set values if child-to-parent
|
// Render tables needed to set values if child-to-parent
|
||||||
// relationship is one-to-many
|
// relationship is one-to-many
|
||||||
for _, v := range item.items {
|
for _, v := range item.items {
|
||||||
if v.relCP.Type == RelOneToMany {
|
if v.relCP.Type == RelOneToMany {
|
||||||
|
io.WriteString(w, `, `)
|
||||||
if v._ctype > 0 {
|
if v._ctype > 0 {
|
||||||
io.WriteString(w, `"_x_`)
|
io.WriteString(w, `"_x_`)
|
||||||
io.WriteString(w, v.relCP.Left.Table)
|
io.WriteString(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `", `)
|
io.WriteString(w, `"`)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, v.relCP.Left.Table)
|
quoted(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `, `)
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -7,9 +7,9 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/jsn"
|
|
||||||
"github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
"github.com/dosco/super-graph/core/internal/util"
|
"github.com/dosco/super-graph/core/internal/util"
|
||||||
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
type itemType int
|
type itemType int
|
||||||
@ -396,7 +396,12 @@ func renderInsertUpdateColumns(w io.Writer,
|
|||||||
}
|
}
|
||||||
|
|
||||||
if values {
|
if values {
|
||||||
colWithTable(w, "t", cn.Name)
|
io.WriteString(w, `CAST( i.j ->>`)
|
||||||
|
io.WriteString(w, `'`)
|
||||||
|
io.WriteString(w, cn.Name)
|
||||||
|
io.WriteString(w, `' AS `)
|
||||||
|
io.WriteString(w, cn.Type)
|
||||||
|
io.WriteString(w, `)`)
|
||||||
} else {
|
} else {
|
||||||
quoted(w, cn.Name)
|
quoted(w, cn.Name)
|
||||||
}
|
}
|
||||||
|
@ -141,7 +141,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
c.renderLateralJoin(sel)
|
c.renderLateralJoin(sel)
|
||||||
}
|
}
|
||||||
|
|
||||||
if !ti.Singular {
|
if !ti.IsSingular {
|
||||||
c.renderPluralSelect(sel, ti)
|
c.renderPluralSelect(sel, ti)
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -178,7 +178,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer, vars Variables) (
|
|||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
|
|
||||||
if !ti.Singular {
|
if !ti.IsSingular {
|
||||||
io.WriteString(c.w, `)`)
|
io.WriteString(c.w, `)`)
|
||||||
aliasWithID(c.w, "__sj", sel.ID)
|
aliasWithID(c.w, "__sj", sel.ID)
|
||||||
}
|
}
|
||||||
@ -438,7 +438,7 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo, vars
|
|||||||
|
|
||||||
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
io.WriteString(c.w, `SELECT to_jsonb("__sr_`)
|
||||||
int2string(c.w, sel.ID)
|
int2string(c.w, sel.ID)
|
||||||
io.WriteString(c.w, `") `)
|
io.WriteString(c.w, `".*) `)
|
||||||
|
|
||||||
if sel.Paging.Type != qcode.PtOffset {
|
if sel.Paging.Type != qcode.PtOffset {
|
||||||
for i := range sel.OrderBy {
|
for i := range sel.OrderBy {
|
||||||
@ -706,7 +706,7 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo, r
|
|||||||
}
|
}
|
||||||
|
|
||||||
switch {
|
switch {
|
||||||
case ti.Singular:
|
case ti.IsSingular:
|
||||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||||
|
|
||||||
case len(sel.Paging.Limit) != 0:
|
case len(sel.Paging.Limit) != 0:
|
||||||
|
@ -16,12 +16,14 @@ type DBSchema struct {
|
|||||||
type DBTableInfo struct {
|
type DBTableInfo struct {
|
||||||
Name string
|
Name string
|
||||||
Type string
|
Type string
|
||||||
Singular bool
|
IsSingular bool
|
||||||
Columns []DBColumn
|
Columns []DBColumn
|
||||||
PrimaryCol *DBColumn
|
PrimaryCol *DBColumn
|
||||||
TSVCol *DBColumn
|
TSVCol *DBColumn
|
||||||
ColMap map[string]*DBColumn
|
ColMap map[string]*DBColumn
|
||||||
ColIDMap map[int16]*DBColumn
|
ColIDMap map[int16]*DBColumn
|
||||||
|
Singular string
|
||||||
|
Plural string
|
||||||
}
|
}
|
||||||
|
|
||||||
type RelType int
|
type RelType int
|
||||||
@ -89,23 +91,28 @@ func (s *DBSchema) addTable(
|
|||||||
colidmap := make(map[int16]*DBColumn, len(cols))
|
colidmap := make(map[int16]*DBColumn, len(cols))
|
||||||
|
|
||||||
singular := flect.Singularize(t.Key)
|
singular := flect.Singularize(t.Key)
|
||||||
|
plural := flect.Pluralize(t.Key)
|
||||||
|
|
||||||
s.t[singular] = &DBTableInfo{
|
s.t[singular] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
Singular: true,
|
IsSingular: true,
|
||||||
Columns: cols,
|
Columns: cols,
|
||||||
ColMap: colmap,
|
ColMap: colmap,
|
||||||
ColIDMap: colidmap,
|
ColIDMap: colidmap,
|
||||||
|
Singular: singular,
|
||||||
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
|
||||||
plural := flect.Pluralize(t.Key)
|
|
||||||
s.t[plural] = &DBTableInfo{
|
s.t[plural] = &DBTableInfo{
|
||||||
Name: t.Name,
|
Name: t.Name,
|
||||||
Type: t.Type,
|
Type: t.Type,
|
||||||
Singular: false,
|
IsSingular: false,
|
||||||
Columns: cols,
|
Columns: cols,
|
||||||
ColMap: colmap,
|
ColMap: colmap,
|
||||||
ColIDMap: colidmap,
|
ColIDMap: colidmap,
|
||||||
|
Singular: singular,
|
||||||
|
Plural: plural,
|
||||||
}
|
}
|
||||||
|
|
||||||
if al, ok := aliases[t.Key]; ok {
|
if al, ok := aliases[t.Key]; ok {
|
||||||
@ -364,6 +371,14 @@ func (s *DBSchema) updateSchemaOTMT(
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (s *DBSchema) GetTableNames() []string {
|
||||||
|
var names []string
|
||||||
|
for name, _ := range s.t {
|
||||||
|
names = append(names, name)
|
||||||
|
}
|
||||||
|
return names
|
||||||
|
}
|
||||||
|
|
||||||
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
|
func (s *DBSchema) GetTable(table string) (*DBTableInfo, error) {
|
||||||
t, ok := s.t[table]
|
t, ok := s.t[table]
|
||||||
if !ok {
|
if !ok {
|
||||||
|
@ -1,25 +1,25 @@
|
|||||||
=== RUN TestCompileInsert
|
=== RUN TestCompileInsert
|
||||||
=== RUN TestCompileInsert/simpleInsert
|
=== RUN TestCompileInsert/simpleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id" FROM (SELECT "users"."id" FROM "users" LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/singleInsert
|
=== RUN TestCompileInsert/singleInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT "t"."name", "t"."description", "t"."price", "t"."user_id" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description", "price", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'user_id' AS bigint) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/bulkInsert
|
=== RUN TestCompileInsert/bulkInsert
|
||||||
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{insert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/simpleInsertWithPresets
|
=== RUN TestCompileInsert/simpleInsertWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone, 'now' :: timestamp without time zone, '{{user_id}}' :: bigint FROM "_sg_input" i RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertManyToMany
|
=== RUN TestCompileInsert/nestedInsertManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "customer_id", "product_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "customers"."id", "products"."id" FROM "_sg_input" i, "customers", "products" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT "t"."sale_type", "t"."quantity", "t"."due_date", "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers", json_populate_record(NULL::purchases, i.j) t RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "customers" AS (INSERT INTO "customers" ("full_name", "email") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i RETURNING *), "purchases" AS (INSERT INTO "purchases" ("sale_type", "quantity", "due_date", "product_id", "customer_id") SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone), "products"."id", "customers"."id" FROM "_sg_input" i, "products", "customers" RETURNING *) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToMany
|
=== RUN TestCompileInsert/nestedInsertOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j->'product') t RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOne
|
=== RUN TestCompileInsert/nestedInsertOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "users"."id" FROM "_sg_input" i, "users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "users"."id" FROM "_sg_input" i, "users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (INSERT INTO "users" ("full_name", "email", "created_at", "updated_at") SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i RETURNING *), "products" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user", "__sj_2"."json" AS "tags" FROM (SELECT "products"."id", "products"."name", "products"."user_id", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."id" AS "id", "tags_2"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_0"."tags"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
=== RUN TestCompileInsert/nestedInsertOneToOneWithConnectArray
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id" = ANY((select a::bigint AS list from json_array_elements_text((i.j->'user'->'connect'->>'id')::json) AS a)) LIMIT 1), "products" AS (INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone), "_x_users"."id" FROM "_sg_input" i, "_x_users" RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileInsert (0.02s)
|
--- PASS: TestCompileInsert (0.02s)
|
||||||
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
--- PASS: TestCompileInsert/simpleInsert (0.00s)
|
||||||
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
--- PASS: TestCompileInsert/singleInsert (0.00s)
|
||||||
@ -33,67 +33,67 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id"
|
|||||||
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
--- PASS: TestCompileInsert/nestedInsertOneToOneWithConnectArray (0.00s)
|
||||||
=== RUN TestCompileMutate
|
=== RUN TestCompileMutate
|
||||||
=== RUN TestCompileMutate/singleUpsert
|
=== RUN TestCompileMutate/singleUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/singleUpsertWhere
|
=== RUN TestCompileMutate/singleUpsertWhere
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) WHERE (("products"."price") > '3' :: numeric(7,2)) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/bulkUpsert
|
=== RUN TestCompileMutate/bulkUpsert
|
||||||
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_recordset(NULL::products, i.j) t RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{upsert}}' :: json AS j), "products" AS (INSERT INTO "products" ("name", "description") SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i RETURNING *) ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileMutate/delete
|
=== RUN TestCompileMutate/delete
|
||||||
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "products" AS (DELETE FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '1' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileMutate (0.01s)
|
--- PASS: TestCompileMutate (0.00s)
|
||||||
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
--- PASS: TestCompileMutate/singleUpsert (0.00s)
|
||||||
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
--- PASS: TestCompileMutate/singleUpsertWhere (0.00s)
|
||||||
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
--- PASS: TestCompileMutate/bulkUpsert (0.00s)
|
||||||
--- PASS: TestCompileMutate/delete (0.00s)
|
--- PASS: TestCompileMutate/delete (0.00s)
|
||||||
=== RUN TestCompileQuery
|
=== RUN TestCompileQuery
|
||||||
=== RUN TestCompileQuery/withComplexArgs
|
=== RUN TestCompileQuery/withComplexArgs
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT DISTINCT ON ("products"."price") "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."id") < '28' :: bigint) AND (("products"."id") >= '20' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) ORDER BY "products"."price" DESC LIMIT ('30') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereAndList
|
=== RUN TestCompileQuery/withWhereAndList
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereIsNull
|
=== RUN TestCompileQuery/withWhereIsNull
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE (((("products"."price") > '10' :: numeric(7,2)) AND NOT (("products"."id") IS NULL) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereMultiOr
|
=== RUN TestCompileQuery/withWhereMultiOr
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND ((("products"."price") < '20' :: numeric(7,2)) OR (("products"."price") > '10' :: numeric(7,2)) OR NOT (("products"."id") IS NULL)))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/fetchByID
|
=== RUN TestCompileQuery/fetchByID
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") = '{{id}}' :: bigint))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/searchQuery
|
=== RUN TestCompileQuery/searchQuery
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."search_rank" AS "search_rank", "products_0"."search_headline_description" AS "search_headline_description" FROM (SELECT "products"."id", "products"."name", ts_rank("products"."tsv", websearch_to_tsquery('{{query}}')) AS "search_rank", ts_headline("products"."description", websearch_to_tsquery('{{query}}')) AS "search_headline_description" FROM "products" WHERE ((("products"."tsv") @@ websearch_to_tsquery('{{query}}'))) LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToMany
|
=== RUN TestCompileQuery/oneToMany
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email", "__sj_1"."json" AS "products" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToManyReverse
|
=== RUN TestCompileQuery/oneToManyReverse
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "__sj_1"."json" AS "users" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."email" AS "email" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/oneToManyArray
|
=== RUN TestCompileQuery/oneToManyArray
|
||||||
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('tags', "__sj_0"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."name" AS "name", "products_2"."price" AS "price", "__sj_3"."json" AS "tags" FROM (SELECT "products"."name", "products"."price", "products"."tags" FROM "products" LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "tags_3"."id" AS "id", "tags_3"."name" AS "name" FROM (SELECT "tags"."id", "tags"."name" FROM "tags" WHERE ((("tags"."slug") = any ("products_2"."tags"))) LIMIT ('20') :: integer) AS "tags_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "tags_0"."name" AS "name", "__sj_1"."json" AS "product" FROM (SELECT "tags"."name", "tags"."slug" FROM "tags" LIMIT ('20') :: integer) AS "tags_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" WHERE ((("tags_0"."slug") = any ("products"."tags"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToMany
|
=== RUN TestCompileQuery/manyToMany
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "__sj_1"."json" AS "customers" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/manyToManyReverse
|
=== RUN TestCompileQuery/manyToManyReverse
|
||||||
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('customers', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "__sj_1"."json" AS "products" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_1"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."name" AS "name" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id")) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('20') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunction
|
=== RUN TestCompileQuery/aggFunction
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
=== RUN TestCompileQuery/aggFunctionBlockedByCol
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionDisabled
|
=== RUN TestCompileQuery/aggFunctionDisabled
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."name" AS "name" FROM (SELECT "products"."name" FROM "products" GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/aggFunctionWithFilter
|
=== RUN TestCompileQuery/aggFunctionWithFilter
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))) AND (("products"."id") > '10' :: bigint))) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/syntheticTables
|
=== RUN TestCompileQuery/syntheticTables
|
||||||
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('me', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = '{{user_id}}' :: bigint)) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/queryWithVariables
|
=== RUN TestCompileQuery/queryWithVariables
|
||||||
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") = '{{product_price}}' :: numeric(7,2)) AND (("products"."id") = '{{product_id}}' :: bigint) AND ((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2))))) LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withWhereOnRelations
|
=== RUN TestCompileQuery/withWhereOnRelations
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")) AND ((("products"."price") > '3' :: numeric(7,2))))) LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/multiRoot
|
=== RUN TestCompileQuery/multiRoot
|
||||||
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4") AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3") AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('customer', "__sj_0"."json", 'user', "__sj_1"."json", 'product', "__sj_2"."json") as "__root" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "products_2"."id" AS "id", "products_2"."name" AS "name", "__sj_3"."json" AS "customers", "__sj_4"."json" AS "customer" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE (((("products"."price") > '0' :: numeric(7,2)) AND (("products"."price") < '8' :: numeric(7,2)))) LIMIT ('1') :: integer) AS "products_2" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_4".*) AS "json"FROM (SELECT "customers_4"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('1') :: integer) AS "customers_4") AS "__sr_4") AS "__sj_4" ON ('true') LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_3"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_3".*) AS "json"FROM (SELECT "customers_3"."email" AS "email" FROM (SELECT "customers"."email" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_2"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_3") AS "__sr_3") AS "__sj_3") AS "__sj_3" ON ('true')) AS "__sr_2") AS "__sj_2", (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1", (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "customers_0"."id" AS "id" FROM (SELECT "customers"."id" FROM "customers" LIMIT ('1') :: integer) AS "customers_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/jsonColumnAsTable
|
=== RUN TestCompileQuery/jsonColumnAsTable
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "tag_count" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "tag_count_1"."count" AS "count", "__sj_2"."json" AS "tags" FROM (SELECT "tag_count"."count", "tag_count"."tag_id" FROM "products", json_to_recordset("products"."tag_count") AS "tag_count"(tag_id bigint, count int) WHERE ((("products"."id") = ("products_0"."id"))) LIMIT ('1') :: integer) AS "tag_count_1" LEFT OUTER JOIN LATERAL (SELECT coalesce(jsonb_agg("__sj_2"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "tags_2"."name" AS "name" FROM (SELECT "tags"."name" FROM "tags" WHERE ((("tags"."id") = ("tag_count_1"."tag_id"))) LIMIT ('20') :: integer) AS "tags_2") AS "__sr_2") AS "__sj_2") AS "__sj_2" ON ('true')) AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/withCursor
|
=== RUN TestCompileQuery/withCursor
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0") - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json", 'products_cursor', "__sj_0"."cursor") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json", CONCAT_WS(',', max("__cur_0"), max("__cur_1")) as "cursor" FROM (SELECT to_jsonb("__sr_0".*) - '__cur_0' - '__cur_1' AS "json", "__cur_0", "__cur_1"FROM (SELECT "products_0"."name" AS "name", LAST_VALUE("products_0"."price") OVER() AS "__cur_0", LAST_VALUE("products_0"."id") OVER() AS "__cur_1" FROM (WITH "__cur" AS (SELECT a[1] as "price", a[2] as "id" FROM string_to_array('{{cursor}}', ',') as a) SELECT "products"."name", "products"."id", "products"."price" FROM "products", "__cur" WHERE (((("products"."price") < "__cur"."price" :: numeric(7,2)) OR ((("products"."price") = "__cur"."price" :: numeric(7,2)) AND (("products"."id") > "__cur"."id" :: bigint)))) ORDER BY "products"."price" DESC, "products"."id" ASC LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
=== RUN TestCompileQuery/nullForAuthRequiredInAnon
|
||||||
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('products', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", NULL AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('20') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedQuery
|
=== RUN TestCompileQuery/blockedQuery
|
||||||
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileQuery/blockedFunctions
|
=== RUN TestCompileQuery/blockedFunctions
|
||||||
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coalesce(jsonb_agg("__sj_0"."json"), '[]') as "json" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."email" AS "email" FROM (SELECT , "users"."email" FROM "users" WHERE (false) GROUP BY "users"."email" LIMIT ('20') :: integer) AS "users_0") AS "__sr_0") AS "__sj_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileQuery (0.02s)
|
--- PASS: TestCompileQuery (0.02s)
|
||||||
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
--- PASS: TestCompileQuery/withComplexArgs (0.00s)
|
||||||
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
--- PASS: TestCompileQuery/withWhereAndList (0.00s)
|
||||||
@ -121,23 +121,23 @@ SELECT jsonb_build_object('users', "__sj_0"."json") as "__root" FROM (SELECT coa
|
|||||||
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
--- PASS: TestCompileQuery/blockedFunctions (0.00s)
|
||||||
=== RUN TestCompileUpdate
|
=== RUN TestCompileUpdate
|
||||||
=== RUN TestCompileUpdate/singleUpdate
|
=== RUN TestCompileUpdate/singleUpdate
|
||||||
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT "t"."name", "t"."description" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{update}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "description") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'description' AS text) FROM "_sg_input" i) WHERE ((("products"."id") = '1' :: bigint) AND (("products"."id") = '{{id}}' :: bigint)) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name" FROM (SELECT "products"."id", "products"."name" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
=== RUN TestCompileUpdate/simpleUpdateWithPresets
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "t"."name", "t"."price", 'now' :: timestamp without time zone FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), 'now' :: timestamp without time zone FROM "_sg_input" i) WHERE (("products"."user_id") = '{{user_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id" FROM (SELECT "products"."id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
=== RUN TestCompileUpdate/nestedUpdateManyToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT "t"."sale_type", "t"."quantity", "t"."due_date" FROM "_sg_input" i, json_populate_record(NULL::purchases, i.j) t) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT "t"."full_name", "t"."email" FROM "_sg_input" i, json_populate_record(NULL::customers, i.j->'customer') t) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT "t"."name", "t"."price" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2") AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "purchases" AS (UPDATE "purchases" SET ("sale_type", "quantity", "due_date") = (SELECT CAST( i.j ->>'sale_type' AS character varying), CAST( i.j ->>'quantity' AS integer), CAST( i.j ->>'due_date' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("purchases"."id") = '{{id}}' :: bigint) RETURNING "purchases".*), "customers" AS (UPDATE "customers" SET ("full_name", "email") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "purchases" WHERE (("customers"."id") = ("purchases"."customer_id")) RETURNING "customers".*), "products" AS (UPDATE "products" SET ("name", "price") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)) FROM "_sg_input" i) FROM "purchases" WHERE (("products"."id") = ("purchases"."product_id")) RETURNING "products".*) SELECT jsonb_build_object('purchase', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "purchases_0"."sale_type" AS "sale_type", "purchases_0"."quantity" AS "quantity", "purchases_0"."due_date" AS "due_date", "__sj_1"."json" AS "product", "__sj_2"."json" AS "customer" FROM (SELECT "purchases"."sale_type", "purchases"."quantity", "purchases"."due_date", "purchases"."product_id", "purchases"."customer_id" FROM "purchases" LIMIT ('1') :: integer) AS "purchases_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_2".*) AS "json"FROM (SELECT "customers_2"."id" AS "id", "customers_2"."full_name" AS "full_name", "customers_2"."email" AS "email" FROM (SELECT "customers"."id", "customers"."full_name", "customers"."email" FROM "customers" WHERE ((("customers"."id") = ("purchases_0"."customer_id"))) LIMIT ('1') :: integer) AS "customers_2") AS "__sr_2") AS "__sj_2" ON ('true') LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."id") = ("purchases_0"."product_id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
=== RUN TestCompileUpdate/nestedUpdateOneToMany
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j->'product') t) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '8' :: bigint) RETURNING "users".*), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) FROM "users" WHERE (("products"."user_id") = ("users"."id") AND "products"."id"= ((i.j->'product'->'where'->>'id'))::bigint) RETURNING "products".*) SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
=== RUN TestCompileUpdate/nestedUpdateOneToOne
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT "t"."name", "t"."price", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT "t"."email" FROM "_sg_input" i, json_populate_record(NULL::users, i.j->'user') t) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "products" AS (UPDATE "products" SET ("name", "price", "created_at", "updated_at") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*), "users" AS (UPDATE "users" SET ("email") = (SELECT CAST( i.j ->>'email' AS character varying) FROM "_sg_input" i) FROM "products" WHERE (("users"."id") = ("products"."user_id")) RETURNING "users".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToManyWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT "t"."full_name", "t"."email", "t"."created_at", "t"."updated_at" FROM "_sg_input" i, json_populate_record(NULL::users, i.j) t) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "users" AS (UPDATE "users" SET ("full_name", "email", "created_at", "updated_at") = (SELECT CAST( i.j ->>'full_name' AS character varying), CAST( i.j ->>'email' AS character varying), CAST( i.j ->>'created_at' AS timestamp without time zone), CAST( i.j ->>'updated_at' AS timestamp without time zone) FROM "_sg_input" i) WHERE (("users"."id") = '{{id}}' :: bigint) RETURNING "users".*), "products_c" AS ( UPDATE "products" SET "user_id" = "users"."id" FROM "users" WHERE ("products"."id"= ((i.j->'product'->'connect'->>'id'))::bigint) RETURNING "products".*), "products_d" AS ( UPDATE "products" SET "user_id" = NULL FROM "users" WHERE ("products"."id"= ((i.j->'product'->'disconnect'->>'id'))::bigint) RETURNING "products".*), "products" AS (SELECT * FROM "products_c" UNION ALL SELECT * FROM "products_d") SELECT jsonb_build_object('user', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email", "__sj_1"."json" AS "product" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" LIMIT ('1') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "products_1"."id" AS "id", "products_1"."name" AS "name", "products_1"."price" AS "price" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('1') :: integer) AS "products_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithConnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint AND "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1") AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT "id" FROM "_sg_input" i,"users" WHERE "users"."email"= ((i.j->'user'->'connect'->>'email'))::character varying AND "users"."id"= ((i.j->'user'->'connect'->>'id'))::bigint LIMIT 1), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{product_id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "__sj_1"."json" AS "user" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT to_jsonb("__sr_1".*) AS "json"FROM (SELECT "users_1"."id" AS "id", "users_1"."full_name" AS "full_name", "users_1"."email" AS "email" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('1') :: integer) AS "users_1") AS "__sr_1") AS "__sj_1" ON ('true')) AS "__sr_0") AS "__sj_0"
|
||||||
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
=== RUN TestCompileUpdate/nestedUpdateOneToOneWithDisconnect
|
||||||
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT "t"."name", "t"."price", "_x_users"."id" FROM "_sg_input" i, "_x_users", json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0") AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FROM (VALUES(NULL::bigint)) AS LOOKUP("id")), "products" AS (UPDATE "products" SET ("name", "price", "user_id") = (SELECT CAST( i.j ->>'name' AS character varying), CAST( i.j ->>'price' AS numeric(7,2)), "_x_users"."id" FROM "_sg_input" i, "_x_users") WHERE (("products"."id") = '{{id}}' :: bigint) RETURNING "products".*) SELECT jsonb_build_object('product', "__sj_0"."json") as "__root" FROM (SELECT to_jsonb("__sr_0".*) AS "json"FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."user_id" AS "user_id" FROM (SELECT "products"."id", "products"."name", "products"."user_id" FROM "products" LIMIT ('1') :: integer) AS "products_0") AS "__sr_0") AS "__sj_0"
|
||||||
--- PASS: TestCompileUpdate (0.02s)
|
--- PASS: TestCompileUpdate (0.02s)
|
||||||
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
--- PASS: TestCompileUpdate/singleUpdate (0.00s)
|
||||||
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
--- PASS: TestCompileUpdate/simpleUpdateWithPresets (0.00s)
|
||||||
@ -148,4 +148,4 @@ WITH "_sg_input" AS (SELECT '{{data}}' :: json AS j), "_x_users" AS (SELECT * FR
|
|||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithConnect (0.00s)
|
||||||
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
--- PASS: TestCompileUpdate/nestedUpdateOneToOneWithDisconnect (0.00s)
|
||||||
PASS
|
PASS
|
||||||
ok github.com/dosco/super-graph/core/internal/psql 0.320s
|
ok github.com/dosco/super-graph/core/internal/psql 0.306s
|
||||||
|
@ -91,25 +91,9 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
renderInsertUpdateColumns(w, qc, jt, ti, sk, true)
|
||||||
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
renderNestedUpdateRelColumns(w, item.kvitem, true)
|
||||||
|
|
||||||
io.WriteString(w, ` FROM "_sg_input" i, `)
|
io.WriteString(w, ` FROM "_sg_input" i`)
|
||||||
renderNestedUpdateRelTables(w, item.kvitem)
|
renderNestedUpdateRelTables(w, item.kvitem)
|
||||||
|
io.WriteString(w, `) `)
|
||||||
if item.array {
|
|
||||||
io.WriteString(w, `json_populate_recordset`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `json_populate_record`)
|
|
||||||
}
|
|
||||||
|
|
||||||
io.WriteString(w, `(NULL::`)
|
|
||||||
io.WriteString(w, ti.Name)
|
|
||||||
|
|
||||||
if len(item.path) == 0 {
|
|
||||||
io.WriteString(w, `, i.j) t)`)
|
|
||||||
} else {
|
|
||||||
io.WriteString(w, `, i.j->`)
|
|
||||||
joinPath(w, item.path)
|
|
||||||
io.WriteString(w, `) t) `)
|
|
||||||
}
|
|
||||||
|
|
||||||
if item.id != 0 {
|
if item.id != 0 {
|
||||||
// Render sql to set id values if child-to-parent
|
// Render sql to set id values if child-to-parent
|
||||||
@ -137,9 +121,11 @@ func (c *compilerContext) renderUpdateStmt(w io.Writer, qc *qcode.QCode, item re
|
|||||||
io.WriteString(w, `)`)
|
io.WriteString(w, `)`)
|
||||||
|
|
||||||
} else {
|
} else {
|
||||||
io.WriteString(w, ` WHERE `)
|
if qc.Selects[0].Where != nil {
|
||||||
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
io.WriteString(w, ` WHERE `)
|
||||||
return err
|
if err := c.renderWhere(&qc.Selects[0], ti); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -202,9 +188,9 @@ func renderNestedUpdateRelTables(w io.Writer, item kvitem) error {
|
|||||||
// relationship is one-to-many
|
// relationship is one-to-many
|
||||||
for _, v := range item.items {
|
for _, v := range item.items {
|
||||||
if v._ctype > 0 && v.relCP.Type == RelOneToMany {
|
if v._ctype > 0 && v.relCP.Type == RelOneToMany {
|
||||||
io.WriteString(w, `"_x_`)
|
io.WriteString(w, `, "_x_`)
|
||||||
io.WriteString(w, v.relCP.Left.Table)
|
io.WriteString(w, v.relCP.Left.Table)
|
||||||
io.WriteString(w, `", `)
|
io.WriteString(w, `"`)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1,12 +1,17 @@
|
|||||||
package qcode
|
package qcode
|
||||||
|
|
||||||
func GetQType(gql string) QType {
|
func GetQType(gql string) QType {
|
||||||
|
ic := false
|
||||||
for i := range gql {
|
for i := range gql {
|
||||||
b := gql[i]
|
b := gql[i]
|
||||||
if b == '{' {
|
switch {
|
||||||
|
case b == '#':
|
||||||
|
ic = true
|
||||||
|
case b == '\n':
|
||||||
|
ic = false
|
||||||
|
case !ic && b == '{':
|
||||||
return QTQuery
|
return QTQuery
|
||||||
}
|
case !ic && al(b):
|
||||||
if al(b) {
|
|
||||||
switch b {
|
switch b {
|
||||||
case 'm', 'M':
|
case 'm', 'M':
|
||||||
return QTMutation
|
return QTMutation
|
||||||
|
50
core/internal/qcode/utils_test.go
Normal file
50
core/internal/qcode/utils_test.go
Normal file
@ -0,0 +1,50 @@
|
|||||||
|
package qcode
|
||||||
|
|
||||||
|
import "testing"
|
||||||
|
|
||||||
|
func TestGetQType(t *testing.T) {
|
||||||
|
type args struct {
|
||||||
|
gql string
|
||||||
|
}
|
||||||
|
type ts struct {
|
||||||
|
name string
|
||||||
|
args args
|
||||||
|
want QType
|
||||||
|
}
|
||||||
|
tests := []ts{
|
||||||
|
ts{
|
||||||
|
name: "query",
|
||||||
|
args: args{gql: " query {"},
|
||||||
|
want: QTQuery,
|
||||||
|
},
|
||||||
|
ts{
|
||||||
|
name: "mutation",
|
||||||
|
args: args{gql: " mutation {"},
|
||||||
|
want: QTMutation,
|
||||||
|
},
|
||||||
|
ts{
|
||||||
|
name: "default query",
|
||||||
|
args: args{gql: " {"},
|
||||||
|
want: QTQuery,
|
||||||
|
},
|
||||||
|
ts{
|
||||||
|
name: "default query with comment",
|
||||||
|
args: args{gql: `# query is good
|
||||||
|
{`},
|
||||||
|
want: QTQuery,
|
||||||
|
},
|
||||||
|
ts{
|
||||||
|
name: "failed query with comment",
|
||||||
|
args: args{gql: `# query is good query {`},
|
||||||
|
want: -1,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
if got := GetQType(tt.args.gql); got != tt.want {
|
||||||
|
t.Errorf("GetQType() = %v, want %v", got, tt.want)
|
||||||
|
}
|
||||||
|
})
|
||||||
|
}
|
||||||
|
}
|
@ -58,21 +58,14 @@ func (sg *SuperGraph) initPrepared() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
err := sg.prepareStmt(v)
|
err := sg.prepareStmt(v)
|
||||||
if err == nil {
|
if err != nil {
|
||||||
|
sg.log.Printf("WRN %s: %v", v.Name, err)
|
||||||
|
} else {
|
||||||
success++
|
success++
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// if len(v.Vars) == 0 {
|
|
||||||
// logger.Warn().Err(err).Msg(v.Query)
|
|
||||||
// } else {
|
|
||||||
// logger.Warn().Err(err).Msgf("%s %s", v.Vars, v.Query)
|
|
||||||
// }
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// logger.Info().
|
sg.log.Printf("INF allow list: prepared %d / %d queries", success, len(list))
|
||||||
// Msgf("Registered %d of %d queries from allow.list as prepared statements",
|
|
||||||
// success, len(list))
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -84,13 +77,6 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
|
|
||||||
qt := qcode.GetQType(query)
|
qt := qcode.GetQType(query)
|
||||||
ct := context.Background()
|
ct := context.Background()
|
||||||
|
|
||||||
tx, err := sg.db.BeginTx(ct, nil)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer tx.Rollback() //nolint: errcheck
|
|
||||||
|
|
||||||
switch qt {
|
switch qt {
|
||||||
case qcode.QTQuery:
|
case qcode.QTQuery:
|
||||||
var stmts1 []stmt
|
var stmts1 []stmt
|
||||||
@ -108,7 +94,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
|
|
||||||
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
//logger.Debug().Msgf("Prepared statement 'query %s' (user)", item.Name)
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts1, stmtHash(item.Name, "user"))
|
err = sg.prepare(ct, stmts1, stmtHash(item.Name, "user"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -124,7 +110,7 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts2, stmtHash(item.Name, "anon"))
|
err = sg.prepare(ct, stmts2, stmtHash(item.Name, "anon"))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -135,36 +121,29 @@ func (sg *SuperGraph) prepareStmt(item allow.Item) error {
|
|||||||
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
// logger.Debug().Msgf("Prepared statement 'mutation %s' (%s)", item.Name, role.Name)
|
||||||
|
|
||||||
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
stmts, err := sg.buildRoleStmt(qb, vars, role.Name)
|
||||||
|
if err == psql.ErrAllTablesSkipped {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// if len(item.Vars) == 0 {
|
return err
|
||||||
// logger.Warn().Err(err).Msg(item.Query)
|
|
||||||
// } else {
|
|
||||||
// logger.Warn().Err(err).Msgf("%s %s", item.Vars, item.Query)
|
|
||||||
// }
|
|
||||||
continue
|
|
||||||
}
|
}
|
||||||
|
|
||||||
err = sg.prepare(ct, tx, stmts, stmtHash(item.Name, role.Name))
|
err = sg.prepare(ct, stmts, stmtHash(item.Name, role.Name))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := tx.Commit(); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (sg *SuperGraph) prepare(ct context.Context, tx *sql.Tx, st []stmt, key string) error {
|
func (sg *SuperGraph) prepare(ct context.Context, st []stmt, key string) error {
|
||||||
finalSQL, am := processTemplate(st[0].sql)
|
finalSQL, am := processTemplate(st[0].sql)
|
||||||
|
|
||||||
sd, err := tx.Prepare(finalSQL)
|
sd, err := sg.db.Prepare(finalSQL)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return fmt.Errorf("prepare failed: %v: %s", err, finalSQL)
|
||||||
}
|
}
|
||||||
|
|
||||||
sg.prepared[key] = &preparedItem{
|
sg.prepared[key] = &preparedItem{
|
||||||
@ -256,7 +235,9 @@ func (sg *SuperGraph) initAllowList() error {
|
|||||||
sg.log.Printf("WRN allow list disabled no file specified")
|
sg.log.Printf("WRN allow list disabled no file specified")
|
||||||
}
|
}
|
||||||
|
|
||||||
if sg.conf.UseAllowList {
|
// When list is not eabled it is still created and
|
||||||
|
// and new queries are saved to it.
|
||||||
|
if !sg.conf.UseAllowList {
|
||||||
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
ac = allow.Config{CreateIfNotExists: true, Persist: true}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
382
core/remote.go
382
core/remote.go
@ -1,253 +1,249 @@
|
|||||||
package core
|
package core
|
||||||
|
|
||||||
// import (
|
import (
|
||||||
// "bytes"
|
"bytes"
|
||||||
// "errors"
|
"errors"
|
||||||
// "fmt"
|
"fmt"
|
||||||
// "net/http"
|
"net/http"
|
||||||
// "sync"
|
"sync"
|
||||||
|
|
||||||
// "github.com/cespare/xxhash/v2"
|
"github.com/cespare/xxhash/v2"
|
||||||
// "github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/core/internal/qcode"
|
||||||
// "github.com/dosco/super-graph/core/internal/qcode"
|
"github.com/dosco/super-graph/jsn"
|
||||||
// )
|
)
|
||||||
|
|
||||||
// func execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]byte, error) {
|
func (sg *SuperGraph) execRemoteJoin(st *stmt, data []byte, hdr http.Header) ([]byte, error) {
|
||||||
// var err error
|
var err error
|
||||||
|
|
||||||
// if len(data) == 0 || st.skipped == 0 {
|
sel := st.qc.Selects
|
||||||
// return data, nil
|
h := xxhash.New()
|
||||||
// }
|
|
||||||
|
|
||||||
// sel := st.qc.Selects
|
// fetch the field name used within the db response json
|
||||||
// h := xxhash.New()
|
// that are used to mark insertion points and the mapping between
|
||||||
|
// those field names and their select objects
|
||||||
|
fids, sfmap := sg.parentFieldIds(h, sel, st.skipped)
|
||||||
|
|
||||||
// // fetch the field name used within the db response json
|
// fetch the field values of the marked insertion points
|
||||||
// // that are used to mark insertion points and the mapping between
|
// these values contain the id to be used with fetching remote data
|
||||||
// // those field names and their select objects
|
from := jsn.Get(data, fids)
|
||||||
// fids, sfmap := parentFieldIds(h, sel, st.skipped)
|
var to []jsn.Field
|
||||||
|
|
||||||
// // fetch the field values of the marked insertion points
|
switch {
|
||||||
// // these values contain the id to be used with fetching remote data
|
case len(from) == 1:
|
||||||
// from := jsn.Get(data, fids)
|
to, err = sg.resolveRemote(hdr, h, from[0], sel, sfmap)
|
||||||
// var to []jsn.Field
|
|
||||||
|
|
||||||
// switch {
|
case len(from) > 1:
|
||||||
// case len(from) == 1:
|
to, err = sg.resolveRemotes(hdr, h, from, sel, sfmap)
|
||||||
// to, err = resolveRemote(hdr, h, from[0], sel, sfmap)
|
|
||||||
|
|
||||||
// case len(from) > 1:
|
default:
|
||||||
// to, err = resolveRemotes(hdr, h, from, sel, sfmap)
|
return nil, errors.New("something wrong no remote ids found in db response")
|
||||||
|
}
|
||||||
|
|
||||||
// default:
|
if err != nil {
|
||||||
// return nil, errors.New("something wrong no remote ids found in db response")
|
return nil, err
|
||||||
// }
|
}
|
||||||
|
|
||||||
// if err != nil {
|
var ob bytes.Buffer
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var ob bytes.Buffer
|
err = jsn.Replace(&ob, data, from, to)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// err = jsn.Replace(&ob, data, from, to)
|
return ob.Bytes(), nil
|
||||||
// if err != nil {
|
}
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// return ob.Bytes(), nil
|
func (sg *SuperGraph) resolveRemote(
|
||||||
// }
|
hdr http.Header,
|
||||||
|
h *xxhash.Digest,
|
||||||
|
field jsn.Field,
|
||||||
|
sel []qcode.Select,
|
||||||
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
|
|
||||||
// func resolveRemote(
|
// replacement data for the marked insertion points
|
||||||
// hdr http.Header,
|
// key and value will be replaced by whats below
|
||||||
// h *xxhash.Digest,
|
toA := [1]jsn.Field{}
|
||||||
// field jsn.Field,
|
to := toA[:1]
|
||||||
// sel []qcode.Select,
|
|
||||||
// sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
|
||||||
|
|
||||||
// // replacement data for the marked insertion points
|
// use the json key to find the related Select object
|
||||||
// // key and value will be replaced by whats below
|
k1 := xxhash.Sum64(field.Key)
|
||||||
// toA := [1]jsn.Field{}
|
|
||||||
// to := toA[:1]
|
|
||||||
|
|
||||||
// // use the json key to find the related Select object
|
s, ok := sfmap[k1]
|
||||||
// k1 := xxhash.Sum64(field.Key)
|
if !ok {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
p := sel[s.ParentID]
|
||||||
|
|
||||||
// s, ok := sfmap[k1]
|
// then use the Table nme in the Select and it's parent
|
||||||
// if !ok {
|
// to find the resolver to use for this relationship
|
||||||
// return nil, nil
|
k2 := mkkey(h, s.Name, p.Name)
|
||||||
// }
|
|
||||||
// p := sel[s.ParentID]
|
|
||||||
|
|
||||||
// // then use the Table nme in the Select and it's parent
|
r, ok := sg.rmap[k2]
|
||||||
// // to find the resolver to use for this relationship
|
if !ok {
|
||||||
// k2 := mkkey(h, s.Name, p.Name)
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// r, ok := rmap[k2]
|
id := jsn.Value(field.Value)
|
||||||
// if !ok {
|
if len(id) == 0 {
|
||||||
// return nil, nil
|
return nil, nil
|
||||||
// }
|
}
|
||||||
|
|
||||||
// id := jsn.Value(field.Value)
|
//st := time.Now()
|
||||||
// if len(id) == 0 {
|
|
||||||
// return nil, nil
|
|
||||||
// }
|
|
||||||
|
|
||||||
// //st := time.Now()
|
b, err := r.Fn(hdr, id)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// b, err := r.Fn(hdr, id)
|
if len(r.Path) != 0 {
|
||||||
// if err != nil {
|
b = jsn.Strip(b, r.Path)
|
||||||
// return nil, err
|
}
|
||||||
// }
|
|
||||||
|
|
||||||
// if len(r.Path) != 0 {
|
var ob bytes.Buffer
|
||||||
// b = jsn.Strip(b, r.Path)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var ob bytes.Buffer
|
if len(s.Cols) != 0 {
|
||||||
|
err = jsn.Filter(&ob, b, colsToList(s.Cols))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
// if len(s.Cols) != 0 {
|
} else {
|
||||||
// err = jsn.Filter(&ob, b, colsToList(s.Cols))
|
ob.WriteString("null")
|
||||||
// if err != nil {
|
}
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// } else {
|
to[0] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
|
||||||
// ob.WriteString("null")
|
return to, nil
|
||||||
// }
|
}
|
||||||
|
|
||||||
// to[0] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
|
func (sg *SuperGraph) resolveRemotes(
|
||||||
// return to, nil
|
hdr http.Header,
|
||||||
// }
|
h *xxhash.Digest,
|
||||||
|
from []jsn.Field,
|
||||||
|
sel []qcode.Select,
|
||||||
|
sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
||||||
|
|
||||||
// func resolveRemotes(
|
// replacement data for the marked insertion points
|
||||||
// hdr http.Header,
|
// key and value will be replaced by whats below
|
||||||
// h *xxhash.Digest,
|
to := make([]jsn.Field, len(from))
|
||||||
// from []jsn.Field,
|
|
||||||
// sel []qcode.Select,
|
|
||||||
// sfmap map[uint64]*qcode.Select) ([]jsn.Field, error) {
|
|
||||||
|
|
||||||
// // replacement data for the marked insertion points
|
var wg sync.WaitGroup
|
||||||
// // key and value will be replaced by whats below
|
wg.Add(len(from))
|
||||||
// to := make([]jsn.Field, len(from))
|
|
||||||
|
|
||||||
// var wg sync.WaitGroup
|
var cerr error
|
||||||
// wg.Add(len(from))
|
|
||||||
|
|
||||||
// var cerr error
|
for i, id := range from {
|
||||||
|
|
||||||
// for i, id := range from {
|
// use the json key to find the related Select object
|
||||||
|
k1 := xxhash.Sum64(id.Key)
|
||||||
|
|
||||||
// // use the json key to find the related Select object
|
s, ok := sfmap[k1]
|
||||||
// k1 := xxhash.Sum64(id.Key)
|
if !ok {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
p := sel[s.ParentID]
|
||||||
|
|
||||||
// s, ok := sfmap[k1]
|
// then use the Table nme in the Select and it's parent
|
||||||
// if !ok {
|
// to find the resolver to use for this relationship
|
||||||
// return nil, nil
|
k2 := mkkey(h, s.Name, p.Name)
|
||||||
// }
|
|
||||||
// p := sel[s.ParentID]
|
|
||||||
|
|
||||||
// // then use the Table nme in the Select and it's parent
|
r, ok := sg.rmap[k2]
|
||||||
// // to find the resolver to use for this relationship
|
if !ok {
|
||||||
// k2 := mkkey(h, s.Name, p.Name)
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
// r, ok := rmap[k2]
|
id := jsn.Value(id.Value)
|
||||||
// if !ok {
|
if len(id) == 0 {
|
||||||
// return nil, nil
|
return nil, nil
|
||||||
// }
|
}
|
||||||
|
|
||||||
// id := jsn.Value(id.Value)
|
go func(n int, id []byte, s *qcode.Select) {
|
||||||
// if len(id) == 0 {
|
defer wg.Done()
|
||||||
// return nil, nil
|
|
||||||
// }
|
|
||||||
|
|
||||||
// go func(n int, id []byte, s *qcode.Select) {
|
//st := time.Now()
|
||||||
// defer wg.Done()
|
|
||||||
|
|
||||||
// //st := time.Now()
|
b, err := r.Fn(hdr, id)
|
||||||
|
if err != nil {
|
||||||
|
cerr = fmt.Errorf("%s: %s", s.Name, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// b, err := r.Fn(hdr, id)
|
if len(r.Path) != 0 {
|
||||||
// if err != nil {
|
b = jsn.Strip(b, r.Path)
|
||||||
// cerr = fmt.Errorf("%s: %s", s.Name, err)
|
}
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if len(r.Path) != 0 {
|
var ob bytes.Buffer
|
||||||
// b = jsn.Strip(b, r.Path)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// var ob bytes.Buffer
|
if len(s.Cols) != 0 {
|
||||||
|
err = jsn.Filter(&ob, b, colsToList(s.Cols))
|
||||||
|
if err != nil {
|
||||||
|
cerr = fmt.Errorf("%s: %s", s.Name, err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
// if len(s.Cols) != 0 {
|
} else {
|
||||||
// err = jsn.Filter(&ob, b, colsToList(s.Cols))
|
ob.WriteString("null")
|
||||||
// if err != nil {
|
}
|
||||||
// cerr = fmt.Errorf("%s: %s", s.Name, err)
|
|
||||||
// return
|
|
||||||
// }
|
|
||||||
|
|
||||||
// } else {
|
to[n] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
|
||||||
// ob.WriteString("null")
|
}(i, id, s)
|
||||||
// }
|
}
|
||||||
|
wg.Wait()
|
||||||
|
|
||||||
// to[n] = jsn.Field{Key: []byte(s.FieldName), Value: ob.Bytes()}
|
return to, cerr
|
||||||
// }(i, id, s)
|
}
|
||||||
// }
|
|
||||||
// wg.Wait()
|
|
||||||
|
|
||||||
// return to, cerr
|
func (sg *SuperGraph) parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
||||||
// }
|
[][]byte,
|
||||||
|
map[uint64]*qcode.Select) {
|
||||||
|
|
||||||
// func parentFieldIds(h *xxhash.Digest, sel []qcode.Select, skipped uint32) (
|
c := 0
|
||||||
// [][]byte,
|
for i := range sel {
|
||||||
// map[uint64]*qcode.Select) {
|
s := &sel[i]
|
||||||
|
if isSkipped(skipped, uint32(s.ID)) {
|
||||||
|
c++
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// c := 0
|
// list of keys (and it's related value) to extract from
|
||||||
// for i := range sel {
|
// the db json response
|
||||||
// s := &sel[i]
|
fm := make([][]byte, c)
|
||||||
// if isSkipped(skipped, uint32(s.ID)) {
|
|
||||||
// c++
|
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// // list of keys (and it's related value) to extract from
|
// mapping between the above extracted key and a Select
|
||||||
// // the db json response
|
// object
|
||||||
// fm := make([][]byte, c)
|
sm := make(map[uint64]*qcode.Select, c)
|
||||||
|
n := 0
|
||||||
|
|
||||||
// // mapping between the above extracted key and a Select
|
for i := range sel {
|
||||||
// // object
|
s := &sel[i]
|
||||||
// sm := make(map[uint64]*qcode.Select, c)
|
|
||||||
// n := 0
|
|
||||||
|
|
||||||
// for i := range sel {
|
if !isSkipped(skipped, uint32(s.ID)) {
|
||||||
// s := &sel[i]
|
continue
|
||||||
|
}
|
||||||
|
|
||||||
// if !isSkipped(skipped, uint32(s.ID)) {
|
p := sel[s.ParentID]
|
||||||
// continue
|
k := mkkey(h, s.Name, p.Name)
|
||||||
// }
|
|
||||||
|
|
||||||
// p := sel[s.ParentID]
|
if r, ok := sg.rmap[k]; ok {
|
||||||
// k := mkkey(h, s.Name, p.Name)
|
fm[n] = r.IDField
|
||||||
|
n++
|
||||||
|
|
||||||
// if r, ok := rmap[k]; ok {
|
k := xxhash.Sum64(r.IDField)
|
||||||
// fm[n] = r.IDField
|
sm[k] = s
|
||||||
// n++
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// k := xxhash.Sum64(r.IDField)
|
return fm, sm
|
||||||
// sm[k] = s
|
}
|
||||||
// }
|
|
||||||
// }
|
|
||||||
|
|
||||||
// return fm, sm
|
func isSkipped(n uint32, pos uint32) bool {
|
||||||
// }
|
return ((n & (1 << pos)) != 0)
|
||||||
|
}
|
||||||
|
|
||||||
// func isSkipped(n uint32, pos uint32) bool {
|
func colsToList(cols []qcode.Column) []string {
|
||||||
// return ((n & (1 << pos)) != 0)
|
var f []string
|
||||||
// }
|
|
||||||
|
|
||||||
// func colsToList(cols []qcode.Column) []string {
|
for i := range cols {
|
||||||
// var f []string
|
f = append(f, cols[i].Name)
|
||||||
|
}
|
||||||
// for i := range cols {
|
return f
|
||||||
// f = append(f, cols[i].Name)
|
}
|
||||||
// }
|
|
||||||
// return f
|
|
||||||
// }
|
|
||||||
|
127
core/resolve.go
127
core/resolve.go
@ -6,90 +6,90 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
|
"github.com/cespare/xxhash/v2"
|
||||||
|
"github.com/dosco/super-graph/core/internal/psql"
|
||||||
"github.com/dosco/super-graph/jsn"
|
"github.com/dosco/super-graph/jsn"
|
||||||
)
|
)
|
||||||
|
|
||||||
var (
|
|
||||||
rmap map[uint64]*resolvFn
|
|
||||||
)
|
|
||||||
|
|
||||||
type resolvFn struct {
|
type resolvFn struct {
|
||||||
IDField []byte
|
IDField []byte
|
||||||
Path [][]byte
|
Path [][]byte
|
||||||
Fn func(h http.Header, id []byte) ([]byte, error)
|
Fn func(h http.Header, id []byte) ([]byte, error)
|
||||||
}
|
}
|
||||||
|
|
||||||
// func initResolvers() {
|
func (sg *SuperGraph) initResolvers() error {
|
||||||
// var err error
|
var err error
|
||||||
// rmap = make(map[uint64]*resolvFn)
|
sg.rmap = make(map[uint64]*resolvFn)
|
||||||
|
|
||||||
// for _, t := range conf.Tables {
|
for _, t := range sg.conf.Tables {
|
||||||
// err = initRemotes(t)
|
err = sg.initRemotes(t)
|
||||||
// if err != nil {
|
if err != nil {
|
||||||
// break
|
break
|
||||||
// }
|
}
|
||||||
// }
|
}
|
||||||
|
|
||||||
// if err != nil {
|
if err != nil {
|
||||||
// errlog.Fatal().Err(err).Msg("failed to initialize resolvers")
|
return fmt.Errorf("failed to initialize resolvers: %v", err)
|
||||||
// }
|
}
|
||||||
// }
|
|
||||||
|
|
||||||
// func initRemotes(t Table) error {
|
return nil
|
||||||
// h := xxhash.New()
|
}
|
||||||
|
|
||||||
// for _, r := range t.Remotes {
|
func (sg *SuperGraph) initRemotes(t Table) error {
|
||||||
// // defines the table column to be used as an id in the
|
h := xxhash.New()
|
||||||
// // remote request
|
|
||||||
// idcol := r.ID
|
|
||||||
|
|
||||||
// // if no table column specified in the config then
|
for _, r := range t.Remotes {
|
||||||
// // use the primary key of the table as the id
|
// defines the table column to be used as an id in the
|
||||||
// if len(idcol) == 0 {
|
// remote request
|
||||||
// pcol, err := pcompile.IDColumn(t.Name)
|
idcol := r.ID
|
||||||
// if err != nil {
|
|
||||||
// return err
|
|
||||||
// }
|
|
||||||
// idcol = pcol.Key
|
|
||||||
// }
|
|
||||||
// idk := fmt.Sprintf("__%s_%s", t.Name, idcol)
|
|
||||||
|
|
||||||
// // register a relationship between the remote data
|
// if no table column specified in the config then
|
||||||
// // and the database table
|
// use the primary key of the table as the id
|
||||||
|
if len(idcol) == 0 {
|
||||||
|
pcol, err := sg.pc.IDColumn(t.Name)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
idcol = pcol.Key
|
||||||
|
}
|
||||||
|
idk := fmt.Sprintf("__%s_%s", t.Name, idcol)
|
||||||
|
|
||||||
// val := &psql.DBRel{Type: psql.RelRemote}
|
// register a relationship between the remote data
|
||||||
// val.Left.Col = idcol
|
// and the database table
|
||||||
// val.Right.Col = idk
|
|
||||||
|
|
||||||
// err := pcompile.AddRelationship(strings.ToLower(r.Name), t.Name, val)
|
val := &psql.DBRel{Type: psql.RelRemote}
|
||||||
// if err != nil {
|
val.Left.Col = idcol
|
||||||
// return err
|
val.Right.Col = idk
|
||||||
// }
|
|
||||||
|
|
||||||
// // the function thats called to resolve this remote
|
err := sg.pc.AddRelationship(sanitize(r.Name), t.Name, val)
|
||||||
// // data request
|
if err != nil {
|
||||||
// fn := buildFn(r)
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// path := [][]byte{}
|
// the function thats called to resolve this remote
|
||||||
// for _, p := range strings.Split(r.Path, ".") {
|
// data request
|
||||||
// path = append(path, []byte(p))
|
fn := buildFn(r)
|
||||||
// }
|
|
||||||
|
|
||||||
// rf := &resolvFn{
|
path := [][]byte{}
|
||||||
// IDField: []byte(idk),
|
for _, p := range strings.Split(r.Path, ".") {
|
||||||
// Path: path,
|
path = append(path, []byte(p))
|
||||||
// Fn: fn,
|
}
|
||||||
// }
|
|
||||||
|
|
||||||
// // index resolver obj by parent and child names
|
rf := &resolvFn{
|
||||||
// rmap[mkkey(h, r.Name, t.Name)] = rf
|
IDField: []byte(idk),
|
||||||
|
Path: path,
|
||||||
|
Fn: fn,
|
||||||
|
}
|
||||||
|
|
||||||
// // index resolver obj by IDField
|
// index resolver obj by parent and child names
|
||||||
// rmap[xxhash.Sum64(rf.IDField)] = rf
|
sg.rmap[mkkey(h, r.Name, t.Name)] = rf
|
||||||
// }
|
|
||||||
|
|
||||||
// return nil
|
// index resolver obj by IDField
|
||||||
// }
|
sg.rmap[xxhash.Sum64(rf.IDField)] = rf
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
||||||
reqURL := strings.Replace(r.URL, "$id", "%s", 1)
|
reqURL := strings.Replace(r.URL, "$id", "%s", 1)
|
||||||
@ -114,12 +114,9 @@ func buildFn(r Remote) func(http.Header, []byte) ([]byte, error) {
|
|||||||
req.Header.Set(v, hdr.Get(v))
|
req.Header.Set(v, hdr.Get(v))
|
||||||
}
|
}
|
||||||
|
|
||||||
// logger.Debug().Str("uri", uri).Msg("Remote Join")
|
|
||||||
|
|
||||||
res, err := client.Do(req)
|
res, err := client.Do(req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// errlog.Error().Err(err).Msgf("Failed to connect to: %s", uri)
|
return nil, fmt.Errorf("failed to connect to '%s': %v", uri, err)
|
||||||
return nil, err
|
|
||||||
}
|
}
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
|
|
||||||
|
15
core/utils.go
Normal file
15
core/utils.go
Normal file
@ -0,0 +1,15 @@
|
|||||||
|
package core
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/cespare/xxhash/v2"
|
||||||
|
)
|
||||||
|
|
||||||
|
// nolint: errcheck
|
||||||
|
func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||||
|
h.WriteString(k1)
|
||||||
|
h.WriteString(k2)
|
||||||
|
v := h.Sum64()
|
||||||
|
h.Reset()
|
||||||
|
|
||||||
|
return v
|
||||||
|
}
|
@ -347,12 +347,10 @@ beer_style
|
|||||||
beer_yeast
|
beer_yeast
|
||||||
|
|
||||||
// Cars
|
// Cars
|
||||||
vehicle
|
car
|
||||||
vehicle_type
|
car_type
|
||||||
car_maker
|
car_maker
|
||||||
car_model
|
car_model
|
||||||
fuel_type
|
|
||||||
transmission_gear_type
|
|
||||||
|
|
||||||
// Text
|
// Text
|
||||||
word
|
word
|
||||||
@ -438,8 +436,8 @@ hipster_paragraph
|
|||||||
hipster_sentence
|
hipster_sentence
|
||||||
|
|
||||||
// File
|
// File
|
||||||
extension
|
file_extension
|
||||||
mine_type
|
file_mine_type
|
||||||
|
|
||||||
// Numbers
|
// Numbers
|
||||||
number
|
number
|
||||||
@ -463,11 +461,18 @@ mac_address
|
|||||||
digit
|
digit
|
||||||
letter
|
letter
|
||||||
lexify
|
lexify
|
||||||
rand_string
|
|
||||||
shuffle_strings
|
shuffle_strings
|
||||||
numerify
|
numerify
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Other utility functions
|
||||||
|
|
||||||
|
```
|
||||||
|
shuffle_strings(string_array)
|
||||||
|
make_slug(text)
|
||||||
|
make_slug_lang(text, lang)
|
||||||
|
```
|
||||||
|
|
||||||
### Migrations
|
### Migrations
|
||||||
|
|
||||||
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
Easy database migrations is the most important thing when building products backend by a relational database. We make it super easy to manage and migrate your database.
|
||||||
@ -1704,7 +1709,7 @@ reload_on_config_change: true
|
|||||||
# seed_file: seed.js
|
# seed_file: seed.js
|
||||||
|
|
||||||
# Path pointing to where the migrations can be found
|
# Path pointing to where the migrations can be found
|
||||||
migrations_path: ./config/migrations
|
migrations_path: ./migrations
|
||||||
|
|
||||||
# Postgres related environment Variables
|
# Postgres related environment Variables
|
||||||
# SG_DATABASE_HOST
|
# SG_DATABASE_HOST
|
||||||
@ -1790,18 +1795,37 @@ database:
|
|||||||
# Enable this if you need the user id in triggers, etc
|
# Enable this if you need the user id in triggers, etc
|
||||||
set_user_id: false
|
set_user_id: false
|
||||||
|
|
||||||
# Define additional variables here to be used with filters
|
# database ping timeout is used for db health checking
|
||||||
variables:
|
ping_timeout: 1m
|
||||||
admin_account_id: "5"
|
|
||||||
|
|
||||||
# Field and table names that you wish to block
|
# Set up an secure tls encrypted db connection
|
||||||
blocklist:
|
enable_tls: false
|
||||||
- ar_internal_metadata
|
|
||||||
- schema_migrations
|
# Required for tls. For example with Google Cloud SQL it's
|
||||||
- secret
|
# <gcp-project-id>:<cloud-sql-instance>"
|
||||||
- password
|
# server_name: blah
|
||||||
- encrypted
|
|
||||||
- token
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# server_cert: ./server-ca.pem
|
||||||
|
|
||||||
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# client_cert: ./client-cert.pem
|
||||||
|
|
||||||
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# client_key: ./client-key.pem
|
||||||
|
|
||||||
|
# Define additional variables here to be used with filters
|
||||||
|
variables:
|
||||||
|
admin_account_id: "5"
|
||||||
|
|
||||||
|
# Field and table names that you wish to block
|
||||||
|
blocklist:
|
||||||
|
- ar_internal_metadata
|
||||||
|
- schema_migrations
|
||||||
|
- secret
|
||||||
|
- password
|
||||||
|
- encrypted
|
||||||
|
- token
|
||||||
|
|
||||||
# Create custom actions with their own api endpoints
|
# Create custom actions with their own api endpoints
|
||||||
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
||||||
|
@ -3,6 +3,11 @@ services:
|
|||||||
db:
|
db:
|
||||||
image: postgres
|
image: postgres
|
||||||
tmpfs: /var/lib/postgresql/data
|
tmpfs: /var/lib/postgresql/data
|
||||||
|
environment:
|
||||||
|
POSTGRES_USER: postgres
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
ports:
|
||||||
|
- "5432:5432"
|
||||||
|
|
||||||
rails_app:
|
rails_app:
|
||||||
image: dosco/super-graph-demo:latest
|
image: dosco/super-graph-demo:latest
|
||||||
|
42
go.mod
42
go.mod
@ -5,31 +5,39 @@ require (
|
|||||||
github.com/NYTimes/gziphandler v1.1.1
|
github.com/NYTimes/gziphandler v1.1.1
|
||||||
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
github.com/adjust/gorails v0.0.0-20171013043634-2786ed0c03d3
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b
|
||||||
github.com/brianvoe/gofakeit v3.18.0+incompatible
|
github.com/brianvoe/gofakeit/v5 v5.2.0
|
||||||
github.com/cespare/xxhash/v2 v2.1.0
|
github.com/cespare/xxhash/v2 v2.1.1
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d
|
||||||
github.com/daaku/go.zipexe v1.0.1 // indirect
|
github.com/daaku/go.zipexe v1.0.1 // indirect
|
||||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733
|
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65
|
||||||
github.com/fsnotify/fsnotify v1.4.7
|
github.com/fsnotify/fsnotify v1.4.9
|
||||||
github.com/garyburd/redigo v1.6.0
|
github.com/garyburd/redigo v1.6.0
|
||||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible // indirect
|
||||||
github.com/gobuffalo/flect v0.1.6
|
github.com/gobuffalo/flect v0.2.1
|
||||||
github.com/jackc/pgtype v1.0.1
|
github.com/gosimple/slug v1.9.0
|
||||||
github.com/jackc/pgx/v4 v4.0.1
|
github.com/jackc/pgtype v1.3.0
|
||||||
github.com/magiconair/properties v1.8.1 // indirect
|
github.com/jackc/pgx/v4 v4.6.0
|
||||||
github.com/pelletier/go-toml v1.4.0 // indirect
|
github.com/mitchellh/mapstructure v1.2.2 // indirect
|
||||||
github.com/pkg/errors v0.8.1
|
github.com/pelletier/go-toml v1.7.0 // indirect
|
||||||
|
github.com/pkg/errors v0.9.1
|
||||||
github.com/rs/cors v1.7.0
|
github.com/rs/cors v1.7.0
|
||||||
github.com/spf13/afero v1.2.2 // indirect
|
github.com/spf13/afero v1.2.2 // indirect
|
||||||
github.com/spf13/cobra v0.0.5
|
github.com/spf13/cast v1.3.1 // indirect
|
||||||
|
github.com/spf13/cobra v1.0.0
|
||||||
|
github.com/spf13/jwalterweatherman v1.1.0 // indirect
|
||||||
github.com/spf13/pflag v1.0.5 // indirect
|
github.com/spf13/pflag v1.0.5 // indirect
|
||||||
github.com/spf13/viper v1.4.0
|
github.com/spf13/viper v1.6.3
|
||||||
github.com/valyala/fasttemplate v1.0.1
|
github.com/stretchr/testify v1.5.1
|
||||||
|
github.com/valyala/fasttemplate v1.1.0
|
||||||
go.uber.org/zap v1.14.1
|
go.uber.org/zap v1.14.1
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904
|
||||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 // indirect
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e // indirect
|
||||||
gopkg.in/yaml.v2 v2.2.7 // indirect
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 // indirect
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 // indirect
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 // indirect
|
||||||
|
gopkg.in/ini.v1 v1.55.0 // indirect
|
||||||
)
|
)
|
||||||
|
|
||||||
go 1.13
|
go 1.13
|
||||||
|
156
go.sum
156
go.sum
@ -19,24 +19,27 @@ github.com/beorn7/perks v0.0.0-20180321164747-3a771d992973/go.mod h1:Dwedo/Wpr24
|
|||||||
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
github.com/beorn7/perks v1.0.0/go.mod h1:KWe93zE9D1o94FZ5RNwFwVgaQK1VOXiVxmqh+CedLV8=
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b h1:L/QXpzIa3pOvUGt1D1lA5KjYhPBAN/3iWdP7xeFS9F0=
|
||||||
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
github.com/bradfitz/gomemcache v0.0.0-20190913173617-a41fca850d0b/go.mod h1:H0wQNHz2YrLsuXOZozoeDmnHXkNCRmMW0gwFWDfEZDA=
|
||||||
github.com/brianvoe/gofakeit v3.18.0+incompatible h1:wDOmHc9DLG4nRjUVVaxA+CEglKOW72Y5+4WNxUIkjM8=
|
github.com/brianvoe/gofakeit/v5 v5.2.0 h1:De9X+2PQum9U2zCaIDxLV7wx0YBL6c7RN2sFBImzHGI=
|
||||||
github.com/brianvoe/gofakeit v3.18.0+incompatible/go.mod h1:kfwdRA90vvNhPutZWfH7WPaDzUjz+CZFqG+rPkOjGOc=
|
github.com/brianvoe/gofakeit/v5 v5.2.0/go.mod h1:/ZENnKqX+XrN8SORLe/fu5lZDIo1tuPncWuRD+eyhSI=
|
||||||
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
github.com/cespare/xxhash v1.1.0 h1:a6HrQnmkObjyL+Gs60czilIUGqrzKutQD6XZog3p+ko=
|
||||||
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
github.com/cespare/xxhash v1.1.0/go.mod h1:XrSqR1VqqWfGrhpAt58auRo0WTKS1nRRg3ghfAqPWnc=
|
||||||
github.com/cespare/xxhash/v2 v2.1.0 h1:yTUvW7Vhb89inJ+8irsUqiWjh8iT6sQPZiQzI6ReGkA=
|
github.com/cespare/xxhash/v2 v2.1.1 h1:6MnRN8NT7+YBpUIWxHtefFZOKTAPgGjpQSxqLNn0+qY=
|
||||||
github.com/cespare/xxhash/v2 v2.1.0/go.mod h1:dgIUBU3pDso/gPgZ1osOZ0iQf77oPR28Tjxl5dIMyVM=
|
github.com/cespare/xxhash/v2 v2.1.1/go.mod h1:VGX0DQ3Q6kWi7AoAeZDth3/j3BFtOZR5XLFGgcrjCOs=
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d h1:JnYHwwRhFmQ8DeyfqmIrzpkkxnZ+iT5V1CUd3Linin0=
|
||||||
|
github.com/chirino/graphql v0.0.0-20200419184546-f015b9dab85d/go.mod h1:+34LPrbHFfKVDPsNfi445UArMEjbeTlCm7C+OpdC7IU=
|
||||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||||
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
|
||||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||||
|
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd h1:qMd81Ts1T2OTKmB4acZcyKaMtRnY5Y44NuXGX2GFJ1w=
|
||||||
|
github.com/codahale/hdrhistogram v0.0.0-20161010025455-3a0bb77429bd/go.mod h1:sE/e/2PUdi/liOCUjSTXgM1o87ZssimdTWN964YiIeI=
|
||||||
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
github.com/coreos/bbolt v1.3.2/go.mod h1:iRUV2dpdMOn7Bo10OQBFzIJO9kkE559Wcmn+qkEiiKk=
|
||||||
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
github.com/coreos/etcd v3.3.10+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-etcd v2.0.0+incompatible/go.mod h1:Jez6KQU2B/sWsbdaef3ED8NzMklzPG4d5KIOhIy30Tk=
|
github.com/coreos/etcd v3.3.13+incompatible/go.mod h1:uF7uidLiAD3TWHmW31ZFd/JWoc32PjwdhPthX9715RE=
|
||||||
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
github.com/coreos/go-semver v0.2.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk=
|
||||||
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||||
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
github.com/coreos/go-systemd v0.0.0-20190719114852-fd7a80b32e1f/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
|
||||||
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
github.com/coreos/pkg v0.0.0-20180928190104-399ea9e2e55f/go.mod h1:E3G3o1h8I7cfcXa63jLwjI0eiQQMgzzUDFVpN/nH/eA=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10 h1:BSKMNlYxDvnunlTymqtgONjNnaRV1sTpcovwwjF22jk=
|
github.com/cpuguy83/go-md2man/v2 v2.0.0/go.mod h1:maD7wRr/U5Z6m/iR4s+kqSMx2CaBsrgA7czyZG/E6dU=
|
||||||
github.com/cpuguy83/go-md2man v1.0.10/go.mod h1:SmD6nW6nTyfqj6ABTjUi3V3JVMnlJmwcJI5acqYI6dE=
|
|
||||||
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
github.com/creack/pty v1.1.7/go.mod h1:lj5s0c3V2DBrqTV7llrYr5NG6My20zk30Fl46Y7DoTY=
|
||||||
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
|
github.com/daaku/go.zipexe v1.0.0 h1:VSOgZtH418pH9L16hC/JrgSNJbbAL26pj7lmD1+CGdY=
|
||||||
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
github.com/daaku/go.zipexe v1.0.0/go.mod h1:z8IiR6TsVLEYKwXAoE/I+8ys/sDkgTzSL0CLnGVd57E=
|
||||||
@ -50,21 +53,24 @@ github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZm
|
|||||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||||
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk=
|
||||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw=
|
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65 h1:Nud597JuGCF/MScrb6NNVDRgmuk8X7w3pFc5GvSsm5E=
|
||||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
github.com/dop251/goja v0.0.0-20200414142002-77e84ffb8c65/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||||
|
github.com/friendsofgo/graphiql v0.2.2/go.mod h1:8Y2kZ36AoTGWs78+VRpvATyt3LJBx0SZXmay80ZTRWo=
|
||||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.9 h1:hsms1Qyu0jgnwNXIxa+/V/PDsU6CfLf6CNO8H7IWoS4=
|
||||||
|
github.com/fsnotify/fsnotify v1.4.9/go.mod h1:znqG4EE+3YCdAaPaxE2ZRY/06pZUdp0tY4IgpuI1SZQ=
|
||||||
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
||||||
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
github.com/garyburd/redigo v1.6.0/go.mod h1:NR3MbYisc3/PwhQ00EMzDiPmrwpPxAn5GI05/YaO1SY=
|
||||||
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
github.com/ghodss/yaml v1.0.0/go.mod h1:4dBDuWmgqj2HViK6kFavaiC9ZROes6MMH2rRYeMEF04=
|
||||||
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
github.com/go-kit/kit v0.8.0/go.mod h1:xBxKIO96dXMWWy0MnWVtmwkA9/13aqxPnvrjFYMA2as=
|
||||||
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
github.com/go-logfmt/logfmt v0.3.0/go.mod h1:Qt1PoO58o5twSAckw1HlFXLmHsOX5/0LbT9GBnD5lWE=
|
||||||
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
github.com/go-logfmt/logfmt v0.4.0/go.mod h1:3RMwSq7FuexP4Kalkev3ejPJsZTpXXBr9+V4qmtdjCk=
|
||||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible h1:0b/xya7BKGhXuqFESKM4oIiRo9WOt2ebz7KxfreD6ug=
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible h1:W1iEw64niKVGogNgBN3ePyLFfuisuzeidWPMPWmECqU=
|
||||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
github.com/go-sourcemap/sourcemap v2.1.3+incompatible/go.mod h1:F8jJfvm2KbVjc5NqelyYJmf/v5J0dwNLS2mL4sNA1Jg=
|
||||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||||
github.com/gobuffalo/flect v0.1.6 h1:D7KWNRFiCknJKA495/e1BO7oxqf8tbieaLv/ehoZ/+g=
|
github.com/gobuffalo/flect v0.2.1 h1:GPoRjEN0QObosV4XwuoWvSd5uSiL0N3e91/xqyY4crQ=
|
||||||
github.com/gobuffalo/flect v0.1.6/go.mod h1:W3K3X9ksuZfir8f/LrfVtWmCDQFfayuylOJ7sz/Fj80=
|
github.com/gobuffalo/flect v0.2.1/go.mod h1:vmkQwuZYhN5Pc4ljYQZzP+1sq+NEkK+lh20jmEmX3jc=
|
||||||
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
|
github.com/gofrs/uuid v3.2.0+incompatible h1:y12jRkkFxsd7GpqdSZ+/KCs/fJbqpEXSGd4+jfEaewE=
|
||||||
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
github.com/gofrs/uuid v3.2.0+incompatible/go.mod h1:b2aQJv3Z4Fp6yNu3cdSllBxTCLRxnplIgP/c0N/04lM=
|
||||||
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
github.com/gogo/protobuf v1.1.1/go.mod h1:r8qH/GZQm5c6nD/R0oafs1akxWv10x8SbQlK7atdtwQ=
|
||||||
@ -76,9 +82,15 @@ github.com/golang/protobuf v1.2.0/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5y
|
|||||||
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
github.com/golang/protobuf v1.3.1/go.mod h1:6lQm79b+lXiMfvg/cZm0SGofjICqVBUtrP5yJMmIC1U=
|
||||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||||
|
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||||
|
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1/go.mod h1:wJfORRmW1u3UXTncJ5qlYoELFm8eSnnEO6hX4iZ3EWY=
|
||||||
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
github.com/gorilla/websocket v1.4.0 h1:WDFjx/TMzVgy9VdMMQi2K2Emtwi2QcUQsztZ/zLaH/Q=
|
||||||
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
github.com/gorilla/websocket v1.4.0/go.mod h1:E7qHFY5m1UJ88s3WnNqhKjPHQ0heANvMoAMk2YaljkQ=
|
||||||
|
github.com/gorilla/websocket v1.4.1/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE=
|
||||||
|
github.com/gosimple/slug v1.9.0 h1:r5vDcYrFz9BmfIAMC829un9hq7hKM4cHUrsv36LbEqs=
|
||||||
|
github.com/gosimple/slug v1.9.0/go.mod h1:AMZ+sOVe65uByN3kgEyf9WEBKBCSS+dJjMX9x4vDJbg=
|
||||||
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
github.com/grpc-ecosystem/go-grpc-middleware v1.0.0/go.mod h1:FiyG127CGDf3tlThmgyCl78X/SZQqEOJBCDaAfeWzPs=
|
||||||
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
|
||||||
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
|
||||||
@ -90,11 +102,13 @@ github.com/jackc/chunkreader v1.0.0 h1:4s39bBR8ByfqH+DKm8rQA3E1LHZWB9XWcrz8fqaZb
|
|||||||
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
github.com/jackc/chunkreader v1.0.0/go.mod h1:RT6O25fNZIuasFJRyZ4R/Y2BbhasbmZXF9QQ7T3kePo=
|
||||||
github.com/jackc/chunkreader/v2 v2.0.0 h1:DUwgMQuuPnS0rhMXenUtZpqZqrR/30NWY+qQvTpSvEs=
|
github.com/jackc/chunkreader/v2 v2.0.0 h1:DUwgMQuuPnS0rhMXenUtZpqZqrR/30NWY+qQvTpSvEs=
|
||||||
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
github.com/jackc/chunkreader/v2 v2.0.0/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||||
|
github.com/jackc/chunkreader/v2 v2.0.1 h1:i+RDz65UE+mmpjTfyz0MoVTnzeYxroil2G82ki7MGG8=
|
||||||
|
github.com/jackc/chunkreader/v2 v2.0.1/go.mod h1:odVSm741yZoC3dpHEUXIqA9tQRhFrgOHwnPIn9lDKlk=
|
||||||
github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
|
github.com/jackc/pgconn v0.0.0-20190420214824-7e0022ef6ba3/go.mod h1:jkELnwuX+w9qN5YIfX0fl88Ehu4XC3keFuOJJk9pcnA=
|
||||||
github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
|
github.com/jackc/pgconn v0.0.0-20190824142844-760dd75542eb/go.mod h1:lLjNuW/+OfW9/pnVKPazfWOgNfH2aPem8YQ7ilXGvJE=
|
||||||
github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
|
github.com/jackc/pgconn v0.0.0-20190831204454-2fabfa3c18b7/go.mod h1:ZJKsE/KZfsUgOEh9hBm+xYTstcNHg7UPMVJqRfQxq4s=
|
||||||
github.com/jackc/pgconn v1.0.1 h1:ZANo4pIkeHKIVD1cQMcxu8fwrwIICLblzi9HCjooZeQ=
|
github.com/jackc/pgconn v1.5.0 h1:oFSOilzIZkyg787M1fEmyMfOUUvwj0daqYMfaWwNL4o=
|
||||||
github.com/jackc/pgconn v1.0.1/go.mod h1:GgY/Lbj1VonNaVdNUHs9AwWom3yP2eymFQ1C8z9r/Lk=
|
github.com/jackc/pgconn v1.5.0/go.mod h1:QeD3lBfpTFe8WUnPZWN5KY/mB8FGMIYRdd8P8Jr0fAI=
|
||||||
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
github.com/jackc/pgio v1.0.0 h1:g12B9UwVnzGhueNavwioyEEpAmqMe1E/BN9ES+8ovkE=
|
||||||
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
github.com/jackc/pgio v1.0.0/go.mod h1:oP+2QK2wFfUWgr+gxjoBH9KGBb31Eio69xUb0w5bYf8=
|
||||||
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye4717ITLaNwV9mWbJx0dLCpcRzdA=
|
github.com/jackc/pgmock v0.0.0-20190831213851-13a1b77aafa2 h1:JVX6jT/XfzNqIjye4717ITLaNwV9mWbJx0dLCpcRzdA=
|
||||||
@ -107,25 +121,31 @@ github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190420180111-c116219b62db/go.mod
|
|||||||
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
github.com/jackc/pgproto3/v2 v2.0.0-alpha1.0.20190609003834-432c2951c711/go.mod h1:uH0AWtUmuShn0bcesswc4aBTWGvw0cAxIJp+6OB//Wg=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
github.com/jackc/pgproto3/v2 v2.0.0-rc3/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
github.com/jackc/pgproto3/v2 v2.0.0-rc3.0.20190831210041-4c03ce451f29/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0 h1:FApgMJ/GtaXfI0s8Lvd0kaLaRwMOhs4VH92pwkwQQvU=
|
github.com/jackc/pgproto3/v2 v2.0.1 h1:Rdjp4NFjwHnEslx2b66FfCI2S0LhO4itac3hXz6WX9M=
|
||||||
github.com/jackc/pgproto3/v2 v2.0.0/go.mod h1:ryONWYqW6dqSg1Lw6vXNMXoBJhpzvWKnT95C46ckYeM=
|
github.com/jackc/pgproto3/v2 v2.0.1/go.mod h1:WfJCnwN3HIg9Ish/j3sgWXnAfK8A9Y0bwXYU5xKaEdA=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8 h1:Q3tB+ExeflWUW7AFcAhXqk40s9mnNYLk1nOkKNZ5GnU=
|
||||||
|
github.com/jackc/pgservicefile v0.0.0-20200307190119-3430c5407db8/go.mod h1:vsD4gTJCa9TptPL8sPkXrLZ+hDuNrZCnj29CQpr4X1E=
|
||||||
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
github.com/jackc/pgtype v0.0.0-20190421001408-4ed0de4755e0/go.mod h1:hdSHsc1V01CGwFsrv11mJRHWJ6aifDLfdV3aVjFF0zg=
|
||||||
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
github.com/jackc/pgtype v0.0.0-20190824184912-ab885b375b90/go.mod h1:KcahbBH1nCMSo2DXpzsoWOAfFkdEtEJpPbVLq8eE+mc=
|
||||||
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
github.com/jackc/pgtype v0.0.0-20190828014616-a8802b16cc59/go.mod h1:MWlu30kVJrUS8lot6TQqcg7mtthZ9T0EoIBFiJcmcyw=
|
||||||
github.com/jackc/pgtype v1.0.1 h1:7GWB9n3DdnO3TIbj59wMAE9QcHPL4cy/Bbtk5P1Noow=
|
github.com/jackc/pgtype v1.3.0 h1:l8JvKrby3RI7Kg3bYEeU9TA4vqC38QDpFCfcrC7KuN0=
|
||||||
github.com/jackc/pgtype v1.0.1/go.mod h1:5m2OfMh1wTK7x+Fk952IDmI4nw3nPrvtQdM0ZT4WpC0=
|
github.com/jackc/pgtype v1.3.0/go.mod h1:b0JqxHvPmljG+HQ5IsvQ0yqeSi4nGcDTVjFoiLDb0Ik=
|
||||||
|
github.com/jackc/pgx v3.6.2+incompatible h1:2zP5OD7kiyR3xzRYMhOcXVvkDZsImVXfj+yIyTQf3/o=
|
||||||
|
github.com/jackc/pgx v3.6.2+incompatible/go.mod h1:0ZGrqGqkRlliWnWB4zKnWtjbSWbGkVEFm4TeybAXq+I=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
github.com/jackc/pgx/v4 v4.0.0-20190420224344-cc3461e65d96/go.mod h1:mdxmSJJuR08CZQyj1PVQBHy9XOp5p8/SHH6a0psbY9Y=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
github.com/jackc/pgx/v4 v4.0.0-20190421002000-1b8f0016e912/go.mod h1:no/Y67Jkk/9WuGR0JG/JseM9irFbnEPbuWV2EELPNuM=
|
||||||
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
github.com/jackc/pgx/v4 v4.0.0-pre1.0.20190824185557-6972a5742186/go.mod h1:X+GQnOEnf1dqHGpw7JmHqHc1NxDoalibchSk9/RWuDc=
|
||||||
github.com/jackc/pgx/v4 v4.0.1 h1:NNrG0MX2AVEJw1NNDYg+ixSXycCfWWKeqMuQHQkAngc=
|
github.com/jackc/pgx/v4 v4.6.0 h1:Fh0O9GdlG4gYpjpwOqjdEodJUQM9jzN3Hdv7PN0xmm0=
|
||||||
github.com/jackc/pgx/v4 v4.0.1/go.mod h1:NeQ64VJooukJGFLX2r01sJL/gRbKlpvsO2giBvjfgrY=
|
github.com/jackc/pgx/v4 v4.6.0/go.mod h1:vPh43ZzxijXUVJ+t/EmXBtFmbFVO72cuneCT9oAlxAg=
|
||||||
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
github.com/jackc/puddle v0.0.0-20190413234325-e4ced69a3a2b/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
github.com/jackc/puddle v0.0.0-20190608224051-11cab39313c9/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jackc/puddle v1.0.0 h1:rbjAshlgKscNa7j0jAM0uNQflis5o2XUogPMVAwtcsM=
|
github.com/jackc/puddle v1.1.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
||||||
github.com/jackc/puddle v1.0.0/go.mod h1:m4B5Dj62Y0fbyuIc15OsIqK0+JU8nkqQjsgx7dvjSWk=
|
|
||||||
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
github.com/jessevdk/go-flags v1.4.0 h1:4IU2WS7AumrZ/40jfhf4QVDMsQwqA7VEHozFRrGARJA=
|
||||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||||
|
github.com/json-iterator/go v1.1.9/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||||
|
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||||
@ -158,17 +178,26 @@ github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5
|
|||||||
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
github.com/mitchellh/go-homedir v1.1.0/go.mod h1:SfyaCUpYCn1Vlf4IUYiD9fPX4A5wJrkLzIz1N1q0pr0=
|
||||||
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
github.com/mitchellh/mapstructure v1.1.2 h1:fmNYVwqnSfB9mZU6OS2O6GsXM+wcskZDuKQzvN1EDeE=
|
||||||
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
github.com/mitchellh/mapstructure v1.1.2/go.mod h1:FVVH3fgwuzCH5S8UJGiWEs2h04kUh9fWfEaFds41c1Y=
|
||||||
|
github.com/mitchellh/mapstructure v1.2.2 h1:dxe5oCinTXiTIcfgmZecdCzPmAJKd46KsCWc35r0TV4=
|
||||||
|
github.com/mitchellh/mapstructure v1.2.2/go.mod h1:bFUtVrKA4DC2yAKiSyO/QUcy7e+RRV2QTWOzhPopBRo=
|
||||||
|
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
|
||||||
|
github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
|
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
|
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229 h1:E2B8qYyeSgv5MXpmzZXRNp8IAQ4vjxIjhpAf5hv/tAg=
|
||||||
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
github.com/nkovacs/streamquote v0.0.0-20170412213628-49af9bddb229/go.mod h1:0aYXnNPJ8l7uZxf45rWW1a/uME32OF0rhiYGNQ2oF2E=
|
||||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||||
|
github.com/opentracing/opentracing-go v1.0.2 h1:3jA2P6O1F9UOrWVpwrIo17pu01KWvNWg4X946/Y5Zwg=
|
||||||
|
github.com/opentracing/opentracing-go v1.0.2/go.mod h1:UkNAQd3GIcIGf0SeVgPpRdFStlNbqXla1AfSYxPUl2o=
|
||||||
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
github.com/pelletier/go-toml v1.2.0 h1:T5zMGML61Wp+FlcbWjRDT7yAxhJNAiPPLOFECq181zc=
|
||||||
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
|
||||||
github.com/pelletier/go-toml v1.4.0 h1:u3Z1r+oOXJIkxqw34zVhyPgjBsm6X2wn21NWs/HfSeg=
|
github.com/pelletier/go-toml v1.7.0 h1:7utD74fnzVc/cpcyy8sjrlFr5vYpypUixARcHIMIGuI=
|
||||||
github.com/pelletier/go-toml v1.4.0/go.mod h1:PN7xzY2wHTK0K9p34ErDQMlFxa51Fk0OUruD3k1mMwo=
|
github.com/pelletier/go-toml v1.7.0/go.mod h1:vwGMzjaWMwyfHwgIBhI2YUM4fB6nL6lVAvS1LBMMhTE=
|
||||||
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
github.com/pkg/errors v0.8.1 h1:iURUrRGxPUNPdy5/HRSm+Yj6okJ6UtLINN0Q9M4+h3I=
|
||||||
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
|
github.com/pkg/errors v0.9.1 h1:FEBLx1zS214owpjy7qsBeixbURkuhQAwrK5UwLGTwt4=
|
||||||
|
github.com/pkg/errors v0.9.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0=
|
||||||
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
github.com/pmezard/go-difflib v1.0.0 h1:4DBwDE0NGyQoBHbLQYPwSUPoCMWR5BEzIk/f1lZbAQM=
|
||||||
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
github.com/pmezard/go-difflib v1.0.0/go.mod h1:iKH77koFhYxTK1pcRnkKkqfTogsbg7gZNVY4sRDYZ/4=
|
||||||
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
github.com/prometheus/client_golang v0.9.1/go.mod h1:7SWBe2y4D6OKWSNQJUaRYU/AaXPKyh/dDVn+NZz0KFw=
|
||||||
@ -180,6 +209,8 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
|
|||||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||||
|
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be h1:ta7tUOvsPHVHGom5hKW5VXNc2xZIkfCKP8iaqOyYtUQ=
|
||||||
|
github.com/rainycape/unidecode v0.0.0-20150907023854-cb7f23ec59be/go.mod h1:MIDFMn7db1kT65GmV94GzpX9Qdi7N/pQlwb+AN8wh+Q=
|
||||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||||
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
github.com/rs/cors v1.7.0 h1:+88SsELBHx5r+hZ8TCkggzSstaWNbDvThkVK8H6f9ik=
|
||||||
@ -188,14 +219,22 @@ github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
|||||||
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
|
||||||
github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=
|
github.com/rs/zerolog v1.15.0 h1:uPRuwkWF4J6fGsJ2R0Gn2jB1EQiav9k3S6CSdygQJXY=
|
||||||
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
|
||||||
github.com/russross/blackfriday v1.5.2 h1:HyvC0ARfnZBqnXwABFeSZHpKvJHJJfPz81GNueLj0oo=
|
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||||
github.com/russross/blackfriday v1.5.2/go.mod h1:JO/DiYxRf+HjHt06OyowR9PTA263kcR/rfWxYHBV53g=
|
|
||||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||||
|
github.com/segmentio/ksuid v1.0.2 h1:9yBfKyw4ECGTdALaF09Snw3sLJmYIX6AbPJrAy6MrDc=
|
||||||
|
github.com/segmentio/ksuid v1.0.2/go.mod h1:BXuJDr2byAiHuQaQtSKoXh1J0YmUDurywOXgB2w+OSU=
|
||||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24 h1:pntxY8Ary0t43dCZ5dqY4YTJCObLY1kIXl0uzMv+7DE=
|
||||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||||
|
github.com/shurcooL/httpfs v0.0.0-20190707220628-8d4bc4ba7749/go.mod h1:ZY1cvUeJuFPAdZ/B6v7RHavJWZn2YPVFQ1OSXhCGOkg=
|
||||||
|
github.com/shurcooL/sanitized_anchor_name v1.0.0/go.mod h1:1NzhyTcUVG4SuEtjjoZeVRXNmyL/1OwPU0+IJeTBvfc=
|
||||||
|
github.com/shurcooL/vfsgen v0.0.0-20181202132449-6a9ea43bcacd/go.mod h1:TrYk7fJVaAttu97ZZKrO9UbRa8izdowaMIZcxYMbVaw=
|
||||||
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo=
|
||||||
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
github.com/sirupsen/logrus v1.4.1/go.mod h1:ni0Sbl8bgC9z8RoU9G6nDWqqs/fq4eDPysMBDgk/93Q=
|
||||||
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE=
|
||||||
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d h1:zE9ykElWQ6/NYmHa3jpm/yHnI4xSofP+UP6SpjHcSeM=
|
||||||
|
github.com/smartystreets/assertions v0.0.0-20180927180507-b2de0cb4f26d/go.mod h1:OnSkiWE9lh6wB0YB77sQom3nweQdgAjqCqsofrRNTgc=
|
||||||
|
github.com/smartystreets/goconvey v1.6.4 h1:fv0U8FUIMPNf1L9lnHLvLhgicrIVChEkdzIKYqbNC9s=
|
||||||
|
github.com/smartystreets/goconvey v1.6.4/go.mod h1:syvi0/a8iFYH4r/RixwvyeAJjdLS9QV7WQ/tjFTllLA=
|
||||||
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
github.com/soheilhy/cmux v0.1.4/go.mod h1:IM3LyeVVIOuxMH7sFAkER9+bJ4dT7Ms6E4xg4kGIyLM=
|
||||||
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
github.com/spaolacci/murmur3 v0.0.0-20180118202830-f09979ecbc72/go.mod h1:JwIasOWyU6f++ZhiEuf87xNszmSA2myDM2Kzu9HwQUA=
|
||||||
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
|
github.com/spf13/afero v1.1.2 h1:m8/z1t7/fwjysjQRYbP0RD+bUIF/8tJwPdEZsI83ACI=
|
||||||
@ -204,18 +243,22 @@ github.com/spf13/afero v1.2.2 h1:5jhuqJyZCZf2JRofRvN/nIFgIWNzPa3/Vz8mYylgbWc=
|
|||||||
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
github.com/spf13/afero v1.2.2/go.mod h1:9ZxEEn6pIJ8Rxe320qSDBk6AsU0r9pR7Q4OcevTdifk=
|
||||||
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
github.com/spf13/cast v1.3.0 h1:oget//CVOEoFewqQxwr0Ej5yjygnqGkvggSE/gB35Q8=
|
||||||
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
github.com/spf13/cast v1.3.0/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
github.com/spf13/cobra v0.0.5 h1:f0B+LkLX6DtmRH1isoNA9VTtNUK9K8xYd28JNNfOv/s=
|
github.com/spf13/cast v1.3.1 h1:nFm6S0SMdyzrzcmThSipiEubIDy8WEXKNZ0UOgiRpng=
|
||||||
github.com/spf13/cobra v0.0.5/go.mod h1:3K3wKZymM7VvHMDS9+Akkh4K60UwM26emMESw8tLCHU=
|
github.com/spf13/cast v1.3.1/go.mod h1:Qx5cxh0v+4UWYiBimWS+eyWzqEqokIECu5etghLkUJE=
|
||||||
|
github.com/spf13/cobra v1.0.0 h1:6m/oheQuQ13N9ks4hubMG6BnvwOeaJrqSPLahSnczz8=
|
||||||
|
github.com/spf13/cobra v1.0.0/go.mod h1:/6GTrnGXV9HjY+aR4k0oJ5tcvakLuG6EuKReYlHNrgE=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
github.com/spf13/jwalterweatherman v1.0.0 h1:XHEdyB+EcvlqZamSM4ZOMGlc93t6AcsBEu9Gc1vn7yk=
|
||||||
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
github.com/spf13/jwalterweatherman v1.0.0/go.mod h1:cQK4TGJAtQXfYWX+Ddv3mKDzgVb68N+wFjFa4jdeBTo=
|
||||||
|
github.com/spf13/jwalterweatherman v1.1.0 h1:ue6voC5bR5F8YxI5S67j9i582FU4Qvo2bmqnqMYADFk=
|
||||||
|
github.com/spf13/jwalterweatherman v1.1.0/go.mod h1:aNWZUN0dPAAO/Ljvb5BEdw96iTZ0EXowPYD95IqWIGo=
|
||||||
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
github.com/spf13/pflag v1.0.3 h1:zPAT6CGy6wXeQ7NtTnaTerfKOsV6V6F8agHXFiazDkg=
|
||||||
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
github.com/spf13/pflag v1.0.3/go.mod h1:DYY7MBk1bdzusC3SYhjObp+wFpr4gzcvqqNjLnInEg4=
|
||||||
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
github.com/spf13/pflag v1.0.5 h1:iy+VFUOCP1a+8yFto/drg2CJ5u0yRoB7fZw3DKv/JXA=
|
||||||
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
github.com/spf13/pflag v1.0.5/go.mod h1:McXfInJRrz4CZXVZOBLb0bTZqETkiAhM9Iw0y3An2Bg=
|
||||||
github.com/spf13/viper v1.3.2 h1:VUFqw5KcqRf7i70GOzW7N+Q7+gxVBkSSqiXB12+JQ4M=
|
|
||||||
github.com/spf13/viper v1.3.2/go.mod h1:ZiWeW+zYFKm7srdB9IoDzzZXaJaI5eL9QjNiN/DMA2s=
|
|
||||||
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
github.com/spf13/viper v1.4.0 h1:yXHLWeravcrgGyFSyCgdYpXQ9dR9c/WED3pg1RhxqEU=
|
||||||
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
github.com/spf13/viper v1.4.0/go.mod h1:PTJ7Z/lr49W6bUbkmS1V3by4uWynFiR9p7+dSq/yZzE=
|
||||||
|
github.com/spf13/viper v1.6.3 h1:pDDu1OyEDTKzpJwdq4TiuLyMsUgRa/BT5cn5O62NoHs=
|
||||||
|
github.com/spf13/viper v1.6.3/go.mod h1:jUMtyi0/lB5yZH/FjyGAoH7IMNrIhlBf6pXZmbMDvzw=
|
||||||
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.0/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
github.com/stretchr/objx v0.1.1/go.mod h1:HFkY916IF+rwdDfMAkV7OtwuqBVzrE8GR6GFx+wExME=
|
||||||
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoHMkEqE=
|
||||||
@ -224,13 +267,24 @@ github.com/stretchr/testify v1.3.0 h1:TivCn/peBQ7UY8ooIcPgZFpTNSz0Q2U6UrFlUfqbe0
|
|||||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||||
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
github.com/stretchr/testify v1.4.0 h1:2E4SXV/wtOkTonXsotYi4li6zVWxYlZuYNCXe9XRJyk=
|
||||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||||
|
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||||
|
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||||
|
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||||
|
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||||
|
github.com/uber-go/atomic v1.3.2 h1:Azu9lPBWRNKzYXSIwRfgRuDuS0YKsK4NFhiQv98gkxo=
|
||||||
|
github.com/uber-go/atomic v1.3.2/go.mod h1:/Ct5t2lcmbJ4OSe/waGBoaVvVqtO0bmtfVNex1PFV8g=
|
||||||
|
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible h1:Dw0AFQs6RGO8RxMPGP2LknN/VtHolVH82P9PP0Ni+9w=
|
||||||
|
github.com/uber/jaeger-client-go v2.14.1-0.20180928181052-40fb3b2c4120+incompatible/go.mod h1:WVhlPFC8FDjOFMMWRy2pZqQJSXxYSwNYOkTr/Z6d3Kk=
|
||||||
|
github.com/uber/jaeger-lib v1.5.0 h1:OHbgr8l656Ub3Fw5k9SWnBfIEwvoHQ+W2y+Aa9D1Uyo=
|
||||||
|
github.com/uber/jaeger-lib v1.5.0/go.mod h1:ComeNDZlWwrWnDv8aPp0Ba6+uUTzImX/AauajbLI56U=
|
||||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||||
github.com/ugorji/go/codec v0.0.0-20181204163529-d75b2dcb6bc8/go.mod h1:VFNgLljTbGfSG7qAOspJ7OScBnGdDN/yBr0sguwnwf0=
|
|
||||||
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
github.com/valyala/bytebufferpool v1.0.0 h1:GqA5TC/0021Y/b9FG4Oi9Mr3q7XYx6KllzawFIhcdPw=
|
||||||
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
github.com/valyala/bytebufferpool v1.0.0/go.mod h1:6bBcMArwyJ5K/AmCkWv1jt77kVWyCJ6HpOuEn7z0Csc=
|
||||||
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
github.com/valyala/fasttemplate v1.0.1 h1:tY9CJiPnMXf1ERmG2EyK7gNUd+c6RKGD0IfU8WdUSz8=
|
||||||
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
github.com/valyala/fasttemplate v1.0.1/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||||
|
github.com/valyala/fasttemplate v1.1.0 h1:RZqt0yGBsps8NGvLSGW804QQqCUYYLsaOjTVHy1Ocw4=
|
||||||
|
github.com/valyala/fasttemplate v1.1.0/go.mod h1:UQGH1tvbgY+Nz5t2n7tXsz52dQxojPUpymEIMZ47gx8=
|
||||||
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
github.com/xiang90/probing v0.0.0-20190116061207-43a291ad63a2/go.mod h1:UETIi67q53MR2AWcXfiuqkDkRtnGDLqkBTpCHuJHxtU=
|
||||||
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
github.com/xordataexchange/crypt v0.0.3-0.20170626215501-b2862e3d0a77/go.mod h1:aYKd//L2LvnjZzWKhF00oedf4jCCReLcmhLdhm1A27Q=
|
||||||
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
|
||||||
@ -249,22 +303,21 @@ go.uber.org/zap v1.10.0/go.mod h1:vwi/ZaCAaUcBkycHslxD9B2zi4UTXhF60s6SWpuDF0Q=
|
|||||||
go.uber.org/zap v1.14.1 h1:nYDKopTbvAPq/NrUVZwT15y2lpROBiLLyoRTbXOYWOo=
|
go.uber.org/zap v1.14.1 h1:nYDKopTbvAPq/NrUVZwT15y2lpROBiLLyoRTbXOYWOo=
|
||||||
go.uber.org/zap v1.14.1/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
|
go.uber.org/zap v1.14.1/go.mod h1:Mb2vm2krFEG5DV0W9qcHBYFtp/Wku1cvYaqPsS/WYfc=
|
||||||
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
golang.org/x/crypto v0.0.0-20180904163835-0709b304e793/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9 h1:mKdxBk7AujPs8kU4m80U72y/zjbZ3UcXC7dClwKbUI0=
|
|
||||||
golang.org/x/crypto v0.0.0-20181203042331-505ab145d0a9/go.mod h1:6SG95UA2DQfeDnfUPMdvaQW0Q7yPrPDi9nlGo2tz2b4=
|
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2 h1:VklqNMn3ovrHsnt90PveolxSbWFaJdECFbxSq0Mqo2M=
|
||||||
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
golang.org/x/crypto v0.0.0-20190308221718-c2843e01d9a2/go.mod h1:djNgcEr1/C05ACkg1iLfiJU5Ep61QUkGW8qpdssI0+w=
|
||||||
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
golang.org/x/crypto v0.0.0-20190411191339-88737f569e3a/go.mod h1:WFFai1msRO1wXaEeE5yQxYXgSfI8pQAWXbQop6sCtWE=
|
||||||
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190510104115-cbcb75029529/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7 h1:0hQKqeLdqlt5iIwVOBErRisrHJAN57yOiPRQItI20fU=
|
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad h1:5E5raQxcv+6CZ11RrBYQe5WRbUIWpScjh0kvHZkZIrQ=
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904 h1:bXoxMPcSLOq08zI3/c5dEBT6lE4eh+jOh886GHrn6V8=
|
||||||
golang.org/x/crypto v0.0.0-20190927123631-a832865fa7ad/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
golang.org/x/crypto v0.0.0-20200414173820-0848c9571904/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||||
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190313153728-d0100b6bd8b3/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de h1:5hukYrvBGR8/eNkX5mdUezrA6JiaEZDtJb9Ei+1LlBs=
|
||||||
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
golang.org/x/lint v0.0.0-20190930215403-16217165b5de/go.mod h1:6SW0HCj/g11FgYtHlgUYUwCkIfeOF89ocIRzGO/8vkc=
|
||||||
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
golang.org/x/mod v0.0.0-20190513183733-4bf6d317e70e/go.mod h1:mXi4GBBbnImb6dmsKGUJ2LatrhH/nqhxcFungHvyanc=
|
||||||
|
golang.org/x/mod v0.1.1-0.20191105210325-c90efee705ee/go.mod h1:QqPTAvyqsEbceGzBzNggFXnrqF1CaUcvgkdR5Ot7KZg=
|
||||||
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
golang.org/x/net v0.0.0-20181220203305-927f97764cc3/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4=
|
||||||
@ -275,6 +328,8 @@ golang.org/x/net v0.0.0-20190522155817-f3200d17e092/go.mod h1:HSz+uSET+XFnRR8LxR
|
|||||||
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190620200207-3b0461eec859/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7 h1:fHDIZ2oxGnUZRN6WgWFCbYBjH9uqVPRCUVUDhs0wnbA=
|
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7 h1:fHDIZ2oxGnUZRN6WgWFCbYBjH9uqVPRCUVUDhs0wnbA=
|
||||||
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
golang.org/x/net v0.0.0-20190813141303-74dc4d7220e7/go.mod h1:z5CRVTTTmAJ677TzLLGU+0bjPO0LkuOLi4/5GtJWs/s=
|
||||||
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e h1:3G+cUijn7XD+S4eJFddp53Pv7+slrESplyjG25HgL+k=
|
||||||
|
golang.org/x/net v0.0.0-20200324143707-d3edc9973b7e/go.mod h1:qpuaurCH72eLCgpAm/N6yyVIVM9cpaDIP3A8BGJEC5A=
|
||||||
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
|
||||||
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
|
||||||
@ -285,8 +340,6 @@ golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5h
|
|||||||
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181107165924-66b7b1311ac8/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a h1:1n5lsVfiQW3yfsRGu98756EH1YthsFqr/5mxHduZW2A=
|
|
||||||
golang.org/x/sys v0.0.0-20181205085412-a5c9d58dba9a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
|
||||||
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190215142949-d0b11bdaac8a/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223 h1:DH4skfRX4EBpamg7iV4ZlCpblAHI6s6TDM39bFZumv8=
|
||||||
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
golang.org/x/sys v0.0.0-20190222072716-a9d3bda3a223/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY=
|
||||||
@ -296,8 +349,10 @@ golang.org/x/sys v0.0.0-20190422165155-953cdadca894/go.mod h1:h1NjWce9XRLGQEsW7w
|
|||||||
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190813064441-fde4db37ae7a/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
|
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456 h1:ng0gs1AKnRRuEMZoTLLlbOd+C17zUDepwGQBb/n+JVg=
|
||||||
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20190826190057-c7b8b68b1456/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9 h1:ZBzSG/7F4eNKz2L3GE9o300RX0Az1Bw5HF7PDraD+qU=
|
golang.org/x/sys v0.0.0-20191005200804-aed5e4c7ecf9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/sys v0.0.0-20191128015809-6d18c012aee9/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
golang.org/x/sys v0.0.0-20200323222414-85ca7c5b95cd/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4 h1:opSr2sbRXk5X5/givKrrKj9HXxFpW2sdCiP8MJSKLQY=
|
||||||
|
golang.org/x/sys v0.0.0-20200413165638-669c56c373c4/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||||
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
golang.org/x/text v0.3.0 h1:g61tztE5qeGQ89tm6NTjjM9VPIm088od1l6aSorWRWg=
|
||||||
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
|
||||||
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
golang.org/x/text v0.3.2 h1:tW2bmiBqwgJj/UpqtC8EpXEZVYOwU0yG4iWbprSVAcs=
|
||||||
@ -307,16 +362,22 @@ golang.org/x/tools v0.0.0-20180221164845-07fd8470d635/go.mod h1:n7NCudcB/nEzxVGm
|
|||||||
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20180917221912-90fa682c2a6e/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
golang.org/x/tools v0.0.0-20190114222345-bf090417da8b/go.mod h1:n7NCudcB/nEzxVGmLbDWY5pfWTLqBcC2KZ6jyYvM4mQ=
|
||||||
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
golang.org/x/tools v0.0.0-20190311212946-11955173bddd/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
|
golang.org/x/tools v0.0.0-20190328211700-ab21143f2384/go.mod h1:LCzVGOaR6xXOjkQ3onu1FJEFr0SW1gC7cKk1uF8kGRs=
|
||||||
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
golang.org/x/tools v0.0.0-20190425163242-31fd60d6bfdc/go.mod h1:RgjU9mgBXZiqYHBnxXauZ1Gv1EHHAz9KjViQ78xBX0Q=
|
||||||
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
golang.org/x/tools v0.0.0-20190621195816-6e04913cbbac/go.mod h1:/rFqwRUd4F7ZHNgwSSTFct+R/Kf4OFW1sUzUTQQTgfc=
|
||||||
golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20190823170909-c4a336ef6a2f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029041327-9cc4af7d6b2c/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs=
|
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5 h1:hKsoRgsbwY1NafxrwTs+k64bikrLBkAgPir1TNCj3Zs=
|
||||||
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||||
|
golang.org/x/tools v0.0.0-20200128220307-520188d60f50 h1:0qnG0gwzB6QPiLDow10WJDdB38c+hQ7ArxO26Qc1boM=
|
||||||
|
golang.org/x/tools v0.0.0-20200128220307-520188d60f50/go.mod h1:TB2adYChydJhpapKDTa4BR/hXlZSLoq2Wpct/0txZ28=
|
||||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7 h1:9zdDQZ7Thm29KFXgAX/+yaf3eVbP7djjWp/dXAppNCc=
|
||||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543 h1:E7g+9GITq07hpfrRu66IVDexMakfv52eLZ2CXBWiKr4=
|
||||||
|
golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||||
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
google.golang.org/appengine v1.1.0/go.mod h1:EbEs0AVv82hx2wNQdGPgUI5lhzA/G0D9YwlJXL52JkM=
|
||||||
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
google.golang.org/genproto v0.0.0-20180817151627-c66870c02cf8/go.mod h1:JiN7NxoALGmiZfu7CAH4rXhgtRTLTxftemlI0sWmxmc=
|
||||||
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
|
||||||
@ -326,15 +387,22 @@ gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405 h1:yhCVgyC4o1eVCa2tZl7eS0r+
|
|||||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15 h1:YR8cESwS4TdDjEe65xsg0ogRM/Nc3DYOhEAlW+xobZo=
|
||||||
|
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||||
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
|
||||||
|
gopkg.in/ini.v1 v1.51.0 h1:AQvPpx3LzTDM0AjnIRlVFwFFGC+npRopjZxLJj6gdno=
|
||||||
|
gopkg.in/ini.v1 v1.51.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
|
gopkg.in/ini.v1 v1.55.0 h1:E8yzL5unfpW3M6fz/eB7Cb5MQAYSZ7GKo4Qth+N2sgQ=
|
||||||
|
gopkg.in/ini.v1 v1.55.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k=
|
||||||
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
gopkg.in/resty.v1 v1.12.0/go.mod h1:mDo4pnntr5jdWRML875a/NmxYqAlA73dVijT2AXvQQo=
|
||||||
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
gopkg.in/yaml.v2 v2.0.0-20170812160011-eb3733d160e7/go.mod h1:JAlM8MvJe8wmxCU4Bli9HhUf9+ttbYbLASfIpnQbh74=
|
||||||
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
gopkg.in/yaml.v2 v2.2.2 h1:ZCJp+EgiOT7lHqUV2J862kp8Qj64Jo6az82+3Td9dZw=
|
||||||
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.7 h1:VUgggvou5XRW9mHwD/yXxIYSMtY0zoKQf/v226p2nyo=
|
gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
gopkg.in/yaml.v2 v2.2.7/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
gopkg.in/yaml.v2 v2.2.8 h1:obN1ZagJSUGI0Ek/LBmuj4SNLPfIny3KsKFopxRdj10=
|
||||||
|
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099 h1:XJP7lxbSxWLOMNdBE4B/STaqVy6L73o0knwj2vIlxnw=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099 h1:XJP7lxbSxWLOMNdBE4B/STaqVy6L73o0knwj2vIlxnw=
|
||||||
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
honnef.co/go/tools v0.0.0-20190102054323-c2f93a96b099/go.mod h1:rf3lG4BRIbNafJWhAfAdb/ePZxsR/4RtNHQocxwk9r4=
|
||||||
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
honnef.co/go/tools v0.0.1-2019.2.3 h1:3JgtbtFHMiCmsznwGVTUWbgGov+pVqnlf1dEJTNAXeM=
|
||||||
|
@ -3,8 +3,8 @@ package serv
|
|||||||
import (
|
import (
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
)
|
)
|
||||||
@ -60,6 +60,11 @@ type Serv struct {
|
|||||||
PoolSize int32 `mapstructure:"pool_size"`
|
PoolSize int32 `mapstructure:"pool_size"`
|
||||||
MaxRetries int `mapstructure:"max_retries"`
|
MaxRetries int `mapstructure:"max_retries"`
|
||||||
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
PingTimeout time.Duration `mapstructure:"ping_timeout"`
|
||||||
|
EnableTLS bool `mapstructure:"enable_tls"`
|
||||||
|
ServerName string `mapstructure:"server_name"`
|
||||||
|
ServerCert string `mapstructure:"server_cert"`
|
||||||
|
ClientCert string `mapstructure:"client_cert"`
|
||||||
|
ClientKey string `mapstructure:"client_key"`
|
||||||
} `mapstructure:"database"`
|
} `mapstructure:"database"`
|
||||||
|
|
||||||
Actions []Action
|
Actions []Action
|
@ -156,6 +156,20 @@ func cmdVersion(cmd *cobra.Command, args []string) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func BuildDetails() string {
|
func BuildDetails() string {
|
||||||
|
if len(version) == 0 {
|
||||||
|
return fmt.Sprintf(`
|
||||||
|
Super Graph (unknown version)
|
||||||
|
For documentation, visit https://supergraph.dev
|
||||||
|
|
||||||
|
To build with version information please use the Makefile
|
||||||
|
> git clone https://github.com/dosco/super-graph
|
||||||
|
> cd super-graph && make install
|
||||||
|
|
||||||
|
Licensed under the Apache Public License 2.0
|
||||||
|
Copyright 2020, Vikram Rangnekar
|
||||||
|
`)
|
||||||
|
}
|
||||||
|
|
||||||
return fmt.Sprintf(`
|
return fmt.Sprintf(`
|
||||||
Super Graph %v
|
Super Graph %v
|
||||||
For documentation, visit https://supergraph.dev
|
For documentation, visit https://supergraph.dev
|
||||||
@ -166,7 +180,7 @@ Branch : %v
|
|||||||
Go version : %v
|
Go version : %v
|
||||||
|
|
||||||
Licensed under the Apache Public License 2.0
|
Licensed under the Apache Public License 2.0
|
||||||
Copyright 2020, Vikram Rangnekar.
|
Copyright 2020, Vikram Rangnekar
|
||||||
`,
|
`,
|
||||||
version,
|
version,
|
||||||
lastCommitSHA,
|
lastCommitSHA,
|
@ -9,7 +9,7 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/migrate"
|
"github.com/dosco/super-graph/internal/serv/internal/migrate"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -55,7 +55,7 @@ func cmdDBReset(cmd *cobra.Command, args []string) {
|
|||||||
func cmdDBCreate(cmd *cobra.Command, args []string) {
|
func cmdDBCreate(cmd *cobra.Command, args []string) {
|
||||||
initConfOnce()
|
initConfOnce()
|
||||||
|
|
||||||
db, err := initDB(conf)
|
db, err := initDB(conf, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
@ -74,7 +74,7 @@ func cmdDBCreate(cmd *cobra.Command, args []string) {
|
|||||||
func cmdDBDrop(cmd *cobra.Command, args []string) {
|
func cmdDBDrop(cmd *cobra.Command, args []string) {
|
||||||
initConfOnce()
|
initConfOnce()
|
||||||
|
|
||||||
db, err := initDB(conf)
|
db, err := initDB(conf, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
@ -98,8 +98,9 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
initConfOnce()
|
initConfOnce()
|
||||||
name := args[0]
|
name := args[0]
|
||||||
|
migrationsPath := conf.relPath(conf.MigrationsPath)
|
||||||
|
|
||||||
m, err := migrate.FindMigrations(conf.MigrationsPath)
|
m, err := migrate.FindMigrations(migrationsPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR error loading migrations: %s", err)
|
log.Fatalf("ERR error loading migrations: %s", err)
|
||||||
}
|
}
|
||||||
@ -107,7 +108,7 @@ func cmdDBNew(cmd *cobra.Command, args []string) {
|
|||||||
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
mname := fmt.Sprintf("%d_%s.sql", len(m), name)
|
||||||
|
|
||||||
// Write new migration
|
// Write new migration
|
||||||
mpath := filepath.Join(conf.MigrationsPath, mname)
|
mpath := filepath.Join(migrationsPath, mname)
|
||||||
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
mfile, err := os.OpenFile(mpath, os.O_CREATE|os.O_EXCL|os.O_WRONLY, 0666)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR %s", err)
|
log.Fatalf("ERR %s", err)
|
||||||
@ -131,7 +132,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
|||||||
initConfOnce()
|
initConfOnce()
|
||||||
dest := args[0]
|
dest := args[0]
|
||||||
|
|
||||||
conn, err := initDB(conf)
|
conn, err := initDB(conf, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
@ -144,7 +145,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
m.Data = getMigrationVars()
|
m.Data = getMigrationVars()
|
||||||
|
|
||||||
err = m.LoadMigrations(path.Join(conf.cpath, conf.MigrationsPath))
|
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||||
}
|
}
|
||||||
@ -223,7 +224,7 @@ func cmdDBMigrate(cmd *cobra.Command, args []string) {
|
|||||||
func cmdDBStatus(cmd *cobra.Command, args []string) {
|
func cmdDBStatus(cmd *cobra.Command, args []string) {
|
||||||
initConfOnce()
|
initConfOnce()
|
||||||
|
|
||||||
db, err := initDB(conf)
|
db, err := initDB(conf, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
@ -236,7 +237,7 @@ func cmdDBStatus(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
m.Data = getMigrationVars()
|
m.Data = getMigrationVars()
|
||||||
|
|
||||||
err = m.LoadMigrations(conf.MigrationsPath)
|
err = m.LoadMigrations(conf.relPath(conf.MigrationsPath))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to load migrations: %s", err)
|
log.Fatalf("ERR failed to load migrations: %s", err)
|
||||||
}
|
}
|
@ -13,9 +13,10 @@ import (
|
|||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/brianvoe/gofakeit"
|
"github.com/brianvoe/gofakeit/v5"
|
||||||
"github.com/dop251/goja"
|
"github.com/dop251/goja"
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/gosimple/slug"
|
||||||
"github.com/spf13/cobra"
|
"github.com/spf13/cobra"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -28,7 +29,7 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
conf.Production = false
|
conf.Production = false
|
||||||
|
|
||||||
db, err = initDB(conf)
|
db, err = initDB(conf, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to connect to database: %s", err)
|
log.Fatalf("ERR failed to connect to database: %s", err)
|
||||||
}
|
}
|
||||||
@ -61,6 +62,10 @@ func cmdDBSeed(cmd *cobra.Command, args []string) {
|
|||||||
setFakeFuncs(fake)
|
setFakeFuncs(fake)
|
||||||
vm.Set("fake", fake)
|
vm.Set("fake", fake)
|
||||||
|
|
||||||
|
util := vm.NewObject()
|
||||||
|
setUtilFuncs(util)
|
||||||
|
vm.Set("util", util)
|
||||||
|
|
||||||
_, err = vm.RunScript("seed.js", string(b))
|
_, err = vm.RunScript("seed.js", string(b))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Fatalf("ERR failed to execute script: %s", err)
|
log.Fatalf("ERR failed to execute script: %s", err)
|
||||||
@ -232,6 +237,10 @@ func imageURL(width int, height int) string {
|
|||||||
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
return fmt.Sprintf("https://picsum.photos/%d/%d?%d", width, height, rand.Intn(5000))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func getRandValue(values []string) string {
|
||||||
|
return values[rand.Intn(len(values))]
|
||||||
|
}
|
||||||
|
|
||||||
//nolint: errcheck
|
//nolint: errcheck
|
||||||
func setFakeFuncs(f *goja.Object) {
|
func setFakeFuncs(f *goja.Object) {
|
||||||
gofakeit.Seed(0)
|
gofakeit.Seed(0)
|
||||||
@ -259,7 +268,6 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("country_abr", gofakeit.CountryAbr)
|
f.Set("country_abr", gofakeit.CountryAbr)
|
||||||
f.Set("state", gofakeit.State)
|
f.Set("state", gofakeit.State)
|
||||||
f.Set("state_abr", gofakeit.StateAbr)
|
f.Set("state_abr", gofakeit.StateAbr)
|
||||||
f.Set("status_code", gofakeit.StatusCode)
|
|
||||||
f.Set("street", gofakeit.Street)
|
f.Set("street", gofakeit.Street)
|
||||||
f.Set("street_name", gofakeit.StreetName)
|
f.Set("street_name", gofakeit.StreetName)
|
||||||
f.Set("street_number", gofakeit.StreetNumber)
|
f.Set("street_number", gofakeit.StreetNumber)
|
||||||
@ -282,12 +290,10 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("beer_yeast", gofakeit.BeerYeast)
|
f.Set("beer_yeast", gofakeit.BeerYeast)
|
||||||
|
|
||||||
// Cars
|
// Cars
|
||||||
f.Set("vehicle", gofakeit.Vehicle)
|
f.Set("car", gofakeit.Car)
|
||||||
f.Set("vehicle_type", gofakeit.VehicleType)
|
f.Set("car_type", gofakeit.CarType)
|
||||||
f.Set("car_maker", gofakeit.CarMaker)
|
f.Set("car_maker", gofakeit.CarMaker)
|
||||||
f.Set("car_model", gofakeit.CarModel)
|
f.Set("car_model", gofakeit.CarModel)
|
||||||
f.Set("fuel_type", gofakeit.FuelType)
|
|
||||||
f.Set("transmission_gear_type", gofakeit.TransmissionGearType)
|
|
||||||
|
|
||||||
// Text
|
// Text
|
||||||
f.Set("word", gofakeit.Word)
|
f.Set("word", gofakeit.Word)
|
||||||
@ -315,7 +321,6 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("domain_suffix", gofakeit.DomainSuffix)
|
f.Set("domain_suffix", gofakeit.DomainSuffix)
|
||||||
f.Set("ipv4_address", gofakeit.IPv4Address)
|
f.Set("ipv4_address", gofakeit.IPv4Address)
|
||||||
f.Set("ipv6_address", gofakeit.IPv6Address)
|
f.Set("ipv6_address", gofakeit.IPv6Address)
|
||||||
f.Set("simple_status_code", gofakeit.SimpleStatusCode)
|
|
||||||
f.Set("http_method", gofakeit.HTTPMethod)
|
f.Set("http_method", gofakeit.HTTPMethod)
|
||||||
f.Set("user_agent", gofakeit.UserAgent)
|
f.Set("user_agent", gofakeit.UserAgent)
|
||||||
f.Set("user_agent_firefox", gofakeit.FirefoxUserAgent)
|
f.Set("user_agent_firefox", gofakeit.FirefoxUserAgent)
|
||||||
@ -379,8 +384,8 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
//f.Set("language_abbreviation", gofakeit.LanguageAbbreviation)
|
||||||
|
|
||||||
// File
|
// File
|
||||||
f.Set("extension", gofakeit.Extension)
|
f.Set("file_extension", gofakeit.FileExtension)
|
||||||
f.Set("mine_type", gofakeit.MimeType)
|
f.Set("file_mine_type", gofakeit.FileMimeType)
|
||||||
|
|
||||||
// Numbers
|
// Numbers
|
||||||
f.Set("number", gofakeit.Number)
|
f.Set("number", gofakeit.Number)
|
||||||
@ -404,10 +409,15 @@ func setFakeFuncs(f *goja.Object) {
|
|||||||
f.Set("digit", gofakeit.Digit)
|
f.Set("digit", gofakeit.Digit)
|
||||||
f.Set("letter", gofakeit.Letter)
|
f.Set("letter", gofakeit.Letter)
|
||||||
f.Set("lexify", gofakeit.Lexify)
|
f.Set("lexify", gofakeit.Lexify)
|
||||||
f.Set("rand_string", gofakeit.RandString)
|
f.Set("rand_string", getRandValue)
|
||||||
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
|
||||||
f.Set("numerify", gofakeit.Numerify)
|
f.Set("numerify", gofakeit.Numerify)
|
||||||
|
|
||||||
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
//f.Set("programming_language", gofakeit.ProgrammingLanguage)
|
||||||
|
}
|
||||||
|
|
||||||
|
func setUtilFuncs(f *goja.Object) {
|
||||||
|
// Slugs
|
||||||
|
f.Set("make_slug", slug.Make)
|
||||||
|
f.Set("make_slug_lang", slug.MakeLang)
|
||||||
|
f.Set("shuffle_strings", gofakeit.ShuffleStrings)
|
||||||
}
|
}
|
@ -19,15 +19,11 @@ func cmdServ(cmd *cobra.Command, args []string) {
|
|||||||
|
|
||||||
initWatcher()
|
initWatcher()
|
||||||
|
|
||||||
db, err = initDB(conf)
|
db, err = initDB(conf, true)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalInProd(err, "failed to connect to database")
|
fatalInProd(err, "failed to connect to database")
|
||||||
}
|
}
|
||||||
|
|
||||||
// if conf != nil && db != nil {
|
|
||||||
// initResolvers()
|
|
||||||
// }
|
|
||||||
|
|
||||||
sg, err = core.NewSuperGraph(&conf.Core, db)
|
sg, err = core.NewSuperGraph(&conf.Core, db)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
fatalInProd(err, "failed to initialize Super Graph")
|
fatalInProd(err, "failed to initialize Super Graph")
|
@ -4,6 +4,7 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
"os"
|
"os"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/spf13/viper"
|
"github.com/spf13/viper"
|
||||||
@ -48,10 +49,6 @@ func ReadInConfig(configFile string) (*Config, error) {
|
|||||||
return nil, fmt.Errorf("failed to decode config, %v", err)
|
return nil, fmt.Errorf("failed to decode config, %v", err)
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(c.Core.AllowListFile) == 0 {
|
|
||||||
c.Core.AllowListFile = path.Join(cpath, "allow.list")
|
|
||||||
}
|
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -113,3 +110,11 @@ func GetConfigName() string {
|
|||||||
|
|
||||||
return ge
|
return ge
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (c *Config) relPath(p string) string {
|
||||||
|
if filepath.IsAbs(p) {
|
||||||
|
return p
|
||||||
|
}
|
||||||
|
|
||||||
|
return path.Join(c.cpath, p)
|
||||||
|
}
|
@ -6,10 +6,9 @@ import (
|
|||||||
"io"
|
"io"
|
||||||
"io/ioutil"
|
"io/ioutil"
|
||||||
"net/http"
|
"net/http"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
"github.com/rs/cors"
|
"github.com/rs/cors"
|
||||||
"go.uber.org/zap"
|
"go.uber.org/zap"
|
||||||
)
|
)
|
||||||
@ -75,15 +74,15 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
if strings.EqualFold(req.OpName, introspectionQuery) {
|
doLog := true
|
||||||
introspect(w)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
res, err := sg.GraphQL(ct, req.Query, req.Vars)
|
||||||
|
|
||||||
if logLevel >= LogLevelDebug {
|
if !conf.Production && res.QueryName() == "IntrospectionQuery" {
|
||||||
log.Printf("DBG query:\n%s\nsql:\n%s", req.Query, res.SQL())
|
doLog = false
|
||||||
|
}
|
||||||
|
|
||||||
|
if doLog && logLevel >= LogLevelDebug {
|
||||||
|
log.Printf("DBG query %s: %s", res.QueryName(), res.SQL())
|
||||||
}
|
}
|
||||||
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -93,7 +92,7 @@ func apiV1(w http.ResponseWriter, r *http.Request) {
|
|||||||
|
|
||||||
json.NewEncoder(w).Encode(res)
|
json.NewEncoder(w).Encode(res)
|
||||||
|
|
||||||
if logLevel >= LogLevelInfo {
|
if doLog && logLevel >= LogLevelInfo {
|
||||||
zlog.Info("success",
|
zlog.Info("success",
|
||||||
zap.String("op", res.Operation()),
|
zap.String("op", res.Operation()),
|
||||||
zap.String("name", res.QueryName()),
|
zap.String("name", res.QueryName()),
|
@ -1,8 +1,15 @@
|
|||||||
package serv
|
package serv
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"crypto/tls"
|
||||||
|
"crypto/x509"
|
||||||
"database/sql"
|
"database/sql"
|
||||||
|
"errors"
|
||||||
|
"fmt"
|
||||||
|
"io/ioutil"
|
||||||
"path"
|
"path"
|
||||||
|
"path/filepath"
|
||||||
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/jackc/pgx/v4"
|
"github.com/jackc/pgx/v4"
|
||||||
@ -10,8 +17,17 @@ import (
|
|||||||
//_ "github.com/jackc/pgx/v4/stdlib"
|
//_ "github.com/jackc/pgx/v4/stdlib"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
const (
|
||||||
|
PEM_SIG = "--BEGIN "
|
||||||
|
)
|
||||||
|
|
||||||
func initConf() (*Config, error) {
|
func initConf() (*Config, error) {
|
||||||
c, err := ReadInConfig(path.Join(confPath, GetConfigName()))
|
cp, err := filepath.Abs(confPath)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
c, err := ReadInConfig(path.Join(cp, GetConfigName()))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -76,38 +92,24 @@ func initConf() (*Config, error) {
|
|||||||
c.AuthFailBlock = false
|
c.AuthFailBlock = false
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if len(c.AllowListFile) == 0 {
|
||||||
|
c.AllowListFile = c.relPath("./allow.list")
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.Production {
|
||||||
|
c.UseAllowList = true
|
||||||
|
}
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func initDB(c *Config) (*sql.DB, error) {
|
func initDB(c *Config, useDB bool) (*sql.DB, error) {
|
||||||
var db *sql.DB
|
var db *sql.DB
|
||||||
var err error
|
var err error
|
||||||
|
|
||||||
// cs := fmt.Sprintf("host=%s port=%d user=%s password=%s dbname=%s",
|
|
||||||
// c.DB.Host, c.DB.Port,
|
|
||||||
// c.DB.User, c.DB.Password,
|
|
||||||
// c.DB.DBName)
|
|
||||||
|
|
||||||
// fmt.Println(">>", cs)
|
|
||||||
|
|
||||||
// for i := 1; i < 10; i++ {
|
|
||||||
// db, err = sql.Open("pgx", cs)
|
|
||||||
// if err == nil {
|
|
||||||
// break
|
|
||||||
// }
|
|
||||||
// time.Sleep(time.Duration(i*100) * time.Millisecond)
|
|
||||||
// }
|
|
||||||
|
|
||||||
// if err != nil {
|
|
||||||
// return nil, err
|
|
||||||
// }
|
|
||||||
|
|
||||||
// return db, nil
|
|
||||||
|
|
||||||
config, _ := pgx.ParseConfig("")
|
config, _ := pgx.ParseConfig("")
|
||||||
config.Host = c.DB.Host
|
config.Host = c.DB.Host
|
||||||
config.Port = c.DB.Port
|
config.Port = c.DB.Port
|
||||||
config.Database = c.DB.DBName
|
|
||||||
config.User = c.DB.User
|
config.User = c.DB.User
|
||||||
config.Password = c.DB.Password
|
config.Password = c.DB.Password
|
||||||
config.RuntimeParams = map[string]string{
|
config.RuntimeParams = map[string]string{
|
||||||
@ -115,6 +117,63 @@ func initDB(c *Config) (*sql.DB, error) {
|
|||||||
"search_path": c.DB.Schema,
|
"search_path": c.DB.Schema,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if useDB {
|
||||||
|
config.Database = c.DB.DBName
|
||||||
|
}
|
||||||
|
|
||||||
|
if c.DB.EnableTLS {
|
||||||
|
if len(c.DB.ServerName) == 0 {
|
||||||
|
return nil, errors.New("server_name is required")
|
||||||
|
}
|
||||||
|
if len(c.DB.ServerCert) == 0 {
|
||||||
|
return nil, errors.New("server_cert is required")
|
||||||
|
}
|
||||||
|
if len(c.DB.ClientCert) == 0 {
|
||||||
|
return nil, errors.New("client_cert is required")
|
||||||
|
}
|
||||||
|
if len(c.DB.ClientKey) == 0 {
|
||||||
|
return nil, errors.New("client_key is required")
|
||||||
|
}
|
||||||
|
|
||||||
|
rootCertPool := x509.NewCertPool()
|
||||||
|
var pem []byte
|
||||||
|
var err error
|
||||||
|
|
||||||
|
if strings.Contains(c.DB.ServerCert, PEM_SIG) {
|
||||||
|
pem = []byte(c.DB.ServerCert)
|
||||||
|
} else {
|
||||||
|
pem, err = ioutil.ReadFile(c.relPath(c.DB.ServerCert))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("db tls: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
if ok := rootCertPool.AppendCertsFromPEM(pem); !ok {
|
||||||
|
return nil, errors.New("db tls: failed to append pem")
|
||||||
|
}
|
||||||
|
|
||||||
|
clientCert := make([]tls.Certificate, 0, 1)
|
||||||
|
var certs tls.Certificate
|
||||||
|
|
||||||
|
if strings.Contains(c.DB.ClientCert, PEM_SIG) {
|
||||||
|
certs, err = tls.X509KeyPair([]byte(c.DB.ClientCert), []byte(c.DB.ClientKey))
|
||||||
|
} else {
|
||||||
|
certs, err = tls.LoadX509KeyPair(c.relPath(c.DB.ClientCert), c.relPath(c.DB.ClientKey))
|
||||||
|
}
|
||||||
|
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("db tls: %w", err)
|
||||||
|
}
|
||||||
|
|
||||||
|
clientCert = append(clientCert, certs)
|
||||||
|
config.TLSConfig = &tls.Config{
|
||||||
|
RootCAs: rootCertPool,
|
||||||
|
Certificates: clientCert,
|
||||||
|
ServerName: c.DB.ServerName,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// switch c.LogLevel {
|
// switch c.LogLevel {
|
||||||
// case "debug":
|
// case "debug":
|
||||||
// config.LogLevel = pgx.LogLevelDebug
|
// config.LogLevel = pgx.LogLevelDebug
|
@ -9,8 +9,8 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
|
|
||||||
"github.com/bradfitz/gomemcache/memcache"
|
"github.com/bradfitz/gomemcache/memcache"
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/rails"
|
|
||||||
"github.com/dosco/super-graph/core"
|
"github.com/dosco/super-graph/core"
|
||||||
|
"github.com/dosco/super-graph/internal/serv/internal/rails"
|
||||||
"github.com/garyburd/redigo/redis"
|
"github.com/garyburd/redigo/redis"
|
||||||
)
|
)
|
||||||
|
|
@ -190,17 +190,3 @@ func self() (string, error) {
|
|||||||
}
|
}
|
||||||
return bin, nil
|
return bin, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Get path relative to cwd
|
|
||||||
func relpath(p string) string {
|
|
||||||
cwd, err := os.Getwd()
|
|
||||||
if err != nil {
|
|
||||||
return p
|
|
||||||
}
|
|
||||||
|
|
||||||
if strings.HasPrefix(p, cwd) {
|
|
||||||
return "./" + strings.TrimLeft(p[len(cwd):], "/")
|
|
||||||
}
|
|
||||||
|
|
||||||
return p
|
|
||||||
}
|
|
File diff suppressed because one or more lines are too long
@ -11,7 +11,7 @@ import (
|
|||||||
|
|
||||||
rice "github.com/GeertJohan/go.rice"
|
rice "github.com/GeertJohan/go.rice"
|
||||||
"github.com/NYTimes/gziphandler"
|
"github.com/NYTimes/gziphandler"
|
||||||
"github.com/dosco/super-graph/cmd/internal/serv/internal/auth"
|
"github.com/dosco/super-graph/internal/serv/internal/auth"
|
||||||
)
|
)
|
||||||
|
|
||||||
func initWatcher() {
|
func initWatcher() {
|
@ -133,20 +133,36 @@ database:
|
|||||||
# database ping timeout is used for db health checking
|
# database ping timeout is used for db health checking
|
||||||
ping_timeout: 1m
|
ping_timeout: 1m
|
||||||
|
|
||||||
# Define additional variables here to be used with filters
|
# Set up an secure tls encrypted db connection
|
||||||
variables:
|
enable_tls: false
|
||||||
#admin_account_id: "5"
|
|
||||||
admin_account_id: "sql:select id from users where admin = true limit 1"
|
# Required for tls. For example with Google Cloud SQL it's
|
||||||
|
# <gcp-project-id>:<cloud-sql-instance>"
|
||||||
|
# server_name: blah
|
||||||
|
|
||||||
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# server_cert: ./server-ca.pem
|
||||||
|
|
||||||
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# client_cert: ./client-cert.pem
|
||||||
|
|
||||||
|
# Required for tls. Can be a file path or the contents of the pem file
|
||||||
|
# client_key: ./client-key.pem
|
||||||
|
|
||||||
|
# Define additional variables here to be used with filters
|
||||||
|
variables:
|
||||||
|
#admin_account_id: "5"
|
||||||
|
admin_account_id: "sql:select id from users where admin = true limit 1"
|
||||||
|
|
||||||
|
|
||||||
# Field and table names that you wish to block
|
# Field and table names that you wish to block
|
||||||
blocklist:
|
blocklist:
|
||||||
- ar_internal_metadata
|
- ar_internal_metadata
|
||||||
- schema_migrations
|
- schema_migrations
|
||||||
- secret
|
- secret
|
||||||
- password
|
- password
|
||||||
- encrypted
|
- encrypted
|
||||||
- token
|
- token
|
||||||
|
|
||||||
# Create custom actions with their own api endpoints
|
# Create custom actions with their own api endpoints
|
||||||
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
# For example the below action will be available at /api/v1/actions/refresh_leaderboard_users
|
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user