Compare commits
18 Commits
Author | SHA1 | Date | |
---|---|---|---|
c7e63a6200 | |||
89bc93e159 | |||
77a51924a7 | |||
0deb3596c5 | |||
b87ba1fcd0 | |||
8ae7210e70 | |||
f8aac8d4d7 | |||
34867a2733 | |||
4a8af69dd0 | |||
c74226208d | |||
6d2f334011 | |||
ff13f651d6 | |||
cabd2d81ae | |||
4edc15eb98 | |||
6bc66d28bc | |||
c797deb4d0 | |||
deb5b93c81 | |||
85a74ed30c |
4
.gitignore
vendored
4
.gitignore
vendored
@ -28,3 +28,7 @@ main
|
||||
.DS_Store
|
||||
.swp
|
||||
main
|
||||
super-graph
|
||||
*-fuzz.zip
|
||||
crashers
|
||||
suppressions
|
13
.wtc.yaml
Normal file
13
.wtc.yaml
Normal file
@ -0,0 +1,13 @@
|
||||
no_trace: false
|
||||
debounce: 300 # if rule has no debounce, this will be used instead
|
||||
ignore: \.git/
|
||||
trig: [start, run] # will run on start
|
||||
rules:
|
||||
- name: start
|
||||
- name: run
|
||||
match: \.go$
|
||||
ignore: web|examples|docs|_test\.go$
|
||||
command: go run main.go serv
|
||||
- name: test
|
||||
match: _test\.go$
|
||||
command: go test -cover {PKG}
|
@ -11,9 +11,8 @@ RUN apk update && \
|
||||
apk add --no-cache git && \
|
||||
apk add --no-cache upx=3.95-r2
|
||||
|
||||
RUN go get -u github.com/shanzi/wu && \
|
||||
go install github.com/shanzi/wu && \
|
||||
go get github.com/GeertJohan/go.rice/rice
|
||||
RUN go get -u github.com/rafaelsq/wtc && \
|
||||
go get -u github.com/GeertJohan/go.rice/rice
|
||||
|
||||
WORKDIR /app
|
||||
COPY . /app
|
||||
|
28
README.md
28
README.md
@ -1,14 +1,14 @@
|
||||
<a href="https://supergraph.dev"><img src="https://supergraph.dev/hologram.svg" width="100" height="100" align="right" /></a>
|
||||
|
||||
# Super Graph - Build web products faster. Instant GraphQL APIs for your apps
|
||||
# Super Graph - Instant GraphQL APIs for your apps.
|
||||
|
||||
## Build web products faster. No code needed. GraphQL auto. transformed into efficient database queries.
|
||||
|
||||

|
||||

|
||||

|
||||
[](https://discord.gg/6pSWCTZ)
|
||||
|
||||
Get an instant high performance GraphQL API for Postgres. No code needed. GraphQL is automatically transformed into efficient database queries.
|
||||
|
||||

|
||||
|
||||
## The story of Super Graph?
|
||||
@ -25,20 +25,21 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
|
||||
|
||||
## Features
|
||||
|
||||
- Works with Rails database schemas
|
||||
- Automatically learns schemas and relationships
|
||||
- Belongs-To, One-To-Many and Many-To-Many table relationships
|
||||
- Full text search and Aggregations
|
||||
- Rails Auth supported (Redis, Memcache, Cookie)
|
||||
- Role based access control
|
||||
- Works with Ruby-On-Rails databases
|
||||
- Automatically learns database schemas and relationships
|
||||
- Full text search and aggregations
|
||||
- Rails authentication supported (Redis, Memcache, Cookie)
|
||||
- JWT tokens supported (Auth0, etc)
|
||||
- Join with remote REST APIs
|
||||
- Join database with remote REST APIs
|
||||
- Highly optimized and fast Postgres SQL queries
|
||||
- Support GraphQL queries and mutations
|
||||
- Configure with a simple config file
|
||||
- GraphQL queries and mutations
|
||||
- A simple config file
|
||||
- High performance GO codebase
|
||||
- Tiny docker image and low memory requirements
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Write database seeding scripts in Javascript
|
||||
- Database seeding tool
|
||||
|
||||
## Documentation
|
||||
|
||||
@ -46,6 +47,9 @@ This compiler is what sits at the heart of Super Graph with layers of useful fun
|
||||
|
||||
## Contact me
|
||||
|
||||
I'm happy to help you deploy Super Graph so feel free to reach out over
|
||||
Twitter or Discord.
|
||||
|
||||
[twitter/dosco](https://twitter.com/dosco)
|
||||
|
||||
[chat/super-graph](https://discord.gg/6pSWCTZ)
|
||||
|
@ -1,5 +1,27 @@
|
||||
# http://localhost:8080/
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Protect Ya Neck",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Enter the Wu-Tang",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
"name": "Wu-Tang",
|
||||
@ -16,16 +38,16 @@ mutation {
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"product_id": 5
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: $product_id, delete: true) {
|
||||
query {
|
||||
users {
|
||||
id
|
||||
name
|
||||
email
|
||||
picture: avatar
|
||||
products(limit: 2, where: {price: {gt: 10}}) {
|
||||
id
|
||||
name
|
||||
description
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -73,6 +95,117 @@ query {
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Gumbo1",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Gumbo2",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
user {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"product_id": 5
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(id: $product_id, delete: true) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Gumbo1",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Gumbo2",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
price
|
||||
users {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "gfk@myspace.com",
|
||||
"full_name": "Ghostface Killah",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Gumbo1",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Gumbo2",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
query {
|
||||
products {
|
||||
id
|
||||
name
|
||||
users {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
query {
|
||||
me {
|
||||
id
|
||||
email
|
||||
full_name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"update": {
|
||||
@ -112,27 +245,29 @@ query {
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Gumbo1",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Gumbo2",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
query {
|
||||
me {
|
||||
id
|
||||
email
|
||||
full_name
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": {
|
||||
"email": "gfk@myspace.com",
|
||||
"full_name": "Ghostface Killah",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
}
|
||||
|
||||
mutation {
|
||||
user(insert: $data) {
|
||||
products {
|
||||
id
|
||||
name
|
||||
description
|
||||
users {
|
||||
email
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -141,33 +276,15 @@ query {
|
||||
id
|
||||
email
|
||||
picture: avatar
|
||||
password
|
||||
full_name
|
||||
products(limit: 2, where: {price: {gt: 10}}) {
|
||||
id
|
||||
name
|
||||
description
|
||||
price
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
variables {
|
||||
"data": [
|
||||
{
|
||||
"name": "Protect Ya Neck",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
},
|
||||
{
|
||||
"name": "Enter the Wu-Tang",
|
||||
"created_at": "now",
|
||||
"updated_at": "now"
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
mutation {
|
||||
products(insert: $data) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -12,8 +12,7 @@ log_level: "debug"
|
||||
use_allow_list: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: never
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
@ -22,7 +21,7 @@ enable_tracing: true
|
||||
|
||||
# Watch the config folder and reload Super Graph
|
||||
# with the new configs when a change is detected
|
||||
reload_on_config_change: false
|
||||
reload_on_config_change: true
|
||||
|
||||
# File that points to the database seeding script
|
||||
# seed_file: seed.js
|
||||
@ -52,8 +51,9 @@ auth:
|
||||
cookie: _app_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header. Good for testing
|
||||
header: X-User-ID
|
||||
# the user_id via a header for testing.
|
||||
# Disable in production
|
||||
creds_in_header: true
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
@ -93,6 +93,10 @@ database:
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
variables:
|
||||
@ -100,7 +104,7 @@ database:
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
# filter: ["{ user_id: { eq: $user_id } }"]
|
||||
# filters: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
@ -112,25 +116,7 @@ database:
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: users
|
||||
# This filter will overwrite defaults.filter
|
||||
# filter: ["{ id: { eq: $user_id } }"]
|
||||
# filter_query: ["{ id: { eq: $user_id } }"]
|
||||
filter_update: ["{ id: { eq: $user_id } }"]
|
||||
filter_delete: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
# - name: products
|
||||
# # Multiple filters are AND'd together
|
||||
# filter: [
|
||||
# "{ price: { gt: 0 } }",
|
||||
# "{ price: { lt: 8 } }"
|
||||
# ]
|
||||
|
||||
- name: customers
|
||||
# No filter is used for this field not
|
||||
# even defaults.filter
|
||||
filter: none
|
||||
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
@ -149,7 +135,61 @@ tables:
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
limit: 10
|
||||
|
||||
query:
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
insert:
|
||||
block: false
|
||||
|
||||
update:
|
||||
block: false
|
||||
|
||||
delete:
|
||||
block: false
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
query:
|
||||
limit: 50
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
disable_functions: false
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
presets:
|
||||
- created_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
presets:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
block: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1
|
||||
tables:
|
||||
- name: users
|
||||
# query:
|
||||
# filters: ["{ account_id: { _eq: $account_id } }"]
|
||||
|
@ -1,3 +1,7 @@
|
||||
# Inherit config from this other config file
|
||||
# so I only need to overwrite some values
|
||||
inherits: dev
|
||||
|
||||
app_name: "Super Graph Production"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: false
|
||||
@ -12,8 +16,7 @@ log_level: "info"
|
||||
use_allow_list: true
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
auth_fail_block: true
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
@ -47,10 +50,6 @@ auth:
|
||||
type: rails
|
||||
cookie: _app_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header. Good for testing
|
||||
header: X-User-ID
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
# various cookies formats.
|
||||
@ -80,68 +79,13 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {{app_name_slug}}_development
|
||||
dbname: app_production
|
||||
user: postgres
|
||||
password: ''
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
variables:
|
||||
account_id: "(select account_id from users where id = $user_id)"
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: users
|
||||
# This filter will overwrite defaults.filter
|
||||
# filter: ["{ id: { eq: $user_id } }"]
|
||||
# filter_query: ["{ id: { eq: $user_id } }"]
|
||||
filter_update: ["{ id: { eq: $user_id } }"]
|
||||
filter_delete: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
# Multiple filters are AND'd together
|
||||
filter: [
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }"
|
||||
]
|
||||
|
||||
- name: customers
|
||||
# No filter is used for this field not
|
||||
# even defaults.filter
|
||||
filter: none
|
||||
|
||||
# remotes:
|
||||
# - name: payments
|
||||
# id: stripe_id
|
||||
# url: http://rails_app:3000/stripe/$id
|
||||
# path: data
|
||||
# # pass_headers:
|
||||
# # - cookie
|
||||
# # - host
|
||||
# set_headers:
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
@ -34,7 +34,7 @@ services:
|
||||
volumes:
|
||||
- .:/app
|
||||
working_dir: /app
|
||||
command: wu -pattern="*.go" go run main.go serv
|
||||
command: wtc
|
||||
depends_on:
|
||||
- db
|
||||
- rails_app
|
||||
|
19
docs/.vuepress/components/Card.vue
Normal file
19
docs/.vuepress/components/Card.vue
Normal file
@ -0,0 +1,19 @@
|
||||
<template>
|
||||
<div class="shadow bg-white p-4 flex items-start" :class="className">
|
||||
<slot name="image"></slot>
|
||||
<div class="pl-4">
|
||||
<h2 class="p-0">
|
||||
<slot name="title"></slot>
|
||||
</h2>
|
||||
<p>
|
||||
<slot name="body"></slot>
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
</template>
|
||||
|
||||
<script>
|
||||
export default {
|
||||
props: ["className"]
|
||||
}
|
||||
</script>
|
@ -23,6 +23,7 @@
|
||||
class="inline-block px-4 py-3 my-8 bg-blue-600 text-blue-100 font-bold rounded"
|
||||
:item="actionLink"
|
||||
/>
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
@ -51,17 +52,26 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="bg-gray-100 mt-10">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
|
||||
<div class="pb-8 hidden md:block ">
|
||||
<img src="arch-basic.svg">
|
||||
</div>
|
||||
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
What is {{ data.heroText }}?
|
||||
</h1>
|
||||
<div class="text-2xl md:text-3xl">
|
||||
Super Graph can automatically learn a Postgres database and instantly serve it as a fast and secured GraphQL API. It comes with tools to create a new app and manage it's database. You get it all, a very productive developer and a highly scalable app backend. It's designed to work well on serverless platforms by Google, AWS, Microsoft, etc. The goal is to save you a ton of time and money so you can focus on you're apps core value.
|
||||
</div>
|
||||
|
||||
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
<div class="flex flex-wrap">
|
||||
<div class="md:w-2/4">
|
||||
<img src="/graphql.png">
|
||||
@ -112,7 +122,7 @@
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="overflow-hidden bg-indigo-900" style="height: 730px">
|
||||
<div class="overflow-hidden bg-indigo-900">
|
||||
<div class="container mx-auto py-20">
|
||||
<img src="/super-graph-web-ui.png">
|
||||
</div>
|
||||
@ -143,10 +153,73 @@
|
||||
</div>
|
||||
|
||||
<div class="border-t py-10">
|
||||
<div class="container mx-auto">
|
||||
<style>.embed-container { position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden; max-width: 100%; } .embed-container iframe, .embed-container object, .embed-container embed { position: absolute; top: 0; left: 0; width: 100%; height: 100%; }</style>
|
||||
<div class="embed-container shadow">
|
||||
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen></iframe>
|
||||
<div class="block md:hidden w-100">
|
||||
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen style="width: 100%; height: 250px;">
|
||||
</iframe>
|
||||
</div>
|
||||
|
||||
<div class="container mx-auto flex flex-col md:flex-row items-center">
|
||||
<div class="w-100 md:w-1/2 p-8">
|
||||
<h1 class="text-2xl font-bold">GraphQL the future of APIs</h1>
|
||||
<p class="text-xl text-gray-600">Keeping a tight and fast development loop helps you iterate quickly. Leveraging technology like Super Graph focuses your team on building the core product and not reinventing wheels. GraphQL eliminate the dependency on the backend engineering and keeps the things moving fast</p>
|
||||
</div>
|
||||
|
||||
<div class="hidden md:block md:w-1/2">
|
||||
<style>.embed-container { position: relative; padding-bottom: 56.25%; height: 0; overflow: hidden; max-width: 100%; } .embed-container iframe, .embed-container object, .embed-container embed { position: absolute; top: 0; left: 0; width: 100%; height: 100%; }</style>
|
||||
<div class="embed-container shadow">
|
||||
<iframe src='https://www.youtube.com/embed/MfPL2A-DAJk' frameborder='0' allowfullscreen >
|
||||
</iframe>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="bg-gray-200 mt-10">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
Build Secure Apps
|
||||
</h1>
|
||||
<div class="flex flex-col text-2xl md:text-3xl">
|
||||
<card className="mb-1 p-8">
|
||||
<template #image><font-awesome-icon icon="portrait" class="text-red-500" /></template>
|
||||
<template #title>Role Based Access Control</template>
|
||||
<template #body>Dynamically assign roles like admin, manager or anon to specific users. Generate role specific queries at runtime. For example admins can get all users while others can only fetch their own user.</template>
|
||||
</card>
|
||||
<card className="mb-1 p-8">
|
||||
<template #image><font-awesome-icon icon="shield-alt" class="text-blue-500" /></template>
|
||||
<template #title>Prepared Statements</template>
|
||||
<template #body>An additional layer of protection from a variety of security issues like SQL injection. In production mode all queries are precompiled into prepared statements so only those can be executed. This also significantly speeds up all queries.</template>
|
||||
</card>
|
||||
<card className="p-8">
|
||||
<template #image><font-awesome-icon icon="lock" class="text-green-500"/></template>
|
||||
<template #title>Fuzz Tested Code</template>
|
||||
<template #body>Fuzzing is done by complex software that generates massives amounts of random input to detect if code is free of security bugs. Google uses fuzzing to protects everything from their cloud infrastructure to the Chrome browser.</template>
|
||||
</card>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="">
|
||||
<div class="container mx-auto px-10 md:px-0 py-32">
|
||||
<h1 class="uppercase font-semibold text-xl text-blue-800 mb-4">
|
||||
More Features
|
||||
</h1>
|
||||
<div class="flex flex-col md:flex-row text-2xl md:text-3xl">
|
||||
<card className="mr-0 md:mr-1 mb-1 flex-col w-100 md:w-1/3 items-center">
|
||||
<template #image><img src="/arch-remote-join.svg" class="h-64"></template>
|
||||
<template #title>Remote Joins</template>
|
||||
<template #body>A powerful feature that allows you to query your database and remote REST APIs at the same time. For example fetch a user from the DB, his tweets from Twitter and his payments from Stripe with a single GraphQL query.</template>
|
||||
</card>
|
||||
<card className="mr-0 md:mr-1 mb-1 flex-col w-100 md:w-1/3">
|
||||
<template #image><img src="/arch-search.svg" class="h-64"></template>
|
||||
<template #title>Full Text Search</template>
|
||||
<template #body>Postgres has excellent full-text search built-in. You don't need another expensive service. Super Graph makes it super easy to use with keyword ranking and highlighting also supported.</template>
|
||||
</card>
|
||||
<card className="mb-1 flex-col w-100 md:w-1/3">
|
||||
<template #image><img src="/arch-bulk.svg" class="h-64"></template>
|
||||
<template #title>Bulk Inserts</template>
|
||||
<template #body>Efficiently insert, update and delete multiple items with a single query. Upserts are also supported</template>
|
||||
</card>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
@ -164,9 +237,17 @@
|
||||
<script>
|
||||
import NavLink from '@theme/components/NavLink.vue'
|
||||
import Navbar from '@theme/components/Navbar.vue'
|
||||
import Card from './Card.vue'
|
||||
|
||||
|
||||
import { library } from '@fortawesome/fontawesome-svg-core'
|
||||
import { faPortrait, faShieldAlt, faLock } from '@fortawesome/free-solid-svg-icons'
|
||||
import { FontAwesomeIcon } from '@fortawesome/vue-fontawesome'
|
||||
|
||||
library.add(faPortrait, faShieldAlt, faLock)
|
||||
|
||||
export default {
|
||||
components: { NavLink, Navbar },
|
||||
components: { NavLink, Navbar, FontAwesomeIcon, Card },
|
||||
|
||||
computed: {
|
||||
data () {
|
||||
|
3
docs/.vuepress/public/arch-basic.svg
Normal file
3
docs/.vuepress/public/arch-basic.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 5.4 KiB |
3
docs/.vuepress/public/arch-bulk.svg
Normal file
3
docs/.vuepress/public/arch-bulk.svg
Normal file
@ -0,0 +1,3 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="411px" height="240px" viewBox="-0.5 -0.5 411 240"><defs/><g><g transform="translate(1.5,6.5)"><switch><foreignObject style="overflow:visible;" pointer-events="none" width="379" height="232" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;"><div><div style="font-size: 19px"><font color="#3b3b3b">[{ name: "beer1", description: "99 bottles of"},</font></div></div><div style="font-size: 19px"><div style="font-size: 12px"><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer2", description: "98 bottles of"},</font></div><div style="font-size: 19px"><div style="font-size: 12px"><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer3", description: "97 bottles of"},</font></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer4", description: "96 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer5", description: "95 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer6", description: "94 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer7", description: "93 bottles of"},</font></div></div></div><div><div><div style="font-size: 19px"><font color="#3b3b3b"> { name: "beer8", description: "92 bottles of"}]</font></div></div></div><div><font color="#3b3b3b"><br /></font></div></div></div></div><div><font color="#3b3b3b"> </font></div></div><div style="font-size: 19px"><font color="#3b3b3b"> </font></div></div></div></foreignObject><text x="190" y="122" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g></g></svg>
|
After Width: | Height: | Size: 2.3 KiB |
3
docs/.vuepress/public/arch-remote-join.svg
Normal file
3
docs/.vuepress/public/arch-remote-join.svg
Normal file
File diff suppressed because one or more lines are too long
After Width: | Height: | Size: 5.4 KiB |
3
docs/.vuepress/public/arch-search.svg
Normal file
3
docs/.vuepress/public/arch-search.svg
Normal file
@ -0,0 +1,3 @@
|
||||
<?xml version="1.0" encoding="UTF-8"?>
|
||||
<!DOCTYPE svg PUBLIC "-//W3C//DTD SVG 1.1//EN" "http://www.w3.org/Graphics/SVG/1.1/DTD/svg11.dtd">
|
||||
<svg xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink" version="1.1" width="426px" height="204px" viewBox="-0.5 -0.5 426 204"><defs/><g><g transform="translate(1.5,6.5)"><switch><foreignObject style="overflow:visible;" pointer-events="none" width="411" height="196" requiredFeatures="http://www.w3.org/TR/SVG11/feature#Extensibility"><div xmlns="http://www.w3.org/1999/xhtml" style="display: inline-block; font-size: 12px; font-family: Helvetica; color: rgb(0, 0, 0); line-height: 1.2; vertical-align: top; white-space: nowrap; text-align: left;"><div xmlns="http://www.w3.org/1999/xhtml" style="display:inline-block;text-align:inherit;text-decoration:inherit;"><div><div style="font-size: 19px"><font color="#3b3b3b">query {</font></div><div><span style="font-size: 19px"> <font color="#cc0000">products</font><font color="#3b3b3b">(</font></span><span style="font-size: 19px"><font color="#3b3b3b">search: "</font><font color="#00994d">ale</font><font color="#3b3b3b">", where: { price: { gt: 3 }) {</font></span></div><div><span style="font-size: 19px"> <font color="#0066cc"> id</font></span></div><div><span style="font-size: 19px"><font color="#0066cc"> name</font></span></div><div><span style="font-size: 19px"><font color="#0066cc"> search_rank</font></span></div><div><span style="font-size: 19px"><font color="#0066cc"> search_headline_description</font><br /> <font color="#3b3b3b"> }<br />}</font></span></div></div><div style="font-size: 19px"><br /></div></div></div></foreignObject><text x="206" y="104" fill="#000000" text-anchor="middle" font-size="12px" font-family="Helvetica">[Not supported by viewer]</text></switch></g></g></svg>
|
After Width: | Height: | Size: 1.8 KiB |
@ -17,12 +17,12 @@ features:
|
||||
details: Easy config file, quick to deploy, No code needed. It just works.
|
||||
- title: High Performance
|
||||
details: Compiles your GraphQL into a fast SQL query in realtime.
|
||||
- title: Built in GO
|
||||
details: Built in Go is a language created at Google to build fast and secure web services.
|
||||
- title: Ruby-on-Rails
|
||||
details: Can read Rails cookies and supports rails database conventions.
|
||||
- title: Serverless
|
||||
details: Instant startup for scale to zero environments like Google Cloud Run, App Engine, AWS Lambda
|
||||
- title: Go Lang
|
||||
details: Go is a language created at Google to build fast and secure web services.
|
||||
- title: Free and Open Source
|
||||
details: Not a VC funded startup. Not even a startup just good old open source code
|
||||
|
||||
|
444
docs/guide.md
444
docs/guide.md
@ -9,20 +9,21 @@ Get an instant high performance GraphQL API for Postgres. No code needed. GraphQ
|
||||
|
||||
## Features
|
||||
|
||||
- Works with Rails database schemas
|
||||
- Automatically learns schemas and relationships
|
||||
- Belongs-To, One-To-Many and Many-To-Many table relationships
|
||||
- Full text search and Aggregations
|
||||
- Rails Auth supported (Redis, Memcache, Cookie)
|
||||
- Role based access control
|
||||
- Works with Ruby-On-Rails databases
|
||||
- Automatically learns database schemas and relationships
|
||||
- Full text search and aggregations
|
||||
- Rails authentication supported (Redis, Memcache, Cookie)
|
||||
- JWT tokens supported (Auth0, etc)
|
||||
- Join with remote REST APIs
|
||||
- Join database with remote REST APIs
|
||||
- Highly optimized and fast Postgres SQL queries
|
||||
- Support GraphQL queries and mutations
|
||||
- Configure with a simple config file
|
||||
- GraphQL queries and mutations
|
||||
- A simple config file
|
||||
- High performance GO codebase
|
||||
- Tiny docker image and low memory requirements
|
||||
- Fuzz tested for security
|
||||
- Database migrations tool
|
||||
- Write database seeding scripts in Javascript
|
||||
- Database seeding tool
|
||||
|
||||
## Try the demo app
|
||||
|
||||
@ -499,7 +500,36 @@ query {
|
||||
|
||||
### Advanced queries
|
||||
|
||||
Super Graph support complex queries where you can add filters, ordering,offsets and limits on the query.
|
||||
Super Graph support complex queries where you can add filters, ordering,offsets and limits on the query. For example the below query will list all products where the price is greater than 10 and the id is not 5.
|
||||
|
||||
```graphql
|
||||
query {
|
||||
products(where: {
|
||||
and: {
|
||||
price: { gt: 10 },
|
||||
not: { id: { eq: 5 } }
|
||||
}
|
||||
}) {
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### Nested where clause targeting related tables
|
||||
|
||||
Sometimes you need to query a table based on a condition that applies to a related table. For example say you need to list all users who belong to an account. This query below will fetch the id and email or all users who belong to the account with id 3.
|
||||
|
||||
```graphql
|
||||
query {
|
||||
users(where: {
|
||||
accounts: { id: { eq: 3 } }
|
||||
}) {
|
||||
id
|
||||
email
|
||||
}
|
||||
}`
|
||||
```
|
||||
|
||||
#### Logical Operators
|
||||
|
||||
@ -613,7 +643,7 @@ mutation {
|
||||
}
|
||||
```
|
||||
|
||||
### Bulk insert
|
||||
#### Bulk insert
|
||||
|
||||
```json
|
||||
{
|
||||
@ -659,7 +689,7 @@ mutation {
|
||||
}
|
||||
```
|
||||
|
||||
### Bulk update
|
||||
#### Bulk update
|
||||
|
||||
```json
|
||||
{
|
||||
@ -702,7 +732,7 @@ mutation {
|
||||
}
|
||||
```
|
||||
|
||||
### Bulk delete
|
||||
#### Bulk delete
|
||||
|
||||
```json
|
||||
{
|
||||
@ -743,7 +773,7 @@ mutation {
|
||||
}
|
||||
```
|
||||
|
||||
### Bulk upsert
|
||||
#### Bulk upsert
|
||||
|
||||
```json
|
||||
{
|
||||
@ -878,83 +908,6 @@ class AddSearchColumn < ActiveRecord::Migration[5.1]
|
||||
end
|
||||
```
|
||||
|
||||
## Remote Joins
|
||||
|
||||
It often happens that after fetching some data from the DB we need to call another API to fetch some more data and all this combined into a single JSON response. For example along with a list of users you need their last 5 payments from Stripe. This requires you to query your DB for the users and Stripe for the payments. Super Graph handles all this for you also only the fields you requested from the Stripe API are returned.
|
||||
|
||||
::: tip Is this fast?
|
||||
Super Graph is able fetch remote data and merge it with the DB response in an efficient manner. Several optimizations such as parallel HTTP requests and a zero-allocation JSON merge algorithm makes this very fast. All of this without you having to write a line of code.
|
||||
:::
|
||||
|
||||
For example you need to list the last 3 payments made by a user. You will first need to look up the user in the database and then call the Stripe API to fetch his last 3 payments. For this to work your user table in the db has a `customer_id` column that contains his Stripe customer ID.
|
||||
|
||||
Similiarly you could also fetch the users last tweet, lead info from Salesforce or whatever else you need. It's fine to mix up several different `remote joins` into a single GraphQL query.
|
||||
|
||||
### Stripe API example
|
||||
|
||||
The configuration is self explanatory. A `payments` field has been added under the `customers` table. This field is added to the `remotes` subsection that defines fields associated with `customers` that are remote and not real database columns.
|
||||
|
||||
The `id` parameter maps a column from the `customers` table to the `$id` variable. In this case it maps `$id` to the `customer_id` column.
|
||||
|
||||
```yaml
|
||||
tables:
|
||||
- name: customers
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
# pass_headers:
|
||||
# - cookie
|
||||
# - host
|
||||
set_headers:
|
||||
- name: Authorization
|
||||
value: Bearer <stripe_api_key>
|
||||
```
|
||||
|
||||
#### How do I make use of this?
|
||||
|
||||
Just include `payments` like you would any other GraphQL selector under the `customers` selector. Super Graph will call the configured API for you and stitch (merge) the JSON the API sends back with the JSON generated from the database query. GraphQL features like aliases and fields all work.
|
||||
|
||||
```graphql
|
||||
query {
|
||||
customers {
|
||||
id
|
||||
email
|
||||
payments {
|
||||
customer_id
|
||||
amount
|
||||
billing_details
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
And voila here is the result. You get all of this advanced and honestly complex querying capability without writing a single line of code.
|
||||
|
||||
```json
|
||||
"data": {
|
||||
"customers": [
|
||||
{
|
||||
"id": 1,
|
||||
"email": "linseymertz@reilly.co",
|
||||
"payments": [
|
||||
{
|
||||
"customer_id": "cus_YCj3ndB5Mz",
|
||||
"amount": 100,
|
||||
"billing_details": {
|
||||
"address": "1 Infinity Drive",
|
||||
"zipcode": "94024"
|
||||
}
|
||||
},
|
||||
...
|
||||
```
|
||||
|
||||
Even tracing data is availble in the Super Graph web UI if tracing is enabled in the config. By default it is enabled in development. Additionally there you can set `debug: true` to enable http request / response dumping to help with debugging.
|
||||
|
||||

|
||||
|
||||
## Authentication
|
||||
|
||||
You can only have one type of auth enabled. You can either pick Rails or JWT.
|
||||
@ -1033,36 +986,193 @@ We can get the JWT token either from the `authorization` header where we expect
|
||||
|
||||
For validation a `secret` or a public key (ecdsa or rsa) is required. When using public keys they have to be in a PEM format file.
|
||||
|
||||
## Easy to setup
|
||||
## Role based Access Control
|
||||
|
||||
It's a common usecase for APIs to control what information they return or insert based on the role of the user. For example when fetching a list of users, a normal user can only fetch his own entry while a manager can fetch all the users within a company and an admin user can fetch everyone. Or when creating a new user an an admin user can set a users role while the user himself cannot set or change it. This is called role based access control or RBAC.
|
||||
|
||||
Super Graph allows you to set access control rules based on dynamically defined roles. You can create as many roles as you wish. The only two default (built-in) roles are `user` for authenticated requests and `anon` for unauthenticated. An authenticated request is one where Super Graph can extract an `user_id` based on the configured authenication method (jwt, rails cookies, etc).
|
||||
|
||||
### Configure RBAC
|
||||
|
||||
```yaml
|
||||
roles_query: "SELECT * FROM users WHERE users.id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
presets:
|
||||
- created_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
presets:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
block: true
|
||||
|
||||
- name: admin
|
||||
match: users.id = 1
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: []
|
||||
```
|
||||
|
||||
This configuration is relatively simple to follow the `roles_query` parameter is the query that
|
||||
must be run to help figure out a users role. This query can be as complex as you like and include joins with other tables.
|
||||
|
||||
The individual roles are defined under the `roles` parameter and this includes each table the role has a custom setting for. The role is dynamically matched using the `match` parameter for example in the above case `users.id = 1` means that when the `roles_query` is executed a user with the id `1` willbe assigned the admin role and those that don't match get the `user` role if authenticated successfully or the `anon` role.
|
||||
|
||||
This below example would work for SAAS apps where an account (tenant) is usually the top parent table to everything else.
|
||||
|
||||
```yaml
|
||||
roles_query: "SELECT * FROM users JOIN accounts on accounts.id = users.account_id WHERE users.id = $user_id"
|
||||
|
||||
roles:
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
...
|
||||
|
||||
- name: admin
|
||||
match: accounts.admin_id = $user_id
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: [{ accounts: { id: { eq: $account_id } } }]
|
||||
```
|
||||
|
||||
## Remote Joins
|
||||
|
||||
It often happens that after fetching some data from the DB we need to call another API to fetch some more data and all this combined into a single JSON response. For example along with a list of users you need their last 5 payments from Stripe. This requires you to query your DB for the users and Stripe for the payments. Super Graph handles all this for you also only the fields you requested from the Stripe API are returned.
|
||||
|
||||
::: tip Is this fast?
|
||||
Super Graph is able fetch remote data and merge it with the DB response in an efficient manner. Several optimizations such as parallel HTTP requests and a zero-allocation JSON merge algorithm makes this very fast. All of this without you having to write a line of code.
|
||||
:::
|
||||
|
||||
For example you need to list the last 3 payments made by a user. You will first need to look up the user in the database and then call the Stripe API to fetch his last 3 payments. For this to work your user table in the db has a `customer_id` column that contains his Stripe customer ID.
|
||||
|
||||
Similiarly you could also fetch the users last tweet, lead info from Salesforce or whatever else you need. It's fine to mix up several different `remote joins` into a single GraphQL query.
|
||||
|
||||
### Stripe API example
|
||||
|
||||
The configuration is self explanatory. A `payments` field has been added under the `customers` table. This field is added to the `remotes` subsection that defines fields associated with `customers` that are remote and not real database columns.
|
||||
|
||||
The `id` parameter maps a column from the `customers` table to the `$id` variable. In this case it maps `$id` to the `customer_id` column.
|
||||
|
||||
```yaml
|
||||
tables:
|
||||
- name: customers
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
# pass_headers:
|
||||
# - cookie
|
||||
# - host
|
||||
set_headers:
|
||||
- name: Authorization
|
||||
value: Bearer <stripe_api_key>
|
||||
```
|
||||
|
||||
#### How do I make use of this?
|
||||
|
||||
Just include `payments` like you would any other GraphQL selector under the `customers` selector. Super Graph will call the configured API for you and stitch (merge) the JSON the API sends back with the JSON generated from the database query. GraphQL features like aliases and fields all work.
|
||||
|
||||
```graphql
|
||||
query {
|
||||
customers {
|
||||
id
|
||||
email
|
||||
payments {
|
||||
customer_id
|
||||
amount
|
||||
billing_details
|
||||
}
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
And voila here is the result. You get all of this advanced and honestly complex querying capability without writing a single line of code.
|
||||
|
||||
```json
|
||||
"data": {
|
||||
"customers": [
|
||||
{
|
||||
"id": 1,
|
||||
"email": "linseymertz@reilly.co",
|
||||
"payments": [
|
||||
{
|
||||
"customer_id": "cus_YCj3ndB5Mz",
|
||||
"amount": 100,
|
||||
"billing_details": {
|
||||
"address": "1 Infinity Drive",
|
||||
"zipcode": "94024"
|
||||
}
|
||||
},
|
||||
...
|
||||
```
|
||||
|
||||
Even tracing data is availble in the Super Graph web UI if tracing is enabled in the config. By default it is enabled in development. Additionally there you can set `debug: true` to enable http request / response dumping to help with debugging.
|
||||
|
||||

|
||||
|
||||
|
||||
## Configuration files
|
||||
|
||||
Configuration files can either be in YAML or JSON their names are derived from the `GO_ENV` variable, for example `GO_ENV=prod` will cause the `prod.yaml` config file to be used. or `GO_ENV=dev` will use the `dev.yaml`. A path to look for the config files in can be specified using the `-path <folder>` command line argument.
|
||||
|
||||
We're tried to ensure that the config file is self documenting and easy to work with.
|
||||
|
||||
```yaml
|
||||
# Inherit config from this other config file
|
||||
# so I only need to overwrite some values
|
||||
inherits: base
|
||||
|
||||
app_name: "Super Graph Development"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
debug_level: 1
|
||||
|
||||
# debug, info, warn, error, fatal, panic, disable
|
||||
log_level: "info"
|
||||
# debug, info, warn, error, fatal, panic
|
||||
log_level: "debug"
|
||||
|
||||
# Disable this in development to get a list of
|
||||
# queries used. When enabled super graph
|
||||
# will only allow queries from this list
|
||||
# List saved to ./config/allow.list
|
||||
use_allow_list: true
|
||||
use_allow_list: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
# response
|
||||
enable_tracing: true
|
||||
|
||||
# Watch the config folder and reload Super Graph
|
||||
# with the new configs when a change is detected
|
||||
reload_on_config_change: true
|
||||
|
||||
# File that points to the database seeding script
|
||||
# seed_file: seed.js
|
||||
|
||||
# Path pointing to where the migrations can be found
|
||||
migrations_path: ./config/migrations
|
||||
|
||||
# Postgres related environment Variables
|
||||
# SG_DATABASE_HOST
|
||||
# SG_DATABASE_PORT
|
||||
@ -1085,8 +1195,9 @@ auth:
|
||||
cookie: _app_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header. Good for testing
|
||||
header: X-User-ID
|
||||
# the user_id via a header for testing.
|
||||
# Disable in production
|
||||
creds_in_header: true
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
@ -1097,10 +1208,10 @@ auth:
|
||||
secret_key_base: 0a248500a64c01184edb4d7ad3a805488f8097ac761b76aaa6c17c01dcb7af03a2f18ba61b2868134b9c7b79a122bc0dadff4367414a2d173297bfea92be5566
|
||||
|
||||
# Remote cookie store. (memcache or redis)
|
||||
# url: redis://127.0.0.1:6379
|
||||
# password: test
|
||||
# max_idle: 80,
|
||||
# max_active: 12000,
|
||||
# url: redis://redis:6379
|
||||
# password: ""
|
||||
# max_idle: 80
|
||||
# max_active: 12000
|
||||
|
||||
# In most cases you don't need these
|
||||
# salt: "encrypted cookie"
|
||||
@ -1120,20 +1231,27 @@ database:
|
||||
dbname: app_development
|
||||
user: postgres
|
||||
password: ''
|
||||
# pool_size: 10
|
||||
# max_retries: 0
|
||||
# log_level: "debug"
|
||||
|
||||
#schema: "public"
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
variables:
|
||||
account_id: "select account_id from users where id = $user_id"
|
||||
account_id: "(select account_id from users where id = $user_id)"
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ user_id: { eq: $user_id } }"]
|
||||
# filters: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blacklist:
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
@ -1141,43 +1259,85 @@ database:
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: users
|
||||
# This filter will overwrite defaults.filter
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
tables:
|
||||
- name: customers
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
pass_headers:
|
||||
- cookie
|
||||
set_headers:
|
||||
- name: Host
|
||||
value: 0.0.0.0
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
|
||||
- name: products
|
||||
# Multiple filters are AND'd together
|
||||
filter: [
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }"
|
||||
]
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
|
||||
- name: customers
|
||||
# No filter is used for this field not
|
||||
# even defaults.filter
|
||||
filter: none
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# pass_headers:
|
||||
# - cookie
|
||||
# - host
|
||||
set_headers:
|
||||
- name: Authorization
|
||||
value: Bearer <stripe_api_key>
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
limit: 10
|
||||
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
query:
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
insert:
|
||||
allow: false
|
||||
|
||||
update:
|
||||
allow: false
|
||||
|
||||
delete:
|
||||
allow: false
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
query:
|
||||
limit: 50
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
disable_functions: false
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
set:
|
||||
- created_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
set:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
deny: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1
|
||||
tables:
|
||||
- name: users
|
||||
# query:
|
||||
# filters: ["{ account_id: { _eq: $account_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
```
|
||||
|
||||
If deploying into environments like Kubernetes it's useful to be able to configure things like secrets and hosts though environment variables therfore we expose the below environment variables. This is escpecially useful for secrets since they are usually injected in via a secrets management framework ie. Kubernetes Secrets
|
||||
|
@ -10,5 +10,10 @@
|
||||
"tailwindcss": "^1.0.6",
|
||||
"vuepress": "^1.0.0",
|
||||
"webpack-dev-middleware": "3.6.0"
|
||||
},
|
||||
"dependencies": {
|
||||
"@fortawesome/fontawesome-svg-core": "^1.2.25",
|
||||
"@fortawesome/free-solid-svg-icons": "^5.11.2",
|
||||
"@fortawesome/vue-fontawesome": "^0.1.7"
|
||||
}
|
||||
}
|
||||
|
858
docs/yarn.lock
858
docs/yarn.lock
@ -10,17 +10,17 @@
|
||||
"@babel/highlight" "^7.0.0"
|
||||
|
||||
"@babel/core@^7.0.0":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.2.tgz#069a776e8d5e9eefff76236bc8845566bd31dd91"
|
||||
integrity sha512-l8zto/fuoZIbncm+01p8zPSDZu/VuuJhAfA7d/AbzM09WR7iVhavvfNDYCNpo1VvLk6E6xgAoP9P+/EMJHuRkQ==
|
||||
version "7.6.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/core/-/core-7.6.4.tgz#6ebd9fe00925f6c3e177bb726a188b5f578088ff"
|
||||
integrity sha512-Rm0HGw101GY8FTzpWSyRbki/jzq+/PkNQJ+nSulrdY6gFGOsNseCqD6KHRYe2E+EdzuBdr2pxCp6s4Uk6eJ+XQ==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.5.5"
|
||||
"@babel/generator" "^7.6.2"
|
||||
"@babel/generator" "^7.6.4"
|
||||
"@babel/helpers" "^7.6.2"
|
||||
"@babel/parser" "^7.6.2"
|
||||
"@babel/parser" "^7.6.4"
|
||||
"@babel/template" "^7.6.0"
|
||||
"@babel/traverse" "^7.6.2"
|
||||
"@babel/types" "^7.6.0"
|
||||
"@babel/traverse" "^7.6.3"
|
||||
"@babel/types" "^7.6.3"
|
||||
convert-source-map "^1.1.0"
|
||||
debug "^4.1.0"
|
||||
json5 "^2.1.0"
|
||||
@ -29,12 +29,12 @@
|
||||
semver "^5.4.1"
|
||||
source-map "^0.5.0"
|
||||
|
||||
"@babel/generator@^7.6.2":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.2.tgz#dac8a3c2df118334c2a29ff3446da1636a8f8c03"
|
||||
integrity sha512-j8iHaIW4gGPnViaIHI7e9t/Hl8qLjERI6DcV9kEpAIDJsAOrcnXqRS7t+QbhL76pwbtqP+QCQLL0z1CyVmtjjQ==
|
||||
"@babel/generator@^7.6.3", "@babel/generator@^7.6.4":
|
||||
version "7.6.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/generator/-/generator-7.6.4.tgz#a4f8437287bf9671b07f483b76e3bb731bc97671"
|
||||
integrity sha512-jsBuXkFoZxk0yWLyGI9llT9oiQ2FeTASmRFE32U+aaDTfoE92t78eroO7PTpU/OrYq38hlcDM6vbfLDaOLy+7w==
|
||||
dependencies:
|
||||
"@babel/types" "^7.6.0"
|
||||
"@babel/types" "^7.6.3"
|
||||
jsesc "^2.5.1"
|
||||
lodash "^4.17.13"
|
||||
source-map "^0.5.0"
|
||||
@ -224,10 +224,10 @@
|
||||
esutils "^2.0.2"
|
||||
js-tokens "^4.0.0"
|
||||
|
||||
"@babel/parser@^7.6.0", "@babel/parser@^7.6.2":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.2.tgz#205e9c95e16ba3b8b96090677a67c9d6075b70a1"
|
||||
integrity sha512-mdFqWrSPCmikBoaBYMuBulzTIKuXVPtEISFbRRVNwMWpCms/hmE2kRq0bblUHaNRKrjRlmVbx1sDHmjmRgD2Xg==
|
||||
"@babel/parser@^7.6.0", "@babel/parser@^7.6.3", "@babel/parser@^7.6.4":
|
||||
version "7.6.4"
|
||||
resolved "https://registry.yarnpkg.com/@babel/parser/-/parser-7.6.4.tgz#cb9b36a7482110282d5cb6dd424ec9262b473d81"
|
||||
integrity sha512-D8RHPW5qd0Vbyo3qb+YjO5nvUVRTXFLQ/FsDxJU2Nqz4uB5EnUN0ZQSEYpvTIbRuttig1XbHWU5oMeQwQSAA+A==
|
||||
|
||||
"@babel/plugin-proposal-async-generator-functions@^7.2.0":
|
||||
version "7.2.0"
|
||||
@ -361,9 +361,9 @@
|
||||
"@babel/helper-plugin-utils" "^7.0.0"
|
||||
|
||||
"@babel/plugin-transform-block-scoping@^7.3.4":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.2.tgz#96c33ab97a9ae500cc6f5b19e04a7e6553360a79"
|
||||
integrity sha512-zZT8ivau9LOQQaOGC7bQLQOT4XPkPXgN2ERfUgk1X8ql+mVkLc4E8eKk+FO3o0154kxzqenWCorfmEXpEZcrSQ==
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.6.3.tgz#6e854e51fbbaa84351b15d4ddafe342f3a5d542a"
|
||||
integrity sha512-7hvrg75dubcO3ZI2rjYTzUrEuh1E9IyDEhhB6qfcooxhDA33xx2MasuLVgdxzcP6R/lipAC6n9ub9maNW6RKdw==
|
||||
dependencies:
|
||||
"@babel/helper-plugin-utils" "^7.0.0"
|
||||
lodash "^4.17.13"
|
||||
@ -479,9 +479,9 @@
|
||||
"@babel/helper-plugin-utils" "^7.0.0"
|
||||
|
||||
"@babel/plugin-transform-named-capturing-groups-regex@^7.3.0":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.2.tgz#c1ca0bb84b94f385ca302c3932e870b0fb0e522b"
|
||||
integrity sha512-xBdB+XOs+lgbZc2/4F5BVDVcDNS4tcSKQc96KmlqLEAwz6tpYPEvPdmDfvVG0Ssn8lAhronaRs6Z6KSexIpK5g==
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.6.3.tgz#aaa6e409dd4fb2e50b6e2a91f7e3a3149dbce0cf"
|
||||
integrity sha512-jTkk7/uE6H2s5w6VlMHeWuH+Pcy2lmdwFoeWCVnvIrDUnB5gQqTVI8WfmEAhF2CDEarGrknZcmSFg1+bkfCoSw==
|
||||
dependencies:
|
||||
regexpu-core "^4.6.0"
|
||||
|
||||
@ -622,17 +622,17 @@
|
||||
semver "^5.3.0"
|
||||
|
||||
"@babel/runtime-corejs2@^7.2.0":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime-corejs2/-/runtime-corejs2-7.6.2.tgz#062f8e31f3df30fc1a3dea68aa1bd854e06e9ba6"
|
||||
integrity sha512-wdyVKnTv9Be4YlwF/7pByYNfcl23qC21aAQ0aIaZOo2ZOvhFEyJdBLJClYZ9i+Pmrz7sUQgg/MwbJa2RZTkygg==
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime-corejs2/-/runtime-corejs2-7.6.3.tgz#de3f446b3fb688b98cbd220474d1a7cad909bcb8"
|
||||
integrity sha512-nuA2o+rgX2+PrNTZ063ehncVcg7sn+tU71BB81SaWRVUbGwCOlb0+yQA1e0QqmzOfRSYOxfvf8cosYqFbJEiwQ==
|
||||
dependencies:
|
||||
core-js "^2.6.5"
|
||||
regenerator-runtime "^0.13.2"
|
||||
|
||||
"@babel/runtime@^7.0.0":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.6.2.tgz#c3d6e41b304ef10dcf13777a33e7694ec4a9a6dd"
|
||||
integrity sha512-EXxN64agfUqqIGeEjI5dL5z0Sw0ZwWo1mLTi4mQowCZ42O59b7DRpZAnTC6OqdF28wMBMFKNb/4uFGrVaigSpg==
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/runtime/-/runtime-7.6.3.tgz#935122c74c73d2240cafd32ddb5fc2a6cd35cf1f"
|
||||
integrity sha512-kq6anf9JGjW8Nt5rYfEuGRaEAaH1mkv3Bbu6rYvLOpPh/RusSJXuKPEAoZ7L7gybZkchE8+NV5g9vKF4AGAtsA==
|
||||
dependencies:
|
||||
regenerator-runtime "^0.13.2"
|
||||
|
||||
@ -645,31 +645,55 @@
|
||||
"@babel/parser" "^7.6.0"
|
||||
"@babel/types" "^7.6.0"
|
||||
|
||||
"@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2":
|
||||
version "7.6.2"
|
||||
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.2.tgz#b0e2bfd401d339ce0e6c05690206d1e11502ce2c"
|
||||
integrity sha512-8fRE76xNwNttVEF2TwxJDGBLWthUkHWSldmfuBzVRmEDWOtu4XdINTgN7TDWzuLg4bbeIMLvfMFD9we5YcWkRQ==
|
||||
"@babel/traverse@^7.1.0", "@babel/traverse@^7.4.4", "@babel/traverse@^7.5.5", "@babel/traverse@^7.6.2", "@babel/traverse@^7.6.3":
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/traverse/-/traverse-7.6.3.tgz#66d7dba146b086703c0fb10dd588b7364cec47f9"
|
||||
integrity sha512-unn7P4LGsijIxaAJo/wpoU11zN+2IaClkQAxcJWBNCMS6cmVh802IyLHNkAjQ0iYnRS3nnxk5O3fuXW28IMxTw==
|
||||
dependencies:
|
||||
"@babel/code-frame" "^7.5.5"
|
||||
"@babel/generator" "^7.6.2"
|
||||
"@babel/generator" "^7.6.3"
|
||||
"@babel/helper-function-name" "^7.1.0"
|
||||
"@babel/helper-split-export-declaration" "^7.4.4"
|
||||
"@babel/parser" "^7.6.2"
|
||||
"@babel/types" "^7.6.0"
|
||||
"@babel/parser" "^7.6.3"
|
||||
"@babel/types" "^7.6.3"
|
||||
debug "^4.1.0"
|
||||
globals "^11.1.0"
|
||||
lodash "^4.17.13"
|
||||
|
||||
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0":
|
||||
version "7.6.1"
|
||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.6.1.tgz#53abf3308add3ac2a2884d539151c57c4b3ac648"
|
||||
integrity sha512-X7gdiuaCmA0uRjCmRtYJNAVCc/q+5xSgsfKJHqMN4iNLILX39677fJE1O40arPMh0TTtS9ItH67yre6c7k6t0g==
|
||||
"@babel/types@^7.0.0", "@babel/types@^7.2.0", "@babel/types@^7.4.4", "@babel/types@^7.5.5", "@babel/types@^7.6.0", "@babel/types@^7.6.3":
|
||||
version "7.6.3"
|
||||
resolved "https://registry.yarnpkg.com/@babel/types/-/types-7.6.3.tgz#3f07d96f854f98e2fbd45c64b0cb942d11e8ba09"
|
||||
integrity sha512-CqbcpTxMcpuQTMhjI37ZHVgjBkysg5icREQIEZ0eG1yCNwg3oy+5AaLiOKmjsCj6nqOsa6Hf0ObjRVwokb7srA==
|
||||
dependencies:
|
||||
esutils "^2.0.2"
|
||||
lodash "^4.17.13"
|
||||
to-fast-properties "^2.0.0"
|
||||
|
||||
"@fullhuman/postcss-purgecss@^1.1.0":
|
||||
"@fortawesome/fontawesome-common-types@^0.2.25":
|
||||
version "0.2.25"
|
||||
resolved "https://registry.yarnpkg.com/@fortawesome/fontawesome-common-types/-/fontawesome-common-types-0.2.25.tgz#6df015905081f2762e5cfddeb7a20d2e9b16c786"
|
||||
integrity sha512-3RuZPDuuPELd7RXtUqTCfed14fcny9UiPOkdr2i+cYxBoTOfQgxcDoq77fHiiHcgWuo1LoBUpvGxFF1H/y7s3Q==
|
||||
|
||||
"@fortawesome/fontawesome-svg-core@^1.2.25":
|
||||
version "1.2.25"
|
||||
resolved "https://registry.yarnpkg.com/@fortawesome/fontawesome-svg-core/-/fontawesome-svg-core-1.2.25.tgz#24b03391d14f0c6171e8cad7057c687b74049790"
|
||||
integrity sha512-MotKnn53JKqbkLQiwcZSBJVYtTgIKFbh7B8+kd05TSnfKYPFmjKKI59o2fpz5t0Hzl35vVGU6+N4twoOpZUrqA==
|
||||
dependencies:
|
||||
"@fortawesome/fontawesome-common-types" "^0.2.25"
|
||||
|
||||
"@fortawesome/free-solid-svg-icons@^5.11.2":
|
||||
version "5.11.2"
|
||||
resolved "https://registry.yarnpkg.com/@fortawesome/free-solid-svg-icons/-/free-solid-svg-icons-5.11.2.tgz#2f2f1459743a27902b76655a0d0bc5ec4d945631"
|
||||
integrity sha512-zBue4i0PAZJUXOmLBBvM7L0O7wmsDC8dFv9IhpW5QL4kT9xhhVUsYg/LX1+5KaukWq4/cbDcKT+RT1aRe543sg==
|
||||
dependencies:
|
||||
"@fortawesome/fontawesome-common-types" "^0.2.25"
|
||||
|
||||
"@fortawesome/vue-fontawesome@^0.1.7":
|
||||
version "0.1.7"
|
||||
resolved "https://registry.yarnpkg.com/@fortawesome/vue-fontawesome/-/vue-fontawesome-0.1.7.tgz#121867297cafd141af78c67d92ab9f1ad4b7328b"
|
||||
integrity sha512-YCw2Q2m4fxzyFsPOH3uDYMoJztTD+pT+AAyse4LFpbdrBg+r8ueaVT8BFnXEjrGwMDJJeXrwJ5AOC6q/JWBI4w==
|
||||
|
||||
"@fullhuman/postcss-purgecss@^1.3.0":
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/@fullhuman/postcss-purgecss/-/postcss-purgecss-1.3.0.tgz#d632900d818f4fcf4678e7326923fb838c3e03a7"
|
||||
integrity sha512-zvfS3dPKD2FAtMcXapMJXGbDgEp9E++mLR6lTgSruv6y37uvV5xJ1crVktuC1gvnmMwsa7Zh1m05FeEiz4VnIQ==
|
||||
@ -691,13 +715,13 @@
|
||||
integrity sha512-shAmDyaQC4H92APFoIaVDHCx5bStIocgvbwQyxPRrbUY20V1EYTbSDchWbuwlMG3V17cprZhA6+78JfB+3DTPw==
|
||||
|
||||
"@silvanite/vuepress-plugin-tailwind@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@silvanite/vuepress-plugin-tailwind/-/vuepress-plugin-tailwind-1.1.0.tgz#e5d0c8b0b1127201509196eca359ac9070517202"
|
||||
integrity sha512-pQVMz0knDMMfIuXhEwYSRK2gPW9ds+C9YEX8IF0sdCqiPRlJSNG5oUTIAoKoe2JYHlr7zdQZX8wBQN7FQEVO4Q==
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@silvanite/vuepress-plugin-tailwind/-/vuepress-plugin-tailwind-1.2.0.tgz#2e1e6d10e441b49c7446bbb99e7fe8ec80aee273"
|
||||
integrity sha512-1LWlIa+g1vV6HkwwEmADAhnuZ33oAGidnbrCs7qnK53ApbtWa9K/+0cHCLhDeQ3zqoyotqK7YQR2sm5nCOTYqA==
|
||||
dependencies:
|
||||
"@fullhuman/postcss-purgecss" "^1.1.0"
|
||||
lodash "^4.17.11"
|
||||
tailwindcss "^0.7.4"
|
||||
"@fullhuman/postcss-purgecss" "^1.3.0"
|
||||
lodash "^4.17.15"
|
||||
tailwindcss "^1.1.2"
|
||||
|
||||
"@types/events@*":
|
||||
version "3.0.0"
|
||||
@ -719,9 +743,9 @@
|
||||
integrity sha512-tHq6qdbT9U1IRSGf14CL0pUlULksvY9OZ+5eEgl1N7t+OA3tGvNpxJCzuKQlsNgCVwbAs670L1vcVQi8j9HjnA==
|
||||
|
||||
"@types/node@*":
|
||||
version "12.7.8"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.7.8.tgz#cb1bf6800238898bc2ff6ffa5702c3cadd350708"
|
||||
integrity sha512-FMdVn84tJJdV+xe+53sYiZS4R5yn1mAIxfj+DVoNiQjTYz1+OYmjwEZr1ev9nU0axXwda0QDbYl06QHanRVH3A==
|
||||
version "12.12.3"
|
||||
resolved "https://registry.yarnpkg.com/@types/node/-/node-12.12.3.tgz#ebfe83507ac506bc3486314a8aa395be66af8d23"
|
||||
integrity sha512-opgSsy+cEF9N8MgaVPnWVtdJ3o4mV2aMHvDq7thkQUFt0EuOHJon4rQpJfhjmNHB+ikl0Cd6WhWIErOyQ+f7tw==
|
||||
|
||||
"@types/q@^1.5.1":
|
||||
version "1.5.2"
|
||||
@ -746,9 +770,9 @@
|
||||
svg-tags "^1.0.0"
|
||||
|
||||
"@vue/babel-preset-app@^3.1.1":
|
||||
version "3.11.0"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-3.11.0.tgz#52bf79c15560a304a13f4770e3e5530e01dd6173"
|
||||
integrity sha512-fcCq9nuGGx1WGnyaKHvIC8RnWjISXGf1rJH4mN9+bymDfosgDbwnfV4TYvTZlyK1/aTHEEpIoO3XimTXBo7QBw==
|
||||
version "3.12.1"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-preset-app/-/babel-preset-app-3.12.1.tgz#24c477052f078f30fdb7735103b14dd1fa2cbfe1"
|
||||
integrity sha512-Zjy5jQaikV1Pz+ri0YgXFS7q4/5wCxB5tRkDOEIt5+4105u0Feb/pvH20nVL6nx9GyXrECFfcm7Yxr/z++OaPQ==
|
||||
dependencies:
|
||||
"@babel/helper-module-imports" "^7.0.0"
|
||||
"@babel/plugin-proposal-class-properties" "^7.0.0"
|
||||
@ -765,15 +789,15 @@
|
||||
core-js "^2.6.5"
|
||||
|
||||
"@vue/babel-preset-jsx@^1.0.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.1.0.tgz#c8001329f5b372297a3111a251eb4f9e956c1266"
|
||||
integrity sha512-EeZ9gwEmu79B4A6LMLAw5cPCVYIcbKWgJgJafWtLzh1S+SgERUmTkVQ9Vx4k8zYBiCuxHK3XziZ3VJIMau7THA==
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-preset-jsx/-/babel-preset-jsx-1.1.1.tgz#3a74642ca0ecea10aae13649df5ff70f9d24a6f5"
|
||||
integrity sha512-SeyndwQZc8MAOkhbJaC34ocTwcKekKkwrwnTMC3YF8VmGp5IQWW5gPIU66bqO9WFBXFA3J3ANsUbP2pj8q8KdQ==
|
||||
dependencies:
|
||||
"@vue/babel-helper-vue-jsx-merge-props" "^1.0.0"
|
||||
"@vue/babel-plugin-transform-vue-jsx" "^1.0.0"
|
||||
"@vue/babel-sugar-functional-vue" "^1.0.0"
|
||||
"@vue/babel-sugar-inject-h" "^1.0.0"
|
||||
"@vue/babel-sugar-v-model" "^1.0.0"
|
||||
"@vue/babel-sugar-v-model" "^1.1.1"
|
||||
"@vue/babel-sugar-v-on" "^1.1.0"
|
||||
|
||||
"@vue/babel-sugar-functional-vue@^1.0.0":
|
||||
@ -790,10 +814,10 @@
|
||||
dependencies:
|
||||
"@babel/plugin-syntax-jsx" "^7.2.0"
|
||||
|
||||
"@vue/babel-sugar-v-model@^1.0.0":
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.0.0.tgz#f4da56aa67f65a349bd2c269a95e72e601af4613"
|
||||
integrity sha512-Pfg2Al0io66P1eO6zUbRIgpyKCU2qTnumiE0lao/wA/uNdb7Dx5Tfd1W6tO5SsByETPnEs8i8+gawRIXX40rFw==
|
||||
"@vue/babel-sugar-v-model@^1.1.1":
|
||||
version "1.1.1"
|
||||
resolved "https://registry.yarnpkg.com/@vue/babel-sugar-v-model/-/babel-sugar-v-model-1.1.1.tgz#a0f0750fcee20769805a20178299eebd4babf25a"
|
||||
integrity sha512-qiPbdUTiqNQdhXzvWQMVfrYGHCiMmscY7j/cudLxdxWZ8AFhgPRVlniVgaWIT7A1iOjs92e8U6qVyqkf0d4ZrA==
|
||||
dependencies:
|
||||
"@babel/plugin-syntax-jsx" "^7.2.0"
|
||||
"@vue/babel-helper-vue-jsx-merge-props" "^1.0.0"
|
||||
@ -826,18 +850,18 @@
|
||||
source-map "~0.6.1"
|
||||
vue-template-es2015-compiler "^1.9.0"
|
||||
|
||||
"@vuepress/core@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/core/-/core-1.1.0.tgz#32fd2b65a4613085cbd2b812bf67afe3a037dc65"
|
||||
integrity sha512-qC+R9kdTpui9QjQGUXUsmfAbToWOnoYjP2AJqMT/RsKUhQsXAIMe2Z0L/Vw2Z3bmlTUq26v+B1zlFgYzGuyIEQ==
|
||||
"@vuepress/core@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/core/-/core-1.2.0.tgz#8e0c636b7f8676202fdd1ecfbe31bfe245dab2a8"
|
||||
integrity sha512-ZIsUkQIF+h4Yk6q4okoRnRwRhcYePu/kNiL0WWPDGycjai8cFqFjLDP/tJjfTKXmn9A62j2ETjSwaiMxCtDkyw==
|
||||
dependencies:
|
||||
"@babel/core" "^7.0.0"
|
||||
"@vue/babel-preset-app" "^3.1.1"
|
||||
"@vuepress/markdown" "^1.1.0"
|
||||
"@vuepress/markdown-loader" "^1.1.0"
|
||||
"@vuepress/plugin-last-updated" "^1.1.0"
|
||||
"@vuepress/plugin-register-components" "^1.1.0"
|
||||
"@vuepress/shared-utils" "^1.1.0"
|
||||
"@vuepress/markdown" "^1.2.0"
|
||||
"@vuepress/markdown-loader" "^1.2.0"
|
||||
"@vuepress/plugin-last-updated" "^1.2.0"
|
||||
"@vuepress/plugin-register-components" "^1.2.0"
|
||||
"@vuepress/shared-utils" "^1.2.0"
|
||||
autoprefixer "^9.5.1"
|
||||
babel-loader "^8.0.4"
|
||||
cache-loader "^3.0.0"
|
||||
@ -856,34 +880,34 @@
|
||||
postcss-safe-parser "^4.0.1"
|
||||
toml "^3.0.0"
|
||||
url-loader "^1.0.1"
|
||||
vue "^2.5.16"
|
||||
vue-loader "^15.2.4"
|
||||
vue-router "^3.0.2"
|
||||
vue-server-renderer "^2.5.16"
|
||||
vue-template-compiler "^2.5.16"
|
||||
vue "^2.6.10"
|
||||
vue-loader "^15.7.1"
|
||||
vue-router "^3.1.3"
|
||||
vue-server-renderer "^2.6.10"
|
||||
vue-template-compiler "^2.6.10"
|
||||
vuepress-html-webpack-plugin "^3.2.0"
|
||||
vuepress-plugin-container "^2.0.0"
|
||||
vuepress-plugin-container "^2.0.2"
|
||||
webpack "^4.8.1"
|
||||
webpack-chain "^4.6.0"
|
||||
webpack-dev-server "^3.5.1"
|
||||
webpack-merge "^4.1.2"
|
||||
webpackbar "3.2.0"
|
||||
|
||||
"@vuepress/markdown-loader@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/markdown-loader/-/markdown-loader-1.1.0.tgz#ab8ac2d286c255f9fa39ecb2f4542053314825ac"
|
||||
integrity sha512-X4+E9kbFt3OSXKxtQbNxeuzxbXdSMhXz8tliUW+/+1zx7RGn1ApcR0x7Y6/irESUgZ+GxOT3jyiCDZA4usHhLA==
|
||||
"@vuepress/markdown-loader@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/markdown-loader/-/markdown-loader-1.2.0.tgz#f8972014616b4ab46a99c9aaac2dd414d437411c"
|
||||
integrity sha512-gOZzoHjfp/W6t+qKBRdbHS/9TwRnNuhY7V+yFzxofNONFHQULofIN/arG+ptYc2SuqJ541jqudNQW+ldHNMC2w==
|
||||
dependencies:
|
||||
"@vuepress/markdown" "^1.1.0"
|
||||
"@vuepress/markdown" "^1.2.0"
|
||||
loader-utils "^1.1.0"
|
||||
lru-cache "^5.1.1"
|
||||
|
||||
"@vuepress/markdown@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/markdown/-/markdown-1.1.0.tgz#f9095c91019d21dbc3daedfd3773c6d5c29117ec"
|
||||
integrity sha512-O2ivsIkUrSUPDx+9N43XKSOGtprV4G1k6/4o3wZjjCn6GXYRsRE906cFDlbryHxQ49Z7Yfz3gyZIGMnThxLo/w==
|
||||
"@vuepress/markdown@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/markdown/-/markdown-1.2.0.tgz#7c457e0fab52ef8ac4dd1898ae450bc3aec30746"
|
||||
integrity sha512-RLRQmTu5wJbCO4Qv+J0K53o5Ew7nAGItLwWyzCbIUB6pRsya3kqSCViWQVlKlS53zFTmRHuAC9tJMRdzly3mCA==
|
||||
dependencies:
|
||||
"@vuepress/shared-utils" "^1.1.0"
|
||||
"@vuepress/shared-utils" "^1.2.0"
|
||||
markdown-it "^8.4.1"
|
||||
markdown-it-anchor "^5.0.2"
|
||||
markdown-it-chain "^1.3.0"
|
||||
@ -891,43 +915,43 @@
|
||||
markdown-it-table-of-contents "^0.4.0"
|
||||
prismjs "^1.13.0"
|
||||
|
||||
"@vuepress/plugin-active-header-links@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-active-header-links/-/plugin-active-header-links-1.1.0.tgz#cd62c1712040676035f34fed16a088e1c08811d8"
|
||||
integrity sha512-sa5ySYl/kTyr1AMakeW375wWs1aQ6psiJiSFclxkGvxcuGZ89F27ELvd43DKaETAlH90LcoE/j7TXMA895qXmw==
|
||||
"@vuepress/plugin-active-header-links@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-active-header-links/-/plugin-active-header-links-1.2.0.tgz#46495c89e51a95e57139be007dffbcae4b229260"
|
||||
integrity sha512-vdi7l96pElJvEmcx6t9DWJNH25TIurS8acjN3+b7o4NzdaszFn5j6klN6WtI4Z+5BVDrxHP5W1F3Ebw8SZyupA==
|
||||
dependencies:
|
||||
lodash.throttle "^4.1.1"
|
||||
lodash.debounce "^4.0.8"
|
||||
|
||||
"@vuepress/plugin-last-updated@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-last-updated/-/plugin-last-updated-1.1.0.tgz#65f2de734f3744026297b4667f3b5276ef99fd06"
|
||||
integrity sha512-x2SaAKWk26RK9O0slnZ55eSlBFYdYjFgqkRIfaOf4f2biWqTa9nzaIbvjzvcx3AZKlOWMl81KRwybhDL8E9OsA==
|
||||
"@vuepress/plugin-last-updated@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-last-updated/-/plugin-last-updated-1.2.0.tgz#7b34065b793848b0482a222b7a6f1b7df3668cdc"
|
||||
integrity sha512-j4uZb/MXDyG+v9QCG3T/rkiaOhC/ib7NKCt1cjn3GOwvWTDmB5UZm9EBhUpbDNrBgxW+SaHOe3kMVNO8bGOTGw==
|
||||
dependencies:
|
||||
cross-spawn "^6.0.5"
|
||||
|
||||
"@vuepress/plugin-nprogress@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-nprogress/-/plugin-nprogress-1.1.0.tgz#ca7106adc7016ed0d90a22555066c11da597ef59"
|
||||
integrity sha512-XhUyAO+mzYFOFupX/pNlPbv0bT596Lk000Q2PhWfRliwUzpUd0/u5Z6B6fasIVj01Yqih/gAGOZpr2ZwSCNJYw==
|
||||
"@vuepress/plugin-nprogress@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-nprogress/-/plugin-nprogress-1.2.0.tgz#ff6166946a0b118a39a562acb57983529afce4d2"
|
||||
integrity sha512-0apt3Dp6XVCOkLViX6seKSEJgARihi+pX3/r8j8ndFp9Y+vmgLFZnQnGE5iKNi1ty+A6PZOK0RQcBjwTAU4pAw==
|
||||
dependencies:
|
||||
nprogress "^0.2.0"
|
||||
|
||||
"@vuepress/plugin-register-components@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-register-components/-/plugin-register-components-1.1.0.tgz#42ea75bcad3fb562fbb86c424136f86e13641162"
|
||||
integrity sha512-HXGdcmBdGHLhI8KHr09GnnZEzgCuaIQx1WBqDNfbigSVKEx910L56ej+Whl6VFd7D0uOLUlW4kb9ELM0sjJpKg==
|
||||
"@vuepress/plugin-register-components@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-register-components/-/plugin-register-components-1.2.0.tgz#95aa0e0af94b2758b26ab98814c43b0f7bcd502b"
|
||||
integrity sha512-C32b8sbGtDEX8I3SUUKS/w2rThiRFiKxmzNcJD996me7VY/4rgmZ8CxGtb6G9wByVoK0UdG1SOkrgOPdSCm80A==
|
||||
dependencies:
|
||||
"@vuepress/shared-utils" "^1.1.0"
|
||||
"@vuepress/shared-utils" "^1.2.0"
|
||||
|
||||
"@vuepress/plugin-search@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-search/-/plugin-search-1.1.0.tgz#3b7a344a7df1bab27f10a46e6b57680c8f5d4c7e"
|
||||
integrity sha512-GoxvcM65ZAZycnsoZJ/wx9F3hXKzzJQdS7lNnAuHrvCheT5tVO1wwMumVP/unZU/59zCQ1PiyReYntLSp5bXVg==
|
||||
"@vuepress/plugin-search@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/plugin-search/-/plugin-search-1.2.0.tgz#0b27c467b7fd42bd4d9e32de0fe2fb81a24bd311"
|
||||
integrity sha512-QU3JfnMfImDAArbJOVH1q1iCDE5QrT99GLpNGo6KQYZWqY1TWAbgyf8C2hQdaI03co1lkU2Wn/iqzHJ5WHlueg==
|
||||
|
||||
"@vuepress/shared-utils@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/shared-utils/-/shared-utils-1.1.0.tgz#9d220ffe54f2d698c56ca5348ba2cb9dd72800da"
|
||||
integrity sha512-zvYfejRRl7y3oavLvAe7dHfCu4XewKnhsyUQ7to6tfxVNoEqzhrl5HcCBwcLlphj792tvTAth5QkVegTgGfsaw==
|
||||
"@vuepress/shared-utils@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/shared-utils/-/shared-utils-1.2.0.tgz#8d9ab40c24f75f027ef32c2ad0169f0f08e949fa"
|
||||
integrity sha512-wo5Ng2/xzsmIYCzvWxgLFlDBp7FkmJp2shAkbSurLNAh1vixhs0+LyDxsk01+m34ktJSp9rTUUsm6khw/Fvo0w==
|
||||
dependencies:
|
||||
chalk "^2.3.2"
|
||||
diacritics "^1.3.0"
|
||||
@ -939,19 +963,20 @@
|
||||
semver "^6.0.0"
|
||||
upath "^1.1.0"
|
||||
|
||||
"@vuepress/theme-default@^1.1.0":
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/theme-default/-/theme-default-1.1.0.tgz#915c97bb69985d6fccd815f829532d67d828e10a"
|
||||
integrity sha512-U+kFHakSBEXFAdfItyeCbP//q2hm9R8+vnTFjbMMVgRZ2SHPnDUC/7WWGoEUzfEpFHHPrG1OzC9iI/o5v8p5AQ==
|
||||
"@vuepress/theme-default@^1.2.0":
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/@vuepress/theme-default/-/theme-default-1.2.0.tgz#3303af21a00031a3482ed1c494508234f545cbf1"
|
||||
integrity sha512-mJxAMYQQv4OrGFsArMlONu8RpCzPUVx81dumkyTT4ay5PXAWTj+WDeFQLOT3j0g9QrDJGnHhbiw2aS+R/0WUyQ==
|
||||
dependencies:
|
||||
"@vuepress/plugin-active-header-links" "^1.1.0"
|
||||
"@vuepress/plugin-nprogress" "^1.1.0"
|
||||
"@vuepress/plugin-search" "^1.1.0"
|
||||
"@vuepress/plugin-active-header-links" "^1.2.0"
|
||||
"@vuepress/plugin-nprogress" "^1.2.0"
|
||||
"@vuepress/plugin-search" "^1.2.0"
|
||||
docsearch.js "^2.5.2"
|
||||
lodash "^4.17.15"
|
||||
stylus "^0.54.5"
|
||||
stylus-loader "^3.0.2"
|
||||
vuepress-plugin-container "^2.0.0"
|
||||
vuepress-plugin-container "^2.0.2"
|
||||
vuepress-plugin-smooth-scroll "^0.0.3"
|
||||
|
||||
"@webassemblyjs/ast@1.8.5":
|
||||
version "1.8.5"
|
||||
@ -1153,9 +1178,9 @@ ajv@^6.1.0, ajv@^6.10.2, ajv@^6.5.5:
|
||||
uri-js "^4.2.2"
|
||||
|
||||
algoliasearch@^3.24.5:
|
||||
version "3.35.0"
|
||||
resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-3.35.0.tgz#03f2900698c7c547fce9fb8fb8d0b9a56c8da405"
|
||||
integrity sha512-Om4aLzkGbUi+Rc3sa8s48CRj2Qe7u5TXS7lK7Z681x2EiAa5Qx5uB/kbp8A6qY6dFDX7vstYRIYZ7t9XgdJ1dw==
|
||||
version "3.35.1"
|
||||
resolved "https://registry.yarnpkg.com/algoliasearch/-/algoliasearch-3.35.1.tgz#297d15f534a3507cab2f5dfb996019cac7568f0c"
|
||||
integrity sha512-K4yKVhaHkXfJ/xcUnil04xiSrB8B8yHZoFEhWNpXg23eiCnqvTZw1tn/SqvdsANlYHLJlKl0qi3I/Q2Sqo7LwQ==
|
||||
dependencies:
|
||||
agentkeepalive "^2.2.0"
|
||||
debug "^2.6.9"
|
||||
@ -1336,10 +1361,12 @@ async-limiter@~1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/async-limiter/-/async-limiter-1.0.1.tgz#dd379e94f0db8310b08291f9d64c3209766617fd"
|
||||
integrity sha512-csOlWGAcRFJaI6m+F2WKdnMKr4HhdhFVBk0H/QbJFMCr+uO2kwohwXQPxw/9OCxp05r5ghVBFSyioixx3gfkNQ==
|
||||
|
||||
async@^1.5.2:
|
||||
version "1.5.2"
|
||||
resolved "https://registry.yarnpkg.com/async/-/async-1.5.2.tgz#ec6a61ae56480c0c3cb241c95618e20892f9672a"
|
||||
integrity sha1-7GphrlZIDAw8skHJVhjiCJL5Zyo=
|
||||
async@^2.6.2:
|
||||
version "2.6.3"
|
||||
resolved "https://registry.yarnpkg.com/async/-/async-2.6.3.tgz#d72625e2344a3656e3a3ad4fa749fa83299d82ff"
|
||||
integrity sha512-zflvls11DCy+dQWzTW2dzuilv8Z5X/pjfmZOWba6TNIVDm+2UDaJmXSOXlasHKfNBs8oo3M0aT50fDEWfKZjXg==
|
||||
dependencies:
|
||||
lodash "^4.17.14"
|
||||
|
||||
asynckit@^0.4.0:
|
||||
version "0.4.0"
|
||||
@ -1359,17 +1386,17 @@ autocomplete.js@0.36.0:
|
||||
immediate "^3.2.3"
|
||||
|
||||
autoprefixer@^9.4.5, autoprefixer@^9.5.1, autoprefixer@^9.6.1:
|
||||
version "9.6.1"
|
||||
resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.6.1.tgz#51967a02d2d2300bb01866c1611ec8348d355a47"
|
||||
integrity sha512-aVo5WxR3VyvyJxcJC3h4FKfwCQvQWb1tSI5VHNibddCVWrcD1NvlxEweg3TSgiPztMnWfjpy2FURKA2kvDE+Tw==
|
||||
version "9.7.0"
|
||||
resolved "https://registry.yarnpkg.com/autoprefixer/-/autoprefixer-9.7.0.tgz#905ec19e50f04545fe9ff131182cc9ab25246901"
|
||||
integrity sha512-j2IRvaCfrUxIiZun9ba4mhJ2omhw4OY88/yVzLO+lHhGBumAAK72PgM6gkbSN8iregPOn1ZlxGkmZh2CQ7X4AQ==
|
||||
dependencies:
|
||||
browserslist "^4.6.3"
|
||||
caniuse-lite "^1.0.30000980"
|
||||
browserslist "^4.7.2"
|
||||
caniuse-lite "^1.0.30001004"
|
||||
chalk "^2.4.2"
|
||||
normalize-range "^0.1.2"
|
||||
num2fraction "^1.2.2"
|
||||
postcss "^7.0.17"
|
||||
postcss-value-parser "^4.0.0"
|
||||
postcss "^7.0.19"
|
||||
postcss-value-parser "^4.0.2"
|
||||
|
||||
aws-sign2@~0.7.0:
|
||||
version "0.7.0"
|
||||
@ -1381,13 +1408,6 @@ aws4@^1.8.0:
|
||||
resolved "https://registry.yarnpkg.com/aws4/-/aws4-1.8.0.tgz#f0e003d9ca9e7f59c7a508945d7b2ef9a04a542f"
|
||||
integrity sha512-ReZxvNHIOv88FlT7rxcXIIC0fPt4KZqZbOlivyWtXLt8ESx84zd3kMC6iK5jVeS2qt+g7ftS7ye4fi06X5rtRQ==
|
||||
|
||||
babel-extract-comments@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/babel-extract-comments/-/babel-extract-comments-1.0.0.tgz#0a2aedf81417ed391b85e18b4614e693a0351a21"
|
||||
integrity sha512-qWWzi4TlddohA91bFwgt6zO/J0X+io7Qp184Fw0m2JYRSTZnJbFR8+07KmzudHCZgOiKRCrjhylwv9Xd8gfhVQ==
|
||||
dependencies:
|
||||
babylon "^6.18.0"
|
||||
|
||||
babel-loader@^8.0.4:
|
||||
version "8.0.6"
|
||||
resolved "https://registry.yarnpkg.com/babel-loader/-/babel-loader-8.0.6.tgz#e33bdb6f362b03f4bb141a0c21ab87c501b70dfb"
|
||||
@ -1416,32 +1436,6 @@ babel-plugin-module-resolver@3.2.0:
|
||||
reselect "^3.0.1"
|
||||
resolve "^1.4.0"
|
||||
|
||||
babel-plugin-syntax-object-rest-spread@^6.8.0:
|
||||
version "6.13.0"
|
||||
resolved "https://registry.yarnpkg.com/babel-plugin-syntax-object-rest-spread/-/babel-plugin-syntax-object-rest-spread-6.13.0.tgz#fd6536f2bce13836ffa3a5458c4903a597bb3bf5"
|
||||
integrity sha1-/WU28rzhODb/o6VFjEkDpZe7O/U=
|
||||
|
||||
babel-plugin-transform-object-rest-spread@^6.26.0:
|
||||
version "6.26.0"
|
||||
resolved "https://registry.yarnpkg.com/babel-plugin-transform-object-rest-spread/-/babel-plugin-transform-object-rest-spread-6.26.0.tgz#0f36692d50fef6b7e2d4b3ac1478137a963b7b06"
|
||||
integrity sha1-DzZpLVD+9rfi1LOsFHgTepY7ewY=
|
||||
dependencies:
|
||||
babel-plugin-syntax-object-rest-spread "^6.8.0"
|
||||
babel-runtime "^6.26.0"
|
||||
|
||||
babel-runtime@^6.26.0:
|
||||
version "6.26.0"
|
||||
resolved "https://registry.yarnpkg.com/babel-runtime/-/babel-runtime-6.26.0.tgz#965c7058668e82b55d7bfe04ff2337bc8b5647fe"
|
||||
integrity sha1-llxwWGaOgrVde/4E/yM3vItWR/4=
|
||||
dependencies:
|
||||
core-js "^2.4.0"
|
||||
regenerator-runtime "^0.11.0"
|
||||
|
||||
babylon@^6.18.0:
|
||||
version "6.18.0"
|
||||
resolved "https://registry.yarnpkg.com/babylon/-/babylon-6.18.0.tgz#af2f3b88fa6f5c1e4c634d1a0f8eac4f55b395e3"
|
||||
integrity sha512-q/UEjfGJ2Cm3oKV71DJz9d25TPnq5rhBVL2Q4fA5wcC3jcrdn7+SssEybFIxwAvvP+YCsCYNKughoF33GxgycQ==
|
||||
|
||||
balanced-match@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/balanced-match/-/balanced-match-1.0.0.tgz#89b4d199ab2bee49de164ea02b89ce462d71b767"
|
||||
@ -1493,9 +1487,9 @@ binary-extensions@^1.0.0:
|
||||
integrity sha512-Un7MIEDdUC5gNpcGDV97op1Ywk748MpHcFTHoYs6qnj1Z3j7I53VG3nwZhKzoBZmbdRNnb6WRdFlwl7tSDuZGw==
|
||||
|
||||
bluebird@^3.1.1, bluebird@^3.5.5:
|
||||
version "3.5.5"
|
||||
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.5.5.tgz#a8d0afd73251effbbd5fe384a77d73003c17a71f"
|
||||
integrity sha512-5am6HnnfN+urzt4yfg7IgTbotDjIT/u8AJpEt0sIU9FtXfVeezXAPKswrG+xKUCOYAINpSdgZVDU6QFh+cuH3w==
|
||||
version "3.7.1"
|
||||
resolved "https://registry.yarnpkg.com/bluebird/-/bluebird-3.7.1.tgz#df70e302b471d7473489acf26a93d63b53f874de"
|
||||
integrity sha512-DdmyoGCleJnkbp3nkbxTLJ18rjDsE4yCggEwKNXkeV123sPNfOCYeDoeuOY+F2FrSjO1YXcTU+dsy96KMy+gcg==
|
||||
|
||||
bn.js@^4.0.0, bn.js@^4.1.0, bn.js@^4.1.1, bn.js@^4.4.0:
|
||||
version "4.11.8"
|
||||
@ -1623,14 +1617,14 @@ browserify-zlib@^0.2.0:
|
||||
dependencies:
|
||||
pako "~1.0.5"
|
||||
|
||||
browserslist@^4.0.0, browserslist@^4.3.4, browserslist@^4.6.3:
|
||||
version "4.7.0"
|
||||
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.0.tgz#9ee89225ffc07db03409f2fee524dc8227458a17"
|
||||
integrity sha512-9rGNDtnj+HaahxiVV38Gn8n8Lr8REKsel68v1sPFfIGEK6uSXTY3h9acgiT1dZVtOOUtifo/Dn8daDQ5dUgVsA==
|
||||
browserslist@^4.0.0, browserslist@^4.3.4, browserslist@^4.7.2:
|
||||
version "4.7.2"
|
||||
resolved "https://registry.yarnpkg.com/browserslist/-/browserslist-4.7.2.tgz#1bb984531a476b5d389cedecb195b2cd69fb1348"
|
||||
integrity sha512-uZavT/gZXJd2UTi9Ov7/Z340WOSQ3+m1iBVRUknf+okKxonL9P83S3ctiBDtuRmRu8PiCHjqyueqQ9HYlJhxiw==
|
||||
dependencies:
|
||||
caniuse-lite "^1.0.30000989"
|
||||
electron-to-chromium "^1.3.247"
|
||||
node-releases "^1.1.29"
|
||||
caniuse-lite "^1.0.30001004"
|
||||
electron-to-chromium "^1.3.295"
|
||||
node-releases "^1.1.38"
|
||||
|
||||
buffer-from@^1.0.0:
|
||||
version "1.1.1"
|
||||
@ -1801,10 +1795,10 @@ caniuse-api@^3.0.0:
|
||||
lodash.memoize "^4.1.2"
|
||||
lodash.uniq "^4.5.0"
|
||||
|
||||
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30000980, caniuse-lite@^1.0.30000989:
|
||||
version "1.0.30000997"
|
||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30000997.tgz#ba44a606804f8680894b7042612c2c7f65685b7e"
|
||||
integrity sha512-BQLFPIdj2ntgBNWp9Q64LGUIEmvhKkzzHhUHR3CD5A9Lb7ZKF20/+sgadhFap69lk5XmK1fTUleDclaRFvgVUA==
|
||||
caniuse-lite@^1.0.0, caniuse-lite@^1.0.30001004:
|
||||
version "1.0.30001006"
|
||||
resolved "https://registry.yarnpkg.com/caniuse-lite/-/caniuse-lite-1.0.30001006.tgz#5b6e8288792cfa275f007b2819a00ccad7112655"
|
||||
integrity sha512-MXnUVX27aGs/QINz+QG1sWSLDr3P1A3Hq5EUWoIt0T7K24DuvMxZEnh3Y5aHlJW6Bz2aApJdSewdYLd8zQnUuw==
|
||||
|
||||
caseless@~0.12.0:
|
||||
version "0.12.0"
|
||||
@ -1987,20 +1981,15 @@ commander@2.17.x:
|
||||
integrity sha512-wPMUt6FnH2yzG95SA6mzjQOEKUU3aLaDEmzs1ti+1E9h+CsrZghRlqEM/EJ4KscsQVG8uNN4uVreUeT8+drlgg==
|
||||
|
||||
commander@^2.20.0:
|
||||
version "2.20.1"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.1.tgz#3863ce3ca92d0831dcf2a102f5fb4b5926afd0f9"
|
||||
integrity sha512-cCuLsMhJeWQ/ZpsFTbE765kvVfoeSddc4nU3up4fV+fDBcfUXnbITJ+JzhkdjzOqhURjZgujxaioam4RM9yGUg==
|
||||
version "2.20.3"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.20.3.tgz#fd485e84c03eb4881c20722ba48035e8531aeb33"
|
||||
integrity sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==
|
||||
|
||||
commander@~2.19.0:
|
||||
version "2.19.0"
|
||||
resolved "https://registry.yarnpkg.com/commander/-/commander-2.19.0.tgz#f6198aa84e5b83c46054b94ddedbfed5ee9ff12a"
|
||||
integrity sha512-6tvAOO+D6OENvRAh524Dh9jcfKTYDQAqvqezbCW82xj5X0pSrcpxtvRKHLG0yBY6SD7PSDrJaj+0AiOcKVd1Xg==
|
||||
|
||||
comment-regex@^1.0.0:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/comment-regex/-/comment-regex-1.0.1.tgz#e070d2c4db33231955d0979d27c918fcb6f93565"
|
||||
integrity sha512-IWlN//Yfby92tOIje7J18HkNmWRR7JESA/BK8W7wqY/akITpU5B0JQWnbTjCfdChSrDNb0DrdA9jfAxiiBXyiQ==
|
||||
|
||||
commondir@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/commondir/-/commondir-1.0.1.tgz#ddd800da0c66127393cca5950ea968a3aaf1253b"
|
||||
@ -2057,11 +2046,9 @@ consola@^2.6.0:
|
||||
integrity sha512-4sxpH6SGFYLADfUip4vuY65f/gEogrzJoniVhNUYkJHtng0l8ZjnDCqxxrSVRHOHwKxsy8Vm5ONZh1wOR3/l/w==
|
||||
|
||||
console-browserify@^1.1.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.1.0.tgz#f0241c45730a9fc6323b206dbf38edc741d0bb10"
|
||||
integrity sha1-8CQcRXMKn8YyOyBtvzjtx0HQuxA=
|
||||
dependencies:
|
||||
date-now "^0.1.4"
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/console-browserify/-/console-browserify-1.2.0.tgz#67063cef57ceb6cf4993a2ab3a55840ae8c49336"
|
||||
integrity sha512-ZMkYO/LkF17QvCPqM0gxw8yUzigAOZOSWSHg91FH6orS7vcEj5dVZTidN2fQ14yBSdg97RqhSNwLUXInd52OTA==
|
||||
|
||||
console-control-strings@^1.0.0, console-control-strings@~1.1.0:
|
||||
version "1.1.0"
|
||||
@ -2144,10 +2131,10 @@ copy-webpack-plugin@^5.0.2:
|
||||
serialize-javascript "^1.7.0"
|
||||
webpack-log "^2.0.0"
|
||||
|
||||
core-js@^2.4.0, core-js@^2.6.5:
|
||||
version "2.6.9"
|
||||
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.9.tgz#6b4b214620c834152e179323727fc19741b084f2"
|
||||
integrity sha512-HOpZf6eXmnl7la+cUdMnLvUxKNqLUzJvgIziQ0DiF3JwSImNphIqdGqzj6hIKyX04MmV0poclQ7+wjWvxQyR2A==
|
||||
core-js@^2.6.5:
|
||||
version "2.6.10"
|
||||
resolved "https://registry.yarnpkg.com/core-js/-/core-js-2.6.10.tgz#8a5b8391f8cc7013da703411ce5b585706300d7f"
|
||||
integrity sha512-I39t74+4t+zau64EN1fE5v2W31Adtc/REhzWN+gWRRXg6WH5qAsZm62DHpQ1+Yhe4047T55jvzz7MUqF/dBBlA==
|
||||
|
||||
core-util-is@1.0.2, core-util-is@~1.0.0:
|
||||
version "1.0.2"
|
||||
@ -2285,21 +2272,13 @@ css-select@^2.0.0:
|
||||
domutils "^1.7.0"
|
||||
nth-check "^1.0.2"
|
||||
|
||||
css-tree@1.0.0-alpha.29:
|
||||
version "1.0.0-alpha.29"
|
||||
resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.29.tgz#3fa9d4ef3142cbd1c301e7664c1f352bd82f5a39"
|
||||
integrity sha512-sRNb1XydwkW9IOci6iB2xmy8IGCj6r/fr+JWitvJ2JxQRPzN3T4AGGVWCMlVmVwM1gtgALJRmGIlWv5ppnGGkg==
|
||||
dependencies:
|
||||
mdn-data "~1.1.0"
|
||||
source-map "^0.5.3"
|
||||
|
||||
css-tree@1.0.0-alpha.33:
|
||||
version "1.0.0-alpha.33"
|
||||
resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.33.tgz#970e20e5a91f7a378ddd0fc58d0b6c8d4f3be93e"
|
||||
integrity sha512-SPt57bh5nQnpsTBsx/IXbO14sRc9xXu5MtMAVuo0BaQQmyf0NupNPPSoMaqiAF5tDFafYsTkfeH4Q/HCKXkg4w==
|
||||
css-tree@1.0.0-alpha.37:
|
||||
version "1.0.0-alpha.37"
|
||||
resolved "https://registry.yarnpkg.com/css-tree/-/css-tree-1.0.0-alpha.37.tgz#98bebd62c4c1d9f960ec340cf9f7522e30709a22"
|
||||
integrity sha512-DMxWJg0rnz7UgxKT0Q1HU/L9BeJI0M6ksor0OgqOnF+aRCDWg/N2641HmVyU9KVIu0OVVWOb2IpC9A+BJRnejg==
|
||||
dependencies:
|
||||
mdn-data "2.0.4"
|
||||
source-map "^0.5.3"
|
||||
source-map "^0.6.1"
|
||||
|
||||
css-unit-converter@^1.1.1:
|
||||
version "1.1.1"
|
||||
@ -2311,11 +2290,6 @@ css-what@2.1, css-what@^2.1.2:
|
||||
resolved "https://registry.yarnpkg.com/css-what/-/css-what-2.1.3.tgz#a6d7604573365fe74686c3f311c56513d88285f2"
|
||||
integrity sha512-a+EPoD+uZiNfh+5fxw2nO9QwFa6nJe2Or35fGY6Ipw1R3R4AGz1d1TEZrCegvw2YTmZ0jXirGYlzxxpYSHwpEg==
|
||||
|
||||
css.escape@^1.5.1:
|
||||
version "1.5.1"
|
||||
resolved "https://registry.yarnpkg.com/css.escape/-/css.escape-1.5.1.tgz#42e27d4fa04ae32f931a4b4d4191fa9cddee97cb"
|
||||
integrity sha1-QuJ9T6BK4y+TGktNQZH6nN3ul8s=
|
||||
|
||||
css@^2.0.0:
|
||||
version "2.2.4"
|
||||
resolved "https://registry.yarnpkg.com/css/-/css-2.2.4.tgz#c646755c73971f2bba6a601e2cf2fd71b1298929"
|
||||
@ -2404,12 +2378,12 @@ cssnano@^4.1.10:
|
||||
is-resolvable "^1.0.0"
|
||||
postcss "^7.0.0"
|
||||
|
||||
csso@^3.5.1:
|
||||
version "3.5.1"
|
||||
resolved "https://registry.yarnpkg.com/csso/-/csso-3.5.1.tgz#7b9eb8be61628973c1b261e169d2f024008e758b"
|
||||
integrity sha512-vrqULLffYU1Q2tLdJvaCYbONStnfkfimRxXNaGjxMldI0C7JPBC4rB1RyjhfdZ4m1frm8pM9uRPKH3d2knZ8gg==
|
||||
csso@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/csso/-/csso-4.0.2.tgz#e5f81ab3a56b8eefb7f0092ce7279329f454de3d"
|
||||
integrity sha512-kS7/oeNVXkHWxby5tHVxlhjizRCSv8QdU7hB2FpdAibDU8FjTAolhNjKNTiLzXtUrKT6HwClE81yXwEk1309wg==
|
||||
dependencies:
|
||||
css-tree "1.0.0-alpha.29"
|
||||
css-tree "1.0.0-alpha.37"
|
||||
|
||||
cyclist@^1.0.1:
|
||||
version "1.0.1"
|
||||
@ -2423,11 +2397,6 @@ dashdash@^1.12.0:
|
||||
dependencies:
|
||||
assert-plus "^1.0.0"
|
||||
|
||||
date-now@^0.1.4:
|
||||
version "0.1.4"
|
||||
resolved "https://registry.yarnpkg.com/date-now/-/date-now-0.1.4.tgz#eaf439fd4d4848ad74e5cc7dbef200672b9e345b"
|
||||
integrity sha1-6vQ5/U1ISK105cx9vvIAZyueNFs=
|
||||
|
||||
de-indent@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/de-indent/-/de-indent-1.0.2.tgz#b2038e846dc33baa5796128d0804b455b8c1e21d"
|
||||
@ -2440,7 +2409,7 @@ debug@2.6.9, debug@^2.2.0, debug@^2.3.3, debug@^2.6.9:
|
||||
dependencies:
|
||||
ms "2.0.0"
|
||||
|
||||
debug@^3.0.0, debug@^3.2.5, debug@^3.2.6:
|
||||
debug@^3.0.0, debug@^3.1.1, debug@^3.2.5, debug@^3.2.6:
|
||||
version "3.2.6"
|
||||
resolved "https://registry.yarnpkg.com/debug/-/debug-3.2.6.tgz#e83d17de16d8a7efb7717edbe5fb10135eee629b"
|
||||
integrity sha512-mel+jf7nrtEl5Pn1Qx46zARXKDpBbvzezse7p7LqINmdoIk8PYP5SySaxEmYv6TZ0JyEKA1hsCId6DIhgITtWQ==
|
||||
@ -2530,11 +2499,6 @@ define-property@^2.0.2:
|
||||
is-descriptor "^1.0.2"
|
||||
isobject "^3.0.1"
|
||||
|
||||
defined@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/defined/-/defined-1.0.0.tgz#c98d9bcef75674188e110969151199e39b1fa693"
|
||||
integrity sha1-yY2bzvdWdBiOEQlpFRGZ45sfppM=
|
||||
|
||||
del@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/del/-/del-4.1.1.tgz#9e8f117222ea44a31ff3a156c049b99052a9f0b4"
|
||||
@ -2733,10 +2697,10 @@ ee-first@1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/ee-first/-/ee-first-1.1.1.tgz#590c61156b0ae2f4f0255732a158b266bc56b21d"
|
||||
integrity sha1-WQxhFWsK4vTwJVcyoViyZrxWsh0=
|
||||
|
||||
electron-to-chromium@^1.3.247:
|
||||
version "1.3.268"
|
||||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.268.tgz#d18f10b064dd0fce39098704896309a8249cb62f"
|
||||
integrity sha512-QkPEya233zGh+1erw/N/GNgLjs+t65wkGX4Yw0X/ZuO75r+4Ropk7toXSUqP3TQ7EIwBDotTks3rbNZ1Kwz8hA==
|
||||
electron-to-chromium@^1.3.295:
|
||||
version "1.3.296"
|
||||
resolved "https://registry.yarnpkg.com/electron-to-chromium/-/electron-to-chromium-1.3.296.tgz#a1d4322d742317945285d3ba88966561b67f3ac8"
|
||||
integrity sha512-s5hv+TSJSVRsxH190De66YHb50pBGTweT9XGWYu/LMR20KX6TsjFzObo36CjVAzM+PUeeKSBRtm/mISlCzeojQ==
|
||||
|
||||
elliptic@^6.0.0:
|
||||
version "6.5.1"
|
||||
@ -2774,12 +2738,12 @@ end-of-stream@^1.0.0, end-of-stream@^1.1.0:
|
||||
once "^1.4.0"
|
||||
|
||||
enhanced-resolve@^4.1.0:
|
||||
version "4.1.0"
|
||||
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.0.tgz#41c7e0bfdfe74ac1ffe1e57ad6a5c6c9f3742a7f"
|
||||
integrity sha512-F/7vkyTtyc/llOIn8oWclcB25KdRaiPBpZYDgJHgh/UHtpgT2p2eldQgtQnLtUvfMKPKxbRaQM/hHkvLHt1Vng==
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/enhanced-resolve/-/enhanced-resolve-4.1.1.tgz#2937e2b8066cd0fe7ce0990a98f0d71a35189f66"
|
||||
integrity sha512-98p2zE+rL7/g/DzMHMTF4zZlCgeVdJ7yr6xzEpJRYwFYrGi9ANdn5DnJURg6RpBkyk60XYDnWIv51VfIhfNGuA==
|
||||
dependencies:
|
||||
graceful-fs "^4.1.2"
|
||||
memory-fs "^0.4.0"
|
||||
memory-fs "^0.5.0"
|
||||
tapable "^1.0.0"
|
||||
|
||||
entities@^1.1.1, entities@~1.1.1:
|
||||
@ -2820,9 +2784,9 @@ error-ex@^1.3.1:
|
||||
is-arrayish "^0.2.1"
|
||||
|
||||
es-abstract@^1.12.0, es-abstract@^1.5.1:
|
||||
version "1.14.2"
|
||||
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.14.2.tgz#7ce108fad83068c8783c3cdf62e504e084d8c497"
|
||||
integrity sha512-DgoQmbpFNOofkjJtKwr87Ma5EW4Dc8fWhD0R+ndq7Oc456ivUfGOOP6oAZTTKl5/CcNMP+EN+e3/iUzgE0veZg==
|
||||
version "1.16.0"
|
||||
resolved "https://registry.yarnpkg.com/es-abstract/-/es-abstract-1.16.0.tgz#d3a26dc9c3283ac9750dca569586e976d9dcc06d"
|
||||
integrity sha512-xdQnfykZ9JMEiasTAJZJdMWCQ1Vm00NBw79/AWi7ELfZuuPCSOMDZbT9mkOfSctVtfhb+sAAzrm+j//GjjLHLg==
|
||||
dependencies:
|
||||
es-to-primitive "^1.2.0"
|
||||
function-bind "^1.1.1"
|
||||
@ -2832,8 +2796,8 @@ es-abstract@^1.12.0, es-abstract@^1.5.1:
|
||||
is-regex "^1.0.4"
|
||||
object-inspect "^1.6.0"
|
||||
object-keys "^1.1.1"
|
||||
string.prototype.trimleft "^2.0.0"
|
||||
string.prototype.trimright "^2.0.0"
|
||||
string.prototype.trimleft "^2.1.0"
|
||||
string.prototype.trimright "^2.1.0"
|
||||
|
||||
es-to-primitive@^1.2.0:
|
||||
version "1.2.0"
|
||||
@ -3072,9 +3036,9 @@ figgy-pudding@^3.5.1:
|
||||
integrity sha512-vNKxJHTEKNThjfrdJwHc7brvM6eVevuO5nTj6ez8ZQ1qbXTvGthucRF7S4vf2cr71QVnT70V34v0S1DyQsti0w==
|
||||
|
||||
figures@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/figures/-/figures-3.0.0.tgz#756275c964646163cc6f9197c7a0295dbfd04de9"
|
||||
integrity sha512-HKri+WoWoUgr83pehn/SIgLOMZ9nAWC6dcGj26RY2R4F50u4+RTUz0RCrUlOV3nKRAICW1UGzyb+kcX2qK1S/g==
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/figures/-/figures-3.1.0.tgz#4b198dd07d8d71530642864af2d45dd9e459c4ec"
|
||||
integrity sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==
|
||||
dependencies:
|
||||
escape-string-regexp "^1.0.5"
|
||||
|
||||
@ -3204,15 +3168,6 @@ from2@^2.1.0:
|
||||
inherits "^2.0.1"
|
||||
readable-stream "^2.0.0"
|
||||
|
||||
fs-extra@^4.0.2:
|
||||
version "4.0.3"
|
||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-4.0.3.tgz#0d852122e5bc5beb453fb028e9c0c9bf36340c94"
|
||||
integrity sha512-q6rbdDd1o2mAnQreO7YADIxf/Whx4AHBiRf6d+/cVT8h44ss+lHgxf1FemcqDnQt9X3ct4McHr+JMGlYSsK7Cg==
|
||||
dependencies:
|
||||
graceful-fs "^4.1.2"
|
||||
jsonfile "^4.0.0"
|
||||
universalify "^0.1.0"
|
||||
|
||||
fs-extra@^7.0.1:
|
||||
version "7.0.1"
|
||||
resolved "https://registry.yarnpkg.com/fs-extra/-/fs-extra-7.0.1.tgz#4f189c44aa123b895f722804f55ea23eadc348e9"
|
||||
@ -3266,11 +3221,6 @@ function-bind@^1.1.1:
|
||||
resolved "https://registry.yarnpkg.com/function-bind/-/function-bind-1.1.1.tgz#a56899d3ea3c9bab874bb9773b7c5ede92f4895d"
|
||||
integrity sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==
|
||||
|
||||
gather-stream@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/gather-stream/-/gather-stream-1.0.0.tgz#b33994af457a8115700d410f317733cbe7a0904b"
|
||||
integrity sha1-szmUr0V6gRVwDUEPMXczy+egkEs=
|
||||
|
||||
gauge@~2.7.3:
|
||||
version "2.7.4"
|
||||
resolved "https://registry.yarnpkg.com/gauge/-/gauge-2.7.4.tgz#2c03405c7538c39d7eb37b317022e325fb018bf7"
|
||||
@ -3328,9 +3278,9 @@ glob-to-regexp@^0.3.0:
|
||||
integrity sha1-jFoUlNIGbFcMw7/kSWF1rMTVAqs=
|
||||
|
||||
glob@^7.0.3, glob@^7.1.2, glob@^7.1.3, glob@^7.1.4:
|
||||
version "7.1.4"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.4.tgz#aa608a2f6c577ad357e1ae5a5c26d9a8d1969255"
|
||||
integrity sha512-hkLPepehmnKk41pUGm3sYxoFs/umurYfYJCerbXEyFIWcAzvpipAgVkBqqT9RBKMGjnq6kMuyYwha6csxbiM1A==
|
||||
version "7.1.5"
|
||||
resolved "https://registry.yarnpkg.com/glob/-/glob-7.1.5.tgz#6714c69bee20f3c3e64c4dd905553e532b40cdc0"
|
||||
integrity sha512-J9dlskqUXK1OeTOYBEn5s8aMukWMwWfs+rPTn/jn50Ux4MNXVhubL1wu/j2t+H4NVI+cXEcCaYellqaPVGXNqQ==
|
||||
dependencies:
|
||||
fs.realpath "^1.0.0"
|
||||
inflight "^1.0.4"
|
||||
@ -3397,9 +3347,9 @@ good-listener@^1.2.2:
|
||||
delegate "^3.1.2"
|
||||
|
||||
graceful-fs@^4.1.11, graceful-fs@^4.1.15, graceful-fs@^4.1.2, graceful-fs@^4.1.6, graceful-fs@^4.2.0:
|
||||
version "4.2.2"
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.2.tgz#6f0952605d0140c1cfdb138ed005775b92d67b02"
|
||||
integrity sha512-IItsdsea19BoLC7ELy13q1iJFNmd7ofZH5+X/pJr90/nRoPEX0DJo1dHDbgtYWOhJhcCgMDTOw84RZ72q6lB+Q==
|
||||
version "4.2.3"
|
||||
resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.3.tgz#4a12ff1b60376ef09862c2093edd908328be8423"
|
||||
integrity sha512-a30VEBm4PEdx1dRB7MFK7BejejvCvBronbLjht+sHuGYj8PHs7M/5Z+rt5lw551vZ7yfTCj4Vuyy3mSJytDWRQ==
|
||||
|
||||
gray-matter@^4.0.1:
|
||||
version "4.0.2"
|
||||
@ -3436,11 +3386,6 @@ has-ansi@^2.0.0:
|
||||
dependencies:
|
||||
ansi-regex "^2.0.0"
|
||||
|
||||
has-flag@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-1.0.0.tgz#9d9e793165ce017a00f00418c43f942a7b1d11fa"
|
||||
integrity sha1-nZ55MWXOAXoA8AQYxD+UKnsdEfo=
|
||||
|
||||
has-flag@^3.0.0:
|
||||
version "3.0.0"
|
||||
resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-3.0.0.tgz#b5d454dc2199ae225699f3467e5a07f3b955bafd"
|
||||
@ -3644,7 +3589,7 @@ http-errors@~1.7.2:
|
||||
resolved "https://registry.yarnpkg.com/http-parser-js/-/http-parser-js-0.4.10.tgz#92c9c1374c35085f75db359ec56cc257cbb93fa4"
|
||||
integrity sha1-ksnBN0w1CF912zWexWzCV8u5P6Q=
|
||||
|
||||
http-proxy-middleware@^0.19.1:
|
||||
http-proxy-middleware@0.19.1:
|
||||
version "0.19.1"
|
||||
resolved "https://registry.yarnpkg.com/http-proxy-middleware/-/http-proxy-middleware-0.19.1.tgz#183c7dc4aa1479150306498c210cdaf96080a43a"
|
||||
integrity sha512-yHYTgWMQO8VvwNS22eLLloAkvungsKdKTLO8AJlftYIKNfJr3GK3zK0ZCfzDDGUBttdGc8xFy1mCitvNKQtC3Q==
|
||||
@ -3707,9 +3652,9 @@ iferr@^0.1.5:
|
||||
integrity sha1-xg7taebY/bazEEofy8ocGS3FtQE=
|
||||
|
||||
ignore-walk@^3.0.1:
|
||||
version "3.0.2"
|
||||
resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.2.tgz#99d83a246c196ea5c93ef9315ad7b0819c35069b"
|
||||
integrity sha512-EXyErtpHbn75ZTsOADsfx6J/FPo6/5cjev46PXrcTpd8z3BoRkXgYu9/JVqrI7tusjmwCZutGeRJeU0Wo1e4Cw==
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/ignore-walk/-/ignore-walk-3.0.3.tgz#017e2447184bfeade7c238e4aefdd1e8f95b1e37"
|
||||
integrity sha512-m7o6xuOaT1aqheYHKf8W6J5pYH85ZI9w077erOzLje3JsB1gkafkAhHHY19dqjulgIZHFm32Cp5uNZgcQqdJKw==
|
||||
dependencies:
|
||||
minimatch "^3.0.4"
|
||||
|
||||
@ -3846,7 +3791,7 @@ is-absolute-url@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-2.1.0.tgz#50530dfb84fcc9aa7dbe7852e83a37b93b9f2aa6"
|
||||
integrity sha1-UFMN+4T8yap9vnhS6Do3uTufKqY=
|
||||
|
||||
is-absolute-url@^3.0.2:
|
||||
is-absolute-url@^3.0.3:
|
||||
version "3.0.3"
|
||||
resolved "https://registry.yarnpkg.com/is-absolute-url/-/is-absolute-url-3.0.3.tgz#96c6a22b6a23929b11ea0afb1836c36ad4a5d698"
|
||||
integrity sha512-opmNIX7uFnS96NtPmhWQgQx6/NYFgsUXYMllcfzwWKUMwfo8kku1TvE6hkNcH+Q1ts5cMVrsY7j0bxXQDciu9Q==
|
||||
@ -4120,11 +4065,6 @@ javascript-stringify@^1.6.0:
|
||||
resolved "https://registry.yarnpkg.com/javascript-stringify/-/javascript-stringify-1.6.0.tgz#142d111f3a6e3dae8f4a9afd77d45855b5a9cce3"
|
||||
integrity sha1-FC0RHzpuPa6PSpr9d9RYVbWpzOM=
|
||||
|
||||
js-base64@^2.1.9:
|
||||
version "2.5.1"
|
||||
resolved "https://registry.yarnpkg.com/js-base64/-/js-base64-2.5.1.tgz#1efa39ef2c5f7980bb1784ade4a8af2de3291121"
|
||||
integrity sha512-M7kLczedRMYX4L8Mdh4MzyAMM9O5osx+4FcOQuTvr3A9F2D9S5JXheN0ewNbrvK2UatkTRhL5ejGmGSjNMiZuw==
|
||||
|
||||
js-levenshtein@^1.1.3:
|
||||
version "1.1.6"
|
||||
resolved "https://registry.yarnpkg.com/js-levenshtein/-/js-levenshtein-1.1.6.tgz#c6cee58eb3550372df8deb85fad5ce66ce01d59d"
|
||||
@ -4196,9 +4136,9 @@ json5@^1.0.1:
|
||||
minimist "^1.2.0"
|
||||
|
||||
json5@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.0.tgz#e7a0c62c48285c628d20a10b85c89bb807c32850"
|
||||
integrity sha512-8Mh9h6xViijj36g7Dxi+Y4S6hNGV96vcJZr/SrlHh1LR/pEn/8j/+qIBbs44YKl69Lrfctp4QD+AdWLTMqEZAQ==
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/json5/-/json5-2.1.1.tgz#81b6cb04e9ba496f1c7005d07b4368a2638f90b6"
|
||||
integrity sha512-l+3HXD0GEI3huGq1njuqtzYK8OYJyXMkOLtQ53pjWh89tvWS2h6l+1zMkYWqlb57+SiQodKZyvMEFb2X+KrFhQ==
|
||||
dependencies:
|
||||
minimist "^1.2.0"
|
||||
|
||||
@ -4325,6 +4265,11 @@ lodash.clonedeep@^4.5.0:
|
||||
resolved "https://registry.yarnpkg.com/lodash.clonedeep/-/lodash.clonedeep-4.5.0.tgz#e23f3f9c4f8fbdde872529c1071857a086e5ccef"
|
||||
integrity sha1-4j8/nE+Pvd6HJSnBBxhXoIblzO8=
|
||||
|
||||
lodash.debounce@^4.0.8:
|
||||
version "4.0.8"
|
||||
resolved "https://registry.yarnpkg.com/lodash.debounce/-/lodash.debounce-4.0.8.tgz#82d79bff30a67c4005ffd5e2515300ad9ca4d7af"
|
||||
integrity sha1-gteb/zCmfEAF/9XiUVMArZyk168=
|
||||
|
||||
lodash.kebabcase@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.kebabcase/-/lodash.kebabcase-4.1.1.tgz#8489b1cb0d29ff88195cceca448ff6d6cc295c36"
|
||||
@ -4350,11 +4295,6 @@ lodash.templatesettings@^4.0.0:
|
||||
dependencies:
|
||||
lodash._reinterpolate "^3.0.0"
|
||||
|
||||
lodash.throttle@^4.1.1:
|
||||
version "4.1.1"
|
||||
resolved "https://registry.yarnpkg.com/lodash.throttle/-/lodash.throttle-4.1.1.tgz#c23e91b710242ac70c37f1e1cda9274cc39bf2f4"
|
||||
integrity sha1-wj6RtxAkKscMN/HhzaknTMOb8vQ=
|
||||
|
||||
lodash.toarray@^4.4.0:
|
||||
version "4.4.0"
|
||||
resolved "https://registry.yarnpkg.com/lodash.toarray/-/lodash.toarray-4.4.0.tgz#24c4bfcd6b2fba38bfd0594db1179d8e9b656561"
|
||||
@ -4365,7 +4305,7 @@ lodash.uniq@^4.5.0:
|
||||
resolved "https://registry.yarnpkg.com/lodash.uniq/-/lodash.uniq-4.5.0.tgz#d0225373aeb652adc1bc82e4945339a842754773"
|
||||
integrity sha1-0CJTc662Uq3BvILklFM5qEJ1R3M=
|
||||
|
||||
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.15, lodash@^4.17.3, lodash@^4.17.5:
|
||||
lodash@^4.17.11, lodash@^4.17.13, lodash@^4.17.14, lodash@^4.17.15, lodash@^4.17.3, lodash@^4.17.5:
|
||||
version "4.17.15"
|
||||
resolved "https://registry.yarnpkg.com/lodash/-/lodash-4.17.15.tgz#b447f6670a0455bbfeedd11392eff330ea097548"
|
||||
integrity sha512-8xOcRHvCjnocdS5cpwXQXVzmmh5e5+saE2QGoeQmbKmRS6J3VQppPOIt0MnmE+4xlZoumy0GPG0D0MVIQbNA1A==
|
||||
@ -4435,9 +4375,9 @@ map-visit@^1.0.0:
|
||||
object-visit "^1.0.0"
|
||||
|
||||
markdown-it-anchor@^5.0.2:
|
||||
version "5.2.4"
|
||||
resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-5.2.4.tgz#d39306fe4c199705b4479d3036842cf34dcba24f"
|
||||
integrity sha512-n8zCGjxA3T+Mx1pG8HEgbJbkB8JFUuRkeTZQuIM8iPY6oQ8sWOPRZJDFC9a/pNg2QkHEjjGkhBEl/RSyzaDZ3A==
|
||||
version "5.2.5"
|
||||
resolved "https://registry.yarnpkg.com/markdown-it-anchor/-/markdown-it-anchor-5.2.5.tgz#dbf13cfcdbffd16a510984f1263e1d479a47d27a"
|
||||
integrity sha512-xLIjLQmtym3QpoY9llBgApknl7pxAcN3WDRc2d3rwpl+/YvDZHPmKscGs+L6E05xf2KrCXPBvosWt7MZukwSpQ==
|
||||
|
||||
markdown-it-chain@^1.3.0:
|
||||
version "1.3.0"
|
||||
@ -4486,11 +4426,6 @@ mdn-data@2.0.4:
|
||||
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-2.0.4.tgz#699b3c38ac6f1d728091a64650b65d388502fd5b"
|
||||
integrity sha512-iV3XNKw06j5Q7mi6h+9vbx23Tv7JkjEVgKHW4pimwyDGWm0OIQntJJ+u1C6mg6mK1EaTv42XQ7w76yuzH7M2cA==
|
||||
|
||||
mdn-data@~1.1.0:
|
||||
version "1.1.4"
|
||||
resolved "https://registry.yarnpkg.com/mdn-data/-/mdn-data-1.1.4.tgz#50b5d4ffc4575276573c4eedb8780812a8419f01"
|
||||
integrity sha512-FSYbp3lyKjyj3E7fMl6rYvUdX0FBXaluGqlFoYESWQlyUTq8R+wp0rkFxoYFqZlHCvsUXGjyJmLQSnXToYhOSA==
|
||||
|
||||
mdurl@^1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/mdurl/-/mdurl-1.0.1.tgz#fe85b2ec75a59037f2adfec100fd6c601761152e"
|
||||
@ -4510,7 +4445,7 @@ mem@^4.0.0:
|
||||
mimic-fn "^2.0.0"
|
||||
p-is-promise "^2.0.0"
|
||||
|
||||
memory-fs@^0.4.0, memory-fs@^0.4.1:
|
||||
memory-fs@^0.4.1:
|
||||
version "0.4.1"
|
||||
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.4.1.tgz#3a9a20b8462523e447cfbc7e8bb80ed667bfc552"
|
||||
integrity sha1-OpoguEYlI+RHz7x+i7gO1me/xVI=
|
||||
@ -4518,6 +4453,14 @@ memory-fs@^0.4.0, memory-fs@^0.4.1:
|
||||
errno "^0.1.3"
|
||||
readable-stream "^2.0.1"
|
||||
|
||||
memory-fs@^0.5.0:
|
||||
version "0.5.0"
|
||||
resolved "https://registry.yarnpkg.com/memory-fs/-/memory-fs-0.5.0.tgz#324c01288b88652966d161db77838720845a8e3c"
|
||||
integrity sha512-jA0rdU5KoQMC0e6ppoNRtpp6vjFq6+NY7r8hywnC7V+1Xj/MtHwGIbB1QaK/dunyjWteJzmkpd7ooeWg10T7GA==
|
||||
dependencies:
|
||||
errno "^0.1.3"
|
||||
readable-stream "^2.0.1"
|
||||
|
||||
merge-descriptors@1.0.1:
|
||||
version "1.0.1"
|
||||
resolved "https://registry.yarnpkg.com/merge-descriptors/-/merge-descriptors-1.0.1.tgz#b00aaa556dd8b44568150ec9d1b953f3f90cbb61"
|
||||
@ -4643,15 +4586,7 @@ minimist@^1.2.0:
|
||||
resolved "https://registry.yarnpkg.com/minimist/-/minimist-1.2.0.tgz#a35008b20f41383eec1fb914f4cd5df79a264284"
|
||||
integrity sha1-o1AIsg9BOD7sH7kU9M1d95omQoQ=
|
||||
|
||||
minipass@^2.6.0, minipass@^2.8.6:
|
||||
version "2.8.6"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.8.6.tgz#620d889ace26356391d010ecb9458749df9b6db5"
|
||||
integrity sha512-lFG7d6g3+/UaFDCOtqPiKAC9zngWWsQZl1g5q6gaONqrjq61SX2xFqXMleQiFVyDpYwa018E9hmlAFY22PCb+A==
|
||||
dependencies:
|
||||
safe-buffer "^5.1.2"
|
||||
yallist "^3.0.0"
|
||||
|
||||
minipass@^2.9.0:
|
||||
minipass@^2.6.0, minipass@^2.8.6, minipass@^2.9.0:
|
||||
version "2.9.0"
|
||||
resolved "https://registry.yarnpkg.com/minipass/-/minipass-2.9.0.tgz#e713762e7d3e32fed803115cf93e04bca9fcc9a6"
|
||||
integrity sha512-wxfUjg9WebH+CUDX/CdbRlh5SmfZiy/hpkxaRI16Y9W56Pa75sWgd/rvFilSgrauD9NyFymP/+JFV3KwzIsJeg==
|
||||
@ -4660,9 +4595,9 @@ minipass@^2.9.0:
|
||||
yallist "^3.0.0"
|
||||
|
||||
minizlib@^1.2.1:
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.2.tgz#5d24764998f98112586f7e566bd4c0999769dad4"
|
||||
integrity sha512-lsNFqSHdJ21EwKzCp12HHJGxSMtHkCW1EMA9cceG3MkMNARjuWotZnMe3NKNshAvFXpm4loZqmYsCmRwhS2JMw==
|
||||
version "1.3.3"
|
||||
resolved "https://registry.yarnpkg.com/minizlib/-/minizlib-1.3.3.tgz#2290de96818a34c29551c8a8d301216bd65a861d"
|
||||
integrity sha512-6ZYMOEnmVsdCeTJVE0W9ZD+pVnE8h9Hma/iOwwRDsdQoePpoX56/8B6z3P9VNwppJuBKNRuFDRNRqRWexT9G9Q==
|
||||
dependencies:
|
||||
minipass "^2.9.0"
|
||||
|
||||
@ -4695,7 +4630,7 @@ mkdirp@0.3.0:
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.3.0.tgz#1bbf5ab1ba827af23575143490426455f481fe1e"
|
||||
integrity sha1-G79asbqCevI1dRQ0kEJkVfSB/h4=
|
||||
|
||||
mkdirp@0.5.x, mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1, mkdirp@~0.5.x:
|
||||
mkdirp@^0.5.0, mkdirp@^0.5.1, mkdirp@~0.5.1, mkdirp@~0.5.x:
|
||||
version "0.5.1"
|
||||
resolved "https://registry.yarnpkg.com/mkdirp/-/mkdirp-0.5.1.tgz#30057438eac6cf7f8c4767f38648d6697d75c903"
|
||||
integrity sha1-MAV0OOrGz3+MR2fzhkjWaX11yQM=
|
||||
@ -4802,10 +4737,10 @@ node-emoji@^1.8.1:
|
||||
dependencies:
|
||||
lodash.toarray "^4.4.0"
|
||||
|
||||
node-forge@0.8.2:
|
||||
version "0.8.2"
|
||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.8.2.tgz#b4bcc59fb12ce77a8825fc6a783dfe3182499c5a"
|
||||
integrity sha512-mXQ9GBq1N3uDCyV1pdSzgIguwgtVpM7f5/5J4ipz12PKWElmPpVWLDuWl8iXmhysr21+WmX/OJ5UKx82wjomgg==
|
||||
node-forge@0.9.0:
|
||||
version "0.9.0"
|
||||
resolved "https://registry.yarnpkg.com/node-forge/-/node-forge-0.9.0.tgz#d624050edbb44874adca12bb9a52ec63cb782579"
|
||||
integrity sha512-7ASaDa3pD+lJ3WvXFsxekJQelBKRpne+GOVbLbtHYdd7pFspyeuJHnWfLplGf3SwKGbfs/aYl5V/JCIaHVUKKQ==
|
||||
|
||||
node-libs-browser@^2.2.1:
|
||||
version "2.2.1"
|
||||
@ -4852,12 +4787,12 @@ node-pre-gyp@^0.12.0:
|
||||
semver "^5.3.0"
|
||||
tar "^4"
|
||||
|
||||
node-releases@^1.1.29:
|
||||
version "1.1.32"
|
||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.32.tgz#485b35c1bf9b4d8baa105d782f8ca731e518276e"
|
||||
integrity sha512-VhVknkitq8dqtWoluagsGPn3dxTvN9fwgR59fV3D7sLBHe0JfDramsMI8n8mY//ccq/Kkrf8ZRHRpsyVZ3qw1A==
|
||||
node-releases@^1.1.38:
|
||||
version "1.1.39"
|
||||
resolved "https://registry.yarnpkg.com/node-releases/-/node-releases-1.1.39.tgz#c1011f30343aff5b633153b10ff691d278d08e8d"
|
||||
integrity sha512-8MRC/ErwNCHOlAFycy9OPca46fQYUjbJRDcZTHVWIGXIjYLM73k70vv3WkYutVnM4cCo4hE0MqBVVZjP6vjISA==
|
||||
dependencies:
|
||||
semver "^5.3.0"
|
||||
semver "^6.3.0"
|
||||
|
||||
nopt@1.0.10:
|
||||
version "1.0.10"
|
||||
@ -4916,9 +4851,9 @@ npm-bundled@^1.0.1:
|
||||
integrity sha512-8/JCaftHwbd//k6y2rEWp6k1wxVfpFzB6t1p825+cUb7Ym2XQfhwIC5KwhrvzZRJu+LtDE585zVaS32+CGtf0g==
|
||||
|
||||
npm-packlist@^1.1.6:
|
||||
version "1.4.4"
|
||||
resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.4.tgz#866224233850ac534b63d1a6e76050092b5d2f44"
|
||||
integrity sha512-zTLo8UcVYtDU3gdeaFu2Xu0n0EvelfHDGuqtNIn5RO7yQj4H1TqNdBc/yZjxnWA0PVB8D3Woyp0i5B43JwQ6Vw==
|
||||
version "1.4.6"
|
||||
resolved "https://registry.yarnpkg.com/npm-packlist/-/npm-packlist-1.4.6.tgz#53ba3ed11f8523079f1457376dd379ee4ea42ff4"
|
||||
integrity sha512-u65uQdb+qwtGvEJh/DgQgW1Xg7sqeNbmxYyrvlNznaVTjV3E5P6F/EFjM+BVHXl7JJlsdG8A64M0XI8FI/IOlg==
|
||||
dependencies:
|
||||
ignore-walk "^3.0.1"
|
||||
npm-bundled "^1.0.1"
|
||||
@ -5295,22 +5230,6 @@ pbkdf2@^3.0.3:
|
||||
safe-buffer "^5.0.1"
|
||||
sha.js "^2.4.8"
|
||||
|
||||
perfectionist@^2.4.0:
|
||||
version "2.4.0"
|
||||
resolved "https://registry.yarnpkg.com/perfectionist/-/perfectionist-2.4.0.tgz#c147ad3714e126467f1764129ee72df861d47ea0"
|
||||
integrity sha1-wUetNxThJkZ/F2QSnuct+GHUfqA=
|
||||
dependencies:
|
||||
comment-regex "^1.0.0"
|
||||
defined "^1.0.0"
|
||||
minimist "^1.2.0"
|
||||
postcss "^5.0.8"
|
||||
postcss-scss "^0.3.0"
|
||||
postcss-value-parser "^3.3.0"
|
||||
read-file-stdin "^0.2.0"
|
||||
string.prototype.repeat "^0.2.0"
|
||||
vendors "^1.0.0"
|
||||
write-file-stdout "0.0.2"
|
||||
|
||||
performance-now@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/performance-now/-/performance-now-2.1.0.tgz#6309f4e0e5fa913ec1c69307ae364b4b377c9e7b"
|
||||
@ -5357,14 +5276,14 @@ pkg-up@^2.0.0:
|
||||
dependencies:
|
||||
find-up "^2.1.0"
|
||||
|
||||
portfinder@^1.0.13, portfinder@^1.0.24:
|
||||
version "1.0.24"
|
||||
resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.24.tgz#11efbc6865f12f37624b6531ead1d809ed965cfa"
|
||||
integrity sha512-ekRl7zD2qxYndYflwiryJwMioBI7LI7rVXg3EnLK3sjkouT5eOuhS3gS255XxBksa30VG8UPZYZCdgfGOfkSUg==
|
||||
portfinder@^1.0.13, portfinder@^1.0.25:
|
||||
version "1.0.25"
|
||||
resolved "https://registry.yarnpkg.com/portfinder/-/portfinder-1.0.25.tgz#254fd337ffba869f4b9d37edc298059cb4d35eca"
|
||||
integrity sha512-6ElJnHBbxVA1XSLgBp7G1FiCkQdlqGzuF7DswL5tcea+E8UpuvPU7beVAjjRwCioTS9ZluNbu+ZyRvgTsmqEBg==
|
||||
dependencies:
|
||||
async "^1.5.2"
|
||||
debug "^2.2.0"
|
||||
mkdirp "0.5.x"
|
||||
async "^2.6.2"
|
||||
debug "^3.1.1"
|
||||
mkdirp "^0.5.1"
|
||||
|
||||
posix-character-classes@^0.1.0:
|
||||
version "0.1.1"
|
||||
@ -5693,13 +5612,6 @@ postcss-safe-parser@^4.0.1:
|
||||
dependencies:
|
||||
postcss "^7.0.0"
|
||||
|
||||
postcss-scss@^0.3.0:
|
||||
version "0.3.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-scss/-/postcss-scss-0.3.1.tgz#65c610d8e2a7ee0e62b1835b71b8870734816e4b"
|
||||
integrity sha1-ZcYQ2OKn7g5isYNbcbiHBzSBbks=
|
||||
dependencies:
|
||||
postcss "^5.2.4"
|
||||
|
||||
postcss-selector-parser@^3.0.0:
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/postcss-selector-parser/-/postcss-selector-parser-3.1.1.tgz#4f875f4afb0c96573d5cf4d74011aee250a7e865"
|
||||
@ -5751,21 +5663,11 @@ postcss-value-parser@^3.0.0, postcss-value-parser@^3.2.3, postcss-value-parser@^
|
||||
resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-3.3.1.tgz#9ff822547e2893213cf1c30efa51ac5fd1ba8281"
|
||||
integrity sha512-pISE66AbVkp4fDQ7VHBwRNXzAAKJjw4Vw7nWI/+Q3vuly7SNfgYXvm6i5IgFylHGK5sP/xHAbB7N49OS4gWNyQ==
|
||||
|
||||
postcss-value-parser@^4.0.0:
|
||||
postcss-value-parser@^4.0.2:
|
||||
version "4.0.2"
|
||||
resolved "https://registry.yarnpkg.com/postcss-value-parser/-/postcss-value-parser-4.0.2.tgz#482282c09a42706d1fc9a069b73f44ec08391dc9"
|
||||
integrity sha512-LmeoohTpp/K4UiyQCwuGWlONxXamGzCMtFxLq4W1nZVGIQLYvMCJx3yAF9qyyuFpflABI9yVdtJAqbihOsCsJQ==
|
||||
|
||||
postcss@^5.0.8, postcss@^5.2.4:
|
||||
version "5.2.18"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-5.2.18.tgz#badfa1497d46244f6390f58b319830d9107853c5"
|
||||
integrity sha512-zrUjRRe1bpXKsX1qAJNJjqZViErVuyEkMTRrwu4ud4sbTtIBRmtaYDrHmcGgmrbsW3MHfmtIf+vJumgQn+PrXg==
|
||||
dependencies:
|
||||
chalk "^1.1.3"
|
||||
js-base64 "^2.1.9"
|
||||
source-map "^0.5.6"
|
||||
supports-color "^3.2.3"
|
||||
|
||||
postcss@^6.0.9:
|
||||
version "6.0.23"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-6.0.23.tgz#61c82cc328ac60e677645f979054eb98bc0e3324"
|
||||
@ -5775,10 +5677,10 @@ postcss@^6.0.9:
|
||||
source-map "^0.6.1"
|
||||
supports-color "^5.4.0"
|
||||
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.17, postcss@^7.0.18, postcss@^7.0.5, postcss@^7.0.6:
|
||||
version "7.0.18"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.18.tgz#4b9cda95ae6c069c67a4d933029eddd4838ac233"
|
||||
integrity sha512-/7g1QXXgegpF+9GJj4iN7ChGF40sYuGYJ8WZu8DZWnmhQ/G36hfdk3q9LBJmoK+lZ+yzZ5KYpOoxq7LF1BxE8g==
|
||||
postcss@^7.0.0, postcss@^7.0.1, postcss@^7.0.11, postcss@^7.0.14, postcss@^7.0.18, postcss@^7.0.19, postcss@^7.0.5, postcss@^7.0.6:
|
||||
version "7.0.21"
|
||||
resolved "https://registry.yarnpkg.com/postcss/-/postcss-7.0.21.tgz#06bb07824c19c2021c5d056d5b10c35b989f7e17"
|
||||
integrity sha512-uIFtJElxJo29QC753JzhidoAhvp/e/Exezkdhfmt8AymWT6/5B7W1WmponYWkHk2eg6sONyTch0A3nkMPun3SQ==
|
||||
dependencies:
|
||||
chalk "^2.4.2"
|
||||
source-map "^0.6.1"
|
||||
@ -5915,9 +5817,9 @@ punycode@^2.1.0:
|
||||
integrity sha512-XRsRjdf+j5ml+y/6GKHPZbrF/8p2Yga0JPtdqTIY2Xe5ohJPD9saDJJLPvp9+NSBprVvevdXZybnj2cv8OEd0A==
|
||||
|
||||
purgecss@^1.4.0:
|
||||
version "1.4.0"
|
||||
resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-1.4.0.tgz#79905624ec1c6c8e1f03044bca92dd8a598ba429"
|
||||
integrity sha512-or7/16i7O6DH+NpXqY8NCcWCc940O6PxOgjWAcMTElzgccKOJua1/n6JVtM8UYqoMMWoCyKk+CbLpo4+4mY3BQ==
|
||||
version "1.4.1"
|
||||
resolved "https://registry.yarnpkg.com/purgecss/-/purgecss-1.4.1.tgz#d362e63eb1ed9dd1fa1554b9fd7accb8d54e56dc"
|
||||
integrity sha512-5jONV/D/3nfa+lC425+LA+OWe5/LDn4a79cac+TnzJq3VczwnWlpIDdW275hHsGhkzIlqATQsYFLW7or0cSwNQ==
|
||||
dependencies:
|
||||
glob "^7.1.3"
|
||||
postcss "^7.0.14"
|
||||
@ -6010,13 +5912,6 @@ read-cache@^1.0.0:
|
||||
dependencies:
|
||||
pify "^2.3.0"
|
||||
|
||||
read-file-stdin@^0.2.0:
|
||||
version "0.2.1"
|
||||
resolved "https://registry.yarnpkg.com/read-file-stdin/-/read-file-stdin-0.2.1.tgz#25eccff3a153b6809afacb23ee15387db9e0ee61"
|
||||
integrity sha1-JezP86FTtoCa+ssj7hU4fbng7mE=
|
||||
dependencies:
|
||||
gather-stream "^1.0.0"
|
||||
|
||||
"readable-stream@1 || 2", readable-stream@^2.0.0, readable-stream@^2.0.1, readable-stream@^2.0.2, readable-stream@^2.0.6, readable-stream@^2.1.5, readable-stream@^2.2.2, readable-stream@^2.3.3, readable-stream@^2.3.6, readable-stream@~2.3.6:
|
||||
version "2.3.6"
|
||||
resolved "https://registry.yarnpkg.com/readable-stream/-/readable-stream-2.3.6.tgz#b11c27d88b8ff1fbe070643cf94b0c79ae1b0aaf"
|
||||
@ -6049,9 +5944,9 @@ readdirp@^2.2.1:
|
||||
readable-stream "^2.0.2"
|
||||
|
||||
reduce-css-calc@^2.1.6:
|
||||
version "2.1.6"
|
||||
resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.6.tgz#050fe6ee7d98a1d70775d2e93ce0b713cee394d2"
|
||||
integrity sha512-+l5/qlQmdsbM9h6JerJ/y5vR5Ci0k93aszLNpCmbadC3nBcbRGmIBm0s9Nj59i22LvCjTGftWzdQRwdknayxhw==
|
||||
version "2.1.7"
|
||||
resolved "https://registry.yarnpkg.com/reduce-css-calc/-/reduce-css-calc-2.1.7.tgz#1ace2e02c286d78abcd01fd92bfe8097ab0602c2"
|
||||
integrity sha512-fDnlZ+AybAS3C7Q9xDq5y8A2z+lT63zLbynew/lur/IR24OQF5x98tfNwf79mzEdfywZ0a2wpM860FhFfMxZlA==
|
||||
dependencies:
|
||||
css-unit-converter "^1.1.1"
|
||||
postcss-value-parser "^3.3.0"
|
||||
@ -6075,11 +5970,6 @@ regenerate@^1.4.0:
|
||||
resolved "https://registry.yarnpkg.com/regenerate/-/regenerate-1.4.0.tgz#4a856ec4b56e4077c557589cae85e7a4c8869a11"
|
||||
integrity sha512-1G6jJVDWrt0rK99kBjvEtziZNCICAuvIPkSiUFIQxVP06RCVpq3dmDo2oi6ABpYaDYaTRr67BEhL8r1wgEZZKg==
|
||||
|
||||
regenerator-runtime@^0.11.0:
|
||||
version "0.11.1"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.11.1.tgz#be05ad7f9bf7d22e056f9726cee5017fbf19e2e9"
|
||||
integrity sha512-MguG95oij0fC3QV3URf4V2SDYGJhJnJGqvIIgdECeODCT98wSWDAJ94SSuVpYQUoTcGUIL6L4yNB7j1DFFHSBg==
|
||||
|
||||
regenerator-runtime@^0.13.2:
|
||||
version "0.13.3"
|
||||
resolved "https://registry.yarnpkg.com/regenerator-runtime/-/regenerator-runtime-0.13.3.tgz#7cf6a77d8f5c6f60eb73c5fc1955b2ceb01e6bf5"
|
||||
@ -6120,9 +6010,9 @@ regexpu-core@^4.6.0:
|
||||
unicode-match-property-value-ecmascript "^1.1.0"
|
||||
|
||||
regjsgen@^0.5.0:
|
||||
version "0.5.0"
|
||||
resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.0.tgz#a7634dc08f89209c2049adda3525711fb97265dd"
|
||||
integrity sha512-RnIrLhrXCX5ow/E5/Mh2O4e/oa1/jW0eaBKTSy3LaCj+M3Bqvm97GWDp2yUtzIs4LEn65zR2yiYGFqb2ApnzDA==
|
||||
version "0.5.1"
|
||||
resolved "https://registry.yarnpkg.com/regjsgen/-/regjsgen-0.5.1.tgz#48f0bf1a5ea205196929c0d9798b42d1ed98443c"
|
||||
integrity sha512-5qxzGZjDs9w4tzT3TPhCJqWdCc3RLYwy9J2NB0nm5Lz+S273lvWcpjaTGHsT1dc6Hhfq41uSEOw8wBmxrKOuyg==
|
||||
|
||||
regjsparser@^0.6.0:
|
||||
version "0.6.0"
|
||||
@ -6333,12 +6223,12 @@ select@^1.1.2:
|
||||
resolved "https://registry.yarnpkg.com/select/-/select-1.1.2.tgz#0e7350acdec80b1108528786ec1d4418d11b396d"
|
||||
integrity sha1-DnNQrN7ICxEIUoeG7B1EGNEbOW0=
|
||||
|
||||
selfsigned@^1.10.6:
|
||||
version "1.10.6"
|
||||
resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.6.tgz#7b3cd37ed9c2034261a173af1a1aae27d8169b67"
|
||||
integrity sha512-i3+CeqxL7DpAazgVpAGdKMwHuL63B5nhJMh9NQ7xmChGkA3jNFflq6Jyo1LLJYcr3idWiNOPWHCrm4zMayLG4w==
|
||||
selfsigned@^1.10.7:
|
||||
version "1.10.7"
|
||||
resolved "https://registry.yarnpkg.com/selfsigned/-/selfsigned-1.10.7.tgz#da5819fd049d5574f28e88a9bcc6dbc6e6f3906b"
|
||||
integrity sha512-8M3wBCzeWIJnQfl43IKwOmC4H/RAp50S8DF60znzjW5GVqTcSe2vWclt7hmYVPkKPlHWOu5EaWOMZ2Y6W8ZXTA==
|
||||
dependencies:
|
||||
node-forge "0.8.2"
|
||||
node-forge "0.9.0"
|
||||
|
||||
semver@^5.1.0, semver@^5.3.0, semver@^5.4.1, semver@^5.5.0, semver@^5.5.1, semver@^5.6.0:
|
||||
version "5.7.1"
|
||||
@ -6469,6 +6359,11 @@ slash@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/slash/-/slash-2.0.0.tgz#de552851a1759df3a8f206535442f5ec4ddeab44"
|
||||
integrity sha512-ZYKh3Wh2z1PpEXWr0MpSBZ0V6mZHAQfYevttO11c51CaWjGTaadiKZ+wVt1PbMlDV5qhMFslpZCemhwOK7C89A==
|
||||
|
||||
smoothscroll-polyfill@^0.4.3:
|
||||
version "0.4.4"
|
||||
resolved "https://registry.yarnpkg.com/smoothscroll-polyfill/-/smoothscroll-polyfill-0.4.4.tgz#3a259131dc6930e6ca80003e1cb03b603b69abf8"
|
||||
integrity sha512-TK5ZA9U5RqCwMpfoMq/l1mrH0JAR7y7KRvOBx0n2869aLxch+gT9GhN3yUfjiw+d/DiF1mKo14+hd62JyMmoBg==
|
||||
|
||||
snapdragon-node@^2.0.1:
|
||||
version "2.1.1"
|
||||
resolved "https://registry.yarnpkg.com/snapdragon-node/-/snapdragon-node-2.1.1.tgz#6c175f86ff14bdb0724563e8f3c1b021a286853b"
|
||||
@ -6543,9 +6438,9 @@ source-map-resolve@^0.5.0, source-map-resolve@^0.5.2:
|
||||
urix "^0.1.0"
|
||||
|
||||
source-map-support@~0.5.12:
|
||||
version "0.5.13"
|
||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.13.tgz#31b24a9c2e73c2de85066c0feb7d44767ed52932"
|
||||
integrity sha512-SHSKFHadjVA5oR4PPqhtAVdcBWwRYVd6g6cAXnIbRiIwc2EhPrTuKUBdSLvlEKyIP3GCf89fltvcZiP9MMFA1w==
|
||||
version "0.5.16"
|
||||
resolved "https://registry.yarnpkg.com/source-map-support/-/source-map-support-0.5.16.tgz#0ae069e7fe3ba7538c64c98515e35339eac5a042"
|
||||
integrity sha512-efyLRJDr68D9hBBNIPWFjhpFzURh+KJykQwvMyW5UiZzYwoF6l4YMMDIJJEyFWxWCqfyxLzz6tSfUFR+kXXsVQ==
|
||||
dependencies:
|
||||
buffer-from "^1.0.0"
|
||||
source-map "^0.6.0"
|
||||
@ -6560,7 +6455,7 @@ source-map@0.5.6:
|
||||
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.6.tgz#75ce38f52bf0733c5a7f0c118d81334a2bb5f412"
|
||||
integrity sha1-dc449SvwczxafwwRjYEzSiu19BI=
|
||||
|
||||
source-map@^0.5.0, source-map@^0.5.3, source-map@^0.5.6:
|
||||
source-map@^0.5.0, source-map@^0.5.6:
|
||||
version "0.5.7"
|
||||
resolved "https://registry.yarnpkg.com/source-map/-/source-map-0.5.7.tgz#8a039d2d1021d22d1ea14c80d8ea468ba2ef3fcc"
|
||||
integrity sha1-igOdLRAh0i0eoUyA2OpGi6LvP8w=
|
||||
@ -6725,12 +6620,7 @@ string-width@^3.0.0, string-width@^3.1.0:
|
||||
is-fullwidth-code-point "^2.0.0"
|
||||
strip-ansi "^5.1.0"
|
||||
|
||||
string.prototype.repeat@^0.2.0:
|
||||
version "0.2.0"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.repeat/-/string.prototype.repeat-0.2.0.tgz#aba36de08dcee6a5a337d49b2ea1da1b28fc0ecf"
|
||||
integrity sha1-q6Nt4I3O5qWjN9SbLqHaGyj8Ds8=
|
||||
|
||||
string.prototype.trimleft@^2.0.0:
|
||||
string.prototype.trimleft@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.trimleft/-/string.prototype.trimleft-2.1.0.tgz#6cc47f0d7eb8d62b0f3701611715a3954591d634"
|
||||
integrity sha512-FJ6b7EgdKxxbDxc79cOlok6Afd++TTs5szo+zJTUyow3ycrRfJVE2pq3vcN53XexvKZu/DJMDfeI/qMiZTrjTw==
|
||||
@ -6738,7 +6628,7 @@ string.prototype.trimleft@^2.0.0:
|
||||
define-properties "^1.1.3"
|
||||
function-bind "^1.1.1"
|
||||
|
||||
string.prototype.trimright@^2.0.0:
|
||||
string.prototype.trimright@^2.1.0:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/string.prototype.trimright/-/string.prototype.trimright-2.1.0.tgz#669d164be9df9b6f7559fa8e89945b168a5a6c58"
|
||||
integrity sha512-fXZTSV55dNBwv16uw+hh5jkghxSnc5oHq+5K/gXgizHwAvMetdAJlHqqoFC1FSDVPYWLkAKl2cxpUT41sV7nSg==
|
||||
@ -6786,14 +6676,6 @@ strip-bom-string@^1.0.0:
|
||||
resolved "https://registry.yarnpkg.com/strip-bom-string/-/strip-bom-string-1.0.0.tgz#e5211e9224369fbb81d633a2f00044dc8cedad92"
|
||||
integrity sha1-5SEekiQ2n7uB1jOi8ABE3IztrZI=
|
||||
|
||||
strip-comments@^1.0.2:
|
||||
version "1.0.2"
|
||||
resolved "https://registry.yarnpkg.com/strip-comments/-/strip-comments-1.0.2.tgz#82b9c45e7f05873bee53f37168af930aa368679d"
|
||||
integrity sha512-kL97alc47hoyIQSV165tTt9rG5dn4w1dNnBhOQ3bOU1Nc1hel09jnXANaHJ7vzHLd4Ju8kseDGzlev96pghLFw==
|
||||
dependencies:
|
||||
babel-extract-comments "^1.0.0"
|
||||
babel-plugin-transform-object-rest-spread "^6.26.0"
|
||||
|
||||
strip-eof@^1.0.0:
|
||||
version "1.0.0"
|
||||
resolved "https://registry.yarnpkg.com/strip-eof/-/strip-eof-1.0.0.tgz#bb43ff5598a6eb05d89b59fcd129c983313606bf"
|
||||
@ -6841,13 +6723,6 @@ supports-color@^2.0.0:
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-2.0.0.tgz#535d045ce6b6363fa40117084629995e9df324c7"
|
||||
integrity sha1-U10EXOa2Nj+kARcIRimZXp3zJMc=
|
||||
|
||||
supports-color@^3.2.3:
|
||||
version "3.2.3"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-3.2.3.tgz#65ac0504b3954171d8a64946b2ae3cbb8a5f54f6"
|
||||
integrity sha1-ZawFBLOVQXHYpklGsq48u4pfVPY=
|
||||
dependencies:
|
||||
has-flag "^1.0.0"
|
||||
|
||||
supports-color@^5.3.0, supports-color@^5.4.0:
|
||||
version "5.5.0"
|
||||
resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-5.5.0.tgz#e2e69a44ac8772f78a1ec0b35b689df6530efc8f"
|
||||
@ -6868,16 +6743,16 @@ svg-tags@^1.0.0:
|
||||
integrity sha1-WPcc7jvVGbWdSyqEO2x95krAR2Q=
|
||||
|
||||
svgo@^1.0.0:
|
||||
version "1.3.0"
|
||||
resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.0.tgz#bae51ba95ded9a33a36b7c46ce9c359ae9154313"
|
||||
integrity sha512-MLfUA6O+qauLDbym+mMZgtXCGRfIxyQoeH6IKVcFslyODEe/ElJNwr0FohQ3xG4C6HK6bk3KYPPXwHVJk3V5NQ==
|
||||
version "1.3.2"
|
||||
resolved "https://registry.yarnpkg.com/svgo/-/svgo-1.3.2.tgz#b6dc511c063346c9e415b81e43401145b96d4167"
|
||||
integrity sha512-yhy/sQYxR5BkC98CY7o31VGsg014AKLEPxdfhora76l36hD9Rdy5NZA/Ocn6yayNPgSamYdtX2rFJdcv07AYVw==
|
||||
dependencies:
|
||||
chalk "^2.4.1"
|
||||
coa "^2.0.2"
|
||||
css-select "^2.0.0"
|
||||
css-select-base-adapter "^0.1.1"
|
||||
css-tree "1.0.0-alpha.33"
|
||||
csso "^3.5.1"
|
||||
css-tree "1.0.0-alpha.37"
|
||||
csso "^4.0.2"
|
||||
js-yaml "^3.13.1"
|
||||
mkdirp "~0.5.1"
|
||||
object.values "^1.1.0"
|
||||
@ -6886,31 +6761,10 @@ svgo@^1.0.0:
|
||||
unquote "~1.1.1"
|
||||
util.promisify "~1.0.0"
|
||||
|
||||
tailwindcss@^0.7.4:
|
||||
version "0.7.4"
|
||||
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-0.7.4.tgz#fb7926821d42eacdc12e6621a49d21f37a3ff9e9"
|
||||
integrity sha512-+GeQjHRJ2VmeLkrNwMCbPDfm2cc5P8eoc7n+DtZfI8oQdlo5eSHqsIlPEuZOtoqQlIALsd2jAggWrUUBFGP2ow==
|
||||
dependencies:
|
||||
autoprefixer "^9.4.5"
|
||||
bytes "^3.0.0"
|
||||
chalk "^2.4.1"
|
||||
css.escape "^1.5.1"
|
||||
fs-extra "^4.0.2"
|
||||
lodash "^4.17.5"
|
||||
node-emoji "^1.8.1"
|
||||
perfectionist "^2.4.0"
|
||||
postcss "^7.0.11"
|
||||
postcss-functions "^3.0.0"
|
||||
postcss-js "^2.0.0"
|
||||
postcss-nested "^4.1.1"
|
||||
postcss-selector-parser "^5.0.0"
|
||||
pretty-hrtime "^1.0.3"
|
||||
strip-comments "^1.0.2"
|
||||
|
||||
tailwindcss@^1.0.6:
|
||||
version "1.1.2"
|
||||
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-1.1.2.tgz#0107dc092c3edee6132b105d896b109c0f66afd6"
|
||||
integrity sha512-mcTzZHXMipnQY9haB17baNJmBTkYYcC8ljfMdB9/97FfhKJIzlglJcyGythuQTOu7r/QIbLfZYYWZhAvaGj95A==
|
||||
tailwindcss@^1.0.6, tailwindcss@^1.1.2:
|
||||
version "1.1.3"
|
||||
resolved "https://registry.yarnpkg.com/tailwindcss/-/tailwindcss-1.1.3.tgz#ad154f78e1e44060e32e3ed44b27287c2be126a6"
|
||||
integrity sha512-8sa/QO+blnu3WXUylsgvYZlUbBpVH36QeGuZxgSGqp1dF3g4AGe1azt8FsO8i8Hfe9Oyvwhx3iSjRDak3nngeQ==
|
||||
dependencies:
|
||||
autoprefixer "^9.4.5"
|
||||
bytes "^3.0.0"
|
||||
@ -6961,9 +6815,9 @@ terser-webpack-plugin@^1.4.1:
|
||||
worker-farm "^1.7.0"
|
||||
|
||||
terser@^4.1.2:
|
||||
version "4.3.4"
|
||||
resolved "https://registry.yarnpkg.com/terser/-/terser-4.3.4.tgz#ad91bade95619e3434685d69efa621a5af5f877d"
|
||||
integrity sha512-Kcrn3RiW8NtHBP0ssOAzwa2MsIRQ8lJWiBG/K7JgqPlomA3mtb2DEmp4/hrUA+Jujx+WZ02zqd7GYD+QRBB/2Q==
|
||||
version "4.3.9"
|
||||
resolved "https://registry.yarnpkg.com/terser/-/terser-4.3.9.tgz#e4be37f80553d02645668727777687dad26bbca8"
|
||||
integrity sha512-NFGMpHjlzmyOtPL+fDw3G7+6Ueh/sz4mkaUYa4lJCxOPTNzd0Uj0aZJOmsDYoSQyfuVoWDMSWTPU3huyOm2zdA==
|
||||
dependencies:
|
||||
commander "^2.20.0"
|
||||
source-map "~0.6.1"
|
||||
@ -6988,9 +6842,9 @@ through@~2.3.4:
|
||||
integrity sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=
|
||||
|
||||
thunky@^1.0.2:
|
||||
version "1.0.3"
|
||||
resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.0.3.tgz#f5df732453407b09191dae73e2a8cc73f381a826"
|
||||
integrity sha512-YwT8pjmNcAXBZqrubu22P4FYsh2D4dxRmnWBOL8Jk8bUcRUtc5326kx32tuTmFDAZtLOGEVNl8POAR8j896Iow==
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/thunky/-/thunky-1.1.0.tgz#5abaf714a9405db0504732bbccd2cedd9ef9537d"
|
||||
integrity sha512-eHY7nBftgThBqOyHGVN+l8gF0BucP09fMo0oO/Lb0w1OF80dJv+lDVpXG60WMQvkcxAkNybKsrEIE3ZtKGmPrA==
|
||||
|
||||
timers-browserify@^2.0.4:
|
||||
version "2.0.11"
|
||||
@ -7328,7 +7182,7 @@ vue-hot-reload-api@^2.3.0:
|
||||
resolved "https://registry.yarnpkg.com/vue-hot-reload-api/-/vue-hot-reload-api-2.3.4.tgz#532955cc1eb208a3d990b3a9f9a70574657e08f2"
|
||||
integrity sha512-BXq3jwIagosjgNVae6tkHzzIk6a8MHFtzAdwhnV5VlvPTFxDCvIttgSiHWjdGoTJvXtmRu5HacExfdarRcFhog==
|
||||
|
||||
vue-loader@^15.2.4:
|
||||
vue-loader@^15.7.1:
|
||||
version "15.7.1"
|
||||
resolved "https://registry.yarnpkg.com/vue-loader/-/vue-loader-15.7.1.tgz#6ccacd4122aa80f69baaac08ff295a62e3aefcfd"
|
||||
integrity sha512-fwIKtA23Pl/rqfYP5TSGK7gkEuLhoTvRYW+TU7ER3q9GpNLt/PjG5NLv3XHRDiTg7OPM1JcckBgds+VnAc+HbA==
|
||||
@ -7339,12 +7193,12 @@ vue-loader@^15.2.4:
|
||||
vue-hot-reload-api "^2.3.0"
|
||||
vue-style-loader "^4.1.0"
|
||||
|
||||
vue-router@^3.0.2:
|
||||
vue-router@^3.1.3:
|
||||
version "3.1.3"
|
||||
resolved "https://registry.yarnpkg.com/vue-router/-/vue-router-3.1.3.tgz#e6b14fabc0c0ee9fda0e2cbbda74b350e28e412b"
|
||||
integrity sha512-8iSa4mGNXBjyuSZFCCO4fiKfvzqk+mhL0lnKuGcQtO1eoj8nq3CmbEG8FwK5QqoqwDgsjsf1GDuisDX4cdb/aQ==
|
||||
|
||||
vue-server-renderer@^2.5.16:
|
||||
vue-server-renderer@^2.6.10:
|
||||
version "2.6.10"
|
||||
resolved "https://registry.yarnpkg.com/vue-server-renderer/-/vue-server-renderer-2.6.10.tgz#cb2558842ead360ae2ec1f3719b75564a805b375"
|
||||
integrity sha512-UYoCEutBpKzL2fKCwx8zlRtRtwxbPZXKTqbl2iIF4yRZUNO/ovrHyDAJDljft0kd+K0tZhN53XRHkgvCZoIhug==
|
||||
@ -7366,7 +7220,7 @@ vue-style-loader@^4.1.0:
|
||||
hash-sum "^1.0.2"
|
||||
loader-utils "^1.0.2"
|
||||
|
||||
vue-template-compiler@^2.5.16:
|
||||
vue-template-compiler@^2.6.10:
|
||||
version "2.6.10"
|
||||
resolved "https://registry.yarnpkg.com/vue-template-compiler/-/vue-template-compiler-2.6.10.tgz#323b4f3495f04faa3503337a82f5d6507799c9cc"
|
||||
integrity sha512-jVZkw4/I/HT5ZMvRnhv78okGusqe0+qH2A0Em0Cp8aq78+NK9TII263CDVz2QXZsIT+yyV/gZc/j/vlwa+Epyg==
|
||||
@ -7379,7 +7233,7 @@ vue-template-es2015-compiler@^1.9.0:
|
||||
resolved "https://registry.yarnpkg.com/vue-template-es2015-compiler/-/vue-template-es2015-compiler-1.9.1.tgz#1ee3bc9a16ecbf5118be334bb15f9c46f82f5825"
|
||||
integrity sha512-4gDntzrifFnCEvyoO8PqyJDmguXgVPxKiIxrBKjIowvL9l+N66196+72XVYR8BBf1Uv1Fgt3bGevJ+sEmxfZzw==
|
||||
|
||||
vue@^2.5.16:
|
||||
vue@^2.6.10:
|
||||
version "2.6.10"
|
||||
resolved "https://registry.yarnpkg.com/vue/-/vue-2.6.10.tgz#a72b1a42a4d82a721ea438d1b6bf55e66195c637"
|
||||
integrity sha512-ImThpeNU9HbdZL3utgMCq0oiMzAkt1mcgy3/E6zWC/G6AaQoeuFdsl9nDhTDU3X1R6FK7nsIUuRACVcjI+A2GQ==
|
||||
@ -7397,20 +7251,27 @@ vuepress-html-webpack-plugin@^3.2.0:
|
||||
toposort "^1.0.0"
|
||||
util.promisify "1.0.0"
|
||||
|
||||
vuepress-plugin-container@^2.0.0:
|
||||
version "2.0.2"
|
||||
resolved "https://registry.yarnpkg.com/vuepress-plugin-container/-/vuepress-plugin-container-2.0.2.tgz#3489cc732c7a210b31f202556e1346125dffeb73"
|
||||
integrity sha512-SrGYYT7lkie7xlIlAVhn+9sDW42MytNCoxWL/2uDr+q9wZA4h1uYlQvfc2DVjy+FsM9PPPSslkeo/zCpYVY82g==
|
||||
vuepress-plugin-container@^2.0.2:
|
||||
version "2.1.0"
|
||||
resolved "https://registry.yarnpkg.com/vuepress-plugin-container/-/vuepress-plugin-container-2.1.0.tgz#eb2ba3e01cdac419bd678d40e05c934caffe6db0"
|
||||
integrity sha512-i4p7S1cqYUrg/3pt+xSghZtKSHVI3VXMQNept8ILxA+lMK1XJkdRkjNovZzwpXlrErQssvrUOTWBV0hdBv7eXQ==
|
||||
dependencies:
|
||||
markdown-it-container "^2.0.0"
|
||||
|
||||
vuepress@^1.0.0:
|
||||
version "1.1.0"
|
||||
resolved "https://registry.yarnpkg.com/vuepress/-/vuepress-1.1.0.tgz#ca0d787d93188b2fd05820a650d7e3643c9e7675"
|
||||
integrity sha512-LAgS9nXsmvjTuCc/LHPWnIsPOuVuZtxh1MjVZf/xJ3Yy5kXoPhqbGUptlQdQt3izjIlns9zin5K6MNBY3u5l5g==
|
||||
vuepress-plugin-smooth-scroll@^0.0.3:
|
||||
version "0.0.3"
|
||||
resolved "https://registry.yarnpkg.com/vuepress-plugin-smooth-scroll/-/vuepress-plugin-smooth-scroll-0.0.3.tgz#6eff2d4c186cca917cc9f7df2b0af7de7c8c6438"
|
||||
integrity sha512-qsQkDftLVFLe8BiviIHaLV0Ea38YLZKKonDGsNQy1IE0wllFpFIEldWD8frWZtDFdx6b/O3KDMgVQ0qp5NjJCg==
|
||||
dependencies:
|
||||
"@vuepress/core" "^1.1.0"
|
||||
"@vuepress/theme-default" "^1.1.0"
|
||||
smoothscroll-polyfill "^0.4.3"
|
||||
|
||||
vuepress@^1.0.0:
|
||||
version "1.2.0"
|
||||
resolved "https://registry.yarnpkg.com/vuepress/-/vuepress-1.2.0.tgz#2f2cdf337ad40a3e4866dfd33e97b840db386af7"
|
||||
integrity sha512-EfHo8Cc73qo+1Pm18hM0qOGynmDr8q5fu2664obynsdCJ1zpvoShVnA0Msraw4SI2xDc0iAoIb3dTwxUIM8DAw==
|
||||
dependencies:
|
||||
"@vuepress/core" "^1.2.0"
|
||||
"@vuepress/theme-default" "^1.2.0"
|
||||
cac "^6.3.9"
|
||||
envinfo "^7.2.0"
|
||||
opencollective-postinstall "^2.0.2"
|
||||
@ -7449,7 +7310,7 @@ webpack-dev-middleware@3.6.0:
|
||||
range-parser "^1.0.3"
|
||||
webpack-log "^2.0.0"
|
||||
|
||||
webpack-dev-middleware@^3.7.1:
|
||||
webpack-dev-middleware@^3.7.2:
|
||||
version "3.7.2"
|
||||
resolved "https://registry.yarnpkg.com/webpack-dev-middleware/-/webpack-dev-middleware-3.7.2.tgz#0019c3db716e3fa5cecbf64f2ab88a74bab331f3"
|
||||
integrity sha512-1xC42LxbYoqLNAhV6YzTYacicgMZQTqRd27Sim9wn5hJrX3I5nxYy1SxSd4+gjUFsz1dQFj+yEe6zEVmSkeJjw==
|
||||
@ -7461,9 +7322,9 @@ webpack-dev-middleware@^3.7.1:
|
||||
webpack-log "^2.0.0"
|
||||
|
||||
webpack-dev-server@^3.5.1:
|
||||
version "3.8.1"
|
||||
resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.8.1.tgz#485b64c4aadc23f601e72114b40c1b1fea31d9f1"
|
||||
integrity sha512-9F5DnfFA9bsrhpUCAfQic/AXBVHvq+3gQS+x6Zj0yc1fVVE0erKh2MV4IV12TBewuTrYeeTIRwCH9qLMvdNvTw==
|
||||
version "3.9.0"
|
||||
resolved "https://registry.yarnpkg.com/webpack-dev-server/-/webpack-dev-server-3.9.0.tgz#27c3b5d0f6b6677c4304465ac817623c8b27b89c"
|
||||
integrity sha512-E6uQ4kRrTX9URN9s/lIbqTAztwEPdvzVrcmHE8EQ9YnuT9J8Es5Wrd8n9BKg1a0oZ5EgEke/EQFgUsp18dSTBw==
|
||||
dependencies:
|
||||
ansi-html "0.0.7"
|
||||
bonjour "^3.5.0"
|
||||
@ -7474,18 +7335,18 @@ webpack-dev-server@^3.5.1:
|
||||
del "^4.1.1"
|
||||
express "^4.17.1"
|
||||
html-entities "^1.2.1"
|
||||
http-proxy-middleware "^0.19.1"
|
||||
http-proxy-middleware "0.19.1"
|
||||
import-local "^2.0.0"
|
||||
internal-ip "^4.3.0"
|
||||
ip "^1.1.5"
|
||||
is-absolute-url "^3.0.2"
|
||||
is-absolute-url "^3.0.3"
|
||||
killable "^1.0.1"
|
||||
loglevel "^1.6.4"
|
||||
opn "^5.5.0"
|
||||
p-retry "^3.0.1"
|
||||
portfinder "^1.0.24"
|
||||
portfinder "^1.0.25"
|
||||
schema-utils "^1.0.0"
|
||||
selfsigned "^1.10.6"
|
||||
selfsigned "^1.10.7"
|
||||
semver "^6.3.0"
|
||||
serve-index "^1.9.1"
|
||||
sockjs "0.3.19"
|
||||
@ -7494,7 +7355,7 @@ webpack-dev-server@^3.5.1:
|
||||
strip-ansi "^3.0.1"
|
||||
supports-color "^6.1.0"
|
||||
url "^0.11.0"
|
||||
webpack-dev-middleware "^3.7.1"
|
||||
webpack-dev-middleware "^3.7.2"
|
||||
webpack-log "^2.0.0"
|
||||
ws "^6.2.1"
|
||||
yargs "12.0.5"
|
||||
@ -7523,9 +7384,9 @@ webpack-sources@^1.1.0, webpack-sources@^1.4.0, webpack-sources@^1.4.1:
|
||||
source-map "~0.6.1"
|
||||
|
||||
webpack@^4.8.1:
|
||||
version "4.41.0"
|
||||
resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.0.tgz#db6a254bde671769f7c14e90a1a55e73602fc70b"
|
||||
integrity sha512-yNV98U4r7wX1VJAj5kyMsu36T8RPPQntcb5fJLOsMz/pt/WrKC0Vp1bAlqPLkA1LegSwQwf6P+kAbyhRKVQ72g==
|
||||
version "4.41.2"
|
||||
resolved "https://registry.yarnpkg.com/webpack/-/webpack-4.41.2.tgz#c34ec76daa3a8468c9b61a50336d8e3303dce74e"
|
||||
integrity sha512-Zhw69edTGfbz9/8JJoyRQ/pq8FYUoY0diOXqW0T6yhgdhCv6wr0hra5DwwWexNRns2Z2+gsnrNcbe9hbGBgk/A==
|
||||
dependencies:
|
||||
"@webassemblyjs/ast" "1.8.5"
|
||||
"@webassemblyjs/helper-module-context" "1.8.5"
|
||||
@ -7632,11 +7493,6 @@ wrappy@1:
|
||||
resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
|
||||
integrity sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=
|
||||
|
||||
write-file-stdout@0.0.2:
|
||||
version "0.0.2"
|
||||
resolved "https://registry.yarnpkg.com/write-file-stdout/-/write-file-stdout-0.0.2.tgz#c252d7c7c5b1b402897630e3453c7bfe690d9ca1"
|
||||
integrity sha1-wlLXx8WxtAKJdjDjRTx7/mkNnKE=
|
||||
|
||||
ws@^6.2.1:
|
||||
version "6.2.1"
|
||||
resolved "https://registry.yarnpkg.com/ws/-/ws-6.2.1.tgz#442fdf0a47ed64f59b6a5d8ff130f4748ed524fb"
|
||||
@ -7660,9 +7516,9 @@ yallist@^2.1.2:
|
||||
integrity sha1-HBH5IY8HYImkfdUS+TxmmaaoHVI=
|
||||
|
||||
yallist@^3.0.0, yallist@^3.0.2, yallist@^3.0.3:
|
||||
version "3.1.0"
|
||||
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.0.tgz#906cc2100972dc2625ae78f566a2577230a1d6f7"
|
||||
integrity sha512-6gpP93MR+VOOehKbCPchro3wFZNSNmek8A2kbkOAZLIZAYx1KP/zAqwO0sOHi3xJEb+UBz8NaYt/17UNit1Q9w==
|
||||
version "3.1.1"
|
||||
resolved "https://registry.yarnpkg.com/yallist/-/yallist-3.1.1.tgz#dbb7daf9bfd8bac9ab45ebf602b8cbad0d5d08fd"
|
||||
integrity sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==
|
||||
|
||||
yargs-parser@^11.1.1:
|
||||
version "11.1.1"
|
||||
@ -7672,10 +7528,10 @@ yargs-parser@^11.1.1:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
|
||||
yargs-parser@^13.1.1:
|
||||
version "13.1.1"
|
||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-13.1.1.tgz#d26058532aa06d365fe091f6a1fc06b2f7e5eca0"
|
||||
integrity sha512-oVAVsHz6uFrg3XQheFII8ESO2ssAf9luWuAd6Wexsu4F3OtIW0o8IribPXYrD4WC24LWtPrJlGy87y5udK+dxQ==
|
||||
yargs-parser@^15.0.0:
|
||||
version "15.0.0"
|
||||
resolved "https://registry.yarnpkg.com/yargs-parser/-/yargs-parser-15.0.0.tgz#cdd7a97490ec836195f59f3f4dbe5ea9e8f75f08"
|
||||
integrity sha512-xLTUnCMc4JhxrPEPUYD5IBR1mWCK/aT6+RJ/K29JY2y1vD+FhtgKK0AXRWvI262q3QSffAQuTouFIKUuHX89wQ==
|
||||
dependencies:
|
||||
camelcase "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
@ -7699,9 +7555,9 @@ yargs@12.0.5:
|
||||
yargs-parser "^11.1.1"
|
||||
|
||||
yargs@^14.0.0:
|
||||
version "14.0.0"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.0.0.tgz#ba4cacc802b3c0b3e36a9e791723763d57a85066"
|
||||
integrity sha512-ssa5JuRjMeZEUjg7bEL99AwpitxU/zWGAGpdj0di41pOEmJti8NR6kyUIJBkR78DTYNPZOU08luUo0GTHuB+ow==
|
||||
version "14.2.0"
|
||||
resolved "https://registry.yarnpkg.com/yargs/-/yargs-14.2.0.tgz#f116a9242c4ed8668790b40759b4906c276e76c3"
|
||||
integrity sha512-/is78VKbKs70bVZH7w4YaZea6xcJWOAwkhbR0CFuZBmYtfTYF0xjGJF43AYd8g2Uii1yJwmS5GR2vBmrc32sbg==
|
||||
dependencies:
|
||||
cliui "^5.0.0"
|
||||
decamelize "^1.2.0"
|
||||
@ -7713,7 +7569,7 @@ yargs@^14.0.0:
|
||||
string-width "^3.0.0"
|
||||
which-module "^2.0.0"
|
||||
y18n "^4.0.0"
|
||||
yargs-parser "^13.1.1"
|
||||
yargs-parser "^15.0.0"
|
||||
|
||||
zepto@^1.2.0:
|
||||
version "1.2.0"
|
||||
|
@ -0,0 +1,273 @@
|
||||
GIT
|
||||
remote: https://github.com/stympy/faker.git
|
||||
revision: 4e9144825fcc9ba5c83cc0fd037779ab82f3120b
|
||||
branch: master
|
||||
specs:
|
||||
faker (2.6.0)
|
||||
i18n (>= 1.6, < 1.8)
|
||||
|
||||
GEM
|
||||
remote: https://rubygems.org/
|
||||
specs:
|
||||
actioncable (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
nio4r (~> 2.0)
|
||||
websocket-driver (>= 0.6.1)
|
||||
actionmailbox (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
activejob (= 6.0.0)
|
||||
activerecord (= 6.0.0)
|
||||
activestorage (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
mail (>= 2.7.1)
|
||||
actionmailer (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
actionview (= 6.0.0)
|
||||
activejob (= 6.0.0)
|
||||
mail (~> 2.5, >= 2.5.4)
|
||||
rails-dom-testing (~> 2.0)
|
||||
actionpack (6.0.0)
|
||||
actionview (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
rack (~> 2.0)
|
||||
rack-test (>= 0.6.3)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.0, >= 1.2.0)
|
||||
actiontext (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
activerecord (= 6.0.0)
|
||||
activestorage (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
nokogiri (>= 1.8.5)
|
||||
actionview (6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
builder (~> 3.1)
|
||||
erubi (~> 1.4)
|
||||
rails-dom-testing (~> 2.0)
|
||||
rails-html-sanitizer (~> 1.1, >= 1.2.0)
|
||||
activejob (6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
globalid (>= 0.3.6)
|
||||
activemodel (6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
activerecord (6.0.0)
|
||||
activemodel (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
activestorage (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
activejob (= 6.0.0)
|
||||
activerecord (= 6.0.0)
|
||||
marcel (~> 0.3.1)
|
||||
activesupport (6.0.0)
|
||||
concurrent-ruby (~> 1.0, >= 1.0.2)
|
||||
i18n (>= 0.7, < 2)
|
||||
minitest (~> 5.1)
|
||||
tzinfo (~> 1.1)
|
||||
zeitwerk (~> 2.1, >= 2.1.8)
|
||||
addressable (2.7.0)
|
||||
public_suffix (>= 2.0.2, < 5.0)
|
||||
archive-zip (0.12.0)
|
||||
io-like (~> 0.3.0)
|
||||
bcrypt (3.1.13)
|
||||
bindex (0.8.1)
|
||||
bootsnap (1.4.5)
|
||||
msgpack (~> 1.0)
|
||||
builder (3.2.3)
|
||||
byebug (11.0.1)
|
||||
capybara (3.29.0)
|
||||
addressable
|
||||
mini_mime (>= 0.1.3)
|
||||
nokogiri (~> 1.8)
|
||||
rack (>= 1.6.0)
|
||||
rack-test (>= 0.6.3)
|
||||
regexp_parser (~> 1.5)
|
||||
xpath (~> 3.2)
|
||||
childprocess (3.0.0)
|
||||
chromedriver-helper (2.1.1)
|
||||
archive-zip (~> 0.10)
|
||||
nokogiri (~> 1.8)
|
||||
coffee-rails (4.2.2)
|
||||
coffee-script (>= 2.2.0)
|
||||
railties (>= 4.0.0)
|
||||
coffee-script (2.4.1)
|
||||
coffee-script-source
|
||||
execjs
|
||||
coffee-script-source (1.12.2)
|
||||
concurrent-ruby (1.1.5)
|
||||
crass (1.0.4)
|
||||
devise (4.7.1)
|
||||
bcrypt (~> 3.0)
|
||||
orm_adapter (~> 0.1)
|
||||
railties (>= 4.1.0)
|
||||
responders
|
||||
warden (~> 1.2.3)
|
||||
erubi (1.9.0)
|
||||
execjs (2.7.0)
|
||||
ffi (1.11.1)
|
||||
globalid (0.4.2)
|
||||
activesupport (>= 4.2.0)
|
||||
i18n (1.7.0)
|
||||
concurrent-ruby (~> 1.0)
|
||||
io-like (0.3.0)
|
||||
jbuilder (2.9.1)
|
||||
activesupport (>= 4.2.0)
|
||||
listen (3.1.5)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
ruby_dep (~> 1.2)
|
||||
loofah (2.3.0)
|
||||
crass (~> 1.0.2)
|
||||
nokogiri (>= 1.5.9)
|
||||
mail (2.7.1)
|
||||
mini_mime (>= 0.1.1)
|
||||
marcel (0.3.3)
|
||||
mimemagic (~> 0.3.2)
|
||||
method_source (0.9.2)
|
||||
mimemagic (0.3.3)
|
||||
mini_mime (1.0.2)
|
||||
mini_portile2 (2.4.0)
|
||||
minitest (5.12.2)
|
||||
msgpack (1.3.1)
|
||||
nio4r (2.5.2)
|
||||
nokogiri (1.10.4)
|
||||
mini_portile2 (~> 2.4.0)
|
||||
orm_adapter (0.5.0)
|
||||
pg (1.1.4)
|
||||
public_suffix (4.0.1)
|
||||
puma (3.12.1)
|
||||
rack (2.0.7)
|
||||
rack-test (1.1.0)
|
||||
rack (>= 1.0, < 3)
|
||||
rails (6.0.0)
|
||||
actioncable (= 6.0.0)
|
||||
actionmailbox (= 6.0.0)
|
||||
actionmailer (= 6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
actiontext (= 6.0.0)
|
||||
actionview (= 6.0.0)
|
||||
activejob (= 6.0.0)
|
||||
activemodel (= 6.0.0)
|
||||
activerecord (= 6.0.0)
|
||||
activestorage (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
bundler (>= 1.3.0)
|
||||
railties (= 6.0.0)
|
||||
sprockets-rails (>= 2.0.0)
|
||||
rails-dom-testing (2.0.3)
|
||||
activesupport (>= 4.2.0)
|
||||
nokogiri (>= 1.6)
|
||||
rails-html-sanitizer (1.3.0)
|
||||
loofah (~> 2.3)
|
||||
railties (6.0.0)
|
||||
actionpack (= 6.0.0)
|
||||
activesupport (= 6.0.0)
|
||||
method_source
|
||||
rake (>= 0.8.7)
|
||||
thor (>= 0.20.3, < 2.0)
|
||||
rake (13.0.0)
|
||||
rb-fsevent (0.10.3)
|
||||
rb-inotify (0.10.0)
|
||||
ffi (~> 1.0)
|
||||
redis (4.1.3)
|
||||
redis-actionpack (5.1.0)
|
||||
actionpack (>= 4.0, < 7)
|
||||
redis-rack (>= 1, < 3)
|
||||
redis-store (>= 1.1.0, < 2)
|
||||
redis-activesupport (5.2.0)
|
||||
activesupport (>= 3, < 7)
|
||||
redis-store (>= 1.3, < 2)
|
||||
redis-rack (2.0.6)
|
||||
rack (>= 1.5, < 3)
|
||||
redis-store (>= 1.2, < 2)
|
||||
redis-rails (5.0.2)
|
||||
redis-actionpack (>= 5.0, < 6)
|
||||
redis-activesupport (>= 5.0, < 6)
|
||||
redis-store (>= 1.2, < 2)
|
||||
redis-store (1.8.0)
|
||||
redis (>= 4, < 5)
|
||||
regexp_parser (1.6.0)
|
||||
responders (3.0.0)
|
||||
actionpack (>= 5.0)
|
||||
railties (>= 5.0)
|
||||
ruby_dep (1.5.0)
|
||||
rubyzip (2.0.0)
|
||||
sass (3.7.4)
|
||||
sass-listen (~> 4.0.0)
|
||||
sass-listen (4.0.0)
|
||||
rb-fsevent (~> 0.9, >= 0.9.4)
|
||||
rb-inotify (~> 0.9, >= 0.9.7)
|
||||
sass-rails (5.1.0)
|
||||
railties (>= 5.2.0)
|
||||
sass (~> 3.1)
|
||||
sprockets (>= 2.8, < 4.0)
|
||||
sprockets-rails (>= 2.0, < 4.0)
|
||||
tilt (>= 1.1, < 3)
|
||||
selenium-webdriver (3.142.6)
|
||||
childprocess (>= 0.5, < 4.0)
|
||||
rubyzip (>= 1.2.2)
|
||||
spring (2.1.0)
|
||||
spring-watcher-listen (2.0.1)
|
||||
listen (>= 2.7, < 4.0)
|
||||
spring (>= 1.2, < 3.0)
|
||||
sprockets (3.7.2)
|
||||
concurrent-ruby (~> 1.0)
|
||||
rack (> 1, < 3)
|
||||
sprockets-rails (3.2.1)
|
||||
actionpack (>= 4.0)
|
||||
activesupport (>= 4.0)
|
||||
sprockets (>= 3.0.0)
|
||||
thor (0.20.3)
|
||||
thread_safe (0.3.6)
|
||||
tilt (2.0.10)
|
||||
turbolinks (5.2.1)
|
||||
turbolinks-source (~> 5.2)
|
||||
turbolinks-source (5.2.0)
|
||||
tzinfo (1.2.5)
|
||||
thread_safe (~> 0.1)
|
||||
uglifier (4.2.0)
|
||||
execjs (>= 0.3.0, < 3)
|
||||
warden (1.2.8)
|
||||
rack (>= 2.0.6)
|
||||
web-console (4.0.1)
|
||||
actionview (>= 6.0.0)
|
||||
activemodel (>= 6.0.0)
|
||||
bindex (>= 0.4.0)
|
||||
railties (>= 6.0.0)
|
||||
websocket-driver (0.7.1)
|
||||
websocket-extensions (>= 0.1.0)
|
||||
websocket-extensions (0.1.4)
|
||||
xpath (3.2.0)
|
||||
nokogiri (~> 1.8)
|
||||
zeitwerk (2.2.0)
|
||||
|
||||
PLATFORMS
|
||||
ruby
|
||||
|
||||
DEPENDENCIES
|
||||
bootsnap (>= 1.1.0)
|
||||
byebug
|
||||
capybara (>= 2.15)
|
||||
chromedriver-helper
|
||||
coffee-rails (~> 4.2)
|
||||
devise
|
||||
faker!
|
||||
jbuilder (~> 2.5)
|
||||
listen (>= 3.0.5, < 3.2)
|
||||
pg (>= 0.18, < 2.0)
|
||||
puma (~> 3.11)
|
||||
rails (~> 6.0.0.rc1)
|
||||
redis-rails
|
||||
sass-rails (~> 5.0)
|
||||
selenium-webdriver
|
||||
spring
|
||||
spring-watcher-listen (~> 2.0.0)
|
||||
turbolinks (~> 5)
|
||||
tzinfo-data
|
||||
uglifier (>= 1.3.0)
|
||||
web-console (>= 3.3.0)
|
||||
|
||||
RUBY VERSION
|
||||
ruby 2.5.7p206
|
||||
|
||||
BUNDLED WITH
|
||||
1.17.3
|
||||
|
1
go.mod
1
go.mod
@ -12,6 +12,7 @@ require (
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible
|
||||
github.com/dlclark/regexp2 v1.2.0 // indirect
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191022152526-8cb203812681 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.7
|
||||
github.com/garyburd/redigo v1.6.0
|
||||
github.com/go-sourcemap/sourcemap v2.1.2+incompatible // indirect
|
||||
|
2
go.sum
2
go.sum
@ -57,6 +57,8 @@ github.com/dlclark/regexp2 v1.2.0 h1:8sAhBGEM0dRWogWqWyQeIJnxjWO6oIjl8FKqREDsGfk
|
||||
github.com/dlclark/regexp2 v1.2.0/go.mod h1:2pZnwuY/m+8K6iRw6wQdMtk+rH5tNGR1i55kozfMjCc=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733 h1:cyNc40Dx5YNEO94idePU8rhVd3dn+sd04Arh0kDBAaw=
|
||||
github.com/dop251/goja v0.0.0-20190912223329-aa89e6a4c733/go.mod h1:Mw6PkjjMXWbTj+nnj4s3QPXq1jaT0s5pC0iFD4+BOAA=
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191022152526-8cb203812681 h1:3WV5aRRj1ELP3RcLlBp/v0WJTuy47OQMkL9GIQq8QEE=
|
||||
github.com/dvyukov/go-fuzz v0.0.0-20191022152526-8cb203812681/go.mod h1:11Gm+ccJnvAhCNLlf5+cS9KjtbaD5I5zaZpFMsTHWTw=
|
||||
github.com/fsnotify/fsnotify v1.4.7 h1:IXs+QLmnXW2CcXuY+8Mzv/fWEsPGWxqefPtCP5CnV9I=
|
||||
github.com/fsnotify/fsnotify v1.4.7/go.mod h1:jwhsz4b93w/PPRr/qN1Yymfu8t87LnFCMoQvtojpjFo=
|
||||
github.com/garyburd/redigo v1.6.0 h1:0VruCpn7yAIIu7pWVClQC8wxCJEcG3nyzpMSHKi1PQc=
|
||||
|
@ -97,8 +97,7 @@ func Filter(w *bytes.Buffer, b []byte, keys []string) error {
|
||||
case state == expectNumClose &&
|
||||
((b[i] < '0' || b[i] > '9') &&
|
||||
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
|
||||
i--
|
||||
e = i
|
||||
e = i - 1
|
||||
|
||||
case state == expectValue &&
|
||||
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
|
||||
|
36
jsn/fuzz.go
36
jsn/fuzz.go
@ -1,35 +1,11 @@
|
||||
// +build gofuzz
|
||||
|
||||
package jsn
|
||||
|
||||
import "bytes"
|
||||
|
||||
// FuzzerEntrypoint for Fuzzbuzz
|
||||
func FuzzerEntryPoint(data []byte) int {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
func Fuzz(data []byte) int {
|
||||
if err := unifiedTest(data); err != nil {
|
||||
return 0
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil {
|
||||
return -1
|
||||
}
|
||||
|
||||
return 0
|
||||
return 1
|
||||
}
|
||||
|
60
jsn/fuzz_test.go
Normal file
60
jsn/fuzz_test.go
Normal file
@ -0,0 +1,60 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"00\"0000\"0{",
|
||||
"6\",\n\t\t\t\"something\": " +
|
||||
"null\n\t\t},\n\t\t{\n\t\t\t\"id" +
|
||||
"\": 12,\n\t\t\t\"full_name" +
|
||||
"\": \"Brenton Bauch Ph" +
|
||||
"D\",\n\t\t\t\"email\": \"ren" +
|
||||
"ee@miller.co\",\n\t\t\t\"_" +
|
||||
"_twitter_id\": 1\n\t\t}," +
|
||||
"\n\t\t{\n\t\t\t\"id\": 13,\n\t\t" +
|
||||
"\t\"full_name\": \"Daine" +
|
||||
" Gleichner\",\n\t\t\t\"ema" +
|
||||
"il\": \"andrea@gmail.c" +
|
||||
"om\",\n\t\t\t\"__twitter_i" +
|
||||
"d\": \"\",\n\t\t\t\"id__twit" +
|
||||
"ter_id\": \"NOOO\",\n\t\t\t" +
|
||||
"\"work_email\": \"andre" +
|
||||
"a@nienow.co\"\n\t\t}\n\t]}" +
|
||||
"\n\t}",
|
||||
"0000\"0000\"0{",
|
||||
"0000\"\"{",
|
||||
"0000\"000\"{",
|
||||
"0\"\"{",
|
||||
"\"0\"{",
|
||||
"000\"0\"{",
|
||||
"0\"0000\"0{",
|
||||
"000\"\"{",
|
||||
"0\"00\"{",
|
||||
"000\"0000\"0{",
|
||||
"000\"00\"{",
|
||||
"\"\"{",
|
||||
"0\"0000\"{",
|
||||
"\"000\"00{",
|
||||
"0000\"00\"{",
|
||||
"00\"0\"{",
|
||||
"0\"0\"{",
|
||||
"000\"0000\"{",
|
||||
"00\"0000\"{",
|
||||
"0000\"0000\"{",
|
||||
"\"000\"{",
|
||||
"00\"00\"{",
|
||||
"00\"0000\"00{",
|
||||
"0\"0000\"00{",
|
||||
"00\"\"{",
|
||||
"0000\"0\"{",
|
||||
"000\"000\"{",
|
||||
"\"00000000\"{",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
unifiedTest([]byte(f))
|
||||
}
|
||||
}
|
@ -105,8 +105,7 @@ func Get(b []byte, keys [][]byte) []Field {
|
||||
case state == expectNumClose &&
|
||||
((b[i] < '0' || b[i] > '9') &&
|
||||
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
|
||||
i--
|
||||
e = i
|
||||
e = i - 1
|
||||
|
||||
case state == expectValue &&
|
||||
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
|
||||
|
@ -12,6 +12,7 @@ func Keys(b []byte) [][]byte {
|
||||
ae := 0
|
||||
|
||||
for i := 0; i < len(b); i++ {
|
||||
|
||||
if state == expectObjClose || state == expectListClose {
|
||||
switch b[i] {
|
||||
case '{', '[':
|
||||
@ -88,8 +89,7 @@ func Keys(b []byte) [][]byte {
|
||||
case state == expectNumClose &&
|
||||
((b[i] < '0' || b[i] > '9') &&
|
||||
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
|
||||
i--
|
||||
e = i
|
||||
e = i - 1
|
||||
|
||||
case state == expectValue &&
|
||||
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
|
||||
|
@ -86,8 +86,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
case state == expectNumClose &&
|
||||
((b[i] < '0' || b[i] > '9') &&
|
||||
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
|
||||
i--
|
||||
e = i
|
||||
e = i - 1
|
||||
|
||||
case state == expectValue &&
|
||||
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
|
||||
@ -156,7 +155,7 @@ func Replace(w *bytes.Buffer, b []byte, from, to []Field) error {
|
||||
|
||||
if ws == -1 || (ws == 0 && we == len(b)) {
|
||||
w.Write(b)
|
||||
} else {
|
||||
} else if ws < we {
|
||||
w.Write(b[ws:we])
|
||||
}
|
||||
|
||||
|
@ -70,8 +70,7 @@ func Strip(b []byte, path [][]byte) []byte {
|
||||
case state == expectNumClose &&
|
||||
((b[i] < '0' || b[i] > '9') &&
|
||||
(b[i] != '.' && b[i] != 'e' && b[i] != 'E' && b[i] != '+' && b[i] != '-')):
|
||||
i--
|
||||
e = i
|
||||
e = i - 1
|
||||
|
||||
case state == expectValue &&
|
||||
(b[i] == 'f' || b[i] == 'F' || b[i] == 't' || b[i] == 'T'):
|
||||
|
37
jsn/test.go
Normal file
37
jsn/test.go
Normal file
@ -0,0 +1,37 @@
|
||||
package jsn
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
)
|
||||
|
||||
func unifiedTest(data []byte) error {
|
||||
err1 := Validate(string(data))
|
||||
|
||||
var b1 bytes.Buffer
|
||||
err2 := Filter(&b1, data, []string{"id", "full_name", "embed"})
|
||||
|
||||
path1 := [][]byte{[]byte("data"), []byte("users")}
|
||||
Strip(data, path1)
|
||||
|
||||
from := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`[{ "name": "hello" }, { "name": "world"}]`)},
|
||||
{[]byte("__twitter_id"), []byte(`"ABC123"`)},
|
||||
}
|
||||
|
||||
to := []Field{
|
||||
{[]byte("__twitter_id"), []byte(`"1234567890"`)},
|
||||
{[]byte("some_list"), []byte(`[{"id":1,"embed":{"id":8}},{"id":2},{"id":3},{"id":4},{"id":5},{"id":6},{"id":7},{"id":8},{"id":9},{"id":10},{"id":11},{"id":12},{"id":13}]`)},
|
||||
}
|
||||
|
||||
var b2 bytes.Buffer
|
||||
err3 := Replace(&b2, data, from, to)
|
||||
|
||||
Keys(data)
|
||||
|
||||
if err1 != nil || err2 != nil || err3 != nil {
|
||||
return errors.New("there was an error")
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
249
psql/insert.go
249
psql/insert.go
@ -1,249 +0,0 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
)
|
||||
|
||||
var zeroPaging = qcode.Paging{}
|
||||
|
||||
func (co *Compiler) compileMutation(qc *qcode.QCode, w *bytes.Buffer, vars Variables) (uint32, error) {
|
||||
if len(qc.Selects) == 0 {
|
||||
return 0, errors.New("empty query")
|
||||
}
|
||||
|
||||
c := &compilerContext{w, qc.Selects, co}
|
||||
root := &qc.Selects[0]
|
||||
|
||||
ti, err := c.schema.GetTable(root.Table)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
c.w.WriteString(`WITH `)
|
||||
quoted(c.w, ti.Name)
|
||||
c.w.WriteString(` AS `)
|
||||
|
||||
switch root.Action {
|
||||
case qcode.ActionInsert:
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.ActionUpdate:
|
||||
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.ActionUpsert:
|
||||
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.ActionDelete:
|
||||
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
default:
|
||||
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` RETURNING *) `)
|
||||
|
||||
root.Paging = zeroPaging
|
||||
root.DistinctOn = root.DistinctOn[:]
|
||||
root.OrderBy = root.OrderBy[:]
|
||||
root.Where = nil
|
||||
root.Args = nil
|
||||
|
||||
return c.compileQuery(qc, w)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderInsert(qc *qcode.QCode, w *bytes.Buffer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
insert, ok := vars[root.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
|
||||
}
|
||||
|
||||
jt, array, err := jsn.Tree(insert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
c.w.WriteString(`(WITH "input" AS (SELECT {{`)
|
||||
c.w.WriteString(root.ActionVar)
|
||||
c.w.WriteString(`}}::json AS j) INSERT INTO `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` (`)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti)
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
c.w.WriteString(` SELECT `)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti)
|
||||
c.w.WriteString(` FROM input i, `)
|
||||
|
||||
if array {
|
||||
c.w.WriteString(`json_populate_recordset`)
|
||||
} else {
|
||||
c.w.WriteString(`json_populate_record`)
|
||||
}
|
||||
|
||||
c.w.WriteString(`(NULL::`)
|
||||
c.w.WriteString(ti.Name)
|
||||
c.w.WriteString(`, i.j) t`)
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderInsertUpdateColumns(qc *qcode.QCode, w *bytes.Buffer,
|
||||
jt map[string]interface{}, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
i := 0
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
c.w.WriteString(cn)
|
||||
i++
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w *bytes.Buffer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
update, ok := vars[root.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
|
||||
}
|
||||
|
||||
jt, array, err := jsn.Tree(update)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
c.w.WriteString(`(WITH "input" AS (SELECT {{`)
|
||||
c.w.WriteString(root.ActionVar)
|
||||
c.w.WriteString(`}}::json AS j) UPDATE `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` SET (`)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti)
|
||||
|
||||
c.w.WriteString(`) = (SELECT `)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti)
|
||||
c.w.WriteString(` FROM input i, `)
|
||||
|
||||
if array {
|
||||
c.w.WriteString(`json_populate_recordset`)
|
||||
} else {
|
||||
c.w.WriteString(`json_populate_record`)
|
||||
}
|
||||
|
||||
c.w.WriteString(`(NULL::`)
|
||||
c.w.WriteString(ti.Name)
|
||||
c.w.WriteString(`, i.j) t)`)
|
||||
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderWhere(root, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderDelete(qc *qcode.QCode, w *bytes.Buffer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
c.w.WriteString(`(DELETE FROM `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderWhere(root, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w *bytes.Buffer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
upsert, ok := vars[root.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", root.ActionVar)
|
||||
}
|
||||
|
||||
jt, _, err := jsn.Tree(upsert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
c.w.WriteString(` ON CONFLICT DO (`)
|
||||
i := 0
|
||||
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if col, ok := ti.Columns[cn]; !ok || !(col.UniqueKey || col.PrimaryKey) {
|
||||
continue
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
c.w.WriteString(cn)
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
c.w.WriteString(ti.PrimaryCol)
|
||||
}
|
||||
c.w.WriteString(`) DO `)
|
||||
|
||||
c.w.WriteString(`UPDATE `)
|
||||
io.WriteString(c.w, ` SET `)
|
||||
|
||||
i = 0
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
c.w.WriteString(cn)
|
||||
io.WriteString(c.w, ` = EXCLUDED.`)
|
||||
c.w.WriteString(cn)
|
||||
i++
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func quoted(w *bytes.Buffer, identifier string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(identifier)
|
||||
w.WriteString(`"`)
|
||||
}
|
@ -1,184 +0,0 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func simpleInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" (full_name, email) SELECT full_name, email FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, insert: $insert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description, user_id) SELECT name, description, user_id FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func bulkInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, insert: $insert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleUpsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, upsert: $upsert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func bulkUpsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, upsert: $upsert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" (name, description) SELECT name, description FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT DO (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleUpdate(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, update: $update, where: { id: { eq: 1 } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET (name, description) = (SELECT name, description FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) AND (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func delete(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(delete: true, where: { id: { eq: 1 } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337";`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars)
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileInsert(t *testing.T) {
|
||||
t.Run("simpleInsert", simpleInsert)
|
||||
t.Run("singleInsert", singleInsert)
|
||||
t.Run("bulkInsert", bulkInsert)
|
||||
t.Run("singleUpdate", singleUpdate)
|
||||
t.Run("singleUpsert", singleUpsert)
|
||||
t.Run("bulkUpsert", bulkUpsert)
|
||||
|
||||
t.Run("delete", delete)
|
||||
}
|
287
psql/mutate.go
Normal file
287
psql/mutate.go
Normal file
@ -0,0 +1,287 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
)
|
||||
|
||||
var noLimit = qcode.Paging{NoLimit: true}
|
||||
|
||||
func (co *Compiler) compileMutation(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
||||
if len(qc.Selects) == 0 {
|
||||
return 0, errors.New("empty query")
|
||||
}
|
||||
|
||||
c := &compilerContext{w, qc.Selects, co}
|
||||
root := &qc.Selects[0]
|
||||
|
||||
ti, err := c.schema.GetTable(root.Table)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `WITH `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` AS `)
|
||||
|
||||
switch qc.Type {
|
||||
case qcode.QTInsert:
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.QTUpdate:
|
||||
if _, err := c.renderUpdate(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.QTUpsert:
|
||||
if _, err := c.renderUpsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
case qcode.QTDelete:
|
||||
if _, err := c.renderDelete(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
default:
|
||||
return 0, errors.New("valid mutations are 'insert', 'update', 'upsert' and 'delete'")
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` RETURNING *) `)
|
||||
|
||||
root.Paging = noLimit
|
||||
root.DistinctOn = root.DistinctOn[:]
|
||||
root.OrderBy = root.OrderBy[:]
|
||||
root.Where = nil
|
||||
root.Args = nil
|
||||
|
||||
return c.compileQuery(qc, w)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderInsert(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
|
||||
insert, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
|
||||
}
|
||||
|
||||
jt, array, err := jsn.Tree(insert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `(WITH "input" AS (SELECT {{`)
|
||||
io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, `}}::json AS j) INSERT INTO `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` (`)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti, false)
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
io.WriteString(c.w, ` SELECT `)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti, true)
|
||||
io.WriteString(c.w, ` FROM input i, `)
|
||||
|
||||
if array {
|
||||
io.WriteString(c.w, `json_populate_recordset`)
|
||||
} else {
|
||||
io.WriteString(c.w, `json_populate_record`)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `(NULL::`)
|
||||
io.WriteString(c.w, ti.Name)
|
||||
io.WriteString(c.w, `, i.j) t`)
|
||||
|
||||
if w := qc.Selects[0].Where; w != nil && w.Op == qcode.OpFalse {
|
||||
io.WriteString(c.w, ` WHERE false`)
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderInsertUpdateColumns(qc *qcode.QCode, w io.Writer,
|
||||
jt map[string]interface{}, ti *DBTableInfo, values bool) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
i := 0
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
if _, ok := root.PresetMap[cn]; ok {
|
||||
continue
|
||||
}
|
||||
if len(root.Allowed) != 0 {
|
||||
if _, ok := root.Allowed[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
io.WriteString(c.w, `"`)
|
||||
io.WriteString(c.w, cn)
|
||||
io.WriteString(c.w, `"`)
|
||||
i++
|
||||
}
|
||||
|
||||
if i != 0 && len(root.PresetList) != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
|
||||
for i := range root.PresetList {
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
if values {
|
||||
io.WriteString(c.w, `'`)
|
||||
io.WriteString(c.w, root.PresetMap[root.PresetList[i]])
|
||||
io.WriteString(c.w, `'`)
|
||||
} else {
|
||||
io.WriteString(c.w, `"`)
|
||||
io.WriteString(c.w, root.PresetList[i])
|
||||
io.WriteString(c.w, `"`)
|
||||
}
|
||||
}
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderUpdate(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
update, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
|
||||
}
|
||||
|
||||
jt, array, err := jsn.Tree(update)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `(WITH "input" AS (SELECT {{`)
|
||||
io.WriteString(c.w, qc.ActionVar)
|
||||
io.WriteString(c.w, `}}::json AS j) UPDATE `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` SET (`)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti, false)
|
||||
|
||||
io.WriteString(c.w, `) = (SELECT `)
|
||||
c.renderInsertUpdateColumns(qc, w, jt, ti, true)
|
||||
io.WriteString(c.w, ` FROM input i, `)
|
||||
|
||||
if array {
|
||||
io.WriteString(c.w, `json_populate_recordset`)
|
||||
} else {
|
||||
io.WriteString(c.w, `json_populate_record`)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `(NULL::`)
|
||||
io.WriteString(c.w, ti.Name)
|
||||
io.WriteString(c.w, `, i.j) t)`)
|
||||
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderWhere(root, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderDelete(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
io.WriteString(c.w, `(DELETE FROM `)
|
||||
quoted(c.w, ti.Name)
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderWhere(root, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderUpsert(qc *qcode.QCode, w io.Writer,
|
||||
vars Variables, ti *DBTableInfo) (uint32, error) {
|
||||
root := &qc.Selects[0]
|
||||
|
||||
upsert, ok := vars[qc.ActionVar]
|
||||
if !ok {
|
||||
return 0, fmt.Errorf("Variable '%s' not defined", qc.ActionVar)
|
||||
}
|
||||
|
||||
jt, _, err := jsn.Tree(upsert)
|
||||
if err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
if _, err := c.renderInsert(qc, w, vars, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` ON CONFLICT (`)
|
||||
i := 0
|
||||
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
|
||||
if col, ok := ti.Columns[cn]; !ok || !(col.UniqueKey || col.PrimaryKey) {
|
||||
continue
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
io.WriteString(c.w, cn)
|
||||
i++
|
||||
}
|
||||
if i == 0 {
|
||||
io.WriteString(c.w, ti.PrimaryCol)
|
||||
}
|
||||
io.WriteString(c.w, `)`)
|
||||
|
||||
if root.Where != nil {
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderWhere(root, ti); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` DO UPDATE SET `)
|
||||
|
||||
i = 0
|
||||
for _, cn := range ti.ColumnNames {
|
||||
if _, ok := jt[cn]; !ok {
|
||||
continue
|
||||
}
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
io.WriteString(c.w, cn)
|
||||
io.WriteString(c.w, ` = EXCLUDED.`)
|
||||
io.WriteString(c.w, cn)
|
||||
i++
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
func quoted(w io.Writer, identifier string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, identifier)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
306
psql/mutate_test.go
Normal file
306
psql/mutate_test.go
Normal file
@ -0,0 +1,306 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func simpleInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, insert: $insert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" ("name", "description", "user_id") SELECT "name", "description", "user_id" FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"insert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc", "user_id": 5 }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func bulkInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(name: "test", id: 15, insert: $insert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{insert}}::json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_recordset(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"insert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleUpsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(upsert: $upsert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleUpsertWhere(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(upsert: $upsert, where: { price : { gt: 3 } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t ON CONFLICT (id) WHERE (("products"."price") > 3) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"upsert": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func bulkUpsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(upsert: $upsert) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{upsert}}::json AS j) INSERT INTO "products" ("name", "description") SELECT "name", "description" FROM input i, json_populate_recordset(NULL::products, i.j) t ON CONFLICT (id) DO UPDATE SET name = EXCLUDED.name, description = EXCLUDED.description RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"upsert": json.RawMessage(` [{ "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }]`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func singleUpdate(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(id: 15, update: $update, where: { id: { eq: 1 } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{update}}::json AS j) UPDATE "products" SET ("name", "description") = (SELECT "name", "description" FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."id") = 1) AND (("products"."id") = 15) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "anon")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func delete(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(delete: true, where: { id: { eq: 1 } }) {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (DELETE FROM "products" WHERE (("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 1) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"update": json.RawMessage(` { "name": "my_name", "woo": { "hoo": "goo" }, "description": "my_desc" }`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func blockedInsert(t *testing.T) {
|
||||
gql := `mutation {
|
||||
user(insert: $data) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "users" ("full_name", "email") SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t WHERE false RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id" FROM "users") AS "users_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "bad_dude")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func blockedUpdate(t *testing.T) {
|
||||
gql := `mutation {
|
||||
user(where: { id: { lt: 5 } }, update: $data) {
|
||||
id
|
||||
email
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "users" AS (WITH "input" AS (SELECT {{data}}::json AS j) UPDATE "users" SET ("full_name", "email") = (SELECT "full_name", "email" FROM input i, json_populate_record(NULL::users, i.j) t) WHERE false RETURNING *) SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id", "users"."email" FROM "users") AS "users_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"email": "reannagreenholt@orn.com", "full_name": "Flo Barton"}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "bad_dude")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func simpleInsertWithPresets(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(insert: $data) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{data}}::json AS j) INSERT INTO "products" ("name", "price", "created_at", "updated_at", "user_id") SELECT "name", "price", 'now', 'now', '$user_id' FROM input i, json_populate_record(NULL::products, i.j) t RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"name": "Tomato", "price": 5.76}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func simpleUpdateWithPresets(t *testing.T) {
|
||||
gql := `mutation {
|
||||
product(update: $data) {
|
||||
id
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `WITH "products" AS (WITH "input" AS (SELECT {{data}}::json AS j) UPDATE "products" SET ("name", "price", "updated_at") = (SELECT "name", "price", 'now' FROM input i, json_populate_record(NULL::products, i.j) t) WHERE (("products"."user_id") = {{user_id}}) RETURNING *) SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id" FROM "products") AS "products_0") AS "done_1337"`
|
||||
|
||||
vars := map[string]json.RawMessage{
|
||||
"data": json.RawMessage(`{"name": "Apple", "price": 1.25}`),
|
||||
}
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, vars, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileMutate(t *testing.T) {
|
||||
t.Run("simpleInsert", simpleInsert)
|
||||
t.Run("singleInsert", singleInsert)
|
||||
t.Run("bulkInsert", bulkInsert)
|
||||
t.Run("singleUpdate", singleUpdate)
|
||||
t.Run("singleUpsert", singleUpsert)
|
||||
t.Run("singleUpsertWhere", singleUpsertWhere)
|
||||
t.Run("bulkUpsert", bulkUpsert)
|
||||
t.Run("delete", delete)
|
||||
t.Run("blockedInsert", blockedInsert)
|
||||
t.Run("blockedUpdate", blockedUpdate)
|
||||
t.Run("simpleInsertWithPresets", simpleInsertWithPresets)
|
||||
t.Run("simpleUpdateWithPresets", simpleUpdateWithPresets)
|
||||
|
||||
}
|
199
psql/psql_test.go
Normal file
199
psql/psql_test.go
Normal file
@ -0,0 +1,199 @@
|
||||
package psql
|
||||
|
||||
import (
|
||||
"log"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
)
|
||||
|
||||
const (
|
||||
errNotExpected = "Generated SQL did not match what was expected"
|
||||
)
|
||||
|
||||
var (
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *Compiler
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
var err error
|
||||
|
||||
qcompile, err = qcode.NewCompiler(qcode.Config{
|
||||
Blocklist: []string{
|
||||
"secret",
|
||||
"password",
|
||||
"token",
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("user", "product", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "name", "price", "users", "customers"},
|
||||
Filters: []string{
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }",
|
||||
},
|
||||
},
|
||||
Insert: qcode.InsertConfig{
|
||||
Presets: map[string]string{
|
||||
"user_id": "$user_id",
|
||||
"created_at": "now",
|
||||
"updated_at": "now",
|
||||
},
|
||||
},
|
||||
Update: qcode.UpdateConfig{
|
||||
Filters: []string{"{ user_id: { eq: $user_id } }"},
|
||||
Presets: map[string]string{"updated_at": "now"},
|
||||
},
|
||||
Delete: qcode.DeleteConfig{
|
||||
Filters: []string{
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("anon", "product", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "name"},
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("anon1", "product", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "name", "price"},
|
||||
DisableFunctions: true,
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("user", "users", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "full_name", "avatar", "email", "products"},
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("bad_dude", "users", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Filters: []string{"false"},
|
||||
DisableFunctions: true,
|
||||
},
|
||||
Insert: qcode.InsertConfig{
|
||||
Filters: []string{"false"},
|
||||
},
|
||||
Update: qcode.UpdateConfig{
|
||||
Filters: []string{"false"},
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("user", "mes", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "full_name", "avatar"},
|
||||
Filters: []string{
|
||||
"{ id: { eq: $user_id } }",
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
qcompile.AddRole("user", "customers", qcode.TRConfig{
|
||||
Query: qcode.QueryConfig{
|
||||
Columns: []string{"id", "email", "full_name", "products"},
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tables := []*DBTable{
|
||||
&DBTable{Name: "customers", Type: "table"},
|
||||
&DBTable{Name: "users", Type: "table"},
|
||||
&DBTable{Name: "products", Type: "table"},
|
||||
&DBTable{Name: "purchases", Type: "table"},
|
||||
}
|
||||
|
||||
columns := [][]*DBColumn{
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 9, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 10, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "avatar", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 9, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 10, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 11, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "name", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "description", Type: "text", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "price", Type: "numeric(7,2)", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "user_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "users", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 6, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "tsv", Type: "tsvector", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "customer_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "customers", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 3, Name: "product_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "products", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 4, Name: "sale_type", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "quantity", Type: "integer", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "due_date", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "returned", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
}
|
||||
|
||||
schema := &DBSchema{
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
al: make(map[string]struct{}),
|
||||
}
|
||||
|
||||
aliases := map[string][]string{
|
||||
"users": []string{"mes"},
|
||||
}
|
||||
|
||||
for i, t := range tables {
|
||||
schema.updateSchema(t, columns[i], aliases)
|
||||
}
|
||||
|
||||
vars := NewVariables(map[string]string{
|
||||
"account_id": "select account_id from users where id = $user_id",
|
||||
})
|
||||
|
||||
pcompile = NewCompiler(Config{
|
||||
Schema: schema,
|
||||
Vars: vars,
|
||||
})
|
||||
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func compileGQLToPSQL(gql string, vars Variables, role string) ([]byte, error) {
|
||||
qc, err := qcompile.Compile([]byte(gql), role)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, sqlStmt, err := pcompile.CompileEx(qc, vars)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
//fmt.Println(string(sqlStmt))
|
||||
|
||||
return sqlStmt, nil
|
||||
}
|
@ -49,7 +49,7 @@ func (c *Compiler) IDColumn(table string) (string, error) {
|
||||
}
|
||||
|
||||
type compilerContext struct {
|
||||
w *bytes.Buffer
|
||||
w io.Writer
|
||||
s []qcode.Select
|
||||
*Compiler
|
||||
}
|
||||
@ -60,18 +60,18 @@ func (co *Compiler) CompileEx(qc *qcode.QCode, vars Variables) (uint32, []byte,
|
||||
return skipped, w.Bytes(), err
|
||||
}
|
||||
|
||||
func (co *Compiler) Compile(qc *qcode.QCode, w *bytes.Buffer, vars Variables) (uint32, error) {
|
||||
func (co *Compiler) Compile(qc *qcode.QCode, w io.Writer, vars Variables) (uint32, error) {
|
||||
switch qc.Type {
|
||||
case qcode.QTQuery:
|
||||
return co.compileQuery(qc, w)
|
||||
case qcode.QTMutation:
|
||||
case qcode.QTInsert, qcode.QTUpdate, qcode.QTDelete, qcode.QTUpsert:
|
||||
return co.compileMutation(qc, w, vars)
|
||||
}
|
||||
|
||||
return 0, errors.New("unknown operation")
|
||||
return 0, fmt.Errorf("Unknown operation type %d", qc.Type)
|
||||
}
|
||||
|
||||
func (co *Compiler) compileQuery(qc *qcode.QCode, w *bytes.Buffer) (uint32, error) {
|
||||
func (co *Compiler) compileQuery(qc *qcode.QCode, w io.Writer) (uint32, error) {
|
||||
if len(qc.Selects) == 0 {
|
||||
return 0, errors.New("empty query")
|
||||
}
|
||||
@ -90,17 +90,17 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w *bytes.Buffer) (uint32, erro
|
||||
|
||||
//fmt.Fprintf(w, `SELECT json_object_agg('%s', %s) FROM (`,
|
||||
//root.FieldName, root.Table)
|
||||
c.w.WriteString(`SELECT json_object_agg('`)
|
||||
c.w.WriteString(root.FieldName)
|
||||
c.w.WriteString(`', `)
|
||||
io.WriteString(c.w, `SELECT json_object_agg('`)
|
||||
io.WriteString(c.w, root.FieldName)
|
||||
io.WriteString(c.w, `', `)
|
||||
|
||||
if ti.Singular == false {
|
||||
c.w.WriteString(root.Table)
|
||||
io.WriteString(c.w, root.Table)
|
||||
} else {
|
||||
c.w.WriteString("sel_json_")
|
||||
io.WriteString(c.w, "sel_json_")
|
||||
int2string(c.w, root.ID)
|
||||
}
|
||||
c.w.WriteString(`) FROM (`)
|
||||
io.WriteString(c.w, `) FROM (`)
|
||||
|
||||
var ignored uint32
|
||||
|
||||
@ -120,7 +120,7 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w *bytes.Buffer) (uint32, erro
|
||||
}
|
||||
|
||||
if sel.ID != 0 {
|
||||
if err = c.renderJoin(sel); err != nil {
|
||||
if err = c.renderLateralJoin(sel); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
@ -154,16 +154,15 @@ func (co *Compiler) compileQuery(qc *qcode.QCode, w *bytes.Buffer) (uint32, erro
|
||||
}
|
||||
|
||||
if sel.ID != 0 {
|
||||
if err = c.renderJoinClose(sel); err != nil {
|
||||
if err = c.renderLateralJoinClose(sel); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
alias(c.w, `done_1337`)
|
||||
c.w.WriteString(`;`)
|
||||
|
||||
return ignored, nil
|
||||
}
|
||||
@ -219,10 +218,10 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo) (uint
|
||||
// SELECT
|
||||
if ti.Singular == false {
|
||||
//fmt.Fprintf(w, `SELECT coalesce(json_agg("%s"`, c.sel.Table)
|
||||
c.w.WriteString(`SELECT coalesce(json_agg("`)
|
||||
c.w.WriteString("sel_json_")
|
||||
io.WriteString(c.w, `SELECT coalesce(json_agg("`)
|
||||
io.WriteString(c.w, "sel_json_")
|
||||
int2string(c.w, sel.ID)
|
||||
c.w.WriteString(`"`)
|
||||
io.WriteString(c.w, `"`)
|
||||
|
||||
if hasOrder {
|
||||
err := c.renderOrderBy(sel, ti)
|
||||
@ -232,24 +231,24 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo) (uint
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `), '[]') AS "%s" FROM (`, c.sel.Table)
|
||||
c.w.WriteString(`), '[]')`)
|
||||
io.WriteString(c.w, `), '[]')`)
|
||||
alias(c.w, sel.Table)
|
||||
c.w.WriteString(` FROM (`)
|
||||
io.WriteString(c.w, ` FROM (`)
|
||||
}
|
||||
|
||||
// ROW-TO-JSON
|
||||
c.w.WriteString(`SELECT `)
|
||||
io.WriteString(c.w, `SELECT `)
|
||||
|
||||
if len(sel.DistinctOn) != 0 {
|
||||
c.renderDistinctOn(sel, ti)
|
||||
}
|
||||
|
||||
c.w.WriteString(`row_to_json((`)
|
||||
io.WriteString(c.w, `row_to_json((`)
|
||||
|
||||
//fmt.Fprintf(w, `SELECT "sel_%d" FROM (SELECT `, c.sel.ID)
|
||||
c.w.WriteString(`SELECT "sel_`)
|
||||
io.WriteString(c.w, `SELECT "sel_`)
|
||||
int2string(c.w, sel.ID)
|
||||
c.w.WriteString(`" FROM (SELECT `)
|
||||
io.WriteString(c.w, `" FROM (SELECT `)
|
||||
|
||||
// Combined column names
|
||||
c.renderColumns(sel, ti)
|
||||
@ -262,11 +261,11 @@ func (c *compilerContext) renderSelect(sel *qcode.Select, ti *DBTableInfo) (uint
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `) AS "sel_%d"`, c.sel.ID)
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "sel", sel.ID)
|
||||
|
||||
//fmt.Fprintf(w, `)) AS "%s"`, c.sel.Table)
|
||||
c.w.WriteString(`))`)
|
||||
io.WriteString(c.w, `))`)
|
||||
aliasWithID(c.w, "sel_json", sel.ID)
|
||||
// END-ROW-TO-JSON
|
||||
|
||||
@ -295,88 +294,119 @@ func (c *compilerContext) renderSelectClose(sel *qcode.Select, ti *DBTableInfo)
|
||||
}
|
||||
}
|
||||
|
||||
if sel.Action == 0 {
|
||||
if len(sel.Paging.Limit) != 0 {
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
c.w.WriteString(` LIMIT ('`)
|
||||
c.w.WriteString(sel.Paging.Limit)
|
||||
c.w.WriteString(`') :: integer`)
|
||||
switch {
|
||||
case sel.Paging.NoLimit:
|
||||
break
|
||||
|
||||
} else if ti.Singular {
|
||||
c.w.WriteString(` LIMIT ('1') :: integer`)
|
||||
case len(sel.Paging.Limit) != 0:
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
io.WriteString(c.w, ` LIMIT ('`)
|
||||
io.WriteString(c.w, sel.Paging.Limit)
|
||||
io.WriteString(c.w, `') :: integer`)
|
||||
|
||||
} else {
|
||||
c.w.WriteString(` LIMIT ('20') :: integer`)
|
||||
}
|
||||
case ti.Singular:
|
||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||
|
||||
default:
|
||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||
}
|
||||
|
||||
if len(sel.Paging.Offset) != 0 {
|
||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||
c.w.WriteString(`OFFSET ('`)
|
||||
c.w.WriteString(sel.Paging.Offset)
|
||||
c.w.WriteString(`') :: integer`)
|
||||
io.WriteString(c.w, `OFFSET ('`)
|
||||
io.WriteString(c.w, sel.Paging.Offset)
|
||||
io.WriteString(c.w, `') :: integer`)
|
||||
}
|
||||
|
||||
if ti.Singular == false {
|
||||
//fmt.Fprintf(w, `) AS "sel_json_agg_%d"`, c.sel.ID)
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, "sel_json_agg", sel.ID)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoin(sel *qcode.Select) error {
|
||||
c.w.WriteString(` LEFT OUTER JOIN LATERAL (`)
|
||||
func (c *compilerContext) renderLateralJoin(sel *qcode.Select) error {
|
||||
io.WriteString(c.w, ` LEFT OUTER JOIN LATERAL (`)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoinClose(sel *qcode.Select) error {
|
||||
func (c *compilerContext) renderLateralJoinClose(sel *qcode.Select) error {
|
||||
//fmt.Fprintf(w, `) AS "%s_%d_join" ON ('true')`, c.sel.Table, c.sel.ID)
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithIDSuffix(c.w, sel.Table, sel.ID, "_join")
|
||||
c.w.WriteString(` ON ('true')`)
|
||||
io.WriteString(c.w, ` ON ('true')`)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderJoinTable(sel *qcode.Select) error {
|
||||
func (c *compilerContext) renderJoin(sel *qcode.Select) error {
|
||||
parent := &c.s[sel.ParentID]
|
||||
return c.renderJoinByName(sel.Table, parent.Table, parent.ID)
|
||||
}
|
||||
|
||||
rel, err := c.schema.GetRel(sel.Table, parent.Table)
|
||||
func (c *compilerContext) renderJoinByName(table, parent string, id int32) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// This join is only required for one-to-many relations since
|
||||
// these make use of join tables that need to be pulled in.
|
||||
if rel.Type != RelOneToManyThrough {
|
||||
return err
|
||||
}
|
||||
|
||||
pt, err := c.schema.GetTable(parent.Table)
|
||||
pt, err := c.schema.GetTable(parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, ` LEFT OUTER JOIN "%s" ON (("%s"."%s") = ("%s_%d"."%s"))`,
|
||||
//rel.Through, rel.Through, rel.ColT, c.parent.Table, c.parent.ID, rel.Col1)
|
||||
c.w.WriteString(` LEFT OUTER JOIN "`)
|
||||
c.w.WriteString(rel.Through)
|
||||
c.w.WriteString(`" ON ((`)
|
||||
io.WriteString(c.w, ` LEFT OUTER JOIN "`)
|
||||
io.WriteString(c.w, rel.Through)
|
||||
io.WriteString(c.w, `" ON ((`)
|
||||
colWithTable(c.w, rel.Through, rel.ColT)
|
||||
c.w.WriteString(`) = (`)
|
||||
colWithTableID(c.w, pt.Name, parent.ID, rel.Col1)
|
||||
c.w.WriteString(`))`)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
if id != -1 {
|
||||
colWithTableID(c.w, pt.Name, id, rel.Col1)
|
||||
} else {
|
||||
colWithTable(c.w, pt.Name, rel.Col1)
|
||||
}
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderColumns(sel *qcode.Select, ti *DBTableInfo) {
|
||||
for i, col := range sel.Cols {
|
||||
i := 0
|
||||
for _, col := range sel.Cols {
|
||||
n := funcPrefixLen(col.Name)
|
||||
if n != 0 {
|
||||
if sel.Functions == false {
|
||||
continue
|
||||
}
|
||||
if len(sel.Allowed) != 0 {
|
||||
if _, ok := sel.Allowed[col.Name[n:]]; !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
} else {
|
||||
if len(sel.Allowed) != 0 {
|
||||
if _, ok := sel.Allowed[col.Name]; !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, ", ")
|
||||
}
|
||||
//fmt.Fprintf(w, `"%s_%d"."%s" AS "%s"`,
|
||||
//c.sel.Table, c.sel.ID, col.Name, col.FieldName)
|
||||
colWithTableIDAlias(c.w, ti.Name, sel.ID, col.Name, col.FieldName)
|
||||
i++
|
||||
}
|
||||
}
|
||||
|
||||
@ -415,10 +445,24 @@ func (c *compilerContext) renderJoinedColumns(sel *qcode.Select, ti *DBTableInfo
|
||||
}
|
||||
childSel := &c.s[id]
|
||||
|
||||
cti, err := c.schema.GetTable(childSel.Table)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `"%s_%d_join"."%s" AS "%s"`,
|
||||
//s.Table, s.ID, s.Table, s.FieldName)
|
||||
colWithTableIDSuffixAlias(c.w, childSel.Table, childSel.ID,
|
||||
"_join", childSel.Table, childSel.FieldName)
|
||||
if cti.Singular {
|
||||
io.WriteString(c.w, `"sel_json_`)
|
||||
int2string(c.w, childSel.ID)
|
||||
io.WriteString(c.w, `" AS "`)
|
||||
io.WriteString(c.w, childSel.FieldName)
|
||||
io.WriteString(c.w, `"`)
|
||||
|
||||
} else {
|
||||
colWithTableIDSuffixAlias(c.w, childSel.Table, childSel.ID,
|
||||
"_join", childSel.Table, childSel.FieldName)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
@ -433,9 +477,10 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo,
|
||||
isSearch := sel.Args["search"] != nil
|
||||
isAgg := false
|
||||
|
||||
c.w.WriteString(` FROM (SELECT `)
|
||||
io.WriteString(c.w, ` FROM (SELECT `)
|
||||
|
||||
for i, col := range sel.Cols {
|
||||
i := 0
|
||||
for n, col := range sel.Cols {
|
||||
cn := col.Name
|
||||
|
||||
_, isRealCol := ti.Columns[cn]
|
||||
@ -447,122 +492,147 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo,
|
||||
cn = ti.TSVCol
|
||||
arg := sel.Args["search"]
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
//fmt.Fprintf(w, `ts_rank("%s"."%s", to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Table, cn, arg.Val, col.Name)
|
||||
c.w.WriteString(`ts_rank(`)
|
||||
io.WriteString(c.w, `ts_rank(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
c.w.WriteString(`, to_tsquery('`)
|
||||
c.w.WriteString(arg.Val)
|
||||
c.w.WriteString(`')`)
|
||||
io.WriteString(c.w, `, to_tsquery('`)
|
||||
io.WriteString(c.w, arg.Val)
|
||||
io.WriteString(c.w, `')`)
|
||||
alias(c.w, col.Name)
|
||||
i++
|
||||
|
||||
case strings.HasPrefix(cn, "search_headline_"):
|
||||
cn = cn[16:]
|
||||
arg := sel.Args["search"]
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
//fmt.Fprintf(w, `ts_headline("%s"."%s", to_tsquery('%s')) AS %s`,
|
||||
//c.sel.Table, cn, arg.Val, col.Name)
|
||||
c.w.WriteString(`ts_headlinek(`)
|
||||
io.WriteString(c.w, `ts_headlinek(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
c.w.WriteString(`, to_tsquery('`)
|
||||
c.w.WriteString(arg.Val)
|
||||
c.w.WriteString(`')`)
|
||||
io.WriteString(c.w, `, to_tsquery('`)
|
||||
io.WriteString(c.w, arg.Val)
|
||||
io.WriteString(c.w, `')`)
|
||||
alias(c.w, col.Name)
|
||||
i++
|
||||
|
||||
}
|
||||
} else {
|
||||
pl := funcPrefixLen(cn)
|
||||
if pl == 0 {
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
//fmt.Fprintf(w, `'%s not defined' AS %s`, cn, col.Name)
|
||||
c.w.WriteString(`'`)
|
||||
c.w.WriteString(cn)
|
||||
c.w.WriteString(` not defined'`)
|
||||
io.WriteString(c.w, `'`)
|
||||
io.WriteString(c.w, cn)
|
||||
io.WriteString(c.w, ` not defined'`)
|
||||
alias(c.w, col.Name)
|
||||
} else {
|
||||
isAgg = true
|
||||
i++
|
||||
|
||||
} else if sel.Functions {
|
||||
cn1 := cn[pl:]
|
||||
if len(sel.Allowed) != 0 {
|
||||
if _, ok := sel.Allowed[cn1]; !ok {
|
||||
continue
|
||||
}
|
||||
}
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
fn := cn[0 : pl-1]
|
||||
cn := cn[pl:]
|
||||
isAgg = true
|
||||
|
||||
//fmt.Fprintf(w, `%s("%s"."%s") AS %s`, fn, c.sel.Table, cn, col.Name)
|
||||
c.w.WriteString(fn)
|
||||
c.w.WriteString(`(`)
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, fn)
|
||||
io.WriteString(c.w, `(`)
|
||||
colWithTable(c.w, ti.Name, cn1)
|
||||
io.WriteString(c.w, `)`)
|
||||
alias(c.w, col.Name)
|
||||
i++
|
||||
|
||||
}
|
||||
}
|
||||
} else {
|
||||
groupBy = append(groupBy, i)
|
||||
groupBy = append(groupBy, n)
|
||||
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Table, cn)
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
colWithTable(c.w, ti.Name, cn)
|
||||
}
|
||||
i++
|
||||
|
||||
if i < len(sel.Cols)-1 || len(childCols) != 0 {
|
||||
//io.WriteString(w, ", ")
|
||||
c.w.WriteString(`, `)
|
||||
}
|
||||
}
|
||||
|
||||
for i, col := range childCols {
|
||||
for _, col := range childCols {
|
||||
if i != 0 {
|
||||
//io.WriteString(w, ", ")
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `"%s"."%s"`, col.Table, col.Name)
|
||||
colWithTable(c.w, col.Table, col.Name)
|
||||
i++
|
||||
}
|
||||
|
||||
c.w.WriteString(` FROM `)
|
||||
io.WriteString(c.w, ` FROM `)
|
||||
|
||||
//fmt.Fprintf(w, ` FROM "%s"`, c.sel.Table)
|
||||
c.w.WriteString(`"`)
|
||||
c.w.WriteString(ti.Name)
|
||||
c.w.WriteString(`"`)
|
||||
io.WriteString(c.w, `"`)
|
||||
io.WriteString(c.w, ti.Name)
|
||||
io.WriteString(c.w, `"`)
|
||||
|
||||
// if tn, ok := c.tmap[sel.Table]; ok {
|
||||
// //fmt.Fprintf(w, ` FROM "%s" AS "%s"`, tn, c.sel.Table)
|
||||
// tableWithAlias(c.w, ti.Name, sel.Table)
|
||||
// } else {
|
||||
// //fmt.Fprintf(w, ` FROM "%s"`, c.sel.Table)
|
||||
// c.w.WriteString(`"`)
|
||||
// c.w.WriteString(sel.Table)
|
||||
// c.w.WriteString(`"`)
|
||||
// io.WriteString(c.w, `"`)
|
||||
// io.WriteString(c.w, sel.Table)
|
||||
// io.WriteString(c.w, `"`)
|
||||
// }
|
||||
|
||||
if isRoot && isFil {
|
||||
c.w.WriteString(` WHERE (`)
|
||||
io.WriteString(c.w, ` WHERE (`)
|
||||
if err := c.renderWhere(sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
|
||||
if !isRoot {
|
||||
if err := c.renderJoinTable(sel); err != nil {
|
||||
if err := c.renderJoin(sel); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
c.w.WriteString(` WHERE (`)
|
||||
io.WriteString(c.w, ` WHERE (`)
|
||||
|
||||
if err := c.renderRelationship(sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if isFil {
|
||||
c.w.WriteString(` AND `)
|
||||
io.WriteString(c.w, ` AND `)
|
||||
if err := c.renderWhere(sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
|
||||
if isAgg {
|
||||
if len(groupBy) != 0 {
|
||||
c.w.WriteString(` GROUP BY `)
|
||||
io.WriteString(c.w, ` GROUP BY `)
|
||||
|
||||
for i, id := range groupBy {
|
||||
if i != 0 {
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
//fmt.Fprintf(w, `"%s"."%s"`, c.sel.Table, c.sel.Cols[id].Name)
|
||||
colWithTable(c.w, ti.Name, sel.Cols[id].Name)
|
||||
@ -570,30 +640,32 @@ func (c *compilerContext) renderBaseSelect(sel *qcode.Select, ti *DBTableInfo,
|
||||
}
|
||||
}
|
||||
|
||||
if sel.Action == 0 {
|
||||
if len(sel.Paging.Limit) != 0 {
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
c.w.WriteString(` LIMIT ('`)
|
||||
c.w.WriteString(sel.Paging.Limit)
|
||||
c.w.WriteString(`') :: integer`)
|
||||
switch {
|
||||
case sel.Paging.NoLimit:
|
||||
break
|
||||
|
||||
} else if ti.Singular {
|
||||
c.w.WriteString(` LIMIT ('1') :: integer`)
|
||||
case len(sel.Paging.Limit) != 0:
|
||||
//fmt.Fprintf(w, ` LIMIT ('%s') :: integer`, c.sel.Paging.Limit)
|
||||
io.WriteString(c.w, ` LIMIT ('`)
|
||||
io.WriteString(c.w, sel.Paging.Limit)
|
||||
io.WriteString(c.w, `') :: integer`)
|
||||
|
||||
} else {
|
||||
c.w.WriteString(` LIMIT ('20') :: integer`)
|
||||
}
|
||||
case ti.Singular:
|
||||
io.WriteString(c.w, ` LIMIT ('1') :: integer`)
|
||||
|
||||
default:
|
||||
io.WriteString(c.w, ` LIMIT ('20') :: integer`)
|
||||
}
|
||||
|
||||
if len(sel.Paging.Offset) != 0 {
|
||||
//fmt.Fprintf(w, ` OFFSET ('%s') :: integer`, c.sel.Paging.Offset)
|
||||
c.w.WriteString(` OFFSET ('`)
|
||||
c.w.WriteString(sel.Paging.Offset)
|
||||
c.w.WriteString(`') :: integer`)
|
||||
io.WriteString(c.w, ` OFFSET ('`)
|
||||
io.WriteString(c.w, sel.Paging.Offset)
|
||||
io.WriteString(c.w, `') :: integer`)
|
||||
}
|
||||
|
||||
//fmt.Fprintf(w, `) AS "%s_%d"`, c.sel.Table, c.sel.ID)
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
aliasWithID(c.w, ti.Name, sel.ID)
|
||||
return nil
|
||||
}
|
||||
@ -604,7 +676,7 @@ func (c *compilerContext) renderOrderByColumns(sel *qcode.Select, ti *DBTableInf
|
||||
for i := range sel.OrderBy {
|
||||
if colsRendered {
|
||||
//io.WriteString(w, ", ")
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
|
||||
col := sel.OrderBy[i].Col
|
||||
@ -612,15 +684,18 @@ func (c *compilerContext) renderOrderByColumns(sel *qcode.Select, ti *DBTableInf
|
||||
//c.sel.Table, c.sel.ID, c,
|
||||
//c.sel.Table, c.sel.ID, c)
|
||||
colWithTableID(c.w, ti.Name, sel.ID, col)
|
||||
c.w.WriteString(` AS `)
|
||||
io.WriteString(c.w, ` AS `)
|
||||
tableIDColSuffix(c.w, sel.Table, sel.ID, col, "_ob")
|
||||
}
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
parent := c.s[sel.ParentID]
|
||||
return c.renderRelationshipByName(sel.Table, parent.Table, parent.ID)
|
||||
}
|
||||
|
||||
rel, err := c.schema.GetRel(sel.Table, parent.Table)
|
||||
func (c *compilerContext) renderRelationshipByName(table, parent string, id int32) error {
|
||||
rel, err := c.schema.GetRel(table, parent)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
@ -629,29 +704,38 @@ func (c *compilerContext) renderRelationship(sel *qcode.Select, ti *DBTableInfo)
|
||||
case RelBelongTo:
|
||||
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
|
||||
//c.sel.Table, rel.Col1, c.parent.Table, c.parent.ID, rel.Col2)
|
||||
c.w.WriteString(`((`)
|
||||
colWithTable(c.w, ti.Name, rel.Col1)
|
||||
c.w.WriteString(`) = (`)
|
||||
colWithTableID(c.w, parent.Table, parent.ID, rel.Col2)
|
||||
c.w.WriteString(`))`)
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, table, rel.Col1)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
if id != -1 {
|
||||
colWithTableID(c.w, parent, id, rel.Col2)
|
||||
} else {
|
||||
colWithTable(c.w, parent, rel.Col2)
|
||||
}
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
case RelOneToMany:
|
||||
//fmt.Fprintf(w, `(("%s"."%s") = ("%s_%d"."%s"))`,
|
||||
//c.sel.Table, rel.Col1, c.parent.Table, c.parent.ID, rel.Col2)
|
||||
c.w.WriteString(`((`)
|
||||
colWithTable(c.w, ti.Name, rel.Col1)
|
||||
c.w.WriteString(`) = (`)
|
||||
colWithTableID(c.w, parent.Table, parent.ID, rel.Col2)
|
||||
c.w.WriteString(`))`)
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, table, rel.Col1)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
if id != -1 {
|
||||
colWithTableID(c.w, parent, id, rel.Col2)
|
||||
} else {
|
||||
colWithTable(c.w, parent, rel.Col2)
|
||||
}
|
||||
io.WriteString(c.w, `))`)
|
||||
|
||||
case RelOneToManyThrough:
|
||||
// This requires the through table to be joined onto this select
|
||||
//fmt.Fprintf(w, `(("%s"."%s") = ("%s"."%s"))`,
|
||||
//c.sel.Table, rel.Col1, rel.Through, rel.Col2)
|
||||
c.w.WriteString(`((`)
|
||||
colWithTable(c.w, ti.Name, rel.Col1)
|
||||
c.w.WriteString(`) = (`)
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, table, rel.Col1)
|
||||
io.WriteString(c.w, `) = (`)
|
||||
colWithTable(c.w, rel.Through, rel.Col2)
|
||||
c.w.WriteString(`))`)
|
||||
io.WriteString(c.w, `))`)
|
||||
}
|
||||
|
||||
return nil
|
||||
@ -675,17 +759,23 @@ func (c *compilerContext) renderWhere(sel *qcode.Select, ti *DBTableInfo) error
|
||||
case qcode.ExpOp:
|
||||
switch val {
|
||||
case qcode.OpAnd:
|
||||
c.w.WriteString(` AND `)
|
||||
io.WriteString(c.w, ` AND `)
|
||||
case qcode.OpOr:
|
||||
c.w.WriteString(` OR `)
|
||||
io.WriteString(c.w, ` OR `)
|
||||
case qcode.OpNot:
|
||||
c.w.WriteString(`NOT `)
|
||||
io.WriteString(c.w, `NOT `)
|
||||
case qcode.OpFalse:
|
||||
io.WriteString(c.w, `false`)
|
||||
default:
|
||||
return fmt.Errorf("11: unexpected value %v (%t)", intf, intf)
|
||||
}
|
||||
|
||||
case *qcode.Exp:
|
||||
switch val.Op {
|
||||
case qcode.OpFalse:
|
||||
st.Push(val.Op)
|
||||
qcode.FreeExp(val)
|
||||
|
||||
case qcode.OpAnd, qcode.OpOr:
|
||||
for i := len(val.Children) - 1; i >= 0; i-- {
|
||||
st.Push(val.Children[i])
|
||||
@ -701,101 +791,20 @@ func (c *compilerContext) renderWhere(sel *qcode.Select, ti *DBTableInfo) error
|
||||
qcode.FreeExp(val)
|
||||
|
||||
default:
|
||||
if val.NestedCol {
|
||||
//fmt.Fprintf(w, `(("%s") `, val.Col)
|
||||
c.w.WriteString(`(("`)
|
||||
c.w.WriteString(val.Col)
|
||||
c.w.WriteString(`") `)
|
||||
if len(val.NestedCols) != 0 {
|
||||
io.WriteString(c.w, `EXISTS `)
|
||||
|
||||
} else if len(val.Col) != 0 {
|
||||
if err := c.renderNestedWhere(val, sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
} else {
|
||||
//fmt.Fprintf(w, `(("%s"."%s") `, c.sel.Table, val.Col)
|
||||
c.w.WriteString(`((`)
|
||||
colWithTable(c.w, ti.Name, val.Col)
|
||||
c.w.WriteString(`) `)
|
||||
if err := c.renderOp(val, sel, ti); err != nil {
|
||||
return err
|
||||
}
|
||||
qcode.FreeExp(val)
|
||||
}
|
||||
valExists := true
|
||||
|
||||
switch val.Op {
|
||||
case qcode.OpEquals:
|
||||
c.w.WriteString(`=`)
|
||||
case qcode.OpNotEquals:
|
||||
c.w.WriteString(`!=`)
|
||||
case qcode.OpGreaterOrEquals:
|
||||
c.w.WriteString(`>=`)
|
||||
case qcode.OpLesserOrEquals:
|
||||
c.w.WriteString(`<=`)
|
||||
case qcode.OpGreaterThan:
|
||||
c.w.WriteString(`>`)
|
||||
case qcode.OpLesserThan:
|
||||
c.w.WriteString(`<`)
|
||||
case qcode.OpIn:
|
||||
c.w.WriteString(`IN`)
|
||||
case qcode.OpNotIn:
|
||||
c.w.WriteString(`NOT IN`)
|
||||
case qcode.OpLike:
|
||||
c.w.WriteString(`LIKE`)
|
||||
case qcode.OpNotLike:
|
||||
c.w.WriteString(`NOT LIKE`)
|
||||
case qcode.OpILike:
|
||||
c.w.WriteString(`ILIKE`)
|
||||
case qcode.OpNotILike:
|
||||
c.w.WriteString(`NOT ILIKE`)
|
||||
case qcode.OpSimilar:
|
||||
c.w.WriteString(`SIMILAR TO`)
|
||||
case qcode.OpNotSimilar:
|
||||
c.w.WriteString(`NOT SIMILAR TO`)
|
||||
case qcode.OpContains:
|
||||
c.w.WriteString(`@>`)
|
||||
case qcode.OpContainedIn:
|
||||
c.w.WriteString(`<@`)
|
||||
case qcode.OpHasKey:
|
||||
c.w.WriteString(`?`)
|
||||
case qcode.OpHasKeyAny:
|
||||
c.w.WriteString(`?|`)
|
||||
case qcode.OpHasKeyAll:
|
||||
c.w.WriteString(`?&`)
|
||||
case qcode.OpIsNull:
|
||||
if strings.EqualFold(val.Val, "true") {
|
||||
c.w.WriteString(`IS NULL)`)
|
||||
} else {
|
||||
c.w.WriteString(`IS NOT NULL)`)
|
||||
}
|
||||
valExists = false
|
||||
case qcode.OpEqID:
|
||||
if len(ti.PrimaryCol) == 0 {
|
||||
return fmt.Errorf("no primary key column defined for %s", ti.Name)
|
||||
}
|
||||
//fmt.Fprintf(w, `(("%s") =`, c.ti.PrimaryCol)
|
||||
c.w.WriteString(`((`)
|
||||
colWithTable(c.w, ti.Name, ti.PrimaryCol)
|
||||
//c.w.WriteString(ti.PrimaryCol)
|
||||
c.w.WriteString(`) =`)
|
||||
case qcode.OpTsQuery:
|
||||
if len(ti.TSVCol) == 0 {
|
||||
return fmt.Errorf("no tsv column defined for %s", ti.Name)
|
||||
}
|
||||
//fmt.Fprintf(w, `(("%s") @@ to_tsquery('%s'))`, c.ti.TSVCol, val.Val)
|
||||
c.w.WriteString(`(("`)
|
||||
c.w.WriteString(ti.TSVCol)
|
||||
c.w.WriteString(`") @@ to_tsquery('`)
|
||||
c.w.WriteString(val.Val)
|
||||
c.w.WriteString(`'))`)
|
||||
valExists = false
|
||||
|
||||
default:
|
||||
return fmt.Errorf("[Where] unexpected op code %d", val.Op)
|
||||
}
|
||||
|
||||
if valExists {
|
||||
if val.Type == qcode.ValList {
|
||||
c.renderList(val)
|
||||
} else {
|
||||
c.renderVal(val, c.vars)
|
||||
}
|
||||
c.w.WriteString(`)`)
|
||||
}
|
||||
|
||||
qcode.FreeExp(val)
|
||||
}
|
||||
|
||||
default:
|
||||
@ -807,11 +816,133 @@ func (c *compilerContext) renderWhere(sel *qcode.Select, ti *DBTableInfo) error
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderNestedWhere(ex *qcode.Exp, sel *qcode.Select, ti *DBTableInfo) error {
|
||||
|
||||
for i := 0; i < len(ex.NestedCols)-1; i++ {
|
||||
cti, err := c.schema.GetTable(ex.NestedCols[i])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if i != 0 {
|
||||
io.WriteString(c.w, ` AND `)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `(SELECT 1 FROM `)
|
||||
io.WriteString(c.w, cti.Name)
|
||||
|
||||
if err := c.renderJoinByName(cti.Name, ti.Name, -1); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
io.WriteString(c.w, ` WHERE `)
|
||||
|
||||
if err := c.renderRelationshipByName(cti.Name, ti.Name, -1); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
for i := 0; i < len(ex.NestedCols)-1; i++ {
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderOp(ex *qcode.Exp, sel *qcode.Select, ti *DBTableInfo) error {
|
||||
if len(ex.Col) != 0 {
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, ti.Name, ex.Col)
|
||||
io.WriteString(c.w, `) `)
|
||||
}
|
||||
|
||||
switch ex.Op {
|
||||
case qcode.OpEquals:
|
||||
io.WriteString(c.w, `=`)
|
||||
case qcode.OpNotEquals:
|
||||
io.WriteString(c.w, `!=`)
|
||||
case qcode.OpGreaterOrEquals:
|
||||
io.WriteString(c.w, `>=`)
|
||||
case qcode.OpLesserOrEquals:
|
||||
io.WriteString(c.w, `<=`)
|
||||
case qcode.OpGreaterThan:
|
||||
io.WriteString(c.w, `>`)
|
||||
case qcode.OpLesserThan:
|
||||
io.WriteString(c.w, `<`)
|
||||
case qcode.OpIn:
|
||||
io.WriteString(c.w, `IN`)
|
||||
case qcode.OpNotIn:
|
||||
io.WriteString(c.w, `NOT IN`)
|
||||
case qcode.OpLike:
|
||||
io.WriteString(c.w, `LIKE`)
|
||||
case qcode.OpNotLike:
|
||||
io.WriteString(c.w, `NOT LIKE`)
|
||||
case qcode.OpILike:
|
||||
io.WriteString(c.w, `ILIKE`)
|
||||
case qcode.OpNotILike:
|
||||
io.WriteString(c.w, `NOT ILIKE`)
|
||||
case qcode.OpSimilar:
|
||||
io.WriteString(c.w, `SIMILAR TO`)
|
||||
case qcode.OpNotSimilar:
|
||||
io.WriteString(c.w, `NOT SIMILAR TO`)
|
||||
case qcode.OpContains:
|
||||
io.WriteString(c.w, `@>`)
|
||||
case qcode.OpContainedIn:
|
||||
io.WriteString(c.w, `<@`)
|
||||
case qcode.OpHasKey:
|
||||
io.WriteString(c.w, `?`)
|
||||
case qcode.OpHasKeyAny:
|
||||
io.WriteString(c.w, `?|`)
|
||||
case qcode.OpHasKeyAll:
|
||||
io.WriteString(c.w, `?&`)
|
||||
case qcode.OpIsNull:
|
||||
if strings.EqualFold(ex.Val, "true") {
|
||||
io.WriteString(c.w, `IS NULL)`)
|
||||
} else {
|
||||
io.WriteString(c.w, `IS NOT NULL)`)
|
||||
}
|
||||
return nil
|
||||
case qcode.OpEqID:
|
||||
if len(ti.PrimaryCol) == 0 {
|
||||
return fmt.Errorf("no primary key column defined for %s", ti.Name)
|
||||
}
|
||||
//fmt.Fprintf(w, `(("%s") =`, c.ti.PrimaryCol)
|
||||
io.WriteString(c.w, `((`)
|
||||
colWithTable(c.w, ti.Name, ti.PrimaryCol)
|
||||
//io.WriteString(c.w, ti.PrimaryCol)
|
||||
io.WriteString(c.w, `) =`)
|
||||
case qcode.OpTsQuery:
|
||||
if len(ti.TSVCol) == 0 {
|
||||
return fmt.Errorf("no tsv column defined for %s", ti.Name)
|
||||
}
|
||||
//fmt.Fprintf(w, `(("%s") @@ to_tsquery('%s'))`, c.ti.TSVCol, val.Val)
|
||||
io.WriteString(c.w, `(("`)
|
||||
io.WriteString(c.w, ti.TSVCol)
|
||||
io.WriteString(c.w, `") @@ to_tsquery('`)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `'))`)
|
||||
return nil
|
||||
|
||||
default:
|
||||
return fmt.Errorf("[Where] unexpected op code %d", ex.Op)
|
||||
}
|
||||
|
||||
if ex.Type == qcode.ValList {
|
||||
c.renderList(ex)
|
||||
} else {
|
||||
c.renderVal(ex, c.vars)
|
||||
}
|
||||
|
||||
io.WriteString(c.w, `)`)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderOrderBy(sel *qcode.Select, ti *DBTableInfo) error {
|
||||
c.w.WriteString(` ORDER BY `)
|
||||
io.WriteString(c.w, ` ORDER BY `)
|
||||
for i := range sel.OrderBy {
|
||||
if i != 0 {
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
ob := sel.OrderBy[i]
|
||||
|
||||
@ -819,27 +950,27 @@ func (c *compilerContext) renderOrderBy(sel *qcode.Select, ti *DBTableInfo) erro
|
||||
case qcode.OrderAsc:
|
||||
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` ASC`)
|
||||
io.WriteString(c.w, ` ASC`)
|
||||
case qcode.OrderDesc:
|
||||
//fmt.Fprintf(w, `"%s_%d.ob.%s" DESC`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` DESC`)
|
||||
io.WriteString(c.w, ` DESC`)
|
||||
case qcode.OrderAscNullsFirst:
|
||||
//fmt.Fprintf(w, `"%s_%d.ob.%s" ASC NULLS FIRST`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` ASC NULLS FIRST`)
|
||||
io.WriteString(c.w, ` ASC NULLS FIRST`)
|
||||
case qcode.OrderDescNullsFirst:
|
||||
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS FIRST`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` DESC NULLLS FIRST`)
|
||||
io.WriteString(c.w, ` DESC NULLLS FIRST`)
|
||||
case qcode.OrderAscNullsLast:
|
||||
//fmt.Fprintf(w, `"%s_%d.ob.%s ASC NULLS LAST`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` ASC NULLS LAST`)
|
||||
io.WriteString(c.w, ` ASC NULLS LAST`)
|
||||
case qcode.OrderDescNullsLast:
|
||||
//fmt.Fprintf(w, `%s_%d.ob.%s DESC NULLS LAST`, sel.Table, sel.ID, ob.Col)
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, ob.Col, "_ob")
|
||||
c.w.WriteString(` DESC NULLS LAST`)
|
||||
io.WriteString(c.w, ` DESC NULLS LAST`)
|
||||
default:
|
||||
return fmt.Errorf("13: unexpected value %v", ob.Order)
|
||||
}
|
||||
@ -851,30 +982,30 @@ func (c *compilerContext) renderDistinctOn(sel *qcode.Select, ti *DBTableInfo) {
|
||||
io.WriteString(c.w, `DISTINCT ON (`)
|
||||
for i := range sel.DistinctOn {
|
||||
if i != 0 {
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
//fmt.Fprintf(w, `"%s_%d.ob.%s"`, c.sel.Table, c.sel.ID, c.sel.DistinctOn[i])
|
||||
tableIDColSuffix(c.w, ti.Name, sel.ID, sel.DistinctOn[i], "_ob")
|
||||
}
|
||||
c.w.WriteString(`) `)
|
||||
io.WriteString(c.w, `) `)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderList(ex *qcode.Exp) {
|
||||
io.WriteString(c.w, ` (`)
|
||||
for i := range ex.ListVal {
|
||||
if i != 0 {
|
||||
c.w.WriteString(`, `)
|
||||
io.WriteString(c.w, `, `)
|
||||
}
|
||||
switch ex.ListType {
|
||||
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
|
||||
c.w.WriteString(ex.ListVal[i])
|
||||
io.WriteString(c.w, ex.ListVal[i])
|
||||
case qcode.ValStr:
|
||||
c.w.WriteString(`'`)
|
||||
c.w.WriteString(ex.ListVal[i])
|
||||
c.w.WriteString(`'`)
|
||||
io.WriteString(c.w, `'`)
|
||||
io.WriteString(c.w, ex.ListVal[i])
|
||||
io.WriteString(c.w, `'`)
|
||||
}
|
||||
}
|
||||
c.w.WriteString(`)`)
|
||||
io.WriteString(c.w, `)`)
|
||||
}
|
||||
|
||||
func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string) {
|
||||
@ -883,27 +1014,27 @@ func (c *compilerContext) renderVal(ex *qcode.Exp, vars map[string]string) {
|
||||
switch ex.Type {
|
||||
case qcode.ValBool, qcode.ValInt, qcode.ValFloat:
|
||||
if len(ex.Val) != 0 {
|
||||
c.w.WriteString(ex.Val)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
} else {
|
||||
c.w.WriteString(`''`)
|
||||
io.WriteString(c.w, `''`)
|
||||
}
|
||||
|
||||
case qcode.ValStr:
|
||||
c.w.WriteString(`'`)
|
||||
c.w.WriteString(ex.Val)
|
||||
c.w.WriteString(`'`)
|
||||
io.WriteString(c.w, `'`)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `'`)
|
||||
|
||||
case qcode.ValVar:
|
||||
if val, ok := vars[ex.Val]; ok {
|
||||
c.w.WriteString(val)
|
||||
io.WriteString(c.w, val)
|
||||
} else {
|
||||
//fmt.Fprintf(w, `'{{%s}}'`, ex.Val)
|
||||
c.w.WriteString(`{{`)
|
||||
c.w.WriteString(ex.Val)
|
||||
c.w.WriteString(`}}`)
|
||||
io.WriteString(c.w, `{{`)
|
||||
io.WriteString(c.w, ex.Val)
|
||||
io.WriteString(c.w, `}}`)
|
||||
}
|
||||
}
|
||||
//c.w.WriteString(`)`)
|
||||
//io.WriteString(c.w, `)`)
|
||||
}
|
||||
|
||||
func funcPrefixLen(fn string) int {
|
||||
@ -939,105 +1070,105 @@ func hasBit(n uint32, pos uint32) bool {
|
||||
return (val > 0)
|
||||
}
|
||||
|
||||
func alias(w *bytes.Buffer, alias string) {
|
||||
w.WriteString(` AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`"`)
|
||||
func alias(w io.Writer, alias string) {
|
||||
io.WriteString(w, ` AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func aliasWithID(w *bytes.Buffer, alias string, id int32) {
|
||||
w.WriteString(` AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`_`)
|
||||
func aliasWithID(w io.Writer, alias string, id int32) {
|
||||
io.WriteString(w, ` AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func aliasWithIDSuffix(w *bytes.Buffer, alias string, id int32, suffix string) {
|
||||
w.WriteString(` AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`_`)
|
||||
func aliasWithIDSuffix(w io.Writer, alias string, id int32, suffix string) {
|
||||
io.WriteString(w, ` AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(suffix)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, suffix)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func colWithAlias(w *bytes.Buffer, col, alias string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(`" AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`"`)
|
||||
func colWithAlias(w io.Writer, col, alias string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, `" AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func tableWithAlias(w *bytes.Buffer, table, alias string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`" AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`"`)
|
||||
func tableWithAlias(w io.Writer, table, alias string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `" AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func colWithTable(w *bytes.Buffer, table, col string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`"."`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(`"`)
|
||||
func colWithTable(w io.Writer, table, col string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `"."`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func colWithTableID(w *bytes.Buffer, table string, id int32, col string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`_`)
|
||||
func colWithTableID(w io.Writer, table string, id int32, col string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(`"."`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, `"."`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func colWithTableIDAlias(w *bytes.Buffer, table string, id int32, col, alias string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`_`)
|
||||
func colWithTableIDAlias(w io.Writer, table string, id int32, col, alias string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(`"."`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(`" AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, `"."`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, `" AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func colWithTableIDSuffixAlias(w *bytes.Buffer, table string, id int32,
|
||||
func colWithTableIDSuffixAlias(w io.Writer, table string, id int32,
|
||||
suffix, col, alias string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`_`)
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(suffix)
|
||||
w.WriteString(`"."`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(`" AS "`)
|
||||
w.WriteString(alias)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, suffix)
|
||||
io.WriteString(w, `"."`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, `" AS "`)
|
||||
io.WriteString(w, alias)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
func tableIDColSuffix(w *bytes.Buffer, table string, id int32, col, suffix string) {
|
||||
w.WriteString(`"`)
|
||||
w.WriteString(table)
|
||||
w.WriteString(`_`)
|
||||
func tableIDColSuffix(w io.Writer, table string, id int32, col, suffix string) {
|
||||
io.WriteString(w, `"`)
|
||||
io.WriteString(w, table)
|
||||
io.WriteString(w, `_`)
|
||||
int2string(w, id)
|
||||
w.WriteString(`_`)
|
||||
w.WriteString(col)
|
||||
w.WriteString(suffix)
|
||||
w.WriteString(`"`)
|
||||
io.WriteString(w, `_`)
|
||||
io.WriteString(w, col)
|
||||
io.WriteString(w, suffix)
|
||||
io.WriteString(w, `"`)
|
||||
}
|
||||
|
||||
const charset = "0123456789"
|
||||
|
||||
func int2string(w *bytes.Buffer, val int32) {
|
||||
func int2string(w io.Writer, val int32) {
|
||||
if val < 10 {
|
||||
w.WriteByte(charset[val])
|
||||
w.Write([]byte{charset[val]})
|
||||
return
|
||||
}
|
||||
|
||||
@ -1053,7 +1184,7 @@ func int2string(w *bytes.Buffer, val int32) {
|
||||
for val3 > 0 {
|
||||
d := val3 % 10
|
||||
val3 /= 10
|
||||
w.WriteByte(charset[d])
|
||||
w.Write([]byte{charset[d]})
|
||||
}
|
||||
}
|
||||
|
@ -2,154 +2,9 @@ package psql
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"log"
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
)
|
||||
|
||||
const (
|
||||
errNotExpected = "Generated SQL did not match what was expected"
|
||||
)
|
||||
|
||||
var (
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *Compiler
|
||||
)
|
||||
|
||||
func TestMain(m *testing.M) {
|
||||
var err error
|
||||
|
||||
qcompile, err = qcode.NewCompiler(qcode.Config{
|
||||
DefaultFilter: []string{
|
||||
`{ user_id: { _eq: $user_id } }`,
|
||||
},
|
||||
FilterMap: qcode.Filters{
|
||||
All: map[string][]string{
|
||||
"users": []string{
|
||||
"{ id: { eq: $user_id } }",
|
||||
},
|
||||
"products": []string{
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }",
|
||||
},
|
||||
"customers": []string{},
|
||||
"mes": []string{
|
||||
"{ id: { eq: $user_id } }",
|
||||
},
|
||||
},
|
||||
Query: map[string][]string{
|
||||
"users": []string{},
|
||||
},
|
||||
Update: map[string][]string{
|
||||
"products": []string{
|
||||
"{ user_id: { eq: $user_id } }",
|
||||
},
|
||||
},
|
||||
},
|
||||
Blocklist: []string{
|
||||
"secret",
|
||||
"password",
|
||||
"token",
|
||||
},
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
tables := []*DBTable{
|
||||
&DBTable{Name: "customers", Type: "table"},
|
||||
&DBTable{Name: "users", Type: "table"},
|
||||
&DBTable{Name: "products", Type: "table"},
|
||||
&DBTable{Name: "purchases", Type: "table"},
|
||||
}
|
||||
|
||||
columns := [][]*DBColumn{
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 9, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 10, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "full_name", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "phone", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "avatar", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "email", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "encrypted_password", Type: "character varying", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "reset_password_token", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "reset_password_sent_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 9, Name: "remember_created_at", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 10, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 11, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "name", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 3, Name: "description", Type: "text", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 4, Name: "price", Type: "numeric(7,2)", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "user_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "users", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 6, Name: "created_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "updated_at", Type: "timestamp without time zone", NotNull: true, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 8, Name: "tsv", Type: "tsvector", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
[]*DBColumn{
|
||||
&DBColumn{ID: 1, Name: "id", Type: "bigint", NotNull: true, PrimaryKey: true, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 2, Name: "customer_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "customers", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 3, Name: "product_id", Type: "bigint", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "products", FKeyColID: []int16{1}},
|
||||
&DBColumn{ID: 4, Name: "sale_type", Type: "character varying", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 5, Name: "quantity", Type: "integer", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 6, Name: "due_date", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)},
|
||||
&DBColumn{ID: 7, Name: "returned", Type: "timestamp without time zone", NotNull: false, PrimaryKey: false, UniqueKey: false, FKeyTable: "", FKeyColID: []int16(nil)}},
|
||||
}
|
||||
|
||||
schema := &DBSchema{
|
||||
t: make(map[string]*DBTableInfo),
|
||||
rm: make(map[string]map[string]*DBRel),
|
||||
al: make(map[string]struct{}),
|
||||
}
|
||||
|
||||
aliases := map[string][]string{
|
||||
"users": []string{"mes"},
|
||||
}
|
||||
|
||||
for i, t := range tables {
|
||||
schema.updateSchema(t, columns[i], aliases)
|
||||
}
|
||||
|
||||
vars := NewVariables(map[string]string{
|
||||
"account_id": "select account_id from users where id = $user_id",
|
||||
})
|
||||
|
||||
pcompile = NewCompiler(Config{
|
||||
Schema: schema,
|
||||
Vars: vars,
|
||||
})
|
||||
|
||||
os.Exit(m.Run())
|
||||
}
|
||||
|
||||
func compileGQLToPSQL(gql string, vars Variables) ([]byte, error) {
|
||||
|
||||
qc, err := qcompile.Compile([]byte(gql))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
_, sqlStmt, err := pcompile.CompileEx(qc, vars)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return sqlStmt, nil
|
||||
}
|
||||
|
||||
func withComplexArgs(t *testing.T) {
|
||||
gql := `query {
|
||||
proDUcts(
|
||||
@ -173,9 +28,9 @@ func withComplexArgs(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0" ORDER BY "products_0_price_ob" DESC), '[]') AS "products" FROM (SELECT DISTINCT ON ("products_0_price_ob") row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0", "products_0"."price" AS "products_0_price_ob" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") < 28) AND (("products"."id") >= 20)) LIMIT ('30') :: integer) AS "products_0" ORDER BY "products_0_price_ob" DESC LIMIT ('30') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0" ORDER BY "products_0_price_ob" DESC), '[]') AS "products" FROM (SELECT DISTINCT ON ("products_0_price_ob") row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0", "products_0"."price" AS "products_0_price_ob" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") < 28) AND (("products"."id") >= 20)) LIMIT ('30') :: integer) AS "products_0" ORDER BY "products_0_price_ob" DESC LIMIT ('30') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -201,9 +56,9 @@ func withWhereMultiOr(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") < 20) OR (("products"."price") > 10) OR NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") < 20) OR (("products"."price") > 10) OR NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -227,9 +82,9 @@ func withWhereIsNull(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -253,9 +108,9 @@ func withWhereAndList(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name", "products_0"."price" AS "price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name", "products"."price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") > 10) AND NOT (("products"."id") IS NULL)) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -273,9 +128,9 @@ func fetchByID(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 15)) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") = 15)) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -293,9 +148,9 @@ func searchQuery(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("tsv") @@ to_tsquery('Imperial'))) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("tsv") @@ to_tsquery('Imperial'))) LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -316,9 +171,9 @@ func oneToMany(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email", "users"."id" FROM "users" LIMIT ('20') :: integer) AS "users_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name", "products_1"."price" AS "price") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name", "products"."price" FROM "products" WHERE ((("products"."user_id") = ("users_0"."id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -339,9 +194,9 @@ func belongsTo(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "users_1_join"."users" AS "users") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "users_1"."email" AS "email") AS "sel_1")) AS "sel_json_1" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "users_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."price" AS "price", "users_1_join"."users" AS "users") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."price", "products"."user_id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "users_1"."email" AS "email") AS "sel_1")) AS "sel_json_1" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = ("products_0"."user_id"))) LIMIT ('20') :: integer) AS "users_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "users_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -362,9 +217,9 @@ func manyToMany(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "customers_1_join"."customers" AS "customers") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "customers_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "customers_1_join"."customers" AS "customers") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", "products"."id" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) LIMIT ('20') :: integer) AS "products_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "customers_1"."email" AS "email", "customers_1"."full_name" AS "full_name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "customers"."email", "customers"."full_name" FROM "customers" LEFT OUTER JOIN "purchases" ON (("purchases"."product_id") = ("products_0"."id")) WHERE ((("customers"."id") = ("purchases"."customer_id"))) LIMIT ('20') :: integer) AS "customers_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "customers_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -385,9 +240,9 @@ func manyToManyReverse(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('customers', customers) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('customers', customers) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "customers" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "customers_0"."email" AS "email", "customers_0"."full_name" AS "full_name", "products_1_join"."products" AS "products") AS "sel_0")) AS "sel_json_0" FROM (SELECT "customers"."email", "customers"."full_name", "customers"."id" FROM "customers" LIMIT ('20') :: integer) AS "customers_0" LEFT OUTER JOIN LATERAL (SELECT coalesce(json_agg("sel_json_1"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_1" FROM (SELECT "products_1"."name" AS "name") AS "sel_1")) AS "sel_json_1" FROM (SELECT "products"."name" FROM "products" LEFT OUTER JOIN "purchases" ON (("purchases"."customer_id") = ("customers_0"."id")) WHERE ((("products"."id") = ("purchases"."product_id"))) LIMIT ('20') :: integer) AS "products_1" LIMIT ('20') :: integer) AS "sel_json_agg_1") AS "products_1_join" ON ('true') LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -405,9 +260,49 @@ func aggFunction(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name", "products_0"."count_price" AS "count_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name", count("products"."price") AS "count_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8)) GROUP BY "products"."name" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func aggFunctionBlockedByCol(t *testing.T) {
|
||||
gql := `query {
|
||||
products {
|
||||
name
|
||||
count_price
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "anon")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func aggFunctionDisabled(t *testing.T) {
|
||||
gql := `query {
|
||||
products {
|
||||
name
|
||||
count_price
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."name" FROM "products" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "anon1")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -425,9 +320,28 @@ func aggFunctionWithFilter(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") > 10)) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('products', products) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "products" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."max_price" AS "max_price") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", max("products"."price") AS "max_price" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."id") > 10)) GROUP BY "products"."id" LIMIT ('20') :: integer) AS "products_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func syntheticTables(t *testing.T) {
|
||||
gql := `query {
|
||||
me {
|
||||
email
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('me', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT ) AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = {{user_id}})) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -445,9 +359,9 @@ func queryWithVariables(t *testing.T) {
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") = {{product_price}}) AND (("products"."id") = {{product_id}})) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('product', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "products_0"."id" AS "id", "products_0"."name" AS "name") AS "sel_0")) AS "sel_json_0" FROM (SELECT "products"."id", "products"."name" FROM "products" WHERE ((("products"."price") > 0) AND (("products"."price") < 8) AND (("products"."price") = {{product_price}}) AND (("products"."id") = {{product_id}})) LIMIT ('1') :: integer) AS "products_0" LIMIT ('1') :: integer) AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -457,16 +371,23 @@ func queryWithVariables(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func syntheticTables(t *testing.T) {
|
||||
func withWhereOnRelations(t *testing.T) {
|
||||
gql := `query {
|
||||
me {
|
||||
users(where: {
|
||||
not: {
|
||||
products: {
|
||||
price: { gt: 3 }
|
||||
}
|
||||
}
|
||||
}) {
|
||||
id
|
||||
email
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('me', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email" FROM "users" WHERE ((("users"."id") = {{user_id}})) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "done_1337";`
|
||||
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id", "users"."email" FROM "users" WHERE (NOT EXISTS (SELECT 1 FROM products WHERE (("products"."user_id") = ("users"."id")))) LIMIT ('20') :: integer) AS "users_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil)
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "user")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
@ -476,7 +397,48 @@ func syntheticTables(t *testing.T) {
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileSelect(t *testing.T) {
|
||||
func blockedQuery(t *testing.T) {
|
||||
gql := `query {
|
||||
user(id: 5, where: { id: { gt: 3 } }) {
|
||||
id
|
||||
full_name
|
||||
email
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('user', sel_json_0) FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."id" AS "id", "users_0"."full_name" AS "full_name", "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."id", "users"."full_name", "users"."email" FROM "users" WHERE (false) LIMIT ('1') :: integer) AS "users_0" LIMIT ('1') :: integer) AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "bad_dude")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func blockedFunctions(t *testing.T) {
|
||||
gql := `query {
|
||||
users {
|
||||
count_id
|
||||
email
|
||||
}
|
||||
}`
|
||||
|
||||
sql := `SELECT json_object_agg('users', users) FROM (SELECT coalesce(json_agg("sel_json_0"), '[]') AS "users" FROM (SELECT row_to_json((SELECT "sel_0" FROM (SELECT "users_0"."email" AS "email") AS "sel_0")) AS "sel_json_0" FROM (SELECT "users"."email" FROM "users" WHERE (false) LIMIT ('20') :: integer) AS "users_0" LIMIT ('20') :: integer) AS "sel_json_agg_0") AS "done_1337"`
|
||||
|
||||
resSQL, err := compileGQLToPSQL(gql, nil, "bad_dude")
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
|
||||
if string(resSQL) != sql {
|
||||
t.Fatal(errNotExpected)
|
||||
}
|
||||
}
|
||||
|
||||
func TestCompileQuery(t *testing.T) {
|
||||
t.Run("withComplexArgs", withComplexArgs)
|
||||
t.Run("withWhereAndList", withWhereAndList)
|
||||
t.Run("withWhereIsNull", withWhereIsNull)
|
||||
@ -488,10 +450,14 @@ func TestCompileSelect(t *testing.T) {
|
||||
t.Run("manyToMany", manyToMany)
|
||||
t.Run("manyToManyReverse", manyToManyReverse)
|
||||
t.Run("aggFunction", aggFunction)
|
||||
t.Run("aggFunctionBlockedByCol", aggFunctionBlockedByCol)
|
||||
t.Run("aggFunctionDisabled", aggFunctionDisabled)
|
||||
t.Run("aggFunctionWithFilter", aggFunctionWithFilter)
|
||||
t.Run("syntheticTables", syntheticTables)
|
||||
t.Run("queryWithVariables", queryWithVariables)
|
||||
|
||||
t.Run("withWhereOnRelations", withWhereOnRelations)
|
||||
t.Run("blockedQuery", blockedQuery)
|
||||
t.Run("blockedFunctions", blockedFunctions)
|
||||
}
|
||||
|
||||
var benchGQL = []byte(`query {
|
||||
@ -526,7 +492,7 @@ func BenchmarkCompile(b *testing.B) {
|
||||
for n := 0; n < b.N; n++ {
|
||||
w.Reset()
|
||||
|
||||
qc, err := qcompile.Compile(benchGQL)
|
||||
qc, err := qcompile.Compile(benchGQL, "user")
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
||||
@ -547,7 +513,7 @@ func BenchmarkCompileParallel(b *testing.B) {
|
||||
for pb.Next() {
|
||||
w.Reset()
|
||||
|
||||
qc, err := qcompile.Compile(benchGQL)
|
||||
qc, err := qcompile.Compile(benchGQL, "user")
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
}
|
123
qcode/config.go
Normal file
123
qcode/config.go
Normal file
@ -0,0 +1,123 @@
|
||||
package qcode
|
||||
|
||||
import (
|
||||
"sort"
|
||||
"strings"
|
||||
)
|
||||
|
||||
type Config struct {
|
||||
Blocklist []string
|
||||
KeepArgs bool
|
||||
}
|
||||
|
||||
type QueryConfig struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool
|
||||
}
|
||||
|
||||
type InsertConfig struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
}
|
||||
|
||||
type UpdateConfig struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
}
|
||||
|
||||
type DeleteConfig struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
}
|
||||
|
||||
type TRConfig struct {
|
||||
Query QueryConfig
|
||||
Insert InsertConfig
|
||||
Update UpdateConfig
|
||||
Delete DeleteConfig
|
||||
}
|
||||
|
||||
type trval struct {
|
||||
query struct {
|
||||
limit string
|
||||
fil *Exp
|
||||
cols map[string]struct{}
|
||||
disable struct {
|
||||
funcs bool
|
||||
}
|
||||
}
|
||||
|
||||
insert struct {
|
||||
fil *Exp
|
||||
cols map[string]struct{}
|
||||
psmap map[string]string
|
||||
pslist []string
|
||||
}
|
||||
|
||||
update struct {
|
||||
fil *Exp
|
||||
cols map[string]struct{}
|
||||
psmap map[string]string
|
||||
pslist []string
|
||||
}
|
||||
|
||||
delete struct {
|
||||
fil *Exp
|
||||
cols map[string]struct{}
|
||||
}
|
||||
}
|
||||
|
||||
func (trv *trval) allowedColumns(qt QType) map[string]struct{} {
|
||||
switch qt {
|
||||
case QTQuery:
|
||||
return trv.query.cols
|
||||
case QTInsert:
|
||||
return trv.insert.cols
|
||||
case QTUpdate:
|
||||
return trv.update.cols
|
||||
case QTDelete:
|
||||
return trv.insert.cols
|
||||
case QTUpsert:
|
||||
return trv.insert.cols
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (trv *trval) filter(qt QType) *Exp {
|
||||
switch qt {
|
||||
case QTQuery:
|
||||
return trv.query.fil
|
||||
case QTInsert:
|
||||
return trv.insert.fil
|
||||
case QTUpdate:
|
||||
return trv.update.fil
|
||||
case QTDelete:
|
||||
return trv.delete.fil
|
||||
case QTUpsert:
|
||||
return trv.insert.fil
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func listToMap(list []string) map[string]struct{} {
|
||||
m := make(map[string]struct{}, len(list))
|
||||
for i := range list {
|
||||
m[strings.ToLower(list[i])] = struct{}{}
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
func mapToList(m map[string]string) []string {
|
||||
list := []string{}
|
||||
for k, _ := range m {
|
||||
list = append(list, strings.ToLower(k))
|
||||
}
|
||||
sort.Strings(list)
|
||||
return list
|
||||
}
|
21
qcode/corpus/001.gql
Normal file
21
qcode/corpus/001.gql
Normal file
@ -0,0 +1,21 @@
|
||||
query {
|
||||
products(
|
||||
# returns only 30 items
|
||||
limit: 30,
|
||||
|
||||
# starts from item 10, commented out for now
|
||||
# offset: 10,
|
||||
|
||||
# orders the response items by highest price
|
||||
order_by: { price: desc },
|
||||
|
||||
# no duplicate prices returned
|
||||
distinct: [ price ]
|
||||
|
||||
# only items with an id >= 30 and < 30 are returned
|
||||
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
14
qcode/corpus/002.gql
Normal file
14
qcode/corpus/002.gql
Normal file
@ -0,0 +1,14 @@
|
||||
query {
|
||||
products(
|
||||
where: {
|
||||
or: {
|
||||
not: { id: { is_null: true } },
|
||||
price: { gt: 10 },
|
||||
price: { lt: 20 }
|
||||
} }
|
||||
) {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
12
qcode/corpus/003.gql
Normal file
12
qcode/corpus/003.gql
Normal file
@ -0,0 +1,12 @@
|
||||
query {
|
||||
products(
|
||||
where: {
|
||||
and: {
|
||||
not: { id: { is_null: true } },
|
||||
price: { gt: 10 }
|
||||
}}) {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
@ -1,14 +1,14 @@
|
||||
// +build gofuzz
|
||||
|
||||
package qcode
|
||||
|
||||
// FuzzerEntrypoint for Fuzzbuzz
|
||||
func FuzzerEntrypoint(data []byte) int {
|
||||
//testData := string(data)
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile(data)
|
||||
_, err := qcompile.Compile(data, "user")
|
||||
if err != nil {
|
||||
return -1
|
||||
return 0
|
||||
}
|
||||
|
||||
return 0
|
||||
return 1
|
||||
}
|
||||
|
@ -18,7 +18,9 @@ type parserType int32
|
||||
const (
|
||||
maxFields = 100
|
||||
maxArgs = 10
|
||||
)
|
||||
|
||||
const (
|
||||
parserError parserType = iota
|
||||
parserEOF
|
||||
opQuery
|
||||
|
@ -5,54 +5,19 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
/*
|
||||
func compareOp(op1, op2 Operation) error {
|
||||
if op1.Type != op2.Type {
|
||||
return errors.New("operator type mismatch")
|
||||
}
|
||||
|
||||
if op1.Name != op2.Name {
|
||||
return errors.New("operator name mismatch")
|
||||
}
|
||||
|
||||
if len(op1.Args) != len(op2.Args) {
|
||||
return errors.New("operator args length mismatch")
|
||||
}
|
||||
|
||||
for i := range op1.Args {
|
||||
if !reflect.DeepEqual(op1.Args[i], op2.Args[i]) {
|
||||
return fmt.Errorf("operator args: %v != %v", op1.Args[i], op2.Args[i])
|
||||
}
|
||||
}
|
||||
|
||||
if len(op1.Fields) != len(op2.Fields) {
|
||||
return errors.New("operator field length mismatch")
|
||||
}
|
||||
|
||||
for i := range op1.Fields {
|
||||
if !reflect.DeepEqual(op1.Fields[i].Args, op2.Fields[i].Args) {
|
||||
return fmt.Errorf("operator field args: %v != %v", op1.Fields[i].Args, op2.Fields[i].Args)
|
||||
}
|
||||
}
|
||||
|
||||
for i := range op1.Fields {
|
||||
if !reflect.DeepEqual(op1.Fields[i].Children, op2.Fields[i].Children) {
|
||||
return fmt.Errorf("operator field fields: %v != %v", op1.Fields[i].Children, op2.Fields[i].Children)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
*/
|
||||
|
||||
func TestCompile1(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
qc, _ := NewCompiler(Config{})
|
||||
qc.AddRole("user", "product", TRConfig{
|
||||
Query: QueryConfig{
|
||||
Columns: []string{"id", "Name"},
|
||||
},
|
||||
})
|
||||
|
||||
_, err := qcompile.Compile([]byte(`
|
||||
_, err := qc.Compile([]byte(`
|
||||
product(id: 15) {
|
||||
id
|
||||
name
|
||||
}`))
|
||||
}`), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -60,13 +25,18 @@ func TestCompile1(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCompile2(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
qc, _ := NewCompiler(Config{})
|
||||
qc.AddRole("user", "product", TRConfig{
|
||||
Query: QueryConfig{
|
||||
Columns: []string{"ID"},
|
||||
},
|
||||
})
|
||||
|
||||
_, err := qcompile.Compile([]byte(`
|
||||
_, err := qc.Compile([]byte(`
|
||||
query { product(id: 15) {
|
||||
id
|
||||
name
|
||||
} }`))
|
||||
} }`), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -74,15 +44,20 @@ func TestCompile2(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestCompile3(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
qc, _ := NewCompiler(Config{})
|
||||
qc.AddRole("user", "product", TRConfig{
|
||||
Query: QueryConfig{
|
||||
Columns: []string{"ID"},
|
||||
},
|
||||
})
|
||||
|
||||
_, err := qcompile.Compile([]byte(`
|
||||
_, err := qc.Compile([]byte(`
|
||||
mutation {
|
||||
product(id: 15, name: "Test") {
|
||||
id
|
||||
name
|
||||
}
|
||||
}`))
|
||||
}`), "user")
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
@ -91,7 +66,7 @@ func TestCompile3(t *testing.T) {
|
||||
|
||||
func TestInvalidCompile1(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(`#`))
|
||||
_, err := qcompile.Compile([]byte(`#`), "user")
|
||||
|
||||
if err == nil {
|
||||
t.Fatal(errors.New("expecting an error"))
|
||||
@ -100,7 +75,7 @@ func TestInvalidCompile1(t *testing.T) {
|
||||
|
||||
func TestInvalidCompile2(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(`{u(where:{not:0})}`))
|
||||
_, err := qcompile.Compile([]byte(`{u(where:{not:0})}`), "user")
|
||||
|
||||
if err == nil {
|
||||
t.Fatal(errors.New("expecting an error"))
|
||||
@ -109,7 +84,7 @@ func TestInvalidCompile2(t *testing.T) {
|
||||
|
||||
func TestEmptyCompile(t *testing.T) {
|
||||
qcompile, _ := NewCompiler(Config{})
|
||||
_, err := qcompile.Compile([]byte(``))
|
||||
_, err := qcompile.Compile([]byte(``), "user")
|
||||
|
||||
if err == nil {
|
||||
t.Fatal(errors.New("expecting an error"))
|
||||
@ -144,7 +119,7 @@ func BenchmarkQCompile(b *testing.B) {
|
||||
b.ReportAllocs()
|
||||
|
||||
for n := 0; n < b.N; n++ {
|
||||
_, err := qcompile.Compile(gql)
|
||||
_, err := qcompile.Compile(gql, "user")
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
@ -160,7 +135,7 @@ func BenchmarkQCompileP(b *testing.B) {
|
||||
|
||||
b.RunParallel(func(pb *testing.PB) {
|
||||
for pb.Next() {
|
||||
_, err := qcompile.Compile(gql)
|
||||
_, err := qcompile.Compile(gql, "user")
|
||||
|
||||
if err != nil {
|
||||
b.Fatal(err)
|
||||
|
395
qcode/qcode.go
395
qcode/qcode.go
@ -3,6 +3,7 @@ package qcode
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
@ -15,25 +16,20 @@ type Action int
|
||||
|
||||
const (
|
||||
maxSelectors = 30
|
||||
)
|
||||
|
||||
const (
|
||||
QTQuery QType = iota + 1
|
||||
QTMutation
|
||||
|
||||
ActionInsert Action = iota + 1
|
||||
ActionUpdate
|
||||
ActionDelete
|
||||
ActionUpsert
|
||||
QTInsert
|
||||
QTUpdate
|
||||
QTDelete
|
||||
QTUpsert
|
||||
)
|
||||
|
||||
type QCode struct {
|
||||
Type QType
|
||||
Selects []Select
|
||||
}
|
||||
|
||||
type Column struct {
|
||||
Table string
|
||||
Name string
|
||||
FieldName string
|
||||
Type QType
|
||||
ActionVar string
|
||||
Selects []Select
|
||||
}
|
||||
|
||||
type Select struct {
|
||||
@ -47,22 +43,30 @@ type Select struct {
|
||||
OrderBy []*OrderBy
|
||||
DistinctOn []string
|
||||
Paging Paging
|
||||
Action Action
|
||||
ActionVar string
|
||||
Children []int32
|
||||
Functions bool
|
||||
Allowed map[string]struct{}
|
||||
PresetMap map[string]string
|
||||
PresetList []string
|
||||
}
|
||||
|
||||
type Column struct {
|
||||
Table string
|
||||
Name string
|
||||
FieldName string
|
||||
}
|
||||
|
||||
type Exp struct {
|
||||
Op ExpOp
|
||||
Col string
|
||||
NestedCol bool
|
||||
Type ValType
|
||||
Val string
|
||||
ListType ValType
|
||||
ListVal []string
|
||||
Children []*Exp
|
||||
childrenA [5]*Exp
|
||||
doFree bool
|
||||
Op ExpOp
|
||||
Col string
|
||||
NestedCols []string
|
||||
Type ValType
|
||||
Val string
|
||||
ListType ValType
|
||||
ListVal []string
|
||||
Children []*Exp
|
||||
childrenA [5]*Exp
|
||||
doFree bool
|
||||
}
|
||||
|
||||
var zeroExp = Exp{doFree: true}
|
||||
@ -77,8 +81,9 @@ type OrderBy struct {
|
||||
}
|
||||
|
||||
type Paging struct {
|
||||
Limit string
|
||||
Offset string
|
||||
Limit string
|
||||
Offset string
|
||||
NoLimit bool
|
||||
}
|
||||
|
||||
type ExpOp int
|
||||
@ -110,6 +115,7 @@ const (
|
||||
OpIsNull
|
||||
OpEqID
|
||||
OpTsQuery
|
||||
OpFalse
|
||||
)
|
||||
|
||||
type ValType int
|
||||
@ -145,81 +151,23 @@ const (
|
||||
OrderDescNullsLast
|
||||
)
|
||||
|
||||
type Filters struct {
|
||||
All map[string][]string
|
||||
Query map[string][]string
|
||||
Insert map[string][]string
|
||||
Update map[string][]string
|
||||
Delete map[string][]string
|
||||
}
|
||||
|
||||
type Config struct {
|
||||
DefaultFilter []string
|
||||
FilterMap Filters
|
||||
Blocklist []string
|
||||
KeepArgs bool
|
||||
}
|
||||
|
||||
type Compiler struct {
|
||||
df *Exp
|
||||
fm struct {
|
||||
all map[string]*Exp
|
||||
query map[string]*Exp
|
||||
insert map[string]*Exp
|
||||
update map[string]*Exp
|
||||
delete map[string]*Exp
|
||||
}
|
||||
tr map[string]map[string]*trval
|
||||
bl map[string]struct{}
|
||||
ka bool
|
||||
}
|
||||
|
||||
var opMap = map[parserType]QType{
|
||||
opQuery: QTQuery,
|
||||
opMutate: QTMutation,
|
||||
}
|
||||
|
||||
var expPool = sync.Pool{
|
||||
New: func() interface{} { return &Exp{doFree: true} },
|
||||
}
|
||||
|
||||
func NewCompiler(c Config) (*Compiler, error) {
|
||||
var err error
|
||||
co := &Compiler{ka: c.KeepArgs}
|
||||
|
||||
co.tr = make(map[string]map[string]*trval)
|
||||
co.bl = make(map[string]struct{}, len(c.Blocklist))
|
||||
|
||||
for i := range c.Blocklist {
|
||||
co.bl[c.Blocklist[i]] = struct{}{}
|
||||
}
|
||||
|
||||
co.df, err = compileFilter(c.DefaultFilter)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
co.fm.all, err = buildFilters(c.FilterMap.All)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
co.fm.query, err = buildFilters(c.FilterMap.Query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
co.fm.insert, err = buildFilters(c.FilterMap.Insert)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
co.fm.update, err = buildFilters(c.FilterMap.Update)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
co.fm.delete, err = buildFilters(c.FilterMap.Delete)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
co.bl[strings.ToLower(c.Blocklist[i])] = struct{}{}
|
||||
}
|
||||
|
||||
seedExp := [100]Exp{}
|
||||
@ -232,58 +180,94 @@ func NewCompiler(c Config) (*Compiler, error) {
|
||||
return co, nil
|
||||
}
|
||||
|
||||
func buildFilters(filMap map[string][]string) (map[string]*Exp, error) {
|
||||
fm := make(map[string]*Exp, len(filMap))
|
||||
func (com *Compiler) AddRole(role, table string, trc TRConfig) error {
|
||||
var err error
|
||||
trv := &trval{}
|
||||
|
||||
for k, v := range filMap {
|
||||
fil, err := compileFilter(v)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
singular := flect.Singularize(k)
|
||||
plural := flect.Pluralize(k)
|
||||
// query config
|
||||
trv.query.fil, err = compileFilter(trc.Query.Filters)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if trc.Query.Limit > 0 {
|
||||
trv.query.limit = strconv.Itoa(trc.Query.Limit)
|
||||
}
|
||||
trv.query.cols = listToMap(trc.Query.Columns)
|
||||
trv.query.disable.funcs = trc.Query.DisableFunctions
|
||||
|
||||
fm[singular] = fil
|
||||
fm[plural] = fil
|
||||
// insert config
|
||||
if trv.insert.fil, err = compileFilter(trc.Insert.Filters); err != nil {
|
||||
return err
|
||||
}
|
||||
trv.insert.cols = listToMap(trc.Insert.Columns)
|
||||
trv.insert.psmap = trc.Insert.Presets
|
||||
trv.insert.pslist = mapToList(trv.insert.psmap)
|
||||
|
||||
// update config
|
||||
if trv.update.fil, err = compileFilter(trc.Update.Filters); err != nil {
|
||||
return err
|
||||
}
|
||||
trv.update.cols = listToMap(trc.Update.Columns)
|
||||
trv.update.psmap = trc.Update.Presets
|
||||
trv.update.pslist = mapToList(trv.update.psmap)
|
||||
|
||||
// delete config
|
||||
if trv.delete.fil, err = compileFilter(trc.Delete.Filters); err != nil {
|
||||
return err
|
||||
}
|
||||
trv.delete.cols = listToMap(trc.Delete.Columns)
|
||||
|
||||
singular := flect.Singularize(table)
|
||||
plural := flect.Pluralize(table)
|
||||
|
||||
if _, ok := com.tr[role]; !ok {
|
||||
com.tr[role] = make(map[string]*trval)
|
||||
}
|
||||
|
||||
return fm, nil
|
||||
com.tr[role][singular] = trv
|
||||
com.tr[role][plural] = trv
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) Compile(query []byte) (*QCode, error) {
|
||||
var qc QCode
|
||||
func (com *Compiler) Compile(query []byte, role string) (*QCode, error) {
|
||||
var err error
|
||||
|
||||
qc := QCode{Type: QTQuery}
|
||||
|
||||
op, err := Parse(query)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
qc.Selects, err = com.compileQuery(op)
|
||||
if err != nil {
|
||||
if err = com.compileQuery(&qc, op, role); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if t, ok := opMap[op.Type]; ok {
|
||||
qc.Type = t
|
||||
} else {
|
||||
return nil, fmt.Errorf("Unknown operation type %d", op.Type)
|
||||
}
|
||||
|
||||
opPool.Put(op)
|
||||
|
||||
return &qc, nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
|
||||
func (com *Compiler) compileQuery(qc *QCode, op *Operation, role string) error {
|
||||
id := int32(0)
|
||||
parentID := int32(0)
|
||||
|
||||
if len(op.Fields) == 0 {
|
||||
return errors.New("invalid graphql no query found")
|
||||
}
|
||||
|
||||
if op.Type == opMutate {
|
||||
if err := com.setMutationType(qc, op.Fields[0].Args); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
selects := make([]Select, 0, 5)
|
||||
st := NewStack()
|
||||
action := qc.Type
|
||||
|
||||
if len(op.Fields) == 0 {
|
||||
return nil, errors.New("empty query")
|
||||
return errors.New("empty query")
|
||||
}
|
||||
st.Push(op.Fields[0].ID)
|
||||
|
||||
@ -293,7 +277,7 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
|
||||
}
|
||||
|
||||
if id >= maxSelectors {
|
||||
return nil, fmt.Errorf("selector limit reached (%d)", maxSelectors)
|
||||
return fmt.Errorf("selector limit reached (%d)", maxSelectors)
|
||||
}
|
||||
|
||||
fid := st.Pop()
|
||||
@ -303,14 +287,32 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
|
||||
continue
|
||||
}
|
||||
|
||||
trv := com.getRole(role, field.Name)
|
||||
|
||||
selects = append(selects, Select{
|
||||
ID: id,
|
||||
ParentID: parentID,
|
||||
Table: field.Name,
|
||||
Children: make([]int32, 0, 5),
|
||||
ID: id,
|
||||
ParentID: parentID,
|
||||
Table: field.Name,
|
||||
Children: make([]int32, 0, 5),
|
||||
Allowed: trv.allowedColumns(action),
|
||||
Functions: true,
|
||||
})
|
||||
s := &selects[(len(selects) - 1)]
|
||||
|
||||
switch action {
|
||||
case QTQuery:
|
||||
s.Functions = !trv.query.disable.funcs
|
||||
s.Paging.Limit = trv.query.limit
|
||||
|
||||
case QTInsert:
|
||||
s.PresetMap = trv.insert.psmap
|
||||
s.PresetList = trv.insert.pslist
|
||||
|
||||
case QTUpdate:
|
||||
s.PresetMap = trv.update.psmap
|
||||
s.PresetList = trv.update.pslist
|
||||
}
|
||||
|
||||
if s.ID != 0 {
|
||||
p := &selects[s.ParentID]
|
||||
p.Children = append(p.Children, s.ID)
|
||||
@ -322,12 +324,13 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
|
||||
s.FieldName = s.Table
|
||||
}
|
||||
|
||||
err := com.compileArgs(s, field.Args)
|
||||
err := com.compileArgs(qc, s, field.Args)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
return err
|
||||
}
|
||||
|
||||
s.Cols = make([]Column, 0, len(field.Children))
|
||||
action = QTQuery
|
||||
|
||||
for _, cid := range field.Children {
|
||||
f := op.Fields[cid]
|
||||
@ -356,57 +359,42 @@ func (com *Compiler) compileQuery(op *Operation) ([]Select, error) {
|
||||
}
|
||||
|
||||
if id == 0 {
|
||||
return nil, errors.New("invalid query")
|
||||
return errors.New("invalid query")
|
||||
}
|
||||
|
||||
var fil *Exp
|
||||
|
||||
root := &selects[0]
|
||||
|
||||
switch op.Type {
|
||||
case opQuery:
|
||||
fil, _ = com.fm.query[root.Table]
|
||||
|
||||
case opMutate:
|
||||
switch root.Action {
|
||||
case ActionInsert:
|
||||
fil, _ = com.fm.insert[root.Table]
|
||||
case ActionUpdate:
|
||||
fil, _ = com.fm.update[root.Table]
|
||||
case ActionDelete:
|
||||
fil, _ = com.fm.delete[root.Table]
|
||||
case ActionUpsert:
|
||||
fil, _ = com.fm.insert[root.Table]
|
||||
}
|
||||
if trv, ok := com.tr[role][op.Fields[0].Name]; ok {
|
||||
fil = trv.filter(qc.Type)
|
||||
}
|
||||
|
||||
if fil == nil {
|
||||
fil, _ = com.fm.all[root.Table]
|
||||
}
|
||||
|
||||
if fil == nil {
|
||||
fil = com.df
|
||||
}
|
||||
|
||||
if fil != nil && fil.Op != OpNop {
|
||||
if root.Where != nil {
|
||||
ow := root.Where
|
||||
|
||||
root.Where = expPool.Get().(*Exp)
|
||||
root.Where.Reset()
|
||||
root.Where.Op = OpAnd
|
||||
root.Where.Children = root.Where.childrenA[:2]
|
||||
root.Where.Children[0] = fil
|
||||
root.Where.Children[1] = ow
|
||||
} else {
|
||||
if fil != nil {
|
||||
switch fil.Op {
|
||||
case OpNop:
|
||||
case OpFalse:
|
||||
root.Where = fil
|
||||
default:
|
||||
if root.Where != nil {
|
||||
ow := root.Where
|
||||
|
||||
root.Where = expPool.Get().(*Exp)
|
||||
root.Where.Reset()
|
||||
root.Where.Op = OpAnd
|
||||
root.Where.Children = root.Where.childrenA[:2]
|
||||
root.Where.Children[0] = fil
|
||||
root.Where.Children[1] = ow
|
||||
} else {
|
||||
root.Where = fil
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return selects[:id], nil
|
||||
qc.Selects = selects[:id]
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgs(sel *Select, args []Arg) error {
|
||||
func (com *Compiler) compileArgs(qc *QCode, sel *Select, args []Arg) error {
|
||||
var err error
|
||||
|
||||
if com.ka {
|
||||
@ -418,9 +406,7 @@ func (com *Compiler) compileArgs(sel *Select, args []Arg) error {
|
||||
|
||||
switch arg.Name {
|
||||
case "id":
|
||||
if sel.ID == 0 {
|
||||
err = com.compileArgID(sel, arg)
|
||||
}
|
||||
err = com.compileArgID(sel, arg)
|
||||
case "search":
|
||||
err = com.compileArgSearch(sel, arg)
|
||||
case "where":
|
||||
@ -433,18 +419,6 @@ func (com *Compiler) compileArgs(sel *Select, args []Arg) error {
|
||||
err = com.compileArgLimit(sel, arg)
|
||||
case "offset":
|
||||
err = com.compileArgOffset(sel, arg)
|
||||
case "insert":
|
||||
sel.Action = ActionInsert
|
||||
err = com.compileArgAction(sel, arg)
|
||||
case "update":
|
||||
sel.Action = ActionUpdate
|
||||
err = com.compileArgAction(sel, arg)
|
||||
case "upsert":
|
||||
sel.Action = ActionUpsert
|
||||
err = com.compileArgAction(sel, arg)
|
||||
case "delete":
|
||||
sel.Action = ActionDelete
|
||||
err = com.compileArgAction(sel, arg)
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
@ -461,6 +435,45 @@ func (com *Compiler) compileArgs(sel *Select, args []Arg) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) setMutationType(qc *QCode, args []Arg) error {
|
||||
setActionVar := func(arg *Arg) error {
|
||||
if arg.Val.Type != nodeVar {
|
||||
return fmt.Errorf("value for argument '%s' must be a variable", arg.Name)
|
||||
}
|
||||
qc.ActionVar = arg.Val.Val
|
||||
return nil
|
||||
}
|
||||
|
||||
for i := range args {
|
||||
arg := &args[i]
|
||||
|
||||
switch arg.Name {
|
||||
case "insert":
|
||||
qc.Type = QTInsert
|
||||
return setActionVar(arg)
|
||||
case "update":
|
||||
qc.Type = QTUpdate
|
||||
return setActionVar(arg)
|
||||
case "upsert":
|
||||
qc.Type = QTUpsert
|
||||
return setActionVar(arg)
|
||||
case "delete":
|
||||
qc.Type = QTDelete
|
||||
|
||||
if arg.Val.Type != nodeBool {
|
||||
return fmt.Errorf("value for argument '%s' must be a boolean", arg.Name)
|
||||
}
|
||||
|
||||
if arg.Val.Val == "false" {
|
||||
qc.Type = QTQuery
|
||||
}
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgObj(st *util.Stack, arg *Arg) (*Exp, error) {
|
||||
if arg.Val.Type != nodeObj {
|
||||
return nil, fmt.Errorf("expecting an object")
|
||||
@ -540,6 +553,10 @@ func (com *Compiler) compileArgNode(st *util.Stack, node *Node, usePool bool) (*
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgID(sel *Select, arg *Arg) error {
|
||||
if sel.ID != 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
if sel.Where != nil && sel.Where.Op == OpEqID {
|
||||
return nil
|
||||
}
|
||||
@ -732,24 +749,14 @@ func (com *Compiler) compileArgOffset(sel *Select, arg *Arg) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (com *Compiler) compileArgAction(sel *Select, arg *Arg) error {
|
||||
switch sel.Action {
|
||||
case ActionDelete:
|
||||
if arg.Val.Type != nodeBool {
|
||||
return fmt.Errorf("value for argument '%s' must be a boolean", arg.Name)
|
||||
}
|
||||
if arg.Val.Val == "false" {
|
||||
sel.Action = 0
|
||||
}
|
||||
var zeroTrv = &trval{}
|
||||
|
||||
default:
|
||||
if arg.Val.Type != nodeVar {
|
||||
return fmt.Errorf("value for argument '%s' must be a variable", arg.Name)
|
||||
}
|
||||
sel.ActionVar = arg.Val.Val
|
||||
func (com *Compiler) getRole(role, field string) *trval {
|
||||
if trv, ok := com.tr[role][field]; ok {
|
||||
return trv
|
||||
} else {
|
||||
return zeroTrv
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func newExp(st *util.Stack, node *Node, usePool bool) (*Exp, error) {
|
||||
@ -911,10 +918,8 @@ func setWhereColName(ex *Exp, node *Node) {
|
||||
}
|
||||
if len(list) == 1 {
|
||||
ex.Col = list[0]
|
||||
|
||||
} else if len(list) > 2 {
|
||||
ex.Col = buildPath(list)
|
||||
ex.NestedCol = true
|
||||
} else if len(list) > 1 {
|
||||
ex.NestedCols = list
|
||||
}
|
||||
}
|
||||
|
||||
@ -954,6 +959,10 @@ func compileFilter(filter []string) (*Exp, error) {
|
||||
}
|
||||
|
||||
for i := range filter {
|
||||
if filter[i] == "false" {
|
||||
return &Exp{Op: OpFalse, doFree: false}, nil
|
||||
}
|
||||
|
||||
node, err := ParseArgValue(filter[i])
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -26,7 +26,8 @@ type allowItem struct {
|
||||
var _allowList allowList
|
||||
|
||||
type allowList struct {
|
||||
list map[string]*allowItem
|
||||
list []*allowItem
|
||||
index map[string]int
|
||||
filepath string
|
||||
saveChan chan *allowItem
|
||||
active bool
|
||||
@ -34,7 +35,7 @@ type allowList struct {
|
||||
|
||||
func initAllowList(cpath string) {
|
||||
_allowList = allowList{
|
||||
list: make(map[string]*allowItem),
|
||||
index: make(map[string]int),
|
||||
saveChan: make(chan *allowItem),
|
||||
active: true,
|
||||
}
|
||||
@ -172,17 +173,21 @@ func (al *allowList) load() {
|
||||
if c == 0 {
|
||||
if ty == AL_QUERY {
|
||||
q := string(b[s:(e + 1)])
|
||||
key := gqlHash(q, varBytes, "")
|
||||
|
||||
item := &allowItem{
|
||||
uri: uri,
|
||||
gql: q,
|
||||
}
|
||||
|
||||
if len(varBytes) != 0 {
|
||||
if idx, ok := al.index[key]; !ok {
|
||||
al.list = append(al.list, &allowItem{
|
||||
uri: uri,
|
||||
gql: q,
|
||||
vars: varBytes,
|
||||
})
|
||||
al.index[key] = len(al.list) - 1
|
||||
} else {
|
||||
item := al.list[idx]
|
||||
item.gql = q
|
||||
item.vars = varBytes
|
||||
}
|
||||
|
||||
al.list[gqlHash(q, varBytes)] = item
|
||||
varBytes = nil
|
||||
|
||||
} else if ty == AL_VARS {
|
||||
@ -203,7 +208,15 @@ func (al *allowList) save(item *allowItem) {
|
||||
if al.active == false {
|
||||
return
|
||||
}
|
||||
al.list[gqlHash(item.gql, item.vars)] = item
|
||||
|
||||
key := gqlHash(item.gql, item.vars, "")
|
||||
|
||||
if _, ok := al.index[key]; ok {
|
||||
return
|
||||
}
|
||||
|
||||
al.list = append(al.list, item)
|
||||
al.index[key] = len(al.list) - 1
|
||||
|
||||
f, err := os.Create(al.filepath)
|
||||
if err != nil {
|
||||
|
@ -8,20 +8,30 @@ import (
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
)
|
||||
|
||||
func varMap(ctx *coreContext) func(w io.Writer, tag string) (int, error) {
|
||||
func argMap(ctx *coreContext) func(w io.Writer, tag string) (int, error) {
|
||||
return func(w io.Writer, tag string) (int, error) {
|
||||
switch tag {
|
||||
case "user_id":
|
||||
if v := ctx.Value(userIDKey); v != nil {
|
||||
return stringVar(w, v.(string))
|
||||
}
|
||||
return 0, errNoUserID
|
||||
|
||||
case "user_id_provider":
|
||||
if v := ctx.Value(userIDProviderKey); v != nil {
|
||||
return stringVar(w, v.(string))
|
||||
return stringArg(w, v.(string))
|
||||
}
|
||||
return 0, errNoUserID
|
||||
io.WriteString(w, "null")
|
||||
return 0, nil
|
||||
|
||||
case "user_id":
|
||||
if v := ctx.Value(userIDKey); v != nil {
|
||||
return stringArg(w, v.(string))
|
||||
}
|
||||
|
||||
io.WriteString(w, "null")
|
||||
return 0, nil
|
||||
|
||||
case "user_role":
|
||||
if v := ctx.Value(userRoleKey); v != nil {
|
||||
return stringArg(w, v.(string))
|
||||
}
|
||||
io.WriteString(w, "null")
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
fields := jsn.Get(ctx.req.Vars, [][]byte{[]byte(tag)})
|
||||
@ -40,7 +50,7 @@ func varMap(ctx *coreContext) func(w io.Writer, tag string) (int, error) {
|
||||
}
|
||||
|
||||
if is {
|
||||
return stringVarB(w, fields[0].Value)
|
||||
return stringArgB(w, fields[0].Value)
|
||||
}
|
||||
|
||||
w.Write(fields[0].Value)
|
||||
@ -48,7 +58,7 @@ func varMap(ctx *coreContext) func(w io.Writer, tag string) (int, error) {
|
||||
}
|
||||
}
|
||||
|
||||
func varList(ctx *coreContext, args [][]byte) []interface{} {
|
||||
func argList(ctx *coreContext, args [][]byte) []interface{} {
|
||||
vars := make([]interface{}, len(args))
|
||||
|
||||
var fields map[string]interface{}
|
||||
@ -76,6 +86,11 @@ func varList(ctx *coreContext, args [][]byte) []interface{} {
|
||||
vars[i] = v.(string)
|
||||
}
|
||||
|
||||
case bytes.Equal(av, []byte("user_role")):
|
||||
if v := ctx.Value(userRoleKey); v != nil {
|
||||
vars[i] = v.(string)
|
||||
}
|
||||
|
||||
default:
|
||||
if v, ok := fields[string(av)]; ok {
|
||||
vars[i] = v
|
||||
@ -86,7 +101,7 @@ func varList(ctx *coreContext, args [][]byte) []interface{} {
|
||||
return vars
|
||||
}
|
||||
|
||||
func stringVar(w io.Writer, v string) (int, error) {
|
||||
func stringArg(w io.Writer, v string) (int, error) {
|
||||
if n, err := w.Write([]byte(`'`)); err != nil {
|
||||
return n, err
|
||||
}
|
||||
@ -96,7 +111,7 @@ func stringVar(w io.Writer, v string) (int, error) {
|
||||
return w.Write([]byte(`'`))
|
||||
}
|
||||
|
||||
func stringVarB(w io.Writer, v []byte) (int, error) {
|
||||
func stringArgB(w io.Writer, v []byte) (int, error) {
|
||||
if n, err := w.Write([]byte(`'`)); err != nil {
|
||||
return n, err
|
||||
}
|
38
serv/auth.go
38
serv/auth.go
@ -7,28 +7,42 @@ import (
|
||||
)
|
||||
|
||||
var (
|
||||
userIDProviderKey = struct{}{}
|
||||
userIDKey = struct{}{}
|
||||
userIDProviderKey = "user_id_provider"
|
||||
userIDKey = "user_id"
|
||||
userRoleKey = "user_role"
|
||||
)
|
||||
|
||||
func headerAuth(r *http.Request, c *config) *http.Request {
|
||||
if len(c.Auth.Header) == 0 {
|
||||
return nil
|
||||
}
|
||||
func headerAuth(next http.HandlerFunc) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := r.Context()
|
||||
|
||||
userID := r.Header.Get(c.Auth.Header)
|
||||
if len(userID) != 0 {
|
||||
ctx := context.WithValue(r.Context(), userIDKey, userID)
|
||||
return r.WithContext(ctx)
|
||||
}
|
||||
userIDProvider := r.Header.Get("X-User-ID-Provider")
|
||||
if len(userIDProvider) != 0 {
|
||||
ctx = context.WithValue(ctx, userIDProviderKey, userIDProvider)
|
||||
}
|
||||
|
||||
return nil
|
||||
userID := r.Header.Get("X-User-ID")
|
||||
if len(userID) != 0 {
|
||||
ctx = context.WithValue(ctx, userIDKey, userID)
|
||||
}
|
||||
|
||||
userRole := r.Header.Get("X-User-Role")
|
||||
if len(userRole) != 0 {
|
||||
ctx = context.WithValue(ctx, userRoleKey, userRole)
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}
|
||||
}
|
||||
|
||||
func withAuth(next http.HandlerFunc) http.HandlerFunc {
|
||||
at := conf.Auth.Type
|
||||
ru := conf.Auth.Rails.URL
|
||||
|
||||
if conf.Auth.CredsInHeader {
|
||||
next = headerAuth(next)
|
||||
}
|
||||
|
||||
switch at {
|
||||
case "rails":
|
||||
if strings.HasPrefix(ru, "memcache:") {
|
||||
|
@ -58,11 +58,6 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
var tok string
|
||||
|
||||
if rn := headerAuth(r, conf); rn != nil {
|
||||
next.ServeHTTP(w, rn)
|
||||
return
|
||||
}
|
||||
|
||||
if len(cookie) != 0 {
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
@ -102,7 +97,6 @@ func jwtHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
next.ServeHTTP(w, r.WithContext(ctx))
|
||||
}
|
||||
|
||||
next.ServeHTTP(w, r)
|
||||
}
|
||||
}
|
||||
|
@ -42,11 +42,6 @@ func railsRedisHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if rn := headerAuth(r, conf); rn != nil {
|
||||
next.ServeHTTP(w, rn)
|
||||
return
|
||||
}
|
||||
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
@ -83,17 +78,12 @@ func railsMemcacheHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
rURL, err := url.Parse(conf.Auth.Rails.URL)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err)
|
||||
logger.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
mc := memcache.New(rURL.Host)
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if rn := headerAuth(r, conf); rn != nil {
|
||||
next.ServeHTTP(w, rn)
|
||||
return
|
||||
}
|
||||
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
next.ServeHTTP(w, r)
|
||||
@ -126,25 +116,20 @@ func railsCookieHandler(next http.HandlerFunc) http.HandlerFunc {
|
||||
|
||||
ra, err := railsAuth(conf)
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err)
|
||||
logger.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
return func(w http.ResponseWriter, r *http.Request) {
|
||||
if rn := headerAuth(r, conf); rn != nil {
|
||||
next.ServeHTTP(w, rn)
|
||||
return
|
||||
}
|
||||
|
||||
ck, err := r.Cookie(cookie)
|
||||
if err != nil {
|
||||
logger.Error().Err(err)
|
||||
if err != nil || len(ck.Value) == 0 {
|
||||
logger.Warn().Err(err).Msg("rails cookie missing")
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
||||
userID, err := ra.ParseCookie(ck.Value)
|
||||
if err != nil {
|
||||
logger.Error().Err(err)
|
||||
logger.Warn().Err(err).Msg("failed to parse rails cookie")
|
||||
next.ServeHTTP(w, r)
|
||||
return
|
||||
}
|
||||
|
62
serv/cmd.go
62
serv/cmd.go
@ -8,9 +8,7 @@ import (
|
||||
|
||||
"github.com/dosco/super-graph/psql"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
"github.com/gobuffalo/flect"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/jackc/pgx/v4/log/zerologadapter"
|
||||
"github.com/jackc/pgx/v4/pgxpool"
|
||||
"github.com/rs/zerolog"
|
||||
"github.com/spf13/cobra"
|
||||
@ -21,20 +19,15 @@ import (
|
||||
|
||||
const (
|
||||
serverName = "Super Graph"
|
||||
|
||||
authFailBlockAlways = iota + 1
|
||||
authFailBlockPerQuery
|
||||
authFailBlockNever
|
||||
)
|
||||
|
||||
var (
|
||||
logger *zerolog.Logger
|
||||
conf *config
|
||||
confPath string
|
||||
db *pgxpool.Pool
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *psql.Compiler
|
||||
authFailBlock int
|
||||
logger *zerolog.Logger
|
||||
conf *config
|
||||
confPath string
|
||||
db *pgxpool.Pool
|
||||
qcompile *qcode.Compiler
|
||||
pcompile *psql.Compiler
|
||||
)
|
||||
|
||||
func Init() {
|
||||
@ -151,41 +144,42 @@ func initLog() *zerolog.Logger {
|
||||
}
|
||||
|
||||
func initConf() (*config, error) {
|
||||
vi := newConfig()
|
||||
vi := newConfig(getConfigName())
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
inherits := vi.GetString("inherits")
|
||||
if len(inherits) != 0 {
|
||||
vi = newConfig(inherits)
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if vi.IsSet("inherits") {
|
||||
logger.Fatal().Msgf("inherited config (%s) cannot itself inherit (%s)",
|
||||
inherits,
|
||||
vi.GetString("inherits"))
|
||||
}
|
||||
|
||||
vi.SetConfigName(getConfigName())
|
||||
vi.MergeInConfig()
|
||||
}
|
||||
|
||||
c := &config{}
|
||||
|
||||
if err := vi.Unmarshal(c); err != nil {
|
||||
if err := c.Init(vi); err != nil {
|
||||
return nil, fmt.Errorf("unable to decode config, %v", err)
|
||||
}
|
||||
|
||||
if len(c.Tables) == 0 {
|
||||
c.Tables = c.DB.Tables
|
||||
}
|
||||
|
||||
for k, v := range c.Inflections {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
}
|
||||
|
||||
authFailBlock = getAuthFailBlock(c)
|
||||
|
||||
logLevel, err := zerolog.ParseLevel(c.LogLevel)
|
||||
if err != nil {
|
||||
logger.Error().Err(err).Msg("error setting log_level")
|
||||
}
|
||||
zerolog.SetGlobalLevel(logLevel)
|
||||
|
||||
//fmt.Printf("%#v", c)
|
||||
|
||||
return c, nil
|
||||
}
|
||||
|
||||
@ -217,7 +211,7 @@ func initDB(c *config, useDB bool) (*pgx.Conn, error) {
|
||||
config.LogLevel = pgx.LogLevelNone
|
||||
}
|
||||
|
||||
config.Logger = zerologadapter.NewLogger(*logger)
|
||||
config.Logger = NewSQLLogger(*logger)
|
||||
|
||||
db, err := pgx.ConnectConfig(context.Background(), config)
|
||||
if err != nil {
|
||||
@ -252,7 +246,7 @@ func initDBPool(c *config) (*pgxpool.Pool, error) {
|
||||
config.ConnConfig.LogLevel = pgx.LogLevelNone
|
||||
}
|
||||
|
||||
config.ConnConfig.Logger = zerologadapter.NewLogger(*logger)
|
||||
config.ConnConfig.Logger = NewSQLLogger(*logger)
|
||||
|
||||
// if c.DB.MaxRetries != 0 {
|
||||
// opt.MaxRetries = c.DB.MaxRetries
|
||||
|
@ -15,13 +15,12 @@ func cmdConfDump(cmd *cobra.Command, args []string) {
|
||||
|
||||
fname := fmt.Sprintf("%s.%s", getConfigName(), args[0])
|
||||
|
||||
vi := newConfig()
|
||||
|
||||
if err := vi.ReadInConfig(); err != nil {
|
||||
logger.Fatal().Err(err).Send()
|
||||
conf, err := initConf()
|
||||
if err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to read config")
|
||||
}
|
||||
|
||||
if err := vi.WriteConfigAs(fname); err != nil {
|
||||
if err := conf.Viper.WriteConfigAs(fname); err != nil {
|
||||
logger.Fatal().Err(err).Send()
|
||||
}
|
||||
|
||||
|
@ -66,6 +66,7 @@ func graphQLFunc(query string, data interface{}) map[string]interface{} {
|
||||
c := &coreContext{Context: context.Background()}
|
||||
c.req.Query = query
|
||||
c.req.Vars = b
|
||||
c.req.role = "user"
|
||||
|
||||
res, err := c.execQuery()
|
||||
if err != nil {
|
||||
|
208
serv/config.go
208
serv/config.go
@ -1,12 +1,19 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"os"
|
||||
"regexp"
|
||||
"strings"
|
||||
"unicode"
|
||||
|
||||
"github.com/gobuffalo/flect"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
type config struct {
|
||||
*viper.Viper
|
||||
|
||||
AppName string `mapstructure:"app_name"`
|
||||
Env string
|
||||
HostPort string `mapstructure:"host_port"`
|
||||
@ -17,16 +24,16 @@ type config struct {
|
||||
EnableTracing bool `mapstructure:"enable_tracing"`
|
||||
UseAllowList bool `mapstructure:"use_allow_list"`
|
||||
WatchAndReload bool `mapstructure:"reload_on_config_change"`
|
||||
AuthFailBlock string `mapstructure:"auth_fail_block"`
|
||||
AuthFailBlock bool `mapstructure:"auth_fail_block"`
|
||||
SeedFile string `mapstructure:"seed_file"`
|
||||
MigrationsPath string `mapstructure:"migrations_path"`
|
||||
|
||||
Inflections map[string]string
|
||||
|
||||
Auth struct {
|
||||
Type string
|
||||
Cookie string
|
||||
Header string
|
||||
Type string
|
||||
Cookie string
|
||||
CredsInHeader bool `mapstructure:"creds_in_header"`
|
||||
|
||||
Rails struct {
|
||||
Version string
|
||||
@ -59,11 +66,12 @@ type config struct {
|
||||
PoolSize int32 `mapstructure:"pool_size"`
|
||||
MaxRetries int `mapstructure:"max_retries"`
|
||||
LogLevel string `mapstructure:"log_level"`
|
||||
SetUserID bool `mapstructure:"set_user_id"`
|
||||
|
||||
vars map[string][]byte `mapstructure:"variables"`
|
||||
Vars map[string]string `mapstructure:"variables"`
|
||||
|
||||
Defaults struct {
|
||||
Filter []string
|
||||
Filters []string
|
||||
Blocklist []string
|
||||
}
|
||||
|
||||
@ -71,18 +79,16 @@ type config struct {
|
||||
} `mapstructure:"database"`
|
||||
|
||||
Tables []configTable
|
||||
|
||||
RolesQuery string `mapstructure:"roles_query"`
|
||||
Roles []configRole
|
||||
}
|
||||
|
||||
type configTable struct {
|
||||
Name string
|
||||
Filter []string
|
||||
FilterQuery []string `mapstructure:"filter_query"`
|
||||
FilterInsert []string `mapstructure:"filter_insert"`
|
||||
FilterUpdate []string `mapstructure:"filter_update"`
|
||||
FilterDelete []string `mapstructure:"filter_delete"`
|
||||
Table string
|
||||
Blocklist []string
|
||||
Remotes []configRemote
|
||||
Name string
|
||||
Table string
|
||||
Blocklist []string
|
||||
Remotes []configRemote
|
||||
}
|
||||
|
||||
type configRemote struct {
|
||||
@ -98,16 +104,63 @@ type configRemote struct {
|
||||
} `mapstructure:"set_headers"`
|
||||
}
|
||||
|
||||
func newConfig() *viper.Viper {
|
||||
type configQuery struct {
|
||||
Limit int
|
||||
Filters []string
|
||||
Columns []string
|
||||
DisableFunctions bool `mapstructure:"disable_functions"`
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configInsert struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configUpdate struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Presets map[string]string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configDelete struct {
|
||||
Filters []string
|
||||
Columns []string
|
||||
Block bool
|
||||
}
|
||||
|
||||
type configRoleTable struct {
|
||||
Name string
|
||||
|
||||
Query configQuery
|
||||
Insert configInsert
|
||||
Update configUpdate
|
||||
Delete configDelete
|
||||
}
|
||||
|
||||
type configRole struct {
|
||||
Name string
|
||||
Match string
|
||||
Tables []configRoleTable
|
||||
}
|
||||
|
||||
func newConfig(name string) *viper.Viper {
|
||||
vi := viper.New()
|
||||
|
||||
vi.SetEnvPrefix("SG")
|
||||
vi.SetEnvKeyReplacer(strings.NewReplacer(".", "_"))
|
||||
vi.AutomaticEnv()
|
||||
|
||||
vi.SetConfigName(name)
|
||||
vi.AddConfigPath(confPath)
|
||||
vi.AddConfigPath("./config")
|
||||
vi.SetConfigName(getConfigName())
|
||||
|
||||
if dir, _ := os.Getwd(); strings.HasSuffix(dir, "/serv") {
|
||||
vi.AddConfigPath("../config")
|
||||
}
|
||||
|
||||
vi.SetDefault("host_port", "0.0.0.0:8080")
|
||||
vi.SetDefault("web_ui", false)
|
||||
@ -132,24 +185,90 @@ func newConfig() *viper.Viper {
|
||||
return vi
|
||||
}
|
||||
|
||||
func (c *config) getVariables() map[string]string {
|
||||
vars := make(map[string]string, len(c.DB.vars))
|
||||
|
||||
for k, v := range c.DB.vars {
|
||||
isVar := false
|
||||
|
||||
for i := range v {
|
||||
if v[i] == '$' {
|
||||
isVar = true
|
||||
} else if v[i] == ' ' {
|
||||
isVar = false
|
||||
} else if isVar && v[i] >= 'a' && v[i] <= 'z' {
|
||||
v[i] = 'A' + (v[i] - 'a')
|
||||
}
|
||||
}
|
||||
vars[k] = string(v)
|
||||
func (c *config) Init(vi *viper.Viper) error {
|
||||
if err := vi.Unmarshal(c); err != nil {
|
||||
return fmt.Errorf("unable to decode config, %v", err)
|
||||
}
|
||||
c.Viper = vi
|
||||
|
||||
if len(c.Tables) == 0 {
|
||||
c.Tables = c.DB.Tables
|
||||
}
|
||||
|
||||
for k, v := range c.Inflections {
|
||||
flect.AddPlural(k, v)
|
||||
}
|
||||
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
t.Name = flect.Pluralize(strings.ToLower(t.Name))
|
||||
t.Table = flect.Pluralize(strings.ToLower(t.Table))
|
||||
}
|
||||
|
||||
for i := range c.Roles {
|
||||
r := c.Roles[i]
|
||||
r.Name = strings.ToLower(r.Name)
|
||||
}
|
||||
|
||||
for k, v := range c.DB.Vars {
|
||||
c.DB.Vars[k] = sanitize(v)
|
||||
}
|
||||
|
||||
c.RolesQuery = sanitize(c.RolesQuery)
|
||||
|
||||
rolesMap := make(map[string]struct{})
|
||||
|
||||
for i := range c.Roles {
|
||||
role := c.Roles[i]
|
||||
|
||||
if _, ok := rolesMap[role.Name]; ok {
|
||||
logger.Fatal().Msgf("duplicate role '%s' found", role.Name)
|
||||
}
|
||||
role.Name = sanitize(role.Name)
|
||||
role.Match = sanitize(role.Match)
|
||||
rolesMap[role.Name] = struct{}{}
|
||||
}
|
||||
|
||||
if _, ok := rolesMap["user"]; !ok {
|
||||
c.Roles = append(c.Roles, configRole{Name: "user"})
|
||||
}
|
||||
|
||||
if _, ok := rolesMap["anon"]; !ok {
|
||||
logger.Warn().Msg("unauthenticated requests will be blocked. no role 'anon' defined")
|
||||
c.AuthFailBlock = true
|
||||
}
|
||||
|
||||
c.validate()
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *config) validate() {
|
||||
rm := make(map[string]struct{})
|
||||
|
||||
for i := range c.Roles {
|
||||
name := c.Roles[i].Name
|
||||
|
||||
if _, ok := rm[name]; ok {
|
||||
logger.Fatal().Msgf("duplicate config for role '%s'", c.Roles[i].Name)
|
||||
}
|
||||
rm[name] = struct{}{}
|
||||
}
|
||||
|
||||
tm := make(map[string]struct{})
|
||||
|
||||
for i := range c.Tables {
|
||||
name := c.Tables[i].Name
|
||||
|
||||
if _, ok := tm[name]; ok {
|
||||
logger.Fatal().Msgf("duplicate config for table '%s'", c.Tables[i].Name)
|
||||
}
|
||||
tm[name] = struct{}{}
|
||||
}
|
||||
|
||||
if len(c.RolesQuery) == 0 {
|
||||
logger.Warn().Msgf("no 'roles_query' defined.")
|
||||
}
|
||||
return vars
|
||||
}
|
||||
|
||||
func (c *config) getAliasMap() map[string][]string {
|
||||
@ -162,8 +281,25 @@ func (c *config) getAliasMap() map[string][]string {
|
||||
continue
|
||||
}
|
||||
|
||||
k := strings.ToLower(t.Table)
|
||||
m[k] = append(m[k], strings.ToLower(t.Name))
|
||||
m[t.Table] = append(m[t.Table], t.Name)
|
||||
}
|
||||
return m
|
||||
}
|
||||
|
||||
var varRe1 = regexp.MustCompile(`(?mi)\$([a-zA-Z0-9_.]+)`)
|
||||
var varRe2 = regexp.MustCompile(`\{\{([a-zA-Z0-9_.]+)\}\}`)
|
||||
|
||||
func sanitize(s string) string {
|
||||
s0 := varRe1.ReplaceAllString(s, `{{$1}}`)
|
||||
|
||||
s1 := strings.Map(func(r rune) rune {
|
||||
if unicode.IsSpace(r) {
|
||||
return ' '
|
||||
}
|
||||
return r
|
||||
}, s0)
|
||||
|
||||
return varRe2.ReplaceAllStringFunc(s1, func(m string) string {
|
||||
return strings.ToLower(m)
|
||||
})
|
||||
}
|
||||
|
13
serv/config_test.go
Normal file
13
serv/config_test.go
Normal file
@ -0,0 +1,13 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestInitConf(t *testing.T) {
|
||||
_, err := initConf()
|
||||
|
||||
if err != nil {
|
||||
t.Fatal(err.Error())
|
||||
}
|
||||
}
|
288
serv/core.go
288
serv/core.go
@ -13,8 +13,8 @@ import (
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/dosco/super-graph/jsn"
|
||||
"github.com/dosco/super-graph/psql"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/valyala/fasttemplate"
|
||||
)
|
||||
|
||||
@ -32,6 +32,12 @@ func (c *coreContext) handleReq(w io.Writer, req *http.Request) error {
|
||||
c.req.ref = req.Referer()
|
||||
c.req.hdr = req.Header
|
||||
|
||||
if authCheck(c) {
|
||||
c.req.role = "user"
|
||||
} else {
|
||||
c.req.role = "anon"
|
||||
}
|
||||
|
||||
b, err := c.execQuery()
|
||||
if err != nil {
|
||||
return err
|
||||
@ -46,10 +52,12 @@ func (c *coreContext) execQuery() ([]byte, error) {
|
||||
var qc *qcode.QCode
|
||||
var data []byte
|
||||
|
||||
logger.Debug().Str("role", c.req.role).Msg(c.req.Query)
|
||||
|
||||
if conf.UseAllowList {
|
||||
var ps *preparedItem
|
||||
|
||||
data, ps, err = c.resolvePreparedSQL(c.req.Query)
|
||||
data, ps, err = c.resolvePreparedSQL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -59,12 +67,7 @@ func (c *coreContext) execQuery() ([]byte, error) {
|
||||
|
||||
} else {
|
||||
|
||||
qc, err = qcompile.Compile([]byte(c.req.Query))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
data, skipped, err = c.resolveSQL(qc)
|
||||
data, skipped, err = c.resolveSQL()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
@ -112,6 +115,154 @@ func (c *coreContext) execQuery() ([]byte, error) {
|
||||
return ob.Bytes(), nil
|
||||
}
|
||||
|
||||
func (c *coreContext) resolvePreparedSQL() ([]byte, *preparedItem, error) {
|
||||
tx, err := db.Begin(c)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer tx.Rollback(c)
|
||||
|
||||
if conf.DB.SetUserID {
|
||||
if err := c.setLocalUserID(tx); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
var role string
|
||||
mutation := isMutation(c.req.Query)
|
||||
useRoleQuery := len(conf.RolesQuery) != 0 && mutation
|
||||
|
||||
if useRoleQuery {
|
||||
if role, err = c.executeRoleQuery(tx); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
} else if v := c.Value(userRoleKey); v != nil {
|
||||
role = v.(string)
|
||||
|
||||
} else if mutation {
|
||||
role = c.req.role
|
||||
|
||||
}
|
||||
|
||||
ps, ok := _preparedList[gqlHash(c.req.Query, c.req.Vars, role)]
|
||||
if !ok {
|
||||
return nil, nil, errUnauthorized
|
||||
}
|
||||
|
||||
var root []byte
|
||||
vars := argList(c, ps.args)
|
||||
|
||||
if mutation {
|
||||
err = tx.QueryRow(c, ps.stmt.SQL, vars...).Scan(&root)
|
||||
} else {
|
||||
err = tx.QueryRow(c, ps.stmt.SQL, vars...).Scan(&c.req.role, &root)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(c); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
return root, ps, nil
|
||||
}
|
||||
|
||||
func (c *coreContext) resolveSQL() ([]byte, uint32, error) {
|
||||
tx, err := db.Begin(c)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer tx.Rollback(c)
|
||||
|
||||
mutation := isMutation(c.req.Query)
|
||||
useRoleQuery := len(conf.RolesQuery) != 0 && mutation
|
||||
|
||||
if useRoleQuery {
|
||||
if c.req.role, err = c.executeRoleQuery(tx); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
} else if v := c.Value(userRoleKey); v != nil {
|
||||
c.req.role = v.(string)
|
||||
}
|
||||
|
||||
stmts, err := c.buildStmt()
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
var st *stmt
|
||||
|
||||
if mutation {
|
||||
st = findStmt(c.req.role, stmts)
|
||||
} else {
|
||||
st = &stmts[0]
|
||||
}
|
||||
|
||||
t := fasttemplate.New(st.sql, openVar, closeVar)
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
_, err = t.ExecuteFunc(buf, argMap(c))
|
||||
|
||||
if err == errNoUserID {
|
||||
logger.Warn().Msg("no user id found. query requires an authenicated request")
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
finalSQL := buf.String()
|
||||
|
||||
var stime time.Time
|
||||
|
||||
if conf.EnableTracing {
|
||||
stime = time.Now()
|
||||
}
|
||||
|
||||
if conf.DB.SetUserID {
|
||||
if err := c.setLocalUserID(tx); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
}
|
||||
|
||||
var root []byte
|
||||
|
||||
if mutation {
|
||||
err = tx.QueryRow(c, finalSQL).Scan(&root)
|
||||
} else {
|
||||
err = tx.QueryRow(c, finalSQL).Scan(&c.req.role, &root)
|
||||
}
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(c); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
if mutation {
|
||||
st = findStmt(c.req.role, stmts)
|
||||
} else {
|
||||
st = &stmts[0]
|
||||
}
|
||||
|
||||
if conf.EnableTracing && len(st.qc.Selects) != 0 {
|
||||
c.addTrace(
|
||||
st.qc.Selects,
|
||||
st.qc.Selects[0].ID,
|
||||
stime)
|
||||
}
|
||||
|
||||
if conf.UseAllowList == false {
|
||||
_allowList.add(&c.req)
|
||||
}
|
||||
|
||||
return root, st.skipped, nil
|
||||
}
|
||||
|
||||
func (c *coreContext) resolveRemote(
|
||||
hdr http.Header,
|
||||
h *xxhash.Digest,
|
||||
@ -259,125 +410,24 @@ func (c *coreContext) resolveRemotes(
|
||||
return to, cerr
|
||||
}
|
||||
|
||||
func (c *coreContext) resolvePreparedSQL(gql string) ([]byte, *preparedItem, error) {
|
||||
ps, ok := _preparedList[gqlHash(gql, c.req.Vars)]
|
||||
if !ok {
|
||||
return nil, nil, errUnauthorized
|
||||
func (c *coreContext) executeRoleQuery(tx pgx.Tx) (string, error) {
|
||||
var role string
|
||||
row := tx.QueryRow(c, "_sg_get_role", c.req.role, 1)
|
||||
|
||||
if err := row.Scan(&role); err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
var root []byte
|
||||
vars := varList(c, ps.args)
|
||||
|
||||
tx, err := db.Begin(c)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
defer tx.Rollback(c)
|
||||
|
||||
if v := c.Value(userIDKey); v != nil {
|
||||
_, err = tx.Exec(c, fmt.Sprintf(`SET LOCAL "user.id" = %s;`, v))
|
||||
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
}
|
||||
|
||||
err = tx.QueryRow(c, ps.stmt.SQL, vars...).Scan(&root)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(c); err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
fmt.Printf("PRE: %v\n", ps.stmt)
|
||||
|
||||
return root, ps, nil
|
||||
return role, nil
|
||||
}
|
||||
|
||||
func (c *coreContext) resolveSQL(qc *qcode.QCode) ([]byte, uint32, error) {
|
||||
var vars map[string]json.RawMessage
|
||||
stmt := &bytes.Buffer{}
|
||||
|
||||
if len(c.req.Vars) != 0 {
|
||||
if err := json.Unmarshal(c.req.Vars, &vars); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
}
|
||||
|
||||
skipped, err := pcompile.Compile(qc, stmt, psql.Variables(vars))
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
t := fasttemplate.New(stmt.String(), openVar, closeVar)
|
||||
|
||||
stmt.Reset()
|
||||
_, err = t.ExecuteFunc(stmt, varMap(c))
|
||||
|
||||
if err == errNoUserID &&
|
||||
authFailBlock == authFailBlockPerQuery &&
|
||||
authCheck(c) == false {
|
||||
return nil, 0, errUnauthorized
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
finalSQL := stmt.String()
|
||||
|
||||
// if conf.LogLevel == "debug" {
|
||||
// os.Stdout.WriteString(finalSQL)
|
||||
// os.Stdout.WriteString("\n\n")
|
||||
// }
|
||||
|
||||
var st time.Time
|
||||
|
||||
if conf.EnableTracing {
|
||||
st = time.Now()
|
||||
}
|
||||
|
||||
tx, err := db.Begin(c)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
defer tx.Rollback(c)
|
||||
|
||||
func (c *coreContext) setLocalUserID(tx pgx.Tx) error {
|
||||
var err error
|
||||
if v := c.Value(userIDKey); v != nil {
|
||||
_, err = tx.Exec(c, fmt.Sprintf(`SET LOCAL "user.id" = %s;`, v))
|
||||
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
}
|
||||
|
||||
//fmt.Printf("\nRAW: %#v\n", finalSQL)
|
||||
|
||||
var root []byte
|
||||
|
||||
err = tx.QueryRow(c, finalSQL).Scan(&root)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
if err := tx.Commit(c); err != nil {
|
||||
return nil, 0, err
|
||||
}
|
||||
|
||||
if conf.EnableTracing && len(qc.Selects) != 0 {
|
||||
c.addTrace(
|
||||
qc.Selects,
|
||||
qc.Selects[0].ID,
|
||||
st)
|
||||
}
|
||||
|
||||
if conf.UseAllowList == false {
|
||||
_allowList.add(&c.req)
|
||||
}
|
||||
|
||||
return root, skipped, nil
|
||||
return err
|
||||
}
|
||||
|
||||
func (c *coreContext) render(w io.Writer, data []byte) error {
|
||||
|
144
serv/core_build.go
Normal file
144
serv/core_build.go
Normal file
@ -0,0 +1,144 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/json"
|
||||
"errors"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/psql"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
)
|
||||
|
||||
type stmt struct {
|
||||
role *configRole
|
||||
qc *qcode.QCode
|
||||
skipped uint32
|
||||
sql string
|
||||
}
|
||||
|
||||
func (c *coreContext) buildStmt() ([]stmt, error) {
|
||||
var vars map[string]json.RawMessage
|
||||
|
||||
if len(c.req.Vars) != 0 {
|
||||
if err := json.Unmarshal(c.req.Vars, &vars); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
gql := []byte(c.req.Query)
|
||||
|
||||
if len(conf.Roles) == 0 {
|
||||
return nil, errors.New(`no roles found ('user' and 'anon' required)`)
|
||||
}
|
||||
|
||||
qc, err := qcompile.Compile(gql, conf.Roles[0].Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
stmts := make([]stmt, 0, len(conf.Roles))
|
||||
mutation := (qc.Type != qcode.QTQuery)
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
for i := range conf.Roles {
|
||||
role := &conf.Roles[i]
|
||||
|
||||
if mutation && len(c.req.role) != 0 && role.Name != c.req.role {
|
||||
continue
|
||||
}
|
||||
|
||||
if i > 0 {
|
||||
qc, err = qcompile.Compile(gql, role.Name)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
stmts = append(stmts, stmt{role: role, qc: qc})
|
||||
|
||||
if mutation {
|
||||
skipped, err := pcompile.Compile(qc, w, psql.Variables(vars))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
s := &stmts[len(stmts)-1]
|
||||
s.skipped = skipped
|
||||
s.sql = w.String()
|
||||
w.Reset()
|
||||
}
|
||||
}
|
||||
|
||||
if mutation {
|
||||
return stmts, nil
|
||||
}
|
||||
|
||||
io.WriteString(w, `SELECT "_sg_auth_info"."role", (CASE "_sg_auth_info"."role" `)
|
||||
|
||||
for _, s := range stmts {
|
||||
io.WriteString(w, `WHEN '`)
|
||||
io.WriteString(w, s.role.Name)
|
||||
io.WriteString(w, `' THEN (`)
|
||||
|
||||
s.skipped, err = pcompile.Compile(s.qc, w, psql.Variables(vars))
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
io.WriteString(w, `) `)
|
||||
}
|
||||
io.WriteString(w, `END) FROM (`)
|
||||
|
||||
if len(conf.RolesQuery) == 0 {
|
||||
v := c.Value(userRoleKey)
|
||||
|
||||
io.WriteString(w, `VALUES ("`)
|
||||
if v != nil {
|
||||
io.WriteString(w, v.(string))
|
||||
} else {
|
||||
io.WriteString(w, c.req.role)
|
||||
}
|
||||
io.WriteString(w, `")) AS "_sg_auth_info"(role) LIMIT 1;`)
|
||||
|
||||
} else {
|
||||
|
||||
io.WriteString(w, `SELECT (CASE WHEN EXISTS (`)
|
||||
io.WriteString(w, conf.RolesQuery)
|
||||
io.WriteString(w, `) THEN `)
|
||||
|
||||
io.WriteString(w, `(SELECT (CASE`)
|
||||
for _, s := range stmts {
|
||||
if len(s.role.Match) == 0 {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
io.WriteString(w, s.role.Match)
|
||||
io.WriteString(w, ` THEN '`)
|
||||
io.WriteString(w, s.role.Name)
|
||||
io.WriteString(w, `'`)
|
||||
}
|
||||
|
||||
if len(c.req.role) == 0 {
|
||||
io.WriteString(w, ` ELSE 'anon' END) FROM (`)
|
||||
} else {
|
||||
io.WriteString(w, ` ELSE '`)
|
||||
io.WriteString(w, c.req.role)
|
||||
io.WriteString(w, `' END) FROM (`)
|
||||
}
|
||||
|
||||
io.WriteString(w, conf.RolesQuery)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query" LIMIT 1) ELSE '`)
|
||||
if len(c.req.role) == 0 {
|
||||
io.WriteString(w, `anon`)
|
||||
} else {
|
||||
io.WriteString(w, c.req.role)
|
||||
}
|
||||
io.WriteString(w, `' END) FROM (VALUES (1)) AS "_sg_auth_filler") AS "_sg_auth_info"(role) LIMIT 1; `)
|
||||
}
|
||||
|
||||
stmts[0].sql = w.String()
|
||||
stmts[0].role = nil
|
||||
|
||||
return stmts, nil
|
||||
}
|
21
serv/corpus/001.gql
Normal file
21
serv/corpus/001.gql
Normal file
@ -0,0 +1,21 @@
|
||||
query {
|
||||
products(
|
||||
# returns only 30 items
|
||||
limit: 30,
|
||||
|
||||
# starts from item 10, commented out for now
|
||||
# offset: 10,
|
||||
|
||||
# orders the response items by highest price
|
||||
order_by: { price: desc },
|
||||
|
||||
# no duplicate prices returned
|
||||
distinct: [ price ]
|
||||
|
||||
# only items with an id >= 30 and < 30 are returned
|
||||
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) {
|
||||
id
|
||||
name
|
||||
price
|
||||
}
|
||||
}
|
11
serv/fuzz.go
Normal file
11
serv/fuzz.go
Normal file
@ -0,0 +1,11 @@
|
||||
// +build gofuzz
|
||||
|
||||
package serv
|
||||
|
||||
func Fuzz(data []byte) int {
|
||||
gql := string(data)
|
||||
isMutation(gql)
|
||||
gqlHash(gql, nil, "")
|
||||
|
||||
return 1
|
||||
}
|
16
serv/fuzz_test.go
Normal file
16
serv/fuzz_test.go
Normal file
@ -0,0 +1,16 @@
|
||||
package serv
|
||||
|
||||
import "testing"
|
||||
|
||||
func TestFuzzCrashers(t *testing.T) {
|
||||
var crashers = []string{
|
||||
"query",
|
||||
"q",
|
||||
"que",
|
||||
}
|
||||
|
||||
for _, f := range crashers {
|
||||
isMutation(f)
|
||||
gqlHash(f, nil, "")
|
||||
}
|
||||
}
|
57
serv/http.go
57
serv/http.go
@ -30,14 +30,15 @@ type gqlReq struct {
|
||||
Query string `json:"query"`
|
||||
Vars json.RawMessage `json:"variables"`
|
||||
ref string
|
||||
role string
|
||||
hdr http.Header
|
||||
}
|
||||
|
||||
type variables map[string]json.RawMessage
|
||||
|
||||
type gqlResp struct {
|
||||
Error string `json:"error,omitempty"`
|
||||
Data json.RawMessage `json:"data"`
|
||||
Error string `json:"message,omitempty"`
|
||||
Data json.RawMessage `json:"data,omitempty"`
|
||||
Extensions *extensions `json:"extensions,omitempty"`
|
||||
}
|
||||
|
||||
@ -69,10 +70,9 @@ type resolver struct {
|
||||
func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
||||
ctx := &coreContext{Context: r.Context()}
|
||||
|
||||
if authFailBlock == authFailBlockAlways && authCheck(ctx) == false {
|
||||
err := "Not authorized"
|
||||
logger.Debug().Msg(err)
|
||||
http.Error(w, err, 401)
|
||||
if conf.AuthFailBlock && authCheck(ctx) == false {
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
json.NewEncoder(w).Encode(gqlResp{Error: errUnauthorized.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
@ -94,55 +94,20 @@ func apiv1Http(w http.ResponseWriter, r *http.Request) {
|
||||
}
|
||||
|
||||
if strings.EqualFold(ctx.req.OpName, introspectionQuery) {
|
||||
// dat, err := ioutil.ReadFile("test.schema")
|
||||
// if err != nil {
|
||||
// http.Error(w, err.Error(), http.StatusInternalServerError)
|
||||
// return
|
||||
// }
|
||||
//w.Write(dat)
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
||||
"data": {
|
||||
"__schema": {
|
||||
"queryType": {
|
||||
"name": "Query"
|
||||
},
|
||||
"mutationType": null,
|
||||
"subscriptionType": null
|
||||
}
|
||||
},
|
||||
"extensions":{
|
||||
"tracing":{
|
||||
"version":1,
|
||||
"startTime":"2019-06-04T19:53:31.093Z",
|
||||
"endTime":"2019-06-04T19:53:31.108Z",
|
||||
"duration":15219720,
|
||||
"execution": {
|
||||
"resolvers": [{
|
||||
"path": ["__schema"],
|
||||
"parentType": "Query",
|
||||
"fieldName": "__schema",
|
||||
"returnType": "__Schema!",
|
||||
"startOffset": 50950,
|
||||
"duration": 17187
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}`))
|
||||
introspect(w)
|
||||
return
|
||||
}
|
||||
|
||||
err = ctx.handleReq(w, r)
|
||||
|
||||
if err == errUnauthorized {
|
||||
err := "Not authorized"
|
||||
logger.Debug().Msg(err)
|
||||
http.Error(w, err, 401)
|
||||
w.WriteHeader(http.StatusUnauthorized)
|
||||
json.NewEncoder(w).Encode(gqlResp{Error: err.Error()})
|
||||
return
|
||||
}
|
||||
|
||||
if err != nil {
|
||||
logger.Err(err).Msg("Failed to handle request")
|
||||
logger.Err(err).Msg("failed to handle request")
|
||||
errorResp(w, err)
|
||||
}
|
||||
}
|
||||
|
36
serv/introsp.go
Normal file
36
serv/introsp.go
Normal file
@ -0,0 +1,36 @@
|
||||
package serv
|
||||
|
||||
import "net/http"
|
||||
|
||||
func introspect(w http.ResponseWriter) {
|
||||
w.Header().Set("Content-Type", "application/json")
|
||||
w.Write([]byte(`{
|
||||
"data": {
|
||||
"__schema": {
|
||||
"queryType": {
|
||||
"name": "Query"
|
||||
},
|
||||
"mutationType": null,
|
||||
"subscriptionType": null
|
||||
}
|
||||
},
|
||||
"extensions":{
|
||||
"tracing":{
|
||||
"version":1,
|
||||
"startTime":"2019-06-04T19:53:31.093Z",
|
||||
"endTime":"2019-06-04T19:53:31.108Z",
|
||||
"duration":15219720,
|
||||
"execution": {
|
||||
"resolvers": [{
|
||||
"path": ["__schema"],
|
||||
"parentType": "Query",
|
||||
"fieldName": "__schema",
|
||||
"returnType": "__Schema!",
|
||||
"startOffset": 50950,
|
||||
"duration": 17187
|
||||
}]
|
||||
}
|
||||
}
|
||||
}
|
||||
}`))
|
||||
}
|
132
serv/prepare.go
132
serv/prepare.go
@ -7,7 +7,6 @@ import (
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/dosco/super-graph/psql"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
"github.com/jackc/pgconn"
|
||||
"github.com/valyala/fasttemplate"
|
||||
@ -27,55 +26,105 @@ var (
|
||||
func initPreparedList() {
|
||||
_preparedList = make(map[string]*preparedItem)
|
||||
|
||||
for k, v := range _allowList.list {
|
||||
err := prepareStmt(k, v.gql, v.vars)
|
||||
if err := prepareRoleStmt(); err != nil {
|
||||
logger.Fatal().Err(err).Msg("failed to prepare get role statement")
|
||||
}
|
||||
|
||||
for _, v := range _allowList.list {
|
||||
|
||||
err := prepareStmt(v.gql, v.vars)
|
||||
if err != nil {
|
||||
logger.Warn().Err(err).Send()
|
||||
logger.Warn().Str("gql", v.gql).Err(err).Send()
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func prepareStmt(key, gql string, varBytes json.RawMessage) error {
|
||||
if len(gql) == 0 || len(key) == 0 {
|
||||
func prepareStmt(gql string, varBytes json.RawMessage) error {
|
||||
if len(gql) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
qc, err := qcompile.Compile([]byte(gql))
|
||||
c := &coreContext{Context: context.Background()}
|
||||
c.req.Query = gql
|
||||
c.req.Vars = varBytes
|
||||
|
||||
stmts, err := c.buildStmt()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var vars map[string]json.RawMessage
|
||||
if len(stmts) != 0 && stmts[0].qc.Type == qcode.QTQuery {
|
||||
c.req.Vars = nil
|
||||
}
|
||||
|
||||
if len(varBytes) != 0 {
|
||||
vars = make(map[string]json.RawMessage)
|
||||
for _, s := range stmts {
|
||||
if len(s.sql) == 0 {
|
||||
continue
|
||||
}
|
||||
|
||||
if err := json.Unmarshal(varBytes, &vars); err != nil {
|
||||
finalSQL, am := processTemplate(s.sql)
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
tx, err := db.Begin(ctx)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
|
||||
pstmt, err := tx.Prepare(ctx, "", finalSQL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
var key string
|
||||
|
||||
if s.role == nil {
|
||||
key = gqlHash(gql, c.req.Vars, "")
|
||||
} else {
|
||||
key = gqlHash(gql, c.req.Vars, s.role.Name)
|
||||
}
|
||||
|
||||
_preparedList[key] = &preparedItem{
|
||||
stmt: pstmt,
|
||||
args: am,
|
||||
skipped: s.skipped,
|
||||
qc: s.qc,
|
||||
}
|
||||
|
||||
if err := tx.Commit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
buf := &bytes.Buffer{}
|
||||
return nil
|
||||
}
|
||||
|
||||
skipped, err := pcompile.Compile(qc, buf, psql.Variables(vars))
|
||||
if err != nil {
|
||||
return err
|
||||
func prepareRoleStmt() error {
|
||||
if len(conf.RolesQuery) == 0 {
|
||||
return nil
|
||||
}
|
||||
|
||||
t := fasttemplate.New(buf.String(), `{{`, `}}`)
|
||||
am := make([][]byte, 0, 5)
|
||||
i := 0
|
||||
w := &bytes.Buffer{}
|
||||
|
||||
finalSQL := t.ExecuteFuncString(func(w io.Writer, tag string) (int, error) {
|
||||
am = append(am, []byte(tag))
|
||||
i++
|
||||
return w.Write([]byte(fmt.Sprintf("$%d", i)))
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
io.WriteString(w, `SELECT (CASE`)
|
||||
for _, role := range conf.Roles {
|
||||
if len(role.Match) == 0 {
|
||||
continue
|
||||
}
|
||||
io.WriteString(w, ` WHEN `)
|
||||
io.WriteString(w, role.Match)
|
||||
io.WriteString(w, ` THEN '`)
|
||||
io.WriteString(w, role.Name)
|
||||
io.WriteString(w, `'`)
|
||||
}
|
||||
|
||||
io.WriteString(w, ` ELSE {{role}} END) FROM (`)
|
||||
io.WriteString(w, conf.RolesQuery)
|
||||
io.WriteString(w, `) AS "_sg_auth_roles_query"`)
|
||||
|
||||
roleSQL, _ := processTemplate(w.String())
|
||||
|
||||
ctx := context.Background()
|
||||
|
||||
tx, err := db.Begin(ctx)
|
||||
@ -84,21 +133,28 @@ func prepareStmt(key, gql string, varBytes json.RawMessage) error {
|
||||
}
|
||||
defer tx.Rollback(ctx)
|
||||
|
||||
pstmt, err := tx.Prepare(ctx, "", finalSQL)
|
||||
_, err = tx.Prepare(ctx, "_sg_get_role", roleSQL)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
_preparedList[key] = &preparedItem{
|
||||
stmt: pstmt,
|
||||
args: am,
|
||||
skipped: skipped,
|
||||
qc: qc,
|
||||
}
|
||||
|
||||
if err := tx.Commit(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func processTemplate(tmpl string) (string, [][]byte) {
|
||||
t := fasttemplate.New(tmpl, `{{`, `}}`)
|
||||
am := make([][]byte, 0, 5)
|
||||
i := 0
|
||||
|
||||
vmap := make(map[string]int)
|
||||
|
||||
return t.ExecuteFuncString(func(w io.Writer, tag string) (int, error) {
|
||||
if n, ok := vmap[tag]; ok {
|
||||
return w.Write([]byte(fmt.Sprintf("$%d", n)))
|
||||
}
|
||||
am = append(am, []byte(tag))
|
||||
i++
|
||||
vmap[tag] = i
|
||||
return w.Write([]byte(fmt.Sprintf("$%d", i)))
|
||||
}), am
|
||||
}
|
||||
|
@ -107,6 +107,13 @@ func Do(log func(string, ...interface{}), additional ...dir) error {
|
||||
case event := <-watcher.Events:
|
||||
// Ensure that we use the correct events, as they are not uniform across
|
||||
// platforms. See https://github.com/fsnotify/fsnotify/issues/74
|
||||
|
||||
if conf.UseAllowList == false && strings.HasSuffix(event.Name, "/allow.list") {
|
||||
continue
|
||||
}
|
||||
|
||||
logger.Info().Msgf("Reloading, file changed detected '%s'", event)
|
||||
|
||||
var trigger bool
|
||||
switch runtime.GOOS {
|
||||
case "darwin", "freebsd", "openbsd", "netbsd", "dragonfly":
|
||||
|
113
serv/serv.go
113
serv/serv.go
@ -12,7 +12,6 @@ import (
|
||||
rice "github.com/GeertJohan/go.rice"
|
||||
"github.com/dosco/super-graph/psql"
|
||||
"github.com/dosco/super-graph/qcode"
|
||||
"github.com/gobuffalo/flect"
|
||||
)
|
||||
|
||||
func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
|
||||
@ -22,52 +21,71 @@ func initCompilers(c *config) (*qcode.Compiler, *psql.Compiler, error) {
|
||||
}
|
||||
|
||||
conf := qcode.Config{
|
||||
DefaultFilter: c.DB.Defaults.Filter,
|
||||
FilterMap: qcode.Filters{
|
||||
All: make(map[string][]string, len(c.Tables)),
|
||||
Query: make(map[string][]string, len(c.Tables)),
|
||||
Insert: make(map[string][]string, len(c.Tables)),
|
||||
Update: make(map[string][]string, len(c.Tables)),
|
||||
Delete: make(map[string][]string, len(c.Tables)),
|
||||
},
|
||||
Blocklist: c.DB.Defaults.Blocklist,
|
||||
KeepArgs: false,
|
||||
}
|
||||
|
||||
for i := range c.Tables {
|
||||
t := c.Tables[i]
|
||||
|
||||
singular := flect.Singularize(t.Name)
|
||||
plural := flect.Pluralize(t.Name)
|
||||
|
||||
setFilter := func(fm map[string][]string, fil []string) {
|
||||
switch {
|
||||
case len(fil) == 0:
|
||||
return
|
||||
case fil[0] == "none" || len(fil[0]) == 0:
|
||||
fm[singular] = []string{}
|
||||
fm[plural] = []string{}
|
||||
default:
|
||||
fm[singular] = t.Filter
|
||||
fm[plural] = t.Filter
|
||||
}
|
||||
}
|
||||
|
||||
setFilter(conf.FilterMap.All, t.Filter)
|
||||
setFilter(conf.FilterMap.Query, t.FilterQuery)
|
||||
setFilter(conf.FilterMap.Insert, t.FilterInsert)
|
||||
setFilter(conf.FilterMap.Update, t.FilterUpdate)
|
||||
setFilter(conf.FilterMap.Delete, t.FilterDelete)
|
||||
}
|
||||
|
||||
qc, err := qcode.NewCompiler(conf)
|
||||
if err != nil {
|
||||
return nil, nil, err
|
||||
}
|
||||
|
||||
blockFilter := []string{"false"}
|
||||
|
||||
for _, r := range c.Roles {
|
||||
for _, t := range r.Tables {
|
||||
query := qcode.QueryConfig{
|
||||
Limit: t.Query.Limit,
|
||||
Filters: t.Query.Filters,
|
||||
Columns: t.Query.Columns,
|
||||
DisableFunctions: t.Query.DisableFunctions,
|
||||
}
|
||||
|
||||
if t.Query.Block {
|
||||
query.Filters = blockFilter
|
||||
}
|
||||
|
||||
insert := qcode.InsertConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
}
|
||||
|
||||
if t.Query.Block {
|
||||
insert.Filters = blockFilter
|
||||
}
|
||||
|
||||
update := qcode.UpdateConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
Presets: t.Insert.Presets,
|
||||
}
|
||||
|
||||
if t.Query.Block {
|
||||
update.Filters = blockFilter
|
||||
}
|
||||
|
||||
delete := qcode.DeleteConfig{
|
||||
Filters: t.Insert.Filters,
|
||||
Columns: t.Insert.Columns,
|
||||
}
|
||||
|
||||
if t.Query.Block {
|
||||
delete.Filters = blockFilter
|
||||
}
|
||||
|
||||
qc.AddRole(r.Name, t.Name, qcode.TRConfig{
|
||||
Query: query,
|
||||
Insert: insert,
|
||||
Update: update,
|
||||
Delete: delete,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
pc := psql.NewCompiler(psql.Config{
|
||||
Schema: schema,
|
||||
Vars: c.getVariables(),
|
||||
Vars: c.DB.Vars,
|
||||
})
|
||||
|
||||
return qc, pc, nil
|
||||
@ -130,7 +148,15 @@ func startHTTP() {
|
||||
db.Close()
|
||||
})
|
||||
|
||||
fmt.Printf("%s listening on %s (%s)\n", serverName, hostPort, conf.Env)
|
||||
var ident string
|
||||
|
||||
if len(conf.AppName) == 0 {
|
||||
ident = conf.Env
|
||||
} else {
|
||||
ident = conf.AppName
|
||||
}
|
||||
|
||||
fmt.Printf("%s listening on %s (%s)\n", serverName, hostPort, ident)
|
||||
|
||||
if err := srv.ListenAndServe(); err != http.ErrServerClosed {
|
||||
logger.Error().Err(err).Msg("server closed")
|
||||
@ -178,16 +204,3 @@ func getConfigName() string {
|
||||
|
||||
return ge
|
||||
}
|
||||
|
||||
func getAuthFailBlock(c *config) int {
|
||||
switch c.AuthFailBlock {
|
||||
case "always":
|
||||
return authFailBlockAlways
|
||||
case "per_query", "perquery", "query":
|
||||
return authFailBlockPerQuery
|
||||
case "never", "false":
|
||||
return authFailBlockNever
|
||||
}
|
||||
|
||||
return authFailBlockAlways
|
||||
}
|
||||
|
45
serv/sqllog.go
Normal file
45
serv/sqllog.go
Normal file
@ -0,0 +1,45 @@
|
||||
package serv
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/jackc/pgx/v4"
|
||||
"github.com/rs/zerolog"
|
||||
)
|
||||
|
||||
type Logger struct {
|
||||
logger zerolog.Logger
|
||||
}
|
||||
|
||||
// NewLogger accepts a zerolog.Logger as input and returns a new custom pgx
|
||||
// logging fascade as output.
|
||||
func NewSQLLogger(logger zerolog.Logger) *Logger {
|
||||
return &Logger{
|
||||
logger: logger.With().Logger(),
|
||||
}
|
||||
}
|
||||
|
||||
func (pl *Logger) Log(ctx context.Context, level pgx.LogLevel, msg string, data map[string]interface{}) {
|
||||
var zlevel zerolog.Level
|
||||
switch level {
|
||||
case pgx.LogLevelNone:
|
||||
zlevel = zerolog.NoLevel
|
||||
case pgx.LogLevelError:
|
||||
zlevel = zerolog.ErrorLevel
|
||||
case pgx.LogLevelWarn:
|
||||
zlevel = zerolog.WarnLevel
|
||||
case pgx.LogLevelInfo:
|
||||
zlevel = zerolog.InfoLevel
|
||||
case pgx.LogLevelDebug:
|
||||
zlevel = zerolog.DebugLevel
|
||||
default:
|
||||
zlevel = zerolog.DebugLevel
|
||||
}
|
||||
|
||||
if sql, ok := data["sql"]; ok {
|
||||
delete(data, "sql")
|
||||
pl.logger.WithLevel(zlevel).Fields(data).Msg(sql.(string))
|
||||
} else {
|
||||
pl.logger.WithLevel(zlevel).Fields(data).Msg(msg)
|
||||
}
|
||||
}
|
@ -21,16 +21,36 @@ func mkkey(h *xxhash.Digest, k1 string, k2 string) uint64 {
|
||||
return v
|
||||
}
|
||||
|
||||
func gqlHash(b string, vars []byte) string {
|
||||
func gqlHash(b string, vars []byte, role string) string {
|
||||
b = strings.TrimSpace(b)
|
||||
h := sha1.New()
|
||||
query := "query"
|
||||
|
||||
s, e := 0, 0
|
||||
space := []byte{' '}
|
||||
starting := true
|
||||
|
||||
var b0, b1 byte
|
||||
|
||||
if len(b) == 0 {
|
||||
return ""
|
||||
}
|
||||
|
||||
for {
|
||||
if starting && b[e] == 'q' {
|
||||
n := 0
|
||||
se := e
|
||||
for e < len(b) && n < len(query) && b[e] == query[n] {
|
||||
n++
|
||||
e++
|
||||
}
|
||||
if n != len(query) {
|
||||
io.WriteString(h, strings.ToLower(b[se:e]))
|
||||
}
|
||||
}
|
||||
if e >= len(b) {
|
||||
break
|
||||
}
|
||||
if ws(b[e]) {
|
||||
for e < len(b) && ws(b[e]) {
|
||||
e++
|
||||
@ -42,6 +62,7 @@ func gqlHash(b string, vars []byte) string {
|
||||
h.Write(space)
|
||||
}
|
||||
} else {
|
||||
starting = false
|
||||
s = e
|
||||
for e < len(b) && ws(b[e]) == false {
|
||||
e++
|
||||
@ -56,6 +77,10 @@ func gqlHash(b string, vars []byte) string {
|
||||
}
|
||||
}
|
||||
|
||||
if len(role) != 0 {
|
||||
io.WriteString(h, role)
|
||||
}
|
||||
|
||||
if vars == nil || len(vars) == 0 {
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
@ -80,3 +105,26 @@ func ws(b byte) bool {
|
||||
func al(b byte) bool {
|
||||
return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || (b >= '0' && b <= '9')
|
||||
}
|
||||
|
||||
func isMutation(sql string) bool {
|
||||
for i := range sql {
|
||||
b := sql[i]
|
||||
if b == '{' {
|
||||
return false
|
||||
}
|
||||
if al(b) {
|
||||
return (b == 'm' || b == 'M')
|
||||
}
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
func findStmt(role string, stmts []stmt) *stmt {
|
||||
for i := range stmts {
|
||||
if stmts[i].role.Name != role {
|
||||
continue
|
||||
}
|
||||
return &stmts[i]
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -5,7 +5,7 @@ import (
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestRelaxHash1(t *testing.T) {
|
||||
func TestGQLHash1(t *testing.T) {
|
||||
var v1 = `
|
||||
products(
|
||||
limit: 30,
|
||||
@ -24,15 +24,15 @@ func TestRelaxHash1(t *testing.T) {
|
||||
price
|
||||
} `
|
||||
|
||||
h1 := gqlHash(v1, nil)
|
||||
h2 := gqlHash(v2, nil)
|
||||
h1 := gqlHash(v1, nil, "")
|
||||
h2 := gqlHash(v2, nil, "")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaxHash2(t *testing.T) {
|
||||
func TestGQLHash2(t *testing.T) {
|
||||
var v1 = `
|
||||
{
|
||||
products(
|
||||
@ -53,15 +53,15 @@ func TestRelaxHash2(t *testing.T) {
|
||||
|
||||
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
|
||||
|
||||
h1 := gqlHash(v1, nil)
|
||||
h2 := gqlHash(v2, nil)
|
||||
h1 := gqlHash(v1, nil, "")
|
||||
h2 := gqlHash(v2, nil, "")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaxHash3(t *testing.T) {
|
||||
func TestGQLHash3(t *testing.T) {
|
||||
var v1 = `users {
|
||||
id
|
||||
email
|
||||
@ -86,15 +86,44 @@ func TestRelaxHash3(t *testing.T) {
|
||||
}
|
||||
`
|
||||
|
||||
h1 := gqlHash(v1, nil)
|
||||
h2 := gqlHash(v2, nil)
|
||||
h1 := gqlHash(v1, nil, "")
|
||||
h2 := gqlHash(v2, nil, "")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaxHashWithVars1(t *testing.T) {
|
||||
func TestGQLHash4(t *testing.T) {
|
||||
var v1 = `
|
||||
query {
|
||||
products(
|
||||
limit: 30
|
||||
order_by: { price: desc }
|
||||
distinct: [price]
|
||||
where: { id: { and: { greater_or_equals: 20, lt: 28 } } }
|
||||
) {
|
||||
id
|
||||
name
|
||||
price
|
||||
user {
|
||||
id
|
||||
email
|
||||
}
|
||||
}
|
||||
}`
|
||||
|
||||
var v2 = ` { products( limit: 30, order_by: { price: desc }, distinct: [ price ] where: { id: { and: { greater_or_equals: 20, lt: 28 } } }) { id name price user { id email } } } `
|
||||
|
||||
h1 := gqlHash(v1, nil, "")
|
||||
h2 := gqlHash(v2, nil, "")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
}
|
||||
}
|
||||
|
||||
func TestGQLHashWithVars1(t *testing.T) {
|
||||
var q1 = `
|
||||
products(
|
||||
limit: 30,
|
||||
@ -136,15 +165,15 @@ func TestRelaxHashWithVars1(t *testing.T) {
|
||||
"user": 123
|
||||
}`
|
||||
|
||||
h1 := gqlHash(q1, []byte(v1))
|
||||
h2 := gqlHash(q2, []byte(v2))
|
||||
h1 := gqlHash(q1, []byte(v1), "user")
|
||||
h2 := gqlHash(q2, []byte(v2), "user")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
}
|
||||
}
|
||||
|
||||
func TestRelaxHashWithVars2(t *testing.T) {
|
||||
func TestGQLHashWithVars2(t *testing.T) {
|
||||
var q1 = `
|
||||
products(
|
||||
limit: 30,
|
||||
@ -193,8 +222,8 @@ func TestRelaxHashWithVars2(t *testing.T) {
|
||||
"user": 123
|
||||
}`
|
||||
|
||||
h1 := gqlHash(q1, []byte(v1))
|
||||
h2 := gqlHash(q2, []byte(v2))
|
||||
h1 := gqlHash(q1, []byte(v1), "user")
|
||||
h2 := gqlHash(q2, []byte(v2), "user")
|
||||
|
||||
if strings.Compare(h1, h2) != 0 {
|
||||
t.Fatal("Hashes don't match they should")
|
||||
|
@ -80,7 +80,7 @@ SQL Output
|
||||
account_id: "select account_id from users where id = $user_id"
|
||||
|
||||
defaults:
|
||||
filter: ["{ user_id: { eq: $user_id } }"]
|
||||
Filters: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
blacklist:
|
||||
- password
|
||||
@ -88,14 +88,14 @@ SQL Output
|
||||
|
||||
fields:
|
||||
- name: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
Filters: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
filter: [
|
||||
Filters: [
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }"
|
||||
]
|
||||
|
||||
- name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
Filters: ["{ id: { eq: $user_id } }"]
|
||||
|
130
tmpl/dev.yml
130
tmpl/dev.yml
@ -1,4 +1,4 @@
|
||||
app_name: "{% app_name %} Development"
|
||||
app_name: "{{app_name}} Development"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: true
|
||||
|
||||
@ -12,8 +12,7 @@ log_level: "debug"
|
||||
use_allow_list: false
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: never
|
||||
auth_fail_block: false
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
@ -49,11 +48,12 @@ migrations_path: ./config/migrations
|
||||
auth:
|
||||
# Can be 'rails' or 'jwt'
|
||||
type: rails
|
||||
cookie: _app_session
|
||||
cookie: _{{app_name_slug}}_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header. Good for testing
|
||||
header: X-User-ID
|
||||
# the user_id via a header for testing.
|
||||
# Disable in production
|
||||
creds_in_header: true
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
@ -84,7 +84,7 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_development
|
||||
dbname: {{app_name_slug}}_development
|
||||
user: postgres
|
||||
password: ''
|
||||
|
||||
@ -93,6 +93,10 @@ database:
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
variables:
|
||||
@ -100,7 +104,7 @@ database:
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
# filter: ["{ user_id: { eq: $user_id } }"]
|
||||
# filters: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
@ -111,45 +115,81 @@ database:
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: users
|
||||
# This filter will overwrite defaults.filter
|
||||
# filter: ["{ id: { eq: $user_id } }"]
|
||||
# filter_query: ["{ id: { eq: $user_id } }"]
|
||||
filter_update: ["{ id: { eq: $user_id } }"]
|
||||
filter_delete: ["{ id: { eq: $user_id } }"]
|
||||
tables:
|
||||
- name: customers
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
pass_headers:
|
||||
- cookie
|
||||
set_headers:
|
||||
- name: Host
|
||||
value: 0.0.0.0
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
|
||||
# - name: products
|
||||
# # Multiple filters are AND'd together
|
||||
# filter: [
|
||||
# "{ price: { gt: 0 } }",
|
||||
# "{ price: { lt: 8 } }"
|
||||
# ]
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
|
||||
- name: customers
|
||||
# No filter is used for this field not
|
||||
# even defaults.filter
|
||||
filter: none
|
||||
roles_query: "SELECT * FROM users WHERE id = $user_id"
|
||||
|
||||
remotes:
|
||||
- name: payments
|
||||
id: stripe_id
|
||||
url: http://rails_app:3000/stripe/$id
|
||||
path: data
|
||||
# debug: true
|
||||
pass_headers:
|
||||
- cookie
|
||||
set_headers:
|
||||
- name: Host
|
||||
value: 0.0.0.0
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
roles:
|
||||
- name: anon
|
||||
tables:
|
||||
- name: products
|
||||
limit: 10
|
||||
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
query:
|
||||
columns: ["id", "name", "description" ]
|
||||
aggregation: false
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
insert:
|
||||
block: false
|
||||
|
||||
update:
|
||||
block: false
|
||||
|
||||
delete:
|
||||
block: false
|
||||
|
||||
- name: user
|
||||
tables:
|
||||
- name: users
|
||||
query:
|
||||
filters: ["{ id: { _eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
query:
|
||||
limit: 50
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
disable_functions: false
|
||||
|
||||
insert:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns: ["id", "name", "description" ]
|
||||
presets:
|
||||
- created_at: "now"
|
||||
|
||||
update:
|
||||
filters: ["{ user_id: { eq: $user_id } }"]
|
||||
columns:
|
||||
- id
|
||||
- name
|
||||
presets:
|
||||
- updated_at: "now"
|
||||
|
||||
delete:
|
||||
deny: true
|
||||
|
||||
- name: admin
|
||||
match: id = 1
|
||||
tables:
|
||||
- name: users
|
||||
# query:
|
||||
# filters: ["{ account_id: { _eq: $account_id } }"]
|
||||
|
@ -1,4 +1,8 @@
|
||||
app_name: "{% app_name %} Production"
|
||||
# Inherit config from this other config file
|
||||
# so I only need to overwrite some values
|
||||
inherits: dev
|
||||
|
||||
app_name: "{{app_name}} Production"
|
||||
host_port: 0.0.0.0:8080
|
||||
web_ui: false
|
||||
|
||||
@ -12,8 +16,7 @@ log_level: "info"
|
||||
use_allow_list: true
|
||||
|
||||
# Throw a 401 on auth failure for queries that need auth
|
||||
# valid values: always, per_query, never
|
||||
auth_fail_block: always
|
||||
auth_fail_block: true
|
||||
|
||||
# Latency tracing for database queries and remote joins
|
||||
# the resulting latency information is returned with the
|
||||
@ -45,11 +48,7 @@ enable_tracing: true
|
||||
auth:
|
||||
# Can be 'rails' or 'jwt'
|
||||
type: rails
|
||||
cookie: _app_session
|
||||
|
||||
# Comment this out if you want to disable setting
|
||||
# the user_id via a header. Good for testing
|
||||
header: X-User-ID
|
||||
cookie: _{{app_name_slug}}_session
|
||||
|
||||
rails:
|
||||
# Rails version this is used for reading the
|
||||
@ -80,68 +79,13 @@ database:
|
||||
type: postgres
|
||||
host: db
|
||||
port: 5432
|
||||
dbname: {% app_name_slug %}_production
|
||||
dbname: {{app_name_slug}}_development
|
||||
user: postgres
|
||||
password: ''
|
||||
#pool_size: 10
|
||||
#max_retries: 0
|
||||
#log_level: "debug"
|
||||
|
||||
# Define variables here that you want to use in filters
|
||||
# sub-queries must be wrapped in ()
|
||||
variables:
|
||||
account_id: "(select account_id from users where id = $user_id)"
|
||||
|
||||
# Define defaults to for the field key and values below
|
||||
defaults:
|
||||
filter: ["{ user_id: { eq: $user_id } }"]
|
||||
|
||||
# Field and table names that you wish to block
|
||||
blocklist:
|
||||
- ar_internal_metadata
|
||||
- schema_migrations
|
||||
- secret
|
||||
- password
|
||||
- encrypted
|
||||
- token
|
||||
|
||||
tables:
|
||||
- name: users
|
||||
# This filter will overwrite defaults.filter
|
||||
# filter: ["{ id: { eq: $user_id } }"]
|
||||
# filter_query: ["{ id: { eq: $user_id } }"]
|
||||
filter_update: ["{ id: { eq: $user_id } }"]
|
||||
filter_delete: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
- name: products
|
||||
# Multiple filters are AND'd together
|
||||
filter: [
|
||||
"{ price: { gt: 0 } }",
|
||||
"{ price: { lt: 8 } }"
|
||||
]
|
||||
|
||||
- name: customers
|
||||
# No filter is used for this field not
|
||||
# even defaults.filter
|
||||
filter: none
|
||||
|
||||
# remotes:
|
||||
# - name: payments
|
||||
# id: stripe_id
|
||||
# url: http://rails_app:3000/stripe/$id
|
||||
# path: data
|
||||
# # pass_headers:
|
||||
# # - cookie
|
||||
# # - host
|
||||
# set_headers:
|
||||
# - name: Authorization
|
||||
# value: Bearer <stripe_api_key>
|
||||
|
||||
- # You can create new fields that have a
|
||||
# real db table backing them
|
||||
name: me
|
||||
table: users
|
||||
filter: ["{ id: { eq: $user_id } }"]
|
||||
|
||||
# - name: posts
|
||||
# filter: ["{ account_id: { _eq: $account_id } }"]
|
||||
# Set session variable "user.id" to the user id
|
||||
# Enable this if you need the user id in triggers, etc
|
||||
set_user_id: false
|
Reference in New Issue
Block a user