mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 19:27:49 +00:00
Compare commits
259 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
ae57191cfb | ||
![]() |
9bdc777010 | ||
![]() |
49bad13fe7 | ||
![]() |
d465d91a94 | ||
![]() |
2edff87269 | ||
![]() |
e6ce0582d3 | ||
![]() |
31849a86b1 | ||
![]() |
de33977c83 | ||
![]() |
d0ace87fd7 | ||
![]() |
a7c6254f5f | ||
![]() |
4ab69d40ef | ||
![]() |
2c62c7b92c | ||
![]() |
75cf68dc78 | ||
![]() |
febc7f3a60 | ||
![]() |
788f609f3b | ||
![]() |
3aa26685eb | ||
![]() |
8ce04ec282 | ||
![]() |
21cc07f6c0 | ||
![]() |
dea8452229 | ||
![]() |
6b0eb72b82 | ||
![]() |
94960ad391 | ||
![]() |
577da72451 | ||
![]() |
ea2f3ec6d1 | ||
![]() |
2eb645123b | ||
![]() |
197e080035 | ||
![]() |
b6b082dd8c | ||
![]() |
fd97b4c616 | ||
![]() |
582cd4460e | ||
![]() |
c056737e2f | ||
![]() |
0ffe646050 | ||
![]() |
dfdc54062d | ||
![]() |
4a0f4c77ca | ||
![]() |
8038a95b60 | ||
![]() |
328cbc2741 | ||
![]() |
482510121c | ||
![]() |
f0929fd633 | ||
![]() |
9483560a18 | ||
![]() |
0f7284b8c8 | ||
![]() |
3a2ef95e25 | ||
![]() |
9bc463c45e | ||
![]() |
5bcb51f803 | ||
![]() |
c145d1c1df | ||
![]() |
87d9380882 | ||
![]() |
40256a1c0e | ||
![]() |
2ffbbbe885 | ||
![]() |
ef89437660 | ||
![]() |
f01a4b9605 | ||
![]() |
8f4a8c14ee | ||
![]() |
c978df1edb | ||
![]() |
3525b88bc2 | ||
![]() |
47249b90fe | ||
![]() |
e06db937e5 | ||
![]() |
cca75d252a | ||
![]() |
5144050875 | ||
![]() |
931544663e | ||
![]() |
1c62aaa853 | ||
![]() |
f1903ba3eb | ||
![]() |
ab5becb31d | ||
![]() |
44b034873e | ||
![]() |
e398eb2a99 | ||
![]() |
57ff0b97ea | ||
![]() |
a633731ae7 | ||
![]() |
b34162f11b | ||
![]() |
a5d1495864 | ||
![]() |
896576d0f8 | ||
![]() |
e3309d9784 | ||
![]() |
861fbf5502 | ||
![]() |
3f84a731b2 | ||
![]() |
c66797fa4f | ||
![]() |
d56d5d54a8 | ||
![]() |
f86a1b4382 | ||
![]() |
51b6e8fa7c | ||
![]() |
89af44efcc | ||
![]() |
a64ef5850d | ||
![]() |
da100ddd18 | ||
![]() |
92ce0503dd | ||
![]() |
61d40fd7b6 | ||
![]() |
c318f2d338 | ||
![]() |
c7c14fa5a1 | ||
![]() |
4fc68ae805 | ||
![]() |
3eb67abedb | ||
![]() |
894dbf0667 | ||
![]() |
f526b99853 | ||
![]() |
a670038f28 | ||
![]() |
2599f40f7b | ||
![]() |
4d833999e8 | ||
![]() |
b4dc93ba0e | ||
![]() |
764a6d6457 | ||
![]() |
2e9ede6da2 | ||
![]() |
cc67a3b37d | ||
![]() |
64ecbfc698 | ||
![]() |
b19eeed59a | ||
![]() |
8f5cd4237d | ||
![]() |
7b3a1451bb | ||
![]() |
a2cdd8ddfe | ||
![]() |
7a04026e67 | ||
![]() |
fab496ea3d | ||
![]() |
4f31831c94 | ||
![]() |
300e4bee48 | ||
![]() |
99e258c974 | ||
![]() |
970c85c11e | ||
![]() |
bbf4426f55 | ||
![]() |
a8620f4b4c | ||
![]() |
15accaa4cb | ||
![]() |
8d382b48ac | ||
![]() |
2983f149ad | ||
![]() |
a1ca97b549 | ||
![]() |
119c1778e6 | ||
![]() |
11489ce4aa | ||
![]() |
42b070baa8 | ||
![]() |
a1df7b218c | ||
![]() |
160d6aa569 | ||
![]() |
a2903e08ac | ||
![]() |
5a74914eac | ||
![]() |
55dc6275ee | ||
![]() |
f2c68c82d8 | ||
![]() |
578ca925db | ||
![]() |
ae14017cfc | ||
![]() |
1b42e3849f | ||
![]() |
2ffcbc5586 | ||
![]() |
235506f2bc | ||
![]() |
5a2ba54b2a | ||
![]() |
122c44c338 | ||
![]() |
2e451fa93c | ||
![]() |
d26d008b47 | ||
![]() |
6a6239f344 | ||
![]() |
2f6a0a6133 | ||
![]() |
bda652b87e | ||
![]() |
2f6bb6d5d9 | ||
![]() |
2cd9b0dd6c | ||
![]() |
13e4f453d5 | ||
![]() |
bc7d51c71e | ||
![]() |
95d3c5bded | ||
![]() |
f0c6f92920 | ||
![]() |
852d2ff583 | ||
![]() |
7cf7905694 | ||
![]() |
0f8107a672 | ||
![]() |
77dec463d1 | ||
![]() |
8ff1d0a8ed | ||
![]() |
859788d98d | ||
![]() |
62642ffbd6 | ||
![]() |
c3760c8689 | ||
![]() |
763c9ae802 | ||
![]() |
37abb3ae1f | ||
![]() |
a6da3cab0a | ||
![]() |
22f756b3a9 | ||
![]() |
cb3e9d8e57 | ||
![]() |
1997fe5a81 | ||
![]() |
5a1451ff69 | ||
![]() |
a18abec1f1 | ||
![]() |
322c3ed4fb | ||
![]() |
d648d119cc | ||
![]() |
9109474e8a | ||
![]() |
ca92a15eba | ||
![]() |
d745048a9c | ||
![]() |
6a0c15d23c | ||
![]() |
fc01374441 | ||
![]() |
0ec3f7fe02 | ||
![]() |
2bae8bd861 | ||
![]() |
38d185d058 | ||
![]() |
4342e29c69 | ||
![]() |
13d8ad9b3d | ||
![]() |
caec91b7f2 | ||
![]() |
665a9d30e6 | ||
![]() |
eb3a14bee4 | ||
![]() |
ba935d7520 | ||
![]() |
11d136214c | ||
![]() |
ddbeff7d7e | ||
![]() |
569700e1b3 | ||
![]() |
93f8476d26 | ||
![]() |
4eef5595bc | ||
![]() |
cf9c67bb64 | ||
![]() |
1968f86448 | ||
![]() |
552faa0a16 | ||
![]() |
c0b6f17488 | ||
![]() |
1ab6501aad | ||
![]() |
07280f1f67 | ||
![]() |
d419a582b9 | ||
![]() |
69c8ec17f9 | ||
![]() |
89d50b7a6a | ||
![]() |
976fc52e9a | ||
![]() |
cb0b89c8f3 | ||
![]() |
bcbcfa040d | ||
![]() |
b7857e0be6 | ||
![]() |
089876b62a | ||
![]() |
fc9fb8769a | ||
![]() |
3432d358d3 | ||
![]() |
340bda704e | ||
![]() |
54156ae7c9 | ||
![]() |
4c4f0bbd37 | ||
![]() |
b58fce186a | ||
![]() |
c6c78ecffc | ||
![]() |
db0e493900 | ||
![]() |
dea5b8ddf7 | ||
![]() |
e9e63fad50 | ||
![]() |
8b45a171e8 | ||
![]() |
a0216dad6a | ||
![]() |
ca5bffd88f | ||
![]() |
1dbf71064e | ||
![]() |
6888953cbb | ||
![]() |
105d6b9113 | ||
![]() |
0c2e4b1d83 | ||
![]() |
f8b1fb472a | ||
![]() |
613ac5e29a | ||
![]() |
5ce5b606e9 | ||
![]() |
0f59a31cdc | ||
![]() |
58407a84e9 | ||
![]() |
9ae9553254 | ||
![]() |
494cc9a571 | ||
![]() |
dbd29ca58a | ||
![]() |
00cdd7ca18 | ||
![]() |
34fe0898b2 | ||
![]() |
3522d3b9d7 | ||
![]() |
d4f79e7f71 | ||
![]() |
4df4fa993a | ||
![]() |
94f79080aa | ||
![]() |
1a5c0f10c3 | ||
![]() |
e6309875fb | ||
![]() |
2e269b9424 | ||
![]() |
40e25b1f8c | ||
![]() |
9eec9ad355 | ||
![]() |
90d2c3b3a0 | ||
![]() |
d25f31ce0b | ||
![]() |
c8e722283c | ||
![]() |
2095e9b561 | ||
![]() |
73addfa928 | ||
![]() |
345f190f4e | ||
![]() |
0682f06ae9 | ||
![]() |
8bc0fdaf17 | ||
![]() |
ab1afeee42 | ||
![]() |
b6d60dd0d5 | ||
![]() |
295d0a0a5e | ||
![]() |
a68a0ee3e3 | ||
![]() |
ea7301e1ed | ||
![]() |
98f5d75429 | ||
![]() |
adc6799c93 | ||
![]() |
a865e91ce7 | ||
![]() |
a64425b13f | ||
![]() |
0586d30381 | ||
![]() |
db1d7c63e2 | ||
![]() |
4acb4de539 | ||
![]() |
07043ddf08 | ||
![]() |
bd05591205 | ||
![]() |
95ff1d8ff2 | ||
![]() |
e92515ba66 | ||
![]() |
8b8087e56d | ||
![]() |
7b7aae7dfe | ||
![]() |
be27618dac | ||
![]() |
7fb24d8cae | ||
![]() |
07c7628973 | ||
![]() |
e42e52eaf0 | ||
![]() |
97e739ffe9 | ||
![]() |
3fb2534263 | ||
![]() |
9e8009a764 | ||
![]() |
dca77c3293 | ||
![]() |
8af527f574 | ||
![]() |
0f399293eb | ||
![]() |
57dfaf2158 | ||
![]() |
3a2e091744 |
114
.codesandbox/tasks.json
Normal file
114
.codesandbox/tasks.json
Normal file
@@ -0,0 +1,114 @@
|
||||
{
|
||||
// These tasks will run in order when initializing your CodeSandbox project.
|
||||
"setupTasks": [
|
||||
{
|
||||
"name": "git udpate",
|
||||
"command": "cd ~/workspace/ && git pull"
|
||||
},
|
||||
{
|
||||
"name": "git udpate submodule",
|
||||
"command": "cd ~/workspace/ && git submodule update --recursive --remote"
|
||||
}
|
||||
],
|
||||
|
||||
// These tasks can be run from CodeSandbox. Running one will open a log in the app.
|
||||
"tasks": {
|
||||
"docker-compose up db": {
|
||||
"name": "docker-compose up db",
|
||||
"command": "docker-compose up db",
|
||||
"runAtStart": true
|
||||
},
|
||||
"docker network inspect network": {
|
||||
"name": "docker network inspect postgsail_default",
|
||||
"command": "docker network ls && docker network inspect postgsail_default",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose up api": {
|
||||
"name": "docker-compose up api",
|
||||
"command": "docker-compose up api",
|
||||
"runAtStart": false,
|
||||
"preview": {
|
||||
"port": 3000,
|
||||
"prLink": "direct"
|
||||
}
|
||||
},
|
||||
"docker volume rm volume": {
|
||||
"name": "docker volume rm volume",
|
||||
"command": "docker volume ls && docker volume rm postgsail_data",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose rm db": {
|
||||
"name": "docker-compose rm db",
|
||||
"command": "docker-compose rm db",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose rm api": {
|
||||
"name": "docker-compose rm api",
|
||||
"command": "docker-compose rm api",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose clean": {
|
||||
"name": "docker-compose clean",
|
||||
"command": "docker-compose stop && docker-compose rm && docker volume ls && docker volume rm postgsail_data",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose pgadmin": {
|
||||
"name": "docker-compose up pgadmin",
|
||||
"command": "docker-compose up pgadmin",
|
||||
"runAtStart": false,
|
||||
"preview": {
|
||||
"port": 5050,
|
||||
"prLink": "direct"
|
||||
}
|
||||
},
|
||||
"docker-compose web": {
|
||||
"name": "docker-compose up web",
|
||||
"command": "docker-compose up web",
|
||||
"runAtStart": false,
|
||||
"preview": {
|
||||
"port": 8080,
|
||||
"prLink": "direct"
|
||||
}
|
||||
},
|
||||
"docker-compose ps": {
|
||||
"name": "docker-compose ps -a",
|
||||
"command": "docker-compose ps -a",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker ps": {
|
||||
"name": "docker ps -a",
|
||||
"command": "docker ps -a",
|
||||
"runAtStart": false
|
||||
},
|
||||
"docker-compose stop": {
|
||||
"name": "docker-compose stop",
|
||||
"command": "docker-compose stop",
|
||||
"runAtStart": false
|
||||
},
|
||||
"npm i": {
|
||||
"name": "npm i",
|
||||
"command": "cd frontend/ && npm i",
|
||||
"runAtStart": false
|
||||
},
|
||||
"git submodule add https://github.com/xbgmsharp/vuestic-postgsail frontend": {
|
||||
"name": "git submodule add https://github.com/xbgmsharp/vuestic-postgsail frontend",
|
||||
"command": "git submodule add https://github.com/xbgmsharp/vuestic-postgsail frontend",
|
||||
"runAtStart": false
|
||||
},
|
||||
"git submodule update --init --recursive": {
|
||||
"name": "git submodule update --init --recursive",
|
||||
"command": "git submodule update --init --recursive",
|
||||
"runAtStart": false
|
||||
},
|
||||
"git submodule update --recursive --remote": {
|
||||
"name": "git submodule update --recursive --remote",
|
||||
"command": "git submodule update --recursive --remote",
|
||||
"runAtStart": false
|
||||
},
|
||||
"git pull": {
|
||||
"name": "git pull",
|
||||
"command": "git pull",
|
||||
"runAtStart": false
|
||||
}
|
||||
}
|
||||
}
|
76
.devcontainer.json
Normal file
76
.devcontainer.json
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"name": "PostgSail",
|
||||
//"image": "mcr.microsoft.com/devcontainers/base:alpine",
|
||||
"dockerComposeFile": ["docker-compose.dev.yml", "docker-compose.yml"],
|
||||
"service": "dev",
|
||||
"workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}",
|
||||
|
||||
// Use this environment variable if you need to bind mount your local source code into a new container.
|
||||
"remoteEnv": {
|
||||
"LOCAL_WORKSPACE_FOLDER": "${localWorkspaceFolder}",
|
||||
"POSTGRES_PASSWORD": "${localEnv:POSTGRES_PASSWORD}",
|
||||
"POSTGRES_USER": "${localEnv:POSTGRES_USER}",
|
||||
"POSTGRES_DB": "${localEnv:POSTGRES_DB}",
|
||||
"PGSAIL_AUTHENTICATOR_PASSWORD": "${localEnv:PGSAIL_AUTHENTICATOR_PASSWORD}"
|
||||
},
|
||||
"containerEnv": {
|
||||
//"GITHUB_TOKEN": "${localEnv:GITHUB_TOKEN}",
|
||||
//"GITHUB_USER": "${localEnv:GITHUB_USER}"
|
||||
},
|
||||
|
||||
// Features to add to the dev container. More info: https://containers.dev/features.
|
||||
// "features": {},
|
||||
|
||||
// Use 'forwardPorts' to make a list of ports inside the container available locally.
|
||||
// "forwardPorts": [],
|
||||
"forwardPorts": ["db:5432", "api:3000", "pgadmin:5050", "web:8080"],
|
||||
|
||||
// Use 'portsAttributes' to set default properties for specific forwarded ports.
|
||||
// More info: https://containers.dev/implementors/json_reference/#port-attributes
|
||||
"portsAttributes": {
|
||||
"3000": {
|
||||
"label": "api",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"5050": {
|
||||
"label": "pgadmin",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"5342": {
|
||||
"label": "database",
|
||||
"onAutoForward": "notify"
|
||||
},
|
||||
"8080": {
|
||||
"label": "web",
|
||||
"onAutoForward": "notify"
|
||||
}
|
||||
},
|
||||
|
||||
// Use 'postCreateCommand' to run commands after the container is created.
|
||||
// "postCreateCommand": "docker --version",
|
||||
|
||||
// Uncomment to connect as root instead. More info: https://aka.ms/dev-containers-non-root.
|
||||
// "remoteUser": "root"
|
||||
|
||||
// Configure tool-specific properties.
|
||||
"customizations": {
|
||||
// Configure properties specific to VS Code.
|
||||
"vscode": {
|
||||
"settings": {
|
||||
"terminal.integrated.profiles.linux": {
|
||||
"zsh": {
|
||||
"path": "/bin/bash"
|
||||
}
|
||||
},
|
||||
"terminal.integrated.defaultProfile.linux": "bash",
|
||||
"editor.formatOnSave": true
|
||||
},
|
||||
"extensions": [
|
||||
"streetsidesoftware.code-spell-checker",
|
||||
"esbenp.prettier-vscode",
|
||||
"ckolkman.vscode-postgres",
|
||||
"ms-azuretools.vscode-docker"
|
||||
]
|
||||
}
|
||||
}
|
||||
}
|
13
.env.example
13
.env.example
@@ -1,15 +1,22 @@
|
||||
# POSTGRESQL ENV Settings
|
||||
POSTGRES_USER=username
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_DB=postgres
|
||||
# PostgSail ENV Settings
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD=password
|
||||
PGSAIL_GRAFANA_PASSWORD=password
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD=password
|
||||
PGSAIL_EMAIL_FROM=root@localhost
|
||||
PGSAIL_EMAIL_SERVER=localhost
|
||||
#PGSAIL_EMAIL_USER= Comment if not use
|
||||
#PGSAIL_EMAIL_PASS= Comment if not use
|
||||
#PGSAIL_PUSHOVER_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
|
||||
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
|
||||
PGSAIL_APP_URL=http://localhost:8080
|
||||
PGSAIL_API_URL=http://localhost:3000
|
||||
# POSTGREST ENV Settings
|
||||
PGRST_DB_URI=postgres://authenticator:${PGSAIL_AUTHENTICATOR_PASSWORD}@127.0.0.1:5432/signalk
|
||||
PGRST_DB_URI=postgres://authenticator:${PGSAIL_AUTHENTICATOR_PASSWORD}@db:5432/signalk
|
||||
PGRST_JWT_SECRET=_at_least_32__char__long__random
|
||||
# Grafana ENV Settings
|
||||
GF_SECURITY_ADMIN_PASSWORD=password
|
||||
|
61
.github/workflows/db-test.yml
vendored
Normal file
61
.github/workflows/db-test.yml
vendored
Normal file
@@ -0,0 +1,61 @@
|
||||
name: Test services db, api
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'initdb/**'
|
||||
branches:
|
||||
- 'main'
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
paths:
|
||||
- 'initdb/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
smoketest:
|
||||
name: tests
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the source
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db api
|
||||
|
||||
- name: Install psql
|
||||
run: sudo apt install postgresql-client
|
||||
|
||||
- name: Run PostgSail Database & API tests
|
||||
# Environment variables
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGDATABASE: signalk
|
||||
PGUSER: username
|
||||
PGPASSWORD: password
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
echo ${PGSAIL_API_URL}
|
||||
curl ${PGSAIL_API_URL}
|
||||
psql -c "select 1"
|
||||
echo "Test PostgreSQL version"
|
||||
psql -c "SELECT version();"
|
||||
echo "Test PostgSail version"
|
||||
psql -c "SELECT value FROM app_settings WHERE name = 'app.version';"
|
||||
echo "Test PostgSail Unit Test"
|
||||
docker-compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
52
.github/workflows/frontend-test.yml
vendored
Normal file
52
.github/workflows/frontend-test.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Test services db, api, web
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
branches:
|
||||
- 'main'
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
paths:
|
||||
- 'frontend/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ci-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db api web
|
||||
|
||||
- name: Run PostgSail Web test
|
||||
# Environment variables
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGDATABASE: signalk
|
||||
PGUSER: username
|
||||
PGPASSWORD: password
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
echo "Test PostgSail Web Unit Test"
|
||||
docker-compose up -d web && sleep 5
|
||||
docker-compose ps -a
|
||||
curl http://localhost:8080/
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
52
.github/workflows/grafana-test.yml
vendored
Normal file
52
.github/workflows/grafana-test.yml
vendored
Normal file
@@ -0,0 +1,52 @@
|
||||
name: Test services db, grafana
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'grafana/**'
|
||||
branches:
|
||||
- 'main'
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
paths:
|
||||
- 'grafana/**'
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
ci-test:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db app
|
||||
|
||||
- name: Run PostgSail Grafana test
|
||||
# Environment variables
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGDATABASE: signalk
|
||||
PGUSER: username
|
||||
PGPASSWORD: password
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15
|
||||
docker-compose ps -a
|
||||
echo "Test PostgSail Grafana Unit Test"
|
||||
docker-compose up -d app && sleep 5
|
||||
docker-compose ps -a
|
||||
curl http://localhost:3001/
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
@@ -0,0 +1,3 @@
|
||||
[submodule "frontend"]
|
||||
path = frontend
|
||||
url = https://github.com/xbgmsharp/vuestic-postgsail
|
35
ERD/README.md
Normal file
35
ERD/README.md
Normal file
@@ -0,0 +1,35 @@
|
||||
# PostgSail ERD
|
||||
The Entity-Relationship Diagram (ERD) provides a graphical representation of database tables, columns, and inter-relationships. ERD can give sufficient information for the database administrator to follow when developing and maintaining the database.
|
||||
|
||||
## A global overview
|
||||

|
||||
|
||||
## Further
|
||||
There is 3 main schemas:
|
||||
- API Schema ERD
|
||||
- tables
|
||||
- metrics
|
||||
- logbook
|
||||
- ...
|
||||
- functions
|
||||
- ...
|
||||

|
||||
|
||||
- Auth Schema ERD
|
||||
- tables
|
||||
- accounts
|
||||
- vessels
|
||||
- ...
|
||||
- functions
|
||||
- ...
|
||||

|
||||
|
||||
- Public Schema ERD
|
||||
- tables
|
||||
- app_settings
|
||||
- tpl_messages
|
||||
- ...
|
||||
- functions
|
||||
- ...
|
||||

|
||||
|
BIN
ERD/postgsail.pgerd.png
Normal file
BIN
ERD/postgsail.pgerd.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 360 KiB |
BIN
ERD/signalk - api.png
Normal file
BIN
ERD/signalk - api.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 222 KiB |
BIN
ERD/signalk - auth.png
Normal file
BIN
ERD/signalk - auth.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 18 KiB |
BIN
ERD/signalk - public.png
Normal file
BIN
ERD/signalk - public.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 194 KiB |
BIN
PostgSail.png
Normal file
BIN
PostgSail.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 97 KiB |
116
README.md
116
README.md
@@ -1,10 +1,9 @@
|
||||
# PostgSail
|
||||
Effortless cloud based solution for storing and sharing your SignalK data. Allow to effortlessly log your sails and monitor your boat with historical data.
|
||||
|
||||
### Context
|
||||
It is all about SQL, object-relational, time-series, spatial database with a bit python.
|
||||
Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data.
|
||||
|
||||
## Features
|
||||
|
||||
### Features
|
||||
- Automatically log your voyages without manually starting or stopping a trip.
|
||||
- Automatically capture the details of your voyages (boat speed, heading, wind speed, etc).
|
||||
- Timelapse video your trips!
|
||||
@@ -15,38 +14,102 @@ It is all about SQL, object-relational, time-series, spatial database with a bit
|
||||
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
|
||||
- History: view trends.
|
||||
- Alert monitoring: get notification on low voltage or low fuel remotely.
|
||||
- Notification via email or PushOver.
|
||||
- Notification via email or PushOver, Telegram
|
||||
- Offline mode
|
||||
- Low Bandwidth mode
|
||||
|
||||
### Cloud
|
||||
The cloud advantage.
|
||||
## Context
|
||||
|
||||
It is all about SQL, object-relational, time-series, spatial databases with a bit of python.
|
||||
|
||||
PostgSail is an open-source alternative to traditional vessel data management.
|
||||
It is based on a well known open-source technology stack, Signalk, PostgreSQL, TimescaleDB, PostGIS, PostgREST. It does perfectly integrate with standard monitoring tool stack like Grafana.
|
||||
|
||||
To understand the why and how, you might want to read [Why.md](https://github.com/xbgmsharp/postgsail/tree/main/Why.md)
|
||||
|
||||
## Architecture
|
||||
A simple scalable architecture:
|
||||
|
||||

|
||||
|
||||
For more clarity and visibility the complete [Entity-Relationship Diagram (ERD)](https://github.com/xbgmsharp/postgsail/tree/main/ERD/README.md) is export as PNG and SVG file.
|
||||
|
||||
## Cloud
|
||||
|
||||
If you prefer not to install or administer your instance of PostgSail, hosted versions of PostgSail are available in the cloud of your choice.
|
||||
|
||||
### The cloud advantage.
|
||||
|
||||
Hosted and fully–managed options for PostgSail, designed for all your deployment and business needs. Register and try for free at https://iot.openplotter.cloud/.
|
||||
|
||||
## Using PostgSail
|
||||
|
||||
### full-featured development environment
|
||||
The Visual Studio Code Remote - Containers extension lets you use a Docker container as a full-featured development environment.
|
||||
|
||||
#### With codesandbox
|
||||
- https://codesandbox.io/p/github/xbgmsharp/postgsail/main
|
||||
|
||||
#### With DevPod
|
||||
- https://devpod.sh/open#https://github.com/xbgmsharp/postgsail/&workspace=postgsail&provider=docker&ide=openvscode
|
||||
|
||||
#### With Docker Dev Environments
|
||||
- https://open.docker.com/dashboard/dev-envs?url=https://github.com/xbgmsharp/postgsail/
|
||||
Open in Docker Dev Environments Open in Docker Dev Environments
|
||||
|
||||
### pre-deploy configuration
|
||||
|
||||
To get these running, copy `.env.example` and rename to `.env` then set the value accordinly.
|
||||
To get these running, copy `.env.example` and rename to `.env` then set the value accordingly.
|
||||
|
||||
```bash
|
||||
# cp .env.example .env
|
||||
```
|
||||
|
||||
Notice, that `PGRST_JWT_SECRET` must be at least 32 characters long.
|
||||
|
||||
`$ head /dev/urandom | tr -dc A-Za-z0-9 | head -c 32 ; echo ''`
|
||||
`$ head /dev/urandom | tr -dc A-Za-z0-9 | head -c 42 ; echo ''`
|
||||
|
||||
```bash
|
||||
# nano .env
|
||||
```
|
||||
|
||||
### Deploy
|
||||
|
||||
By default there is no network set and the postgresql data are store in a docker volume.
|
||||
You can update the default settings by editing `docker-compose.yml` to your need.
|
||||
Then simply excecute:
|
||||
|
||||
First let's initialize the database.
|
||||
|
||||
#### Initialize database
|
||||
|
||||
First let's import the SQL schema, execute:
|
||||
|
||||
```bash
|
||||
$ docker-compose up db
|
||||
```
|
||||
$ docker-compose up
|
||||
|
||||
#### Start backend (db, api)
|
||||
|
||||
Then launch the full stack (db, api) backend, execute:
|
||||
|
||||
```bash
|
||||
$ docker-compose up db api
|
||||
```
|
||||
|
||||
The API should be accessible via port HTTP/3000.
|
||||
The database should be accessible via port TCP/5432.
|
||||
|
||||
You can connect to the database via a web gui like [pgadmin](https://www.pgadmin.org/) or you can use a client [dbeaver](https://dbeaver.io/).
|
||||
|
||||
### SQL Configuration
|
||||
|
||||
Check and update your postgsail settings via SQL in the table `app_settings`:
|
||||
|
||||
```
|
||||
select * from app_settings;
|
||||
```sql
|
||||
SELECT * FROM app_settings;
|
||||
```
|
||||
|
||||
```
|
||||
```sql
|
||||
UPDATE app_settings
|
||||
SET
|
||||
value = 'new_value'
|
||||
@@ -54,50 +117,57 @@ UPDATE app_settings
|
||||
```
|
||||
|
||||
### Ingest data
|
||||
|
||||
Next, to ingest data from signalk, you need to install [signalk-postgsail](https://github.com/xbgmsharp/signalk-postgsail) plugin on your signalk server instance.
|
||||
|
||||
Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers).
|
||||
|
||||
You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux).
|
||||
Any taker on influxdb2 to PostgSail? It is definitly possible.
|
||||
Any taker on influxdb2 to PostgSail? It is definitely possible.
|
||||
|
||||
Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests.
|
||||
If everything goes well all tests pass sucessfully and you should recieve a few notifications by email or PushOver.
|
||||
If everything goes well all tests pass successfully and you should receive a few notifications by email or PushOver.
|
||||
|
||||
```
|
||||
$ docker-compose up tests
|
||||
```
|
||||
|
||||
### API Documentation
|
||||
|
||||
The OpenAPI description output depends on the permissions of the role that is contained in the JWT role claim.
|
||||
|
||||
API anonymous:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/
|
||||
```
|
||||
|
||||
API user_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_login_or_signup_fn'
|
||||
```
|
||||
|
||||
API vessel_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_register_vessel_fn'
|
||||
```
|
||||
|
||||
#### API main workflow
|
||||
|
||||
Check the [unit test sample](https://github.com/xbgmsharp/PostgSail/blob/main/tests/index.js).
|
||||
Check the [unit test sample](https://github.com/xbgmsharp/postgsail/blob/main/tests/index.js).
|
||||
|
||||
### Docker dependencies
|
||||
|
||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 2 containers:
|
||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||
|
||||
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
||||
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
||||
- `grafana` alias `app`, visualize and monitor your data
|
||||
|
||||
### Optional docker images
|
||||
- [Grafana](https://hub.docker.com/r/grafana/grafana), visualize and monitor your data
|
||||
|
||||
- [pgAdmin](https://hub.docker.com/r/dpage/pgadmin4), web UI to monitor and manage multiple PostgreSQL
|
||||
- [Swagger](https://hub.docker.com/r/swaggerapi/swagger-ui), web UI to visualize documentation from PostgREST
|
||||
|
||||
@@ -106,16 +176,22 @@ docker-compose -f docker-compose-optional.yml up
|
||||
```
|
||||
|
||||
### Software reference
|
||||
|
||||
Out of the box iot platform using docker with the following software:
|
||||
|
||||
- [Signal K server, a Free and Open Source universal marine data exchange format](https://signalk.org)
|
||||
- [PostgreSQL, open source object-relational database system](https://postgresql.org)
|
||||
- [TimescaleDB, Time-series data extends PostgreSQL](https://www.timescale.com)
|
||||
- [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/)
|
||||
- [Grafana, open observability platform | Grafana Labs](https://grafana.com)
|
||||
|
||||
### Releases & updates
|
||||
|
||||
PostgSail Release Notes & Future Plans: see planned and in-progress updates and detailed information about current and past releases. [PostgSail project](https://github.com/xbgmsharp?tab=projects)
|
||||
|
||||
### Support
|
||||
|
||||
To get support, please create new [issue](https://github.com/xbgmsharp/PostgSail/issues).
|
||||
To get support, please create new [issue](https://github.com/xbgmsharp/postgsail/issues).
|
||||
|
||||
There is more likely security flows and bugs.
|
||||
|
||||
|
15
Why.md
Normal file
15
Why.md
Normal file
@@ -0,0 +1,15 @@
|
||||
|
||||
#### Why not InfluxDB vs TimescaleDB
|
||||
I had an InfluxDBv1 on my RPI that kill the sdcard/usbkey. I had an InfluxDBv2, but there is no more ARM support and had to learn flux. Also could not find a good way to store data when offline. How do you export your data from a InfluxDBv2? Still looking for a solution.
|
||||
|
||||
With TimescaleDB, we already know SQL and there is a lot of tools and libraries that work with Postgres.
|
||||
However, InfluxDB does simplify things like schema and provide an http endpoint.
|
||||
With TimescaleDB, you are using a standard SQL table schema to store data from Signalk.
|
||||
|
||||
#### Why not MQTT vs HTTP
|
||||
Having MQTT, makes your application micro service approach. however you multiple the components and dependency. HTTP seem a more reliable solution specially for offline support as MQTT library have a buffer limitation.
|
||||
Using PostgREST is an alternative to manual CRUD programming. Custom API servers suffer problems. Writing business logic often duplicates, ignores or hobbles database structure. Object-relational mapping is a leaky abstraction leading to slow imperative code. The PostgREST philosophy establishes a single declarative source of truth: the data itself.
|
||||
|
||||
#### PostgreSQL got it all!
|
||||
No additional dependencies other than PostgreSQL, thanks to the extensions ecosystem.
|
||||
With PostgSail is based on PostGis and TimescaleDB and a few other pg extensions, https://github.com/xbgmsharp/timescaledb-postgis, fore more details.
|
113
docker-compose.dev.yml
Normal file
113
docker-compose.dev.yml
Normal file
@@ -0,0 +1,113 @@
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
dev:
|
||||
container_name: dev
|
||||
image: mcr.microsoft.com/devcontainers/base:ubuntu
|
||||
volumes:
|
||||
- ../:/workspaces:cached
|
||||
- /var/run/docker.sock:/var/run/docker.sock
|
||||
#network_mode: service:db
|
||||
links:
|
||||
- "api:postgrest"
|
||||
- "db:database"
|
||||
#- "web_dev:web_dev"
|
||||
command: sleep infinity
|
||||
|
||||
pgadmin:
|
||||
image: dpage/pgadmin4:latest
|
||||
container_name: pgadmin
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- data:/var/lib/pgadmin
|
||||
- ./pgadmin_servers.json:/servers.json:ro
|
||||
links:
|
||||
- "db:database"
|
||||
ports:
|
||||
- 5050:5050
|
||||
environment:
|
||||
- PGADMIN_DEFAULT_EMAIL=${PGADMIN_DEFAULT_EMAIL}
|
||||
- PGADMIN_DEFAULT_PASSWORD=${PGADMIN_DEFAULT_PASSWORD}
|
||||
- PGADMIN_LISTEN_ADDRESS=0.0.0.0
|
||||
- PGADMIN_LISTEN_PORT=5050
|
||||
- PGADMIN_SERVER_JSON_FILE=/servers.json
|
||||
- PGADMIN_DISABLE_POSTFIX=true
|
||||
depends_on:
|
||||
- db
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
swagger:
|
||||
image: swaggerapi/swagger-ui
|
||||
container_name: swagger
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "api:postgrest"
|
||||
ports:
|
||||
- "8181:8080"
|
||||
expose:
|
||||
- "8080"
|
||||
environment:
|
||||
- API_URL=http://api:3000/
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
tests:
|
||||
image: xbgmsharp/postgsail-tests
|
||||
build:
|
||||
context: ./tests
|
||||
dockerfile: Dockerfile
|
||||
container_name: tests
|
||||
volumes:
|
||||
- ./tests:/mnt
|
||||
working_dir: /mnt
|
||||
command: 'bash tests.sh'
|
||||
links:
|
||||
- "api:postgrest"
|
||||
- "db:database"
|
||||
env_file: .env
|
||||
environment:
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
- PGPASSWORD=${POSTGRES_PASSWORD}
|
||||
- PGSAIL_API_URI=http://api:3000
|
||||
- PGSAIL_DB_URI=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@db:5432/signalk
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
web_dev:
|
||||
image: xbgmsharp/postgsail-vuestic:dev
|
||||
build:
|
||||
context: https://github.com/xbgmsharp/vuestic-postgsail.git#live
|
||||
dockerfile: Dockerfile_dev
|
||||
container_name: web_dev
|
||||
restart: unless-stopped
|
||||
volumes:
|
||||
- ./frontend:/app
|
||||
links:
|
||||
- "api:postgrest"
|
||||
ports:
|
||||
- 8080:8080
|
||||
environment:
|
||||
- VITE_PGSAIL_URL=${PGSAIL_API_URL}
|
||||
- VITE_APP_INCLUDE_DEMOS=false
|
||||
- VITE_APP_BUILD_VERSION=true
|
||||
- VITE_APP_TITLE=${VITE_APP_TITLE}
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
volumes:
|
||||
data: {}
|
@@ -1,27 +1,42 @@
|
||||
version: '3.9'
|
||||
version: "3.9"
|
||||
|
||||
services:
|
||||
db:
|
||||
image: xbgmsharp/timescaledb-postgis
|
||||
container_name: db
|
||||
hostname: db
|
||||
restart: unless-stopped
|
||||
env_file: .env
|
||||
environment:
|
||||
- POSTGRES_DB=postgres
|
||||
- TIMESCALEDB_TELEMETRY=off
|
||||
- PGDATA=/var/lib/postgresql/data/pgdata
|
||||
- TZ=UTC
|
||||
- POSTGRES_USER=${POSTGRES_USER}
|
||||
- POSTGRES_PASSWORD=${POSTGRES_PASSWORD}
|
||||
- POSTGRES_DB=${POSTGRES_DB}
|
||||
- PGSAIL_AUTHENTICATOR_PASSWORD=${PGSAIL_AUTHENTICATOR_PASSWORD}
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- data:/var/lib/postgresql/data
|
||||
- $PWD/initdb:/docker-entrypoint-initdb.d
|
||||
- ./db-data:/var/lib/postgresql/data
|
||||
- ./initdb:/docker-entrypoint-initdb.d
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
healthcheck:
|
||||
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d signalk'"]
|
||||
interval: 60s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 100s
|
||||
|
||||
api:
|
||||
image: postgrest/postgrest
|
||||
container_name: api
|
||||
hostname: api
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "db:database"
|
||||
ports:
|
||||
- "3000:3000"
|
||||
env_file: .env
|
||||
@@ -30,12 +45,88 @@ services:
|
||||
PGRST_DB_ANON_ROLE: api_anonymous
|
||||
PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000
|
||||
PGRST_DB_PRE_REQUEST: public.check_jwt
|
||||
network_mode: "host"
|
||||
PGRST_DB_URI: ${PGRST_DB_URI}
|
||||
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
||||
depends_on:
|
||||
- db
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
#healthcheck:
|
||||
# test: ["CMD-SHELL", "sh -c 'curl --fail http://localhost:3003/live || exit 1'"]
|
||||
# interval: 60s
|
||||
# timeout: 10s
|
||||
# retries: 5
|
||||
# start_period: 100s
|
||||
|
||||
app:
|
||||
image: grafana/grafana:latest
|
||||
container_name: app
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "db:database"
|
||||
volumes:
|
||||
- data:/var/lib/grafana
|
||||
- data:/var/log/grafana
|
||||
- ./grafana:/etc/grafana
|
||||
ports:
|
||||
- "3001:3000"
|
||||
env_file: .env
|
||||
environment:
|
||||
- GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel
|
||||
- GF_USERS_ALLOW_SIGN_UP=false
|
||||
- GF_SMTP_ENABLED=false
|
||||
- PGSAIL_GRAFANA_URI=db:5432
|
||||
- PGSAIL_GRAFANA_PASSWORD=${PGSAIL_GRAFANA_PASSWORD}
|
||||
depends_on:
|
||||
- db
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
#healthcheck:
|
||||
# test: ["CMD-SHELL", "sh -c 'curl --fail http://localhost:3000/healthz || exit 1'"]
|
||||
# interval: 60s
|
||||
# timeout: 10s
|
||||
# retries: 5
|
||||
# start_period: 100s
|
||||
|
||||
telegram:
|
||||
image: xbgmsharp/postgsail-telegram-bot
|
||||
container_name: telegram
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "api:postgrest"
|
||||
ports:
|
||||
- "3005:8080"
|
||||
environment:
|
||||
- BOT_TOKEN=${PGSAIL_TELEGRAM_BOT_TOKEN}
|
||||
- PGSAIL_URL=${PGSAIL_API_URL}
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
web:
|
||||
image: xbgmsharp/postgsail-vuestic
|
||||
container_name: web
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "api:postgrest"
|
||||
ports:
|
||||
- 8080:8080
|
||||
environment:
|
||||
- VITE_PGSAIL_URL=${PGSAIL_API_URL}
|
||||
- VITE_APP_INCLUDE_DEMOS=false
|
||||
- VITE_APP_BUILD_VERSION=true
|
||||
- VITE_APP_TITLE=${VITE_APP_TITLE}
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
logging:
|
||||
options:
|
||||
max-size: 10m
|
||||
|
||||
volumes:
|
||||
data: {}
|
||||
|
1
frontend
Submodule
1
frontend
Submodule
Submodule frontend added at 8bcf7ca2a6
1455
grafana/dashboards/Electrical.json
Normal file
1455
grafana/dashboards/Electrical.json
Normal file
File diff suppressed because it is too large
Load Diff
472
grafana/dashboards/Logbook.json
Normal file
472
grafana/dashboards/Logbook.json
Normal file
@@ -0,0 +1,472 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "datasource",
|
||||
"uid": "grafana"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"target": {
|
||||
"limit": 100,
|
||||
"matchAny": false,
|
||||
"tags": [],
|
||||
"type": "dashboard"
|
||||
},
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Logs,Moorages,Stays",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": 1,
|
||||
"links": [
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "external link",
|
||||
"includeVars": true,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": true,
|
||||
"title": "New link",
|
||||
"tooltip": "",
|
||||
"type": "dashboards",
|
||||
"url": ""
|
||||
}
|
||||
],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"custom": {
|
||||
"align": "auto",
|
||||
"cellOptions": {
|
||||
"type": "auto"
|
||||
},
|
||||
"filterable": false,
|
||||
"inspect": false
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "id"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 41
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "distance"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 104
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 10,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"footer": {
|
||||
"countRows": false,
|
||||
"fields": "",
|
||||
"reducer": [
|
||||
"sum"
|
||||
],
|
||||
"show": false
|
||||
},
|
||||
"showHeader": true,
|
||||
"sortBy": []
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT * from api.logs_view",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "logs",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": []
|
||||
}
|
||||
],
|
||||
"title": "Logbook ${__user.email} / ${__user.login}",
|
||||
"type": "table"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"custom": {
|
||||
"align": "auto",
|
||||
"cellOptions": {
|
||||
"type": "auto"
|
||||
},
|
||||
"filterable": false,
|
||||
"inspect": false
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "id"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 41
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "distance"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 104
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 10,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 10
|
||||
},
|
||||
"id": 5,
|
||||
"options": {
|
||||
"footer": {
|
||||
"countRows": false,
|
||||
"fields": "",
|
||||
"reducer": [
|
||||
"sum"
|
||||
],
|
||||
"show": false
|
||||
},
|
||||
"showHeader": true,
|
||||
"sortBy": []
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT * from api.stays_view",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "logs",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": []
|
||||
}
|
||||
],
|
||||
"title": "Stays",
|
||||
"type": "table"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"custom": {
|
||||
"align": "auto",
|
||||
"cellOptions": {
|
||||
"type": "auto"
|
||||
},
|
||||
"filterable": false,
|
||||
"inspect": false
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
}
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "id"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 41
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"matcher": {
|
||||
"id": "byName",
|
||||
"options": "distance"
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.width",
|
||||
"value": 104
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 10,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 20
|
||||
},
|
||||
"id": 6,
|
||||
"options": {
|
||||
"footer": {
|
||||
"countRows": false,
|
||||
"fields": "",
|
||||
"reducer": [
|
||||
"sum"
|
||||
],
|
||||
"show": false
|
||||
},
|
||||
"showHeader": true,
|
||||
"sortBy": []
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nselect * from api.moorages_view",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "logs",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": []
|
||||
}
|
||||
],
|
||||
"title": "Moorages",
|
||||
"type": "table"
|
||||
}
|
||||
],
|
||||
"refresh": "",
|
||||
"revision": 1,
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"definition": "SET \"user.email\" = '${__user.email}';\nSET vessel.id = '${__user.login}';\nSELECT\n v.name AS __text,\n m.vessel_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
|
||||
"description": "Vessel Name",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": "Boat",
|
||||
"multi": false,
|
||||
"name": "boat",
|
||||
"options": [],
|
||||
"query": "SET \"user.email\" = '${__user.email}';\nSET vessel.id = '${__user.login}';\nSELECT\n v.name AS __text,\n m.vessel_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"type": "query"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-15d",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {},
|
||||
"timezone": "utc",
|
||||
"title": "Logbook",
|
||||
"uid": "E_FUkx9nk",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
735
grafana/dashboards/Monitor.json
Normal file
735
grafana/dashboards/Monitor.json
Normal file
@@ -0,0 +1,735 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "datasource",
|
||||
"uid": "grafana"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"target": {
|
||||
"limit": 100,
|
||||
"matchAny": false,
|
||||
"tags": [],
|
||||
"type": "dashboard"
|
||||
},
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"description": "Monitoring view",
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"id": 2,
|
||||
"links": [
|
||||
{
|
||||
"asDropdown": false,
|
||||
"icon": "external link",
|
||||
"includeVars": true,
|
||||
"keepTime": false,
|
||||
"tags": [],
|
||||
"targetBlank": true,
|
||||
"title": "New link",
|
||||
"tooltip": "",
|
||||
"type": "dashboards",
|
||||
"url": ""
|
||||
}
|
||||
],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "volt"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 4,
|
||||
"w": 12,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 8,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"text": {},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "time_series",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT\n time AS \"time\",\n cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2Voltage\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND vessel_id = '${boat}'\nORDER BY 1",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "trip_in_progress",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": [
|
||||
{
|
||||
"name": "$__timeFilter",
|
||||
"params": [],
|
||||
"type": "macro"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"title": "AUX2 Voltage",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "thresholds"
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "celsius"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 4,
|
||||
"w": 12,
|
||||
"x": 12,
|
||||
"y": 0
|
||||
},
|
||||
"id": 7,
|
||||
"options": {
|
||||
"colorMode": "value",
|
||||
"graphMode": "area",
|
||||
"justifyMode": "auto",
|
||||
"orientation": "auto",
|
||||
"reduceOptions": {
|
||||
"calcs": [
|
||||
"lastNotNull"
|
||||
],
|
||||
"fields": "",
|
||||
"values": false
|
||||
},
|
||||
"text": {},
|
||||
"textMode": "auto"
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "time_series",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT\n time AS \"time\",\n cast(metrics-> 'environment.outside.temperature' AS numeric) - 273.15 AS OutsideTemperature\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND vessel_id = '${boat}'\nORDER BY 1",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "trip_in_progress",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": [
|
||||
{
|
||||
"name": "$__timeFilter",
|
||||
"params": [],
|
||||
"type": "macro"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"title": "Temperature",
|
||||
"type": "stat"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"graph": false,
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": true,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": [
|
||||
{
|
||||
"__systemRef": "hideSeriesFrom",
|
||||
"matcher": {
|
||||
"id": "byNames",
|
||||
"options": {
|
||||
"mode": "exclude",
|
||||
"names": [
|
||||
"aux2"
|
||||
],
|
||||
"prefix": "All except:",
|
||||
"readOnly": true
|
||||
}
|
||||
},
|
||||
"properties": [
|
||||
{
|
||||
"id": "custom.hideFrom",
|
||||
"value": {
|
||||
"graph": true,
|
||||
"legend": false,
|
||||
"tooltip": false
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 4
|
||||
},
|
||||
"id": 4,
|
||||
"options": {
|
||||
"graph": {},
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "7.5.4",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "time_series",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT\n time AS \"time\",\n cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2,\n\tcast(metrics-> 'electrical.batteries.House.voltage' AS numeric) AS House,\n\tcast(metrics-> 'environment.rpi.pijuice.gpioVoltage' AS numeric) AS gpioVoltage,\n\tcast(metrics-> 'electrical.batteries.Seatalk.voltage' AS numeric) AS SeatalkVoltage,\n\tcast(metrics-> 'electrical.batteries.Starter.voltage' AS numeric) AS StarterVoltage,\n\tcast(metrics-> 'environment.rpi.pijuice.batteryVoltage' AS numeric) AS RPIBatteryVoltage,\n\tcast(metrics-> 'electrical.batteries.victronDevice.voltage' AS numeric) AS victronDeviceVoltage\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n\tAND vessel_id = '${boat}'\nORDER BY 1",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "trip_in_progress",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": [
|
||||
{
|
||||
"name": "$__timeFilter",
|
||||
"params": [],
|
||||
"type": "macro"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"title": "Voltage",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"graph": false,
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": true,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 13
|
||||
},
|
||||
"id": 2,
|
||||
"options": {
|
||||
"graph": {},
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "single",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "7.5.4",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nSELECT\n time AS \"time\",\n cast(metrics-> 'environment.water.temperature' AS numeric) - 273.15 AS waterTemperature,\n\tcast(metrics-> 'environment.inside.temperature' AS numeric) - 273.15 AS insideTemperature,\n\tcast(metrics-> 'environment.outside.temperature' AS numeric) - 273.15 AS outsideTemperature\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND vessel_id = '${boat}'\nORDER BY 1",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "trip_in_progress",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": [
|
||||
{
|
||||
"name": "$__timeFilter",
|
||||
"params": [],
|
||||
"type": "macro"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"title": "Temperatures",
|
||||
"type": "timeseries"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"fieldConfig": {
|
||||
"defaults": {
|
||||
"color": {
|
||||
"mode": "palette-classic"
|
||||
},
|
||||
"custom": {
|
||||
"axisCenteredZero": false,
|
||||
"axisColorMode": "text",
|
||||
"axisLabel": "",
|
||||
"axisPlacement": "auto",
|
||||
"barAlignment": 0,
|
||||
"drawStyle": "line",
|
||||
"fillOpacity": 10,
|
||||
"gradientMode": "none",
|
||||
"hideFrom": {
|
||||
"legend": false,
|
||||
"tooltip": false,
|
||||
"viz": false
|
||||
},
|
||||
"lineInterpolation": "linear",
|
||||
"lineWidth": 1,
|
||||
"pointSize": 5,
|
||||
"scaleDistribution": {
|
||||
"type": "linear"
|
||||
},
|
||||
"showPoints": "never",
|
||||
"spanNulls": false,
|
||||
"stacking": {
|
||||
"group": "A",
|
||||
"mode": "none"
|
||||
},
|
||||
"thresholdsStyle": {
|
||||
"mode": "off"
|
||||
}
|
||||
},
|
||||
"mappings": [],
|
||||
"thresholds": {
|
||||
"mode": "absolute",
|
||||
"steps": [
|
||||
{
|
||||
"color": "green",
|
||||
"value": null
|
||||
},
|
||||
{
|
||||
"color": "red",
|
||||
"value": 80
|
||||
}
|
||||
]
|
||||
},
|
||||
"unit": "short"
|
||||
},
|
||||
"overrides": []
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 9,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 22
|
||||
},
|
||||
"id": 5,
|
||||
"options": {
|
||||
"legend": {
|
||||
"calcs": [],
|
||||
"displayMode": "list",
|
||||
"placement": "bottom",
|
||||
"showLegend": true
|
||||
},
|
||||
"tooltip": {
|
||||
"mode": "multi",
|
||||
"sort": "none"
|
||||
}
|
||||
},
|
||||
"pluginVersion": "9.3.1",
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"editorMode": "code",
|
||||
"format": "table",
|
||||
"group": [],
|
||||
"metricColumn": "none",
|
||||
"rawQuery": true,
|
||||
"rawSql": "SET vessel.id = '${__user.login}';\nwith config as (select set_config('vessel.id', '${boat}', false) ) select * from api.monitoring_view",
|
||||
"refId": "A",
|
||||
"select": [
|
||||
[
|
||||
{
|
||||
"params": [
|
||||
"_from_lat"
|
||||
],
|
||||
"type": "column"
|
||||
}
|
||||
]
|
||||
],
|
||||
"sql": {
|
||||
"columns": [
|
||||
{
|
||||
"parameters": [],
|
||||
"type": "function"
|
||||
}
|
||||
],
|
||||
"groupBy": [
|
||||
{
|
||||
"property": {
|
||||
"type": "string"
|
||||
},
|
||||
"type": "groupBy"
|
||||
}
|
||||
],
|
||||
"limit": 50
|
||||
},
|
||||
"table": "trip_in_progress",
|
||||
"timeColumn": "_from_time",
|
||||
"timeColumnType": "timestamp",
|
||||
"where": [
|
||||
{
|
||||
"name": "$__timeFilter",
|
||||
"params": [],
|
||||
"type": "macro"
|
||||
}
|
||||
]
|
||||
}
|
||||
],
|
||||
"title": "Title",
|
||||
"type": "timeseries"
|
||||
}
|
||||
],
|
||||
"refresh": "",
|
||||
"revision": 1,
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"definition": "SET \"user.email\" = '${__user.email}';\nSET vessel.id = '${__user.login}';\nSELECT\n v.name AS __text,\n m.vessel_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
|
||||
"description": "Vessel name",
|
||||
"hide": 0,
|
||||
"includeAll": false,
|
||||
"label": "Boat",
|
||||
"multi": false,
|
||||
"name": "boat",
|
||||
"options": [],
|
||||
"query": "SET \"user.email\" = '${__user.email}';\nSET vessel.id = '${__user.login}';\nSELECT\n v.name AS __text,\n m.vessel_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
|
||||
"refresh": 1,
|
||||
"regex": "",
|
||||
"skipUrlSync": false,
|
||||
"sort": 0,
|
||||
"type": "query"
|
||||
}
|
||||
]
|
||||
},
|
||||
"time": {
|
||||
"from": "now-30d",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"refresh_intervals": [
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
]
|
||||
},
|
||||
"timezone": "utc",
|
||||
"title": "Monitor",
|
||||
"uid": "apqDcPjMz",
|
||||
"version": 1,
|
||||
"weekStart": ""
|
||||
}
|
1341
grafana/dashboards/RPI.json
Normal file
1341
grafana/dashboards/RPI.json
Normal file
File diff suppressed because it is too large
Load Diff
1987
grafana/dashboards/Solar.json
Normal file
1987
grafana/dashboards/Solar.json
Normal file
File diff suppressed because it is too large
Load Diff
1049
grafana/dashboards/TimescaleDB.json
Normal file
1049
grafana/dashboards/TimescaleDB.json
Normal file
File diff suppressed because it is too large
Load Diff
1987
grafana/dashboards/Weather.json
Normal file
1987
grafana/dashboards/Weather.json
Normal file
File diff suppressed because it is too large
Load Diff
134
grafana/dashboards/home.json
Normal file
134
grafana/dashboards/home.json
Normal file
@@ -0,0 +1,134 @@
|
||||
{
|
||||
"annotations": {
|
||||
"list": [
|
||||
{
|
||||
"builtIn": 1,
|
||||
"datasource": {
|
||||
"type": "grafana",
|
||||
"uid": "-- Grafana --"
|
||||
},
|
||||
"enable": true,
|
||||
"hide": true,
|
||||
"iconColor": "rgba(0, 211, 255, 1)",
|
||||
"name": "Annotations & Alerts",
|
||||
"target": {
|
||||
"limit": 100,
|
||||
"matchAny": false,
|
||||
"tags": [],
|
||||
"type": "dashboard"
|
||||
},
|
||||
"type": "dashboard"
|
||||
}
|
||||
]
|
||||
},
|
||||
"editable": true,
|
||||
"fiscalYearStartMonth": 0,
|
||||
"graphTooltip": 0,
|
||||
"links": [],
|
||||
"liveNow": false,
|
||||
"panels": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "OIttR1sVk"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 3,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 0
|
||||
},
|
||||
"id": 1,
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "OIttR1sVk"
|
||||
},
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"type": "welcome"
|
||||
},
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "OIttR1sVk"
|
||||
},
|
||||
"gridPos": {
|
||||
"h": 12,
|
||||
"w": 24,
|
||||
"x": 0,
|
||||
"y": 3
|
||||
},
|
||||
"id": 3,
|
||||
"links": [],
|
||||
"options": {
|
||||
"folderId": 0,
|
||||
"maxItems": 30,
|
||||
"query": "",
|
||||
"showHeadings": true,
|
||||
"showRecentlyViewed": true,
|
||||
"showSearch": false,
|
||||
"showStarred": true,
|
||||
"tags": []
|
||||
},
|
||||
"pluginVersion": "9.4.3",
|
||||
"tags": [],
|
||||
"targets": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "OIttR1sVk"
|
||||
},
|
||||
"refId": "A"
|
||||
}
|
||||
],
|
||||
"title": "Dashboards",
|
||||
"type": "dashlist"
|
||||
}
|
||||
],
|
||||
"refresh": "",
|
||||
"revision": 1,
|
||||
"schemaVersion": 38,
|
||||
"style": "dark",
|
||||
"tags": [],
|
||||
"templating": {
|
||||
"list": []
|
||||
},
|
||||
"time": {
|
||||
"from": "now-6h",
|
||||
"to": "now"
|
||||
},
|
||||
"timepicker": {
|
||||
"hidden": true,
|
||||
"refresh_intervals": [
|
||||
"5s",
|
||||
"10s",
|
||||
"30s",
|
||||
"1m",
|
||||
"5m",
|
||||
"15m",
|
||||
"30m",
|
||||
"1h",
|
||||
"2h",
|
||||
"1d"
|
||||
],
|
||||
"time_options": [
|
||||
"5m",
|
||||
"15m",
|
||||
"1h",
|
||||
"6h",
|
||||
"12h",
|
||||
"24h",
|
||||
"2d",
|
||||
"7d",
|
||||
"30d"
|
||||
],
|
||||
"type": "timepicker"
|
||||
},
|
||||
"timezone": "browser",
|
||||
"title": "Home",
|
||||
"version": 0,
|
||||
"weekStart": ""
|
||||
}
|
16
grafana/grafana.ini
Normal file
16
grafana/grafana.ini
Normal file
@@ -0,0 +1,16 @@
|
||||
[users]
|
||||
allow_sign_up = false
|
||||
auto_assign_org = true
|
||||
auto_assign_org_role = Editor
|
||||
|
||||
[auth.proxy]
|
||||
enabled = true
|
||||
header_name = X-WEBAUTH-USER
|
||||
header_property = email
|
||||
auto_sign_up = true
|
||||
enable_login_token = true
|
||||
login_maximum_inactive_lifetime_duration = 12h
|
||||
login_maximum_lifetime_duration = 1d
|
||||
|
||||
[dashboards]
|
||||
default_home_dashboard_path = /etc/grafana/dashboards/home.json
|
25
grafana/provisioning/dashboards/default.yaml
Normal file
25
grafana/provisioning/dashboards/default.yaml
Normal file
@@ -0,0 +1,25 @@
|
||||
|
||||
apiVersion: 1
|
||||
|
||||
providers:
|
||||
# <string> an unique provider name. Required
|
||||
- name: 'PostgSail'
|
||||
# <int> Org id. Default to 1
|
||||
orgId: 1
|
||||
# <string> name of the dashboard folder.
|
||||
#folder: 'PostgSail'
|
||||
# <string> folder UID. will be automatically generated if not specified
|
||||
#folderUid: ''
|
||||
# <string> provider type. Default to 'file'
|
||||
type: file
|
||||
# <bool> disable dashboard deletion
|
||||
disableDeletion: false
|
||||
# <int> how often Grafana will scan for changed dashboards
|
||||
updateIntervalSeconds: 60
|
||||
# <bool> allow updating provisioned dashboards from the UI
|
||||
allowUiUpdates: true
|
||||
options:
|
||||
# <string, required> path to dashboard files on disk. Required when using the 'file' type
|
||||
path: /etc/grafana/dashboards/
|
||||
# <bool> use folder names from filesystem to create folders in Grafana
|
||||
foldersFromFilesStructure: true
|
18
grafana/provisioning/datasources/timescale.yaml
Normal file
18
grafana/provisioning/datasources/timescale.yaml
Normal file
@@ -0,0 +1,18 @@
|
||||
apiVersion: 1
|
||||
|
||||
datasources:
|
||||
- name: PostgreSQL
|
||||
isDefault: true
|
||||
type: postgres
|
||||
url: '${PGSAIL_GRAFANA_URI}'
|
||||
database: signalk
|
||||
user: grafana
|
||||
secureJsonData:
|
||||
password: '${PGSAIL_GRAFANA_PASSWORD}'
|
||||
jsonData:
|
||||
sslmode: 'disable' # disable/require/verify-ca/verify-full
|
||||
maxOpenConns: 10 # Grafana v5.4+
|
||||
maxIdleConns: 2 # Grafana v5.4+
|
||||
connMaxLifetime: 14400 # Grafana v5.4+
|
||||
postgresVersion: 1500 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10
|
||||
timescaledb: true
|
@@ -7,20 +7,30 @@ echo $PGDATA
|
||||
echo "${PGDATA}/postgresql.conf"
|
||||
|
||||
cat << 'EOF' >> ${PGDATA}/postgresql.conf
|
||||
# PostgSail pg15
|
||||
# Add settings for extensions here
|
||||
shared_preload_libraries = 'timescaledb,pg_stat_statements,pg_cron'
|
||||
# TimescaleDB - time series database
|
||||
# Disable timescaleDB telemetry
|
||||
timescaledb.telemetry_level=off
|
||||
|
||||
# pg_cron - Run periodic jobs in PostgreSQL
|
||||
# pg_cron database
|
||||
#cron.database_name = 'signalk'
|
||||
# pg_cron connect via a unix domain socket
|
||||
cron.host = '/var/run/postgresql/'
|
||||
# Increase the number of available background workers from the default of 8
|
||||
#max_worker_processes = 8
|
||||
|
||||
# monitoring https://www.postgresql.org/docs/current/runtime-config-statistics.html#GUC-TRACK-IO-TIMING
|
||||
track_io_timing = on
|
||||
stats_temp_directory = '/tmp'
|
||||
track_functions = all
|
||||
# Remove in pg-15, does not exist anymore
|
||||
#stats_temp_directory = '/tmp'
|
||||
|
||||
# Postgrest
|
||||
# PostgREST - turns your PostgreSQL database directly into a RESTful API
|
||||
# send logs where the collector can access them
|
||||
#log_destination = 'stderr'
|
||||
log_destination = 'stderr'
|
||||
# collect stderr output to log files
|
||||
#logging_collector = on
|
||||
# save logs in pg_log/ under the pg data directory
|
||||
@@ -29,5 +39,19 @@ stats_temp_directory = '/tmp'
|
||||
#log_filename = 'postgresql-%Y-%m-%d.log'
|
||||
# log every kind of SQL statement
|
||||
#log_statement = 'all'
|
||||
# Do not enable log_statement as its log format will not be parsed by pgBadger.
|
||||
|
||||
# pgBadger - a fast PostgreSQL log analysis report
|
||||
# log all the queries that are taking more than 1 second:
|
||||
#log_min_duration_statement = 1000
|
||||
#log_checkpoints = on
|
||||
#log_connections = on
|
||||
#log_disconnections = on
|
||||
#log_lock_waits = on
|
||||
#log_temp_files = 0
|
||||
#log_autovacuum_min_duration = 0
|
||||
#log_error_verbosity = default
|
||||
|
||||
# Francois
|
||||
log_min_messages = NOTICE
|
||||
EOF
|
76
initdb/01signalk.sql
Executable file
76
initdb/01signalk.sql
Executable file
@@ -0,0 +1,76 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- PostgSail => Postgres + TimescaleDB + PostGIS + PostgREST
|
||||
--
|
||||
-- Inspired from:
|
||||
-- https://groups.google.com/g/signalk/c/W2H15ODCic4
|
||||
--
|
||||
-- Description:
|
||||
-- Insert data into table metadata from API using PostgREST
|
||||
-- Insert data into table metrics from API using PostgREST
|
||||
-- TimescaleDB Hypertable to store signalk metrics
|
||||
-- pgsql functions to generate logbook, stays, moorages
|
||||
-- CRON functions to process logbook, stays, moorages
|
||||
-- python functions for geo reverse and send notification via email and/or pushover
|
||||
-- Views statistics, timelapse, monitoring, logs
|
||||
-- Always store time in UTC
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
-- vessels signalk -(POST)-> metadata -> metadata_upsert -(trigger)-> metadata_upsert_trigger_fn (INSERT or UPDATE)
|
||||
-- vessels signalk -(POST)-> metrics -> metrics -(trigger)-> metrics_fn new log,stay,moorage
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
-- Drop database
|
||||
-- % docker exec -i timescaledb-postgis psql -Uusername -W postgres -c "drop database signalk;"
|
||||
|
||||
-- Import Schema
|
||||
-- % cat signalk.sql | docker exec -i timescaledb-postgis psql -Uusername postgres
|
||||
|
||||
-- Export hypertable
|
||||
-- % docker exec -i timescaledb-postgis psql -Uusername -W signalk -c "\COPY (SELECT * FROM api.metrics ORDER BY time ASC) TO '/var/lib/postgresql/data/metrics.csv' DELIMITER ',' CSV"
|
||||
-- Export hypertable to gzip
|
||||
-- # docker exec -i timescaledb-postgis psql -Uusername -W signalk -c "\COPY (SELECT * FROM api.metrics ORDER BY time ASC) TO PROGRAM 'gzip > /var/lib/postgresql/data/metrics.csv.gz' CSV HEADER;"
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
RAISE WARNING '
|
||||
_________.__ .__ ____ __.
|
||||
/ _____/|__| ____ ____ _____ | | | |/ _|
|
||||
\_____ \ | |/ ___\ / \\__ \ | | | <
|
||||
/ \| / /_/ > | \/ __ \| |_| | \
|
||||
/_______ /|__\___ /|___| (____ /____/____|__ \
|
||||
\/ /_____/ \/ \/ \/
|
||||
%', now();
|
||||
END $$;
|
||||
|
||||
select version();
|
||||
|
||||
-- Database
|
||||
CREATE DATABASE signalk;
|
||||
-- Limit connection to 100
|
||||
ALTER DATABASE signalk WITH CONNECTION LIMIT = 100;
|
||||
-- Set timezone to UTC
|
||||
ALTER DATABASE signalk SET TIMEZONE='UTC';
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- Schema
|
||||
CREATE SCHEMA IF NOT EXISTS api;
|
||||
COMMENT ON SCHEMA api IS 'api schema expose to postgrest';
|
||||
|
||||
-- Revoke default privileges to all public functions
|
||||
ALTER DEFAULT PRIVILEGES REVOKE EXECUTE ON FUNCTIONS FROM PUBLIC;
|
||||
|
||||
-- Extensions
|
||||
CREATE EXTENSION IF NOT EXISTS timescaledb; -- provides time series functions for PostgreSQL
|
||||
-- CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit; -- provides time series functions for PostgreSQL
|
||||
CREATE EXTENSION IF NOT EXISTS postgis; -- adds support for geographic objects to the PostgreSQL object-relational database
|
||||
CREATE EXTENSION IF NOT EXISTS plpgsql; -- PL/pgSQL procedural language
|
||||
CREATE EXTENSION IF NOT EXISTS plpython3u; -- implements PL/Python based on the Python 3 language variant.
|
||||
CREATE EXTENSION IF NOT EXISTS jsonb_plpython3u CASCADE; -- tranform jsonb to python json type.
|
||||
CREATE EXTENSION IF NOT EXISTS pg_stat_statements; -- provides a means for tracking planning and execution statistics of all SQL statements executed
|
||||
CREATE EXTENSION IF NOT EXISTS "moddatetime"; -- provides functions for tracking last modification time
|
||||
|
||||
-- Trust plpython3u language by default
|
||||
UPDATE pg_language SET lanpltrusted = true WHERE lanname = 'plpython3u';
|
506
initdb/02_1_1_signalk_api_tables.sql
Normal file
506
initdb/02_1_1_signalk_api_tables.sql
Normal file
@@ -0,0 +1,506 @@
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Tables
|
||||
--
|
||||
---------------------------------------------------------------------------
|
||||
-- Metadata from signalk
|
||||
CREATE TABLE IF NOT EXISTS api.metadata(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name TEXT NULL,
|
||||
mmsi NUMERIC NULL,
|
||||
client_id TEXT NULL,
|
||||
-- vessel_id link auth.vessels with api.metadata
|
||||
vessel_id TEXT NOT NULL UNIQUE,
|
||||
length DOUBLE PRECISION NULL,
|
||||
beam DOUBLE PRECISION NULL,
|
||||
height DOUBLE PRECISION NULL,
|
||||
ship_type NUMERIC NULL,
|
||||
plugin_version TEXT NOT NULL,
|
||||
signalk_version TEXT NOT NULL,
|
||||
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, -- should be rename to last_update !?
|
||||
active BOOLEAN DEFAULT True, -- trigger monitor online/offline
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW()
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.metadata
|
||||
IS 'Stores metadata from vessel';
|
||||
COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline';
|
||||
-- Index
|
||||
CREATE INDEX metadata_vessel_id_idx ON api.metadata (vessel_id);
|
||||
--CREATE INDEX metadata_mmsi_idx ON api.metadata (mmsi);
|
||||
-- is unused index ?
|
||||
CREATE INDEX metadata_name_idx ON api.metadata (name);
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Metrics from signalk
|
||||
-- Create vessel status enum
|
||||
CREATE TYPE status AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
|
||||
-- Table api.metrics
|
||||
CREATE TABLE IF NOT EXISTS api.metrics (
|
||||
time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
client_id TEXT NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
speedOverGround DOUBLE PRECISION NULL,
|
||||
courseOverGroundTrue DOUBLE PRECISION NULL,
|
||||
windSpeedApparent DOUBLE PRECISION NULL,
|
||||
angleSpeedApparent DOUBLE PRECISION NULL,
|
||||
status status NULL,
|
||||
metrics jsonb NULL,
|
||||
--CONSTRAINT valid_client_id CHECK (length(client_id) > 10),
|
||||
CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90),
|
||||
CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180),
|
||||
PRIMARY KEY (time, vessel_id)
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.metrics
|
||||
IS 'Stores metrics from vessel';
|
||||
COMMENT ON COLUMN api.metrics.latitude IS 'With CONSTRAINT but allow NULL value to be ignored silently by trigger';
|
||||
COMMENT ON COLUMN api.metrics.longitude IS 'With CONSTRAINT but allow NULL value to be ignored silently by trigger';
|
||||
|
||||
-- Index
|
||||
CREATE INDEX ON api.metrics (vessel_id, time DESC);
|
||||
CREATE INDEX ON api.metrics (status, time DESC);
|
||||
-- json index??
|
||||
CREATE INDEX ON api.metrics using GIN (metrics);
|
||||
-- timescaledb hypertable
|
||||
SELECT create_hypertable('api.metrics', 'time', chunk_time_interval => INTERVAL '7 day');
|
||||
-- timescaledb hypertable with space partitions
|
||||
-- ERROR: new row for relation "_hyper_1_2_chunk" violates check constraint "constraint_4"
|
||||
-- ((_timescaledb_internal.get_partition_hash(vessel_id) < 1073741823))
|
||||
--SELECT create_hypertable('api.metrics', 'time', 'vessel_id',
|
||||
-- number_partitions => 2,
|
||||
-- chunk_time_interval => INTERVAL '7 day',
|
||||
-- if_not_exists => true);
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Logbook
|
||||
-- todo add consumption fuel?
|
||||
-- todo add engine hour?
|
||||
-- todo add geom object http://epsg.io/4326 EPSG:4326 Unit: degres
|
||||
-- todo add geog object http://epsg.io/3857 EPSG:3857 Unit: meters
|
||||
-- https://postgis.net/workshops/postgis-intro/geography.html#using-geography
|
||||
-- https://medium.com/coord/postgis-performance-showdown-geometry-vs-geography-ec99967da4f0
|
||||
-- virtual logbook by boat by client_id impossible?
|
||||
-- https://www.postgresql.org/docs/current/ddl-partitioning.html
|
||||
-- Issue:
|
||||
-- https://www.reddit.com/r/PostgreSQL/comments/di5mbr/postgresql_12_foreign_keys_and_partitioned_tables/f3tsoop/
|
||||
-- Check unused index
|
||||
|
||||
CREATE TABLE IF NOT EXISTS api.logbook(
|
||||
id SERIAL PRIMARY KEY,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
--client_id VARCHAR(255) NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
_from VARCHAR(255),
|
||||
_from_lat DOUBLE PRECISION NULL,
|
||||
_from_lng DOUBLE PRECISION NULL,
|
||||
_to VARCHAR(255),
|
||||
_to_lat DOUBLE PRECISION NULL,
|
||||
_to_lng DOUBLE PRECISION NULL,
|
||||
--track_geom Geometry(LINESTRING)
|
||||
track_geom geometry(LINESTRING,4326) NULL,
|
||||
track_geog geography(LINESTRING) NULL,
|
||||
track_geojson JSON NULL,
|
||||
track_gpx XML NULL,
|
||||
_from_time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
_to_time TIMESTAMP WITHOUT TIME ZONE NULL,
|
||||
distance NUMERIC, -- meters?
|
||||
duration INTERVAL, -- duration in days and hours?
|
||||
avg_speed DOUBLE PRECISION NULL,
|
||||
max_speed DOUBLE PRECISION NULL,
|
||||
max_wind_speed DOUBLE PRECISION NULL,
|
||||
notes TEXT NULL,
|
||||
extra JSONB NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.logbook
|
||||
IS 'Stores generated logbook';
|
||||
COMMENT ON COLUMN api.logbook.distance IS 'in NM';
|
||||
|
||||
-- Index todo!
|
||||
CREATE INDEX logbook_vessel_id_idx ON api.logbook (vessel_id);
|
||||
CREATE INDEX ON api.logbook USING GIST ( track_geom );
|
||||
COMMENT ON COLUMN api.logbook.track_geom IS 'postgis geometry type EPSG:4326 Unit: degres';
|
||||
CREATE INDEX ON api.logbook USING GIST ( track_geog );
|
||||
COMMENT ON COLUMN api.logbook.track_geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- Otherwise -- ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
COMMENT ON COLUMN api.logbook.track_geojson IS 'store the geojson track metrics data, can not depend api.metrics table, should be generate from linetring to save disk space?';
|
||||
COMMENT ON COLUMN api.logbook.track_gpx IS 'store the gpx track metrics data, can not depend api.metrics table, should be generate from linetring to save disk space?';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Stays
|
||||
-- virtual logbook by boat?
|
||||
CREATE TABLE IF NOT EXISTS api.stays(
|
||||
id SERIAL PRIMARY KEY,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
--client_id VARCHAR(255) NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
geog GEOGRAPHY(POINT) NULL,
|
||||
arrived TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
departed TIMESTAMP WITHOUT TIME ZONE,
|
||||
duration INTERVAL, -- duration in days and hours?
|
||||
stay_code INT DEFAULT 1, -- REFERENCES api.stays_at(stay_code),
|
||||
notes TEXT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.stays
|
||||
IS 'Stores generated stays';
|
||||
|
||||
-- Index
|
||||
CREATE INDEX stays_vessel_id_idx ON api.stays (vessel_id);
|
||||
CREATE INDEX ON api.stays USING GIST ( geog );
|
||||
COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Moorages
|
||||
-- virtual logbook by boat?
|
||||
CREATE TABLE IF NOT EXISTS api.moorages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
--client_id VARCHAR(255) NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
name TEXT,
|
||||
country TEXT, -- todo need to update reverse_geocode_py_fn
|
||||
stay_id INT NOT NULL, -- needed?
|
||||
stay_code INT DEFAULT 1, -- needed? REFERENCES api.stays_at(stay_code)
|
||||
stay_duration INTERVAL NULL,
|
||||
reference_count INT DEFAULT 1,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
geog GEOGRAPHY(POINT) NULL,
|
||||
home_flag BOOLEAN DEFAULT false,
|
||||
notes TEXT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.moorages
|
||||
IS 'Stores generated moorages';
|
||||
|
||||
-- Index
|
||||
CREATE INDEX moorages_vessel_id_idx ON api.moorages (vessel_id);
|
||||
CREATE INDEX ON api.moorages USING GIST ( geog );
|
||||
COMMENT ON COLUMN api.moorages.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Stay Type
|
||||
CREATE TABLE IF NOT EXISTS api.stays_at(
|
||||
stay_code INTEGER NOT NULL,
|
||||
description TEXT NOT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE api.stays_at IS 'Stay Type';
|
||||
-- Insert default possible values
|
||||
INSERT INTO api.stays_at(stay_code, description) VALUES
|
||||
(1, 'Unknow'),
|
||||
(2, 'Anchor'),
|
||||
(3, 'Mooring Buoy'),
|
||||
(4, 'Dock');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Trigger Functions Metadata table
|
||||
--
|
||||
-- UPSERT - Insert vs Update for Metadata
|
||||
DROP FUNCTION IF EXISTS metadata_upsert_trigger_fn;
|
||||
CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert$
|
||||
DECLARE
|
||||
metadata_id integer;
|
||||
metadata_active boolean;
|
||||
BEGIN
|
||||
-- Set client_id to new value to allow RLS
|
||||
--PERFORM set_config('vessel.client_id', NEW.client_id, false);
|
||||
-- UPSERT - Insert vs Update for Metadata
|
||||
--RAISE NOTICE 'metadata_upsert_trigger_fn';
|
||||
--PERFORM set_config('vessel.id', NEW.vessel_id, true);
|
||||
--RAISE WARNING 'metadata_upsert_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW;
|
||||
SELECT m.id,m.active INTO metadata_id, metadata_active
|
||||
FROM api.metadata m
|
||||
WHERE m.vessel_id IS NOT NULL AND m.vessel_id = current_setting('vessel.id', true);
|
||||
--RAISE NOTICE 'metadata_id is [%]', metadata_id;
|
||||
IF metadata_id IS NOT NULL THEN
|
||||
-- send notification if boat is back online
|
||||
IF metadata_active is False THEN
|
||||
-- Add monitor online entry to process queue for later notification
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('monitoring_online', metadata_id, now(), current_setting('vessel.id', true));
|
||||
END IF;
|
||||
-- Update vessel metadata
|
||||
UPDATE api.metadata
|
||||
SET
|
||||
name = NEW.name,
|
||||
mmsi = NEW.mmsi,
|
||||
client_id = NEW.client_id,
|
||||
length = NEW.length,
|
||||
beam = NEW.beam,
|
||||
height = NEW.height,
|
||||
ship_type = NEW.ship_type,
|
||||
plugin_version = NEW.plugin_version,
|
||||
signalk_version = NEW.signalk_version,
|
||||
time = NEW.time,
|
||||
active = true
|
||||
WHERE id = metadata_id;
|
||||
RETURN NULL; -- Ignore insert
|
||||
ELSE
|
||||
IF NEW.vessel_id IS NULL THEN
|
||||
-- set vessel_id from jwt if not present in INSERT query
|
||||
NEW.vessel_id := current_setting('vessel.id');
|
||||
END IF;
|
||||
-- Insert new vessel metadata and
|
||||
RETURN NEW; -- Insert new vessel metadata
|
||||
END IF;
|
||||
END;
|
||||
$metadata_upsert$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metadata_upsert_trigger_fn
|
||||
IS 'process metadata from vessel, upsert';
|
||||
|
||||
CREATE TRIGGER metadata_moddatetime
|
||||
BEFORE UPDATE ON api.metadata
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE moddatetime (updated_at);
|
||||
-- Description
|
||||
COMMENT ON TRIGGER metadata_moddatetime
|
||||
ON api.metadata
|
||||
IS 'Automatic update of updated_at on table modification';
|
||||
|
||||
-- FUNCTION Metadata notification for new vessel after insert
|
||||
DROP FUNCTION IF EXISTS metadata_notification_trigger_fn;
|
||||
CREATE FUNCTION metadata_notification_trigger_fn() RETURNS trigger AS $metadata_notification$
|
||||
DECLARE
|
||||
BEGIN
|
||||
RAISE NOTICE 'metadata_notification_trigger_fn [%]', NEW;
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('monitoring_online', NEW.id, now(), NEW.vessel_id);
|
||||
RETURN NULL;
|
||||
END;
|
||||
$metadata_notification$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metadata_notification_trigger_fn
|
||||
IS 'process metadata notification from vessel, monitoring_online';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Trigger metadata table
|
||||
--
|
||||
-- Metadata trigger BEFORE INSERT
|
||||
CREATE TRIGGER metadata_upsert_trigger BEFORE INSERT ON api.metadata
|
||||
FOR EACH ROW EXECUTE FUNCTION metadata_upsert_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metadata_upsert_trigger ON api.metadata
|
||||
IS 'BEFORE INSERT ON api.metadata run function metadata_upsert_trigger_fn';
|
||||
|
||||
-- Metadata trigger AFTER INSERT
|
||||
CREATE TRIGGER metadata_notification_trigger AFTER INSERT ON api.metadata
|
||||
FOR EACH ROW EXECUTE FUNCTION metadata_notification_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metadata_notification_trigger ON api.metadata
|
||||
IS 'AFTER INSERT ON api.metadata run function metadata_update_trigger_fn for notification on new vessel';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Trigger Functions metrics table
|
||||
--
|
||||
-- Create a logbook or stay entry base on the vessel state, eg: navigation.state
|
||||
-- https://github.com/meri-imperiumi/signalk-autostate
|
||||
|
||||
DROP FUNCTION IF EXISTS metrics_trigger_fn;
|
||||
CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
||||
DECLARE
|
||||
previous_status varchar;
|
||||
previous_time TIMESTAMP WITHOUT TIME ZONE;
|
||||
stay_code integer;
|
||||
logbook_id integer;
|
||||
stay_id integer;
|
||||
valid_status BOOLEAN;
|
||||
_vessel_id TEXT;
|
||||
BEGIN
|
||||
--RAISE NOTICE 'metrics_trigger_fn';
|
||||
--RAISE WARNING 'metrics_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW;
|
||||
-- Ensure vessel.id to new value to allow RLS
|
||||
IF NEW.vessel_id IS NULL THEN
|
||||
-- set vessel_id from jwt if not present in INSERT query
|
||||
NEW.vessel_id := current_setting('vessel.id');
|
||||
END IF;
|
||||
-- Boat metadata are check using api.metrics REFERENCES to api.metadata
|
||||
-- Fetch the latest entry to compare status against the new status to be insert
|
||||
SELECT coalesce(m.status, 'moored'), m.time INTO previous_status, previous_time
|
||||
FROM api.metrics m
|
||||
WHERE m.vessel_id IS NOT NULL
|
||||
AND m.vessel_id = current_setting('vessel.id', true)
|
||||
ORDER BY m.time DESC LIMIT 1;
|
||||
--RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
IF previous_time = NEW.time THEN
|
||||
-- Ignore entry if same time
|
||||
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], duplicate time [%] = [%]', NEW.vessel_id, previous_time, NEW.time;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
IF previous_time > NEW.time THEN
|
||||
-- Ignore entry if new time is later than previous time
|
||||
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], new time is older [%] > [%]', NEW.vessel_id, previous_time, NEW.time;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
-- Check if latitude or longitude are null
|
||||
IF NEW.latitude IS NULL OR NEW.longitude IS NULL THEN
|
||||
-- Ignore entry if null latitude,longitude
|
||||
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], null latitude,longitude [%] [%]', NEW.vessel_id, NEW.latitude, NEW.longitude;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
-- Check if status is null
|
||||
IF NEW.status IS NULL THEN
|
||||
RAISE WARNING 'Metrics Unknown NEW.status, vessel_id [%], null status, set to default moored from [%]', NEW.vessel_id, NEW.status;
|
||||
NEW.status := 'moored';
|
||||
END IF;
|
||||
IF previous_status IS NULL THEN
|
||||
IF NEW.status = 'anchored' THEN
|
||||
RAISE WARNING 'Metrics Unknown previous_status from vessel_id [%], [%] set to default current status [%]', NEW.vessel_id, previous_status, NEW.status;
|
||||
previous_status := NEW.status;
|
||||
ELSE
|
||||
RAISE WARNING 'Metrics Unknown previous_status from vessel_id [%], [%] set to default status moored vs [%]', NEW.vessel_id, previous_status, NEW.status;
|
||||
previous_status := 'moored';
|
||||
END IF;
|
||||
-- Add new stay as no previous entry exist
|
||||
INSERT INTO api.stays
|
||||
(vessel_id, active, arrived, latitude, longitude, stay_code)
|
||||
VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude, 1)
|
||||
RETURNING id INTO stay_id;
|
||||
-- Add stay entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
|
||||
RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id %', stay_id;
|
||||
END IF;
|
||||
-- Check if status is valid enum
|
||||
SELECT NEW.status::name = any(enum_range(null::status)::name[]) INTO valid_status;
|
||||
IF valid_status IS False THEN
|
||||
-- Ignore entry if status is invalid
|
||||
RAISE WARNING 'Metrics Ignoring metric, invalid status [%]', NEW.status;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- Check the state and if any previous/current entry
|
||||
-- If change of state and new status is sailing or motoring
|
||||
IF previous_status::TEXT <> NEW.status::TEXT AND
|
||||
( (NEW.status::TEXT = 'sailing' AND previous_status::TEXT <> 'motoring')
|
||||
OR (NEW.status::TEXT = 'motoring' AND previous_status::TEXT <> 'sailing') ) THEN
|
||||
RAISE WARNING 'Metrics Update status, try new logbook, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
-- Start new log
|
||||
logbook_id := public.trip_in_progress_fn(current_setting('vessel.id', true)::TEXT);
|
||||
IF logbook_id IS NULL THEN
|
||||
INSERT INTO api.logbook
|
||||
(vessel_id, active, _from_time, _from_lat, _from_lng)
|
||||
VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude)
|
||||
RETURNING id INTO logbook_id;
|
||||
RAISE WARNING 'Metrics Insert new logbook, logbook_id %', logbook_id;
|
||||
ELSE
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
active = false,
|
||||
_to_time = NEW.time,
|
||||
_to_lat = NEW.latitude,
|
||||
_to_lng = NEW.longitude
|
||||
WHERE id = logbook_id;
|
||||
RAISE WARNING 'Metrics Existing Logbook logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
|
||||
END IF;
|
||||
|
||||
-- End current stay
|
||||
stay_id := public.stay_in_progress_fn(current_setting('vessel.id', true)::TEXT);
|
||||
IF stay_id IS NOT NULL THEN
|
||||
UPDATE api.stays
|
||||
SET
|
||||
active = false,
|
||||
departed = NEW.time
|
||||
WHERE id = stay_id;
|
||||
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
||||
-- Add moorage entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('new_moorage', stay_id, now(), current_setting('vessel.id', true));
|
||||
ELSE
|
||||
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
|
||||
END IF;
|
||||
|
||||
-- If change of state and new status is moored or anchored
|
||||
ELSIF previous_status::TEXT <> NEW.status::TEXT AND
|
||||
( (NEW.status::TEXT = 'moored' AND previous_status::TEXT <> 'anchored')
|
||||
OR (NEW.status::TEXT = 'anchored' AND previous_status::TEXT <> 'moored') ) THEN
|
||||
-- Start new stays
|
||||
RAISE WARNING 'Metrics Update status, try new stay, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
stay_id := public.stay_in_progress_fn(current_setting('vessel.id', true)::TEXT);
|
||||
IF stay_id IS NULL THEN
|
||||
RAISE WARNING 'Metrics Inserting new stay [%]', NEW.status;
|
||||
-- If metric status is anchored set stay_code accordingly
|
||||
stay_code = 1;
|
||||
IF NEW.status = 'anchored' THEN
|
||||
stay_code = 2;
|
||||
END IF;
|
||||
-- Add new stay
|
||||
INSERT INTO api.stays
|
||||
(vessel_id, active, arrived, latitude, longitude, stay_code)
|
||||
VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
|
||||
RETURNING id INTO stay_id;
|
||||
-- Add stay entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
|
||||
ELSE
|
||||
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
|
||||
UPDATE api.stays
|
||||
SET
|
||||
active = false,
|
||||
departed = NEW.time
|
||||
WHERE id = stay_id;
|
||||
END IF;
|
||||
|
||||
-- End current log/trip
|
||||
-- Fetch logbook_id by vessel_id
|
||||
logbook_id := public.trip_in_progress_fn(current_setting('vessel.id', true)::TEXT);
|
||||
IF logbook_id IS NOT NULL THEN
|
||||
-- todo check on time start vs end
|
||||
RAISE WARNING 'Metrics Updating logbook status [%] [%] [%]', logbook_id, NEW.status, NEW.time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
active = false,
|
||||
_to_time = NEW.time,
|
||||
_to_lat = NEW.latitude,
|
||||
_to_lng = NEW.longitude
|
||||
WHERE id = logbook_id;
|
||||
-- Add logbook entry to process queue for later processing
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUEs ('new_logbook', logbook_id, now(), current_setting('vessel.id', true));
|
||||
ELSE
|
||||
RAISE WARNING 'Metrics Invalid logbook_id [%] [%]', logbook_id, NEW.time;
|
||||
END IF;
|
||||
END IF;
|
||||
RETURN NEW; -- Finally insert the actual new metric
|
||||
END;
|
||||
$metrics$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metrics_trigger_fn
|
||||
IS 'process metrics from vessel, generate new_logbook and new_stay.';
|
||||
|
||||
--
|
||||
-- Triggers logbook update on metrics insert
|
||||
CREATE TRIGGER metrics_trigger BEFORE INSERT ON api.metrics
|
||||
FOR EACH ROW EXECUTE FUNCTION metrics_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metrics_trigger ON api.metrics
|
||||
IS 'BEFORE INSERT ON api.metrics run function metrics_trigger_fn';
|
381
initdb/02_1_2_signalk_api_functions.sql
Normal file
381
initdb/02_1_2_signalk_api_functions.sql
Normal file
@@ -0,0 +1,381 @@
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- API helper functions
|
||||
--
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Functions API schema
|
||||
-- Timelapse - replay logs
|
||||
DROP FUNCTION IF EXISTS api.timelapse_fn;
|
||||
CREATE OR REPLACE FUNCTION api.timelapse_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL,
|
||||
IN start_date TEXT DEFAULT NULL,
|
||||
IN end_date TEXT DEFAULT NULL,
|
||||
OUT geojson JSON) RETURNS JSON AS $timelapse$
|
||||
DECLARE
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- TODO using jsonb pgsql function instead of python
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log;
|
||||
--raise WARNING 'by log _geojson %' , _geojson;
|
||||
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook
|
||||
WHERE _from_time >= start_log::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND _to_time <= end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes';
|
||||
--raise WARNING 'by date _geojson %' , _geojson;
|
||||
ELSE
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook;
|
||||
--raise WARNING 'all result _geojson %' , _geojson;
|
||||
END IF;
|
||||
-- Return a GeoJSON filter on Point
|
||||
-- result _geojson [null, null]
|
||||
--raise WARNING 'result _geojson %' , _geojson;
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', public.geojson_py_fn(_geojson, 'LineString'::TEXT) ) INTO geojson;
|
||||
END;
|
||||
$timelapse$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.timelapse_fn
|
||||
IS 'Export to geojson feature point with Time and courseOverGroundTrue properties';
|
||||
|
||||
-- export_logbook_geojson_fn
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
|
||||
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSON) RETURNS JSON AS $export_logbook_geojson$
|
||||
-- validate with geojson.io
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> export_logbook_geojson_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Gather log details
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook WHERE id = _id;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> export_logbook_geojson_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
geojson := logbook_rec.track_geojson;
|
||||
END;
|
||||
$export_logbook_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_geojson_fn
|
||||
IS 'Export a log entry to geojson feature linestring and multipoint';
|
||||
|
||||
-- Generate GPX XML file output
|
||||
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html
|
||||
--
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS pg_catalog.xml
|
||||
AS $export_logbook_gpx$
|
||||
DECLARE
|
||||
log_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> export_logbook_geojson_fn invalid input %', _id;
|
||||
RETURN '';
|
||||
END IF;
|
||||
-- Gather log details _from_time and _to_time
|
||||
SELECT * INTO log_rec
|
||||
FROM
|
||||
api.logbook l
|
||||
WHERE l.id = _id;
|
||||
-- Ensure the query is successful
|
||||
IF log_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> export_logbook_gpx_fn invalid logbook %', _id;
|
||||
RETURN '';
|
||||
END IF;
|
||||
-- Generate XML
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
'https://iot.openplotter.cloud' as "xmlns:postgsail",
|
||||
'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi",
|
||||
'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx",
|
||||
'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"),
|
||||
xmlelement(name trk,
|
||||
xmlelement(name name, log_rec.name),
|
||||
xmlelement(name desc, log_rec.notes),
|
||||
xmlelement(name link, xmlattributes(concat('https://iot.openplotter.cloud/log/', log_rec.id) as href),
|
||||
xmlelement(name text, log_rec.name)),
|
||||
xmlelement(name extensions, xmlelement(name "postgsail:log_id", 1),
|
||||
xmlelement(name "postgsail:link", concat('https://iot.openplotter.cloud/log/', log_rec.id)),
|
||||
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
||||
xmlelement(name "opencpn:viz", '1'),
|
||||
xmlelement(name "opencpn:start", log_rec._from_time),
|
||||
xmlelement(name "opencpn:end", log_rec._to_time)
|
||||
),
|
||||
xmlelement(name trkseg, xmlagg(
|
||||
xmlelement(name trkpt,
|
||||
xmlattributes(latitude as lat, longitude as lon),
|
||||
xmlelement(name time, time)
|
||||
)))))::pg_catalog.xml
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= log_rec._from_time::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND m.time <= log_rec._to_time::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND vessel_id = log_rec.vessel_id;
|
||||
-- ERROR: column "m.time" must appear in the GROUP BY clause or be used in an aggregate function at character 2304
|
||||
--ORDER BY m.time ASC;
|
||||
END;
|
||||
$export_logbook_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_gpx_fn
|
||||
IS 'Export a log entry to GPX XML format';
|
||||
|
||||
-- Find all log from and to moorage geopoint within 100m
|
||||
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
|
||||
CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_from_moorage$
|
||||
DECLARE
|
||||
moorage_rec record;
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> find_log_from_moorage_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Gather moorage details
|
||||
SELECT * INTO moorage_rec
|
||||
FROM api.moorages m
|
||||
WHERE m.id = _id;
|
||||
-- Find all log from and to moorage geopoint within 100m
|
||||
SELECT jsonb_agg(l.track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook l
|
||||
WHERE ST_DWithin(
|
||||
Geography(ST_MakePoint(l._from_lng, l._from_lat)),
|
||||
moorage_rec.geog,
|
||||
1000 -- in meters ?
|
||||
);
|
||||
-- Return a GeoJSON filter on LineString
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
||||
END;
|
||||
$find_log_from_moorage$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.find_log_from_moorage_fn
|
||||
IS 'Find all log from moorage geopoint within 100m';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.find_log_to_moorage_fn;
|
||||
CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_to_moorage$
|
||||
DECLARE
|
||||
moorage_rec record;
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> find_log_from_moorage_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Gather moorage details
|
||||
SELECT * INTO moorage_rec
|
||||
FROM api.moorages m
|
||||
WHERE m.id = _id;
|
||||
-- Find all log from and to moorage geopoint within 100m
|
||||
SELECT jsonb_agg(l.track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook l
|
||||
WHERE ST_DWithin(
|
||||
Geography(ST_MakePoint(l._to_lng, l._to_lat)),
|
||||
moorage_rec.geog,
|
||||
1000 -- in meters ?
|
||||
);
|
||||
-- Return a GeoJSON filter on LineString
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
||||
END;
|
||||
$find_log_to_moorage$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.find_log_to_moorage_fn
|
||||
IS 'Find all log to moorage geopoint within 100m';
|
||||
|
||||
-- Find all stay within 100m of moorage geopoint
|
||||
DROP FUNCTION IF EXISTS api.find_stay_from_moorage_fn;
|
||||
CREATE OR REPLACE FUNCTION api.find_stay_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_stay_from_moorage$
|
||||
DECLARE
|
||||
moorage_rec record;
|
||||
stay_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
SELECT * INTO moorage_rec
|
||||
FROM api.moorages m
|
||||
WHERE m.id = _id;
|
||||
-- find all log from and to moorage geopoint within 100m
|
||||
--RETURN QUERY
|
||||
SELECT s.id,s.arrived,s.departed,s.duration,sa.description
|
||||
FROM api.stays s, api.stays_at sa
|
||||
WHERE ST_DWithin(
|
||||
s.geog,
|
||||
moorage_rec.geog,
|
||||
100 -- in meters ?
|
||||
)
|
||||
AND departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
ORDER BY s.arrived DESC;
|
||||
END;
|
||||
$find_stay_from_moorage$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.find_stay_from_moorage_fn
|
||||
IS 'Find all stay within 100m of moorage geopoint';
|
||||
|
||||
-- trip_in_progress_fn
|
||||
DROP FUNCTION IF EXISTS public.trip_in_progress_fn;
|
||||
CREATE FUNCTION public.trip_in_progress_fn(IN _vessel_id TEXT) RETURNS INT AS $trip_in_progress$
|
||||
DECLARE
|
||||
logbook_id INT := NULL;
|
||||
BEGIN
|
||||
SELECT id INTO logbook_id
|
||||
FROM api.logbook l
|
||||
WHERE l.vessel_id IS NOT NULL
|
||||
AND l.vessel_id = _vessel_id
|
||||
AND active IS true
|
||||
LIMIT 1;
|
||||
RETURN logbook_id;
|
||||
END;
|
||||
$trip_in_progress$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.trip_in_progress_fn
|
||||
IS 'trip_in_progress';
|
||||
|
||||
-- stay_in_progress_fn
|
||||
DROP FUNCTION IF EXISTS public.stay_in_progress_fn;
|
||||
CREATE FUNCTION public.stay_in_progress_fn(IN _vessel_id TEXT) RETURNS INT AS $stay_in_progress$
|
||||
DECLARE
|
||||
stay_id INT := NULL;
|
||||
BEGIN
|
||||
SELECT id INTO stay_id
|
||||
FROM api.stays s
|
||||
WHERE s.vessel_id IS NOT NULL
|
||||
AND s.vessel_id = _vessel_id
|
||||
AND active IS true
|
||||
LIMIT 1;
|
||||
RETURN stay_id;
|
||||
END;
|
||||
$stay_in_progress$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.stay_in_progress_fn
|
||||
IS 'stay_in_progress';
|
||||
|
||||
-- logs_by_month_fn
|
||||
DROP FUNCTION IF EXISTS api.logs_by_month_fn;
|
||||
CREATE FUNCTION api.logs_by_month_fn(OUT charts JSONB) RETURNS JSONB AS $logs_by_month$
|
||||
DECLARE
|
||||
data JSONB;
|
||||
BEGIN
|
||||
-- Query logs by month
|
||||
SELECT json_object_agg(month,count) INTO data
|
||||
FROM (
|
||||
SELECT
|
||||
to_char(date_trunc('month', _from_time), 'MM') as month,
|
||||
count(*) as count
|
||||
FROM api.logbook
|
||||
GROUP BY month
|
||||
ORDER BY month
|
||||
) AS t;
|
||||
-- Merge jsonb to get all 12 months
|
||||
SELECT '{"01": 0, "02": 0, "03": 0, "04": 0, "05": 0, "06": 0, "07": 0, "08": 0, "09": 0, "10": 0, "11": 0,"12": 0}'::jsonb ||
|
||||
data::jsonb INTO charts;
|
||||
END;
|
||||
$logs_by_month$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.logs_by_month_fn
|
||||
IS 'logbook by month for web charts';
|
||||
|
||||
-- moorage_geojson_fn
|
||||
DROP FUNCTION IF EXISTS api.export_moorages_geojson_fn;
|
||||
CREATE FUNCTION api.export_moorages_geojson_fn(OUT geojson JSONB) RETURNS JSONB AS $export_moorages_geojson$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features',
|
||||
( SELECT
|
||||
json_agg(ST_AsGeoJSON(m.*)::JSON) as moorages_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||
geog
|
||||
FROM api.moorages
|
||||
) AS m
|
||||
)
|
||||
) INTO geojson;
|
||||
END;
|
||||
$export_moorages_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_moorages_geojson_fn
|
||||
IS 'Export moorages as geojson';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.export_moorages_gpx_fn;
|
||||
CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS pg_catalog.xml AS $export_moorages_gpx$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Generate XML
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
'https://iot.openplotter.cloud' as "xmlns:postgsail",
|
||||
'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi",
|
||||
'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx",
|
||||
'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"),
|
||||
xmlagg(
|
||||
xmlelement(name wpt, xmlattributes(m.latitude as lat, m.longitude as lon),
|
||||
xmlelement(name name, m.name),
|
||||
xmlelement(name time, 'TODO first seen'),
|
||||
xmlelement(name desc,
|
||||
concat('Last Stayed On: ', 'TODO last seen',
|
||||
E'\nTotal Stays: ', m.stay_duration,
|
||||
E'\nTotal Arrivals and Departures: ', m.reference_count,
|
||||
E'\nLink: ', concat('https://iot.openplotter.cloud/moorage/', m.id)),
|
||||
xmlelement(name "opencpn:guid", uuid_generate_v4())),
|
||||
xmlelement(name sym, 'anchor'),
|
||||
xmlelement(name type, 'WPT'),
|
||||
xmlelement(name link, xmlattributes(concat('https://iot.openplotter.cloud/moorage/', m.id) as href),
|
||||
xmlelement(name text, m.name)),
|
||||
xmlelement(name extensions, xmlelement(name "postgsail:mooorage_id", 1),
|
||||
xmlelement(name "postgsail:link", concat('https://iot.openplotter.cloud/moorage/', m.id)),
|
||||
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
||||
xmlelement(name "opencpn:viz", '1'),
|
||||
xmlelement(name "opencpn:scale_min_max", xmlattributes(true as UseScale, 30000 as ScaleMin, 0 as ScaleMax)
|
||||
))))
|
||||
)::pg_catalog.xml
|
||||
FROM api.moorages m;
|
||||
END;
|
||||
$export_moorages_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_moorages_gpx_fn
|
||||
IS 'Export moorages as gpx';
|
452
initdb/02_1_3_signalk_api_views.sql
Normal file
452
initdb/02_1_3_signalk_api_views.sql
Normal file
@@ -0,0 +1,452 @@
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- API helper views
|
||||
--
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Views
|
||||
-- Views are invoked with the privileges of the view owner,
|
||||
-- make the user_role the view’s owner.
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
CREATE VIEW first_metric AS
|
||||
SELECT *
|
||||
FROM api.metrics
|
||||
ORDER BY time ASC LIMIT 1;
|
||||
|
||||
CREATE VIEW last_metric AS
|
||||
SELECT *
|
||||
FROM api.metrics
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
CREATE VIEW trip_in_progress AS
|
||||
SELECT *
|
||||
FROM api.logbook
|
||||
WHERE active IS true;
|
||||
|
||||
CREATE VIEW stay_in_progress AS
|
||||
SELECT *
|
||||
FROM api.stays
|
||||
WHERE active IS true;
|
||||
|
||||
-- TODO: Use materialized views instead as it is not live data
|
||||
-- Logs web view
|
||||
DROP VIEW IF EXISTS api.logs_view;
|
||||
CREATE OR REPLACE VIEW api.logs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration"
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.logs_view
|
||||
IS 'Logs web view';
|
||||
|
||||
-- Initial try of MATERIALIZED VIEW
|
||||
CREATE MATERIALIZED VIEW api.logs_mat_view AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration"
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON MATERIALIZED VIEW
|
||||
api.logs_mat_view
|
||||
IS 'Logs MATERIALIZED web view';
|
||||
|
||||
DROP VIEW IF EXISTS api.log_view;
|
||||
CREATE OR REPLACE VIEW api.log_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration",
|
||||
notes as "Notes",
|
||||
track_geojson as geojson,
|
||||
avg_speed as avg_speed,
|
||||
max_speed as max_speed,
|
||||
max_wind_speed as max_wind_speed,
|
||||
extra as extra
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.log_view
|
||||
IS 'Log web view';
|
||||
|
||||
-- Stays web view
|
||||
-- TODO group by month
|
||||
DROP VIEW IF EXISTS api.stays_view;
|
||||
CREATE OR REPLACE VIEW api.stays_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT s.id,
|
||||
concat(
|
||||
extract(DAYS FROM (s.departed-s.arrived)::interval),
|
||||
' days',
|
||||
--DATE_TRUNC('day', s.departed-s.arrived),
|
||||
' stay at ',
|
||||
s.name,
|
||||
' in ',
|
||||
RTRIM(TO_CHAR(s.departed, 'Month')),
|
||||
' ',
|
||||
TO_CHAR(s.departed, 'YYYY')
|
||||
) as "name",
|
||||
s.name AS "moorage",
|
||||
m.id AS "moorage_id",
|
||||
(s.departed-s.arrived) AS "duration",
|
||||
sa.description AS "stayed_at",
|
||||
sa.stay_code AS "stayed_at_id",
|
||||
s.arrived AS "arrived",
|
||||
s.departed AS "departed",
|
||||
s.notes AS "notes"
|
||||
FROM api.stays s, api.stays_at sa, api.moorages m
|
||||
WHERE departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
AND s.id = m.stay_id
|
||||
ORDER BY s.arrived DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.stays_view
|
||||
IS 'Stays web view';
|
||||
|
||||
DROP VIEW IF EXISTS api.stay_view;
|
||||
CREATE OR REPLACE VIEW api.stay_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT s.id,
|
||||
concat(
|
||||
extract(DAYS FROM (s.departed-s.arrived)::interval),
|
||||
' days',
|
||||
--DATE_TRUNC('day', s.departed-s.arrived),
|
||||
' stay at ',
|
||||
s.name,
|
||||
' in ',
|
||||
RTRIM(TO_CHAR(s.departed, 'Month')),
|
||||
' ',
|
||||
TO_CHAR(s.departed, 'YYYY')
|
||||
) as "name",
|
||||
s.name AS "moorage",
|
||||
m.id AS "moorage_id",
|
||||
(s.departed-s.arrived) AS "duration",
|
||||
sa.description AS "stayed_at",
|
||||
sa.stay_code AS "stayed_at_id",
|
||||
s.arrived AS "arrived",
|
||||
s.departed AS "departed",
|
||||
s.notes AS "notes"
|
||||
FROM api.stays s, api.stays_at sa, api.moorages m
|
||||
WHERE departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
AND s.id = m.stay_id
|
||||
ORDER BY s.arrived DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.stay_view
|
||||
IS 'Stay web view';
|
||||
|
||||
-- Moorages web view
|
||||
-- TODO, this is wrong using distinct (m.name) should be using postgis geog feature
|
||||
--DROP VIEW IF EXISTS api.moorages_view_old;
|
||||
--CREATE VIEW api.moorages_view_old AS
|
||||
-- SELECT
|
||||
-- m.name AS Moorage,
|
||||
-- sa.description AS "Default Stay",
|
||||
-- sum((m.departed-m.arrived)) OVER (PARTITION by m.name) AS "Total Stay",
|
||||
-- count(m.departed) OVER (PARTITION by m.name) AS "Arrivals & Departures"
|
||||
-- FROM api.moorages m, api.stays_at sa
|
||||
-- WHERE departed is not null
|
||||
-- AND m.name is not null
|
||||
-- AND m.stay_code = sa.stay_code
|
||||
-- GROUP BY m.name,sa.description,m.departed,m.arrived
|
||||
-- ORDER BY 4 DESC;
|
||||
|
||||
-- the good way?
|
||||
DROP VIEW IF EXISTS api.moorages_view;
|
||||
CREATE OR REPLACE VIEW api.moorages_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
SELECT m.id,
|
||||
m.name AS Moorage,
|
||||
sa.description AS Default_Stay,
|
||||
sa.stay_code AS Default_Stay_Id,
|
||||
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay, -- in days
|
||||
m.reference_count AS Arrivals_Departures
|
||||
-- m.geog
|
||||
-- m.stay_duration,
|
||||
-- justify_hours ( m.stay_duration )
|
||||
FROM api.moorages m, api.stays_at sa
|
||||
WHERE m.name is not null
|
||||
AND m.stay_code = sa.stay_code
|
||||
GROUP BY m.id,m.name,sa.description,m.stay_duration,m.reference_count,m.geog,sa.stay_code
|
||||
-- ORDER BY 4 DESC;
|
||||
ORDER BY m.reference_count DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorages_view
|
||||
IS 'Moorages listing web view';
|
||||
|
||||
DROP VIEW IF EXISTS api.moorage_view;
|
||||
CREATE OR REPLACE VIEW api.moorage_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
SELECT id,
|
||||
m.name AS Name,
|
||||
m.stay_code AS Default_Stay,
|
||||
m.home_flag AS Home,
|
||||
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay,
|
||||
m.reference_count AS Arrivals_Departures,
|
||||
m.notes
|
||||
-- m.geog
|
||||
FROM api.moorages m
|
||||
WHERE m.name IS NOT NULL;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorage_view
|
||||
IS 'Moorage details web view';
|
||||
|
||||
-- All moorage in 100 meters from the start of a logbook.
|
||||
-- ST_DistanceSphere Returns minimum distance in meters between two lon/lat points.
|
||||
--SELECT
|
||||
-- m.name, ST_MakePoint(m._lng,m._lat),
|
||||
-- l._from, ST_MakePoint(l._from_lng,l._from_lat),
|
||||
-- ST_DistanceSphere(ST_MakePoint(m._lng,m._lat), ST_MakePoint(l._from_lng,l._from_lat))
|
||||
-- FROM api.moorages m , api.logbook l
|
||||
-- WHERE ST_DistanceSphere(ST_MakePoint(m._lng,m._lat), ST_MakePoint(l._from_lng,l._from_lat)) <= 100;
|
||||
|
||||
-- Stats web view
|
||||
-- TODO....
|
||||
-- first time entry from metrics
|
||||
----> select * from api.metrics m ORDER BY m.time desc limit 1
|
||||
-- last time entry from metrics
|
||||
----> select * from api.metrics m ORDER BY m.time asc limit 1
|
||||
-- max speed from logbook
|
||||
-- max wind speed from logbook
|
||||
----> select max(l.max_speed) as max_speed, max(l.max_wind_speed) as max_wind_speed from api.logbook l;
|
||||
-- Total Distance from logbook
|
||||
----> select sum(l.distance) as "Total Distance" from api.logbook l;
|
||||
-- Total Time Underway from logbook
|
||||
----> select sum(l.duration) as "Total Time Underway" from api.logbook l;
|
||||
-- Longest Nonstop Sail from logbook, eg longest trip duration and distance
|
||||
----> select max(l.duration),max(l.distance) from api.logbook l;
|
||||
CREATE OR REPLACE VIEW api.stats_logs_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
WITH
|
||||
meta AS (
|
||||
SELECT m.name FROM api.metadata m ),
|
||||
last_metric AS (
|
||||
SELECT m.time FROM api.metrics m ORDER BY m.time DESC limit 1),
|
||||
first_metric AS (
|
||||
SELECT m.time FROM api.metrics m ORDER BY m.time ASC limit 1),
|
||||
logbook AS (
|
||||
SELECT
|
||||
count(*) AS "Number of Log Entries",
|
||||
max(l.max_speed) AS "Max Speed",
|
||||
max(l.max_wind_speed) AS "Max Wind Speed",
|
||||
sum(l.distance) AS "Total Distance",
|
||||
sum(l.duration) AS "Total Time Underway",
|
||||
concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "Longest Nonstop Sail"
|
||||
FROM api.logbook l)
|
||||
SELECT
|
||||
m.name as Name,
|
||||
fm.time AS first,
|
||||
lm.time AS last,
|
||||
l.*
|
||||
FROM first_metric fm, last_metric lm, logbook l, meta m;
|
||||
COMMENT ON VIEW
|
||||
api.stats_logs_view
|
||||
IS 'Statistics Logs web view';
|
||||
|
||||
-- Home Ports / Unique Moorages
|
||||
----> select count(*) as "Home Ports" from api.moorages m where home_flag is true;
|
||||
-- Unique Moorages
|
||||
----> select count(*) as "Home Ports" from api.moorages m;
|
||||
-- Time Spent at Home Port(s)
|
||||
----> select sum(m.stay_duration) as "Time Spent at Home Port(s)" from api.moorages m where home_flag is true;
|
||||
-- OR
|
||||
----> select m.stay_duration as "Time Spent at Home Port(s)" from api.moorages m where home_flag is true;
|
||||
-- Time Spent Away
|
||||
----> select sum(m.stay_duration) as "Time Spent Away" from api.moorages m where home_flag is false;
|
||||
-- Time Spent Away order by, group by stay_code (Dock, Anchor, Mooring Buoys, Unclassified)
|
||||
----> select sa.description,sum(m.stay_duration) as "Time Spent Away" from api.moorages m, api.stays_at sa where home_flag is false AND m.stay_code = sa.stay_code group by m.stay_code,sa.description order by m.stay_code;
|
||||
CREATE OR REPLACE VIEW api.stats_moorages_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
WITH
|
||||
home_ports AS (
|
||||
select count(*) as home_ports from api.moorages m where home_flag is true
|
||||
),
|
||||
unique_moorage AS (
|
||||
select count(*) as unique_moorage from api.moorages m
|
||||
),
|
||||
time_at_home_ports AS (
|
||||
select sum(m.stay_duration) as time_at_home_ports from api.moorages m where home_flag is true
|
||||
),
|
||||
time_spent_away AS (
|
||||
select sum(m.stay_duration) as time_spent_away from api.moorages m where home_flag is false
|
||||
)
|
||||
SELECT
|
||||
home_ports.home_ports as "Home Ports",
|
||||
unique_moorage.unique_moorage as "Unique Moorages",
|
||||
time_at_home_ports.time_at_home_ports "Time Spent at Home Port(s)",
|
||||
time_spent_away.time_spent_away as "Time Spent Away"
|
||||
FROM home_ports, unique_moorage, time_at_home_ports, time_spent_away;
|
||||
COMMENT ON VIEW
|
||||
api.stats_moorages_view
|
||||
IS 'Statistics Moorages web view';
|
||||
|
||||
CREATE OR REPLACE VIEW api.stats_moorages_away_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
SELECT sa.description,sum(m.stay_duration) as time_spent_away_by
|
||||
FROM api.moorages m, api.stays_at sa
|
||||
WHERE home_flag IS false
|
||||
AND m.stay_code = sa.stay_code
|
||||
GROUP BY m.stay_code,sa.description
|
||||
ORDER BY m.stay_code;
|
||||
COMMENT ON VIEW
|
||||
api.stats_moorages_away_view
|
||||
IS 'Statistics Moorages Time Spent Away web view';
|
||||
|
||||
--CREATE VIEW api.stats_view AS -- todo
|
||||
-- WITH
|
||||
-- logs AS (
|
||||
-- SELECT * FROM api.stats_logs_view ),
|
||||
-- moorages AS (
|
||||
-- SELECT * FROM api.stats_moorages_view)
|
||||
-- SELECT
|
||||
-- l.*,
|
||||
-- m.*
|
||||
-- FROM logs l, moorages m;
|
||||
--COMMENT ON VIEW
|
||||
-- api.stats_moorages_away_view
|
||||
-- IS 'Statistics Moorages Time Spent Away web view';
|
||||
|
||||
-- View main monitoring for web app
|
||||
DROP VIEW IF EXISTS api.monitoring_view;
|
||||
CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
(NOW() AT TIME ZONE 'UTC' - time) > INTERVAL '70 MINUTES' as offline,
|
||||
metrics-> 'environment.water.temperature' AS waterTemperature,
|
||||
metrics-> 'environment.inside.temperature' AS insideTemperature,
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature,
|
||||
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
|
||||
metrics-> 'environment.wind.directionGround' AS windDirectionGround,
|
||||
metrics-> 'environment.inside.humidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.humidity' AS outsideHumidity,
|
||||
metrics-> 'environment.outside.pressure' AS outsidePressure,
|
||||
metrics-> 'environment.inside.pressure' AS insidePressure,
|
||||
metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge,
|
||||
metrics-> 'electrical.batteries.House.voltage' AS batteryVoltage,
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'name', current_setting('vessel.name', false),
|
||||
'latitude', m.latitude,
|
||||
'longitude', m.longitude
|
||||
)::jsonb ) AS geojson,
|
||||
current_setting('vessel.name', false) AS name
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view
|
||||
IS 'Monitoring static web view';
|
||||
|
||||
DROP VIEW IF EXISTS api.monitoring_humidity;
|
||||
CREATE VIEW api.monitoring_humidity WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT m.time, key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'environment.%.humidity'
|
||||
ORDER BY m.time DESC;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_humidity
|
||||
IS 'Monitoring environment.%.humidity web view';
|
||||
|
||||
-- View System RPI monitoring for grafana
|
||||
-- View Electric monitoring for grafana
|
||||
|
||||
-- View main monitoring for grafana
|
||||
-- LAST Monitoring data from json!
|
||||
DROP VIEW IF EXISTS api.monitoring_temperatures;
|
||||
CREATE VIEW api.monitoring_temperatures WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT m.time, key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'environment.%.temperature'
|
||||
ORDER BY m.time DESC;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_temperatures
|
||||
IS 'Monitoring environment.%.temperature web view';
|
||||
|
||||
-- json key regexp
|
||||
-- https://stackoverflow.com/questions/38204467/selecting-for-a-jsonb-array-contains-regex-match
|
||||
-- Last voltage data from json!
|
||||
DROP VIEW IF EXISTS api.monitoring_voltage;
|
||||
CREATE VIEW api.monitoring_voltage WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT m.time, key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'electrical.%.voltage'
|
||||
ORDER BY m.time DESC;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_voltage
|
||||
IS 'Monitoring electrical.%.voltage web view';
|
||||
|
||||
-- Last whatever data from json!
|
||||
DROP VIEW IF EXISTS api.monitoring_view2;
|
||||
CREATE VIEW api.monitoring_view2 WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT
|
||||
*
|
||||
FROM
|
||||
jsonb_each(
|
||||
( SELECT metrics FROM api.metrics m ORDER BY time DESC LIMIT 1)
|
||||
);
|
||||
-- WHERE key ilike 'tanks.%.capacity%'
|
||||
-- or key ilike 'electrical.solar.%.panelPower'
|
||||
-- or key ilike 'electrical.batteries%stateOfCharge'
|
||||
-- or key ilike 'tanks\.%currentLevel'
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view2
|
||||
IS 'Monitoring Last whatever data from json web view';
|
||||
|
||||
-- Timeseries whatever data from json!
|
||||
DROP VIEW IF EXISTS api.monitoring_view3;
|
||||
CREATE VIEW api.monitoring_view3 WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT m.time, key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
ORDER BY m.time DESC;
|
||||
-- WHERE key ILIKE 'electrical.batteries%voltage';
|
||||
-- WHERE key ilike 'tanks.%.capacity%'
|
||||
-- or key ilike 'electrical.solar.%.panelPower'
|
||||
-- or key ilike 'electrical.batteries%stateOfCharge';
|
||||
-- key ILIKE 'propulsion.%.runTime'
|
||||
-- key ILIKE 'navigation.log'
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view3
|
||||
IS 'Monitoring Timeseries whatever data from json web view';
|
||||
|
||||
-- Infotiles web app
|
||||
DROP VIEW IF EXISTS api.total_info_view;
|
||||
CREATE VIEW api.total_info_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
-- Infotiles web app, not used calculated client side
|
||||
WITH
|
||||
l as (SELECT count(*) as logs FROM api.logbook),
|
||||
s as (SELECT count(*) as stays FROM api.stays),
|
||||
m as (SELECT count(*) as moorages FROM api.moorages)
|
||||
SELECT * FROM l,s,m;
|
||||
COMMENT ON VIEW
|
||||
api.total_info_view
|
||||
IS 'total_info_view web view';
|
@@ -1,980 +0,0 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- PostSail => Postgres + TimescaleDB + PostGIS + PostgREST
|
||||
--
|
||||
-- Inspired from:
|
||||
-- https://groups.google.com/g/signalk/c/W2H15ODCic4
|
||||
--
|
||||
-- Description:
|
||||
-- Insert data into table metadata from API using PostgREST
|
||||
-- Insert data into table metrics from API using PostgREST
|
||||
-- TimescaleDB Hypertable to store signalk metrics
|
||||
-- pgsql functions to generate logbook, stays, moorages
|
||||
-- CRON functions to process logbook, stays, moorages
|
||||
-- python functions for geo reverse and send notification via email and/or pushover
|
||||
-- Views statistics, timelapse, monitoring, logs
|
||||
-- Always store time in UTC
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
-- vessels signalk -(POST)-> metadata -> metadata_upsert -(trigger)-> metadata_upsert_trigger_fn (INSERT or UPDATE)
|
||||
-- vessels signalk -(POST)-> metrics -> metrics -(trigger)-> metrics_fn new log,stay,moorage
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
|
||||
-- Drop database
|
||||
-- % docker exec -i timescaledb-postgis psql -Uusername -W postgres -c "drop database signalk;"
|
||||
|
||||
-- Import Schema
|
||||
-- % cat signalk.sql | docker exec -i timescaledb-postgis psql -Uusername postgres
|
||||
|
||||
-- Export hypertable
|
||||
-- % docker exec -i timescaledb-postgis psql -Uusername -W signalk -c "\COPY (SELECT * FROM api.metrics ORDER BY time ASC) TO '/var/lib/postgresql/data/metrics.csv' DELIMITER ',' CSV"
|
||||
-- Export hypertable to gzip
|
||||
-- # docker exec -i timescaledb-postgis psql -Uusername -W signalk -c "\COPY (SELECT * FROM api.metrics ORDER BY time ASC) TO PROGRAM 'gzip > /var/lib/postgresql/data/metrics.csv.gz' CSV HEADER;"
|
||||
|
||||
DO $$
|
||||
BEGIN
|
||||
RAISE WARNING '
|
||||
_________.__ .__ ____ __.
|
||||
/ _____/|__| ____ ____ _____ | | | |/ _|
|
||||
\_____ \ | |/ ___\ / \\__ \ | | | <
|
||||
/ \| / /_/ > | \/ __ \| |_| | \
|
||||
/_______ /|__\___ /|___| (____ /____/____|__ \
|
||||
\/ /_____/ \/ \/ \/
|
||||
%', now();
|
||||
END $$;
|
||||
|
||||
select version();
|
||||
|
||||
-- Database
|
||||
CREATE DATABASE signalk;
|
||||
|
||||
-- connext to the DB
|
||||
\c signalk
|
||||
|
||||
-- Schema
|
||||
CREATE SCHEMA IF NOT EXISTS api;
|
||||
COMMENT ON SCHEMA api IS 'api schema expose to postgrest';
|
||||
|
||||
-- Revoke default privileges to all public functions
|
||||
ALTER DEFAULT PRIVILEGES REVOKE EXECUTE ON FUNCTIONS FROM PUBLIC;
|
||||
|
||||
-- Extensions
|
||||
CREATE EXTENSION IF NOT EXISTS timescaledb; -- provides time series functions for PostgreSQL
|
||||
-- CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit; -- provides time series functions for PostgreSQL
|
||||
CREATE EXTENSION IF NOT EXISTS postgis; -- adds support for geographic objects to the PostgreSQL object-relational database
|
||||
CREATE EXTENSION IF NOT EXISTS plpgsql; -- PL/pgSQL procedural language
|
||||
CREATE EXTENSION IF NOT EXISTS plpython3u; -- implements PL/Python based on the Python 3 language variant.
|
||||
CREATE EXTENSION IF NOT EXISTS jsonb_plpython3u CASCADE; -- tranform jsonb to python json type.
|
||||
CREATE EXTENSION IF NOT EXISTS pg_stat_statements; -- provides a means for tracking planning and execution statistics of all SQL statements executed
|
||||
|
||||
-- Trust plpython3u language by default
|
||||
UPDATE pg_language SET lanpltrusted = true WHERE lanname = 'plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Tables
|
||||
--
|
||||
-- Metrics from signalk
|
||||
CREATE TABLE IF NOT EXISTS api.metrics (
|
||||
time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
client_id VARCHAR(255) NOT NULL,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
speedOverGround DOUBLE PRECISION NULL,
|
||||
courseOverGroundTrue DOUBLE PRECISION NULL,
|
||||
windSpeedApparent DOUBLE PRECISION NULL,
|
||||
angleSpeedApparent DOUBLE PRECISION NULL,
|
||||
status VARCHAR(100) NULL,
|
||||
metrics jsonb NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.metrics
|
||||
IS 'Stores metrics from vessel';
|
||||
|
||||
-- Index todo!
|
||||
CREATE INDEX ON api.metrics (client_id, time DESC);
|
||||
CREATE INDEX ON api.metrics (status, time DESC);
|
||||
-- json index??
|
||||
CREATE INDEX ON api.metrics using GIN (metrics);
|
||||
-- timescaledb hypertable
|
||||
SELECT create_hypertable('api.metrics', 'time');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Metadata from signalk
|
||||
CREATE TABLE IF NOT EXISTS api.metadata(
|
||||
id SERIAL PRIMARY KEY,
|
||||
name VARCHAR(150) NULL,
|
||||
mmsi VARCHAR(10) NULL,
|
||||
client_id VARCHAR(255) UNIQUE NOT NULL,
|
||||
length DOUBLE PRECISION NULL,
|
||||
beam DOUBLE PRECISION NULL,
|
||||
height DOUBLE PRECISION NULL,
|
||||
ship_type VARCHAR(255) NULL,
|
||||
plugin_version VARCHAR(10) NOT NULL,
|
||||
signalk_version VARCHAR(10) NOT NULL,
|
||||
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, -- last_update
|
||||
active BOOLEAN DEFAULT True -- monitor online/offline
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.metadata
|
||||
IS 'Stores metadata from vessel';
|
||||
|
||||
-- Index todo!
|
||||
CREATE INDEX metadata_client_id_idx ON api.metadata (client_id);
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Logbook
|
||||
-- todo add clientid ref
|
||||
-- todo add cosumption fuel?
|
||||
-- todo add engine hour?
|
||||
-- todo add geom object http://epsg.io/4326 EPSG:4326 Unit: degres
|
||||
-- todo add geog object http://epsg.io/3857 EPSG:3857 Unit: meters
|
||||
-- https://postgis.net/workshops/postgis-intro/geography.html#using-geography
|
||||
-- https://medium.com/coord/postgis-performance-showdown-geometry-vs-geography-ec99967da4f0
|
||||
-- virtual logbook by boat by client_id impossible?
|
||||
-- https://www.postgresql.org/docs/current/ddl-partitioning.html
|
||||
-- Issue:
|
||||
-- https://www.reddit.com/r/PostgreSQL/comments/di5mbr/postgresql_12_foreign_keys_and_partitioned_tables/f3tsoop/
|
||||
CREATE TABLE IF NOT EXISTS api.logbook(
|
||||
id SERIAL PRIMARY KEY,
|
||||
client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
-- client_id VARCHAR(255) NOT NULL,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
_from VARCHAR(255),
|
||||
_from_lat DOUBLE PRECISION NULL,
|
||||
_from_lng DOUBLE PRECISION NULL,
|
||||
_to VARCHAR(255),
|
||||
_to_lat DOUBLE PRECISION NULL,
|
||||
_to_lng DOUBLE PRECISION NULL,
|
||||
--track_geom Geometry(LINESTRING)
|
||||
track_geom geometry(LINESTRING,4326) NULL,
|
||||
track_geog geography(LINESTRING) NULL,
|
||||
track_geojson JSON NULL,
|
||||
_from_time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
_to_time TIMESTAMP WITHOUT TIME ZONE NULL,
|
||||
distance NUMERIC, -- meters?
|
||||
duration INTERVAL, -- duration in days and hours?
|
||||
avg_speed DOUBLE PRECISION NULL,
|
||||
max_speed DOUBLE PRECISION NULL,
|
||||
max_wind_speed DOUBLE PRECISION NULL,
|
||||
notes TEXT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.logbook
|
||||
IS 'Stores generated logbook';
|
||||
COMMENT ON COLUMN api.logbook.distance IS 'in NM';
|
||||
|
||||
-- Index todo!
|
||||
CREATE INDEX logbook_client_id_idx ON api.logbook (client_id);
|
||||
CREATE INDEX ON api.logbook USING GIST ( track_geom );
|
||||
COMMENT ON COLUMN api.logbook.track_geom IS 'postgis geometry type EPSG:4326 Unit: degres';
|
||||
CREATE INDEX ON api.logbook USING GIST ( track_geog );
|
||||
COMMENT ON COLUMN api.logbook.track_geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- Otherwise -- ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Stays
|
||||
-- todo add clientid ref
|
||||
-- todo add FOREIGN KEY?
|
||||
-- virtual logbook by boat?
|
||||
CREATE TABLE IF NOT EXISTS api.stays(
|
||||
id SERIAL PRIMARY KEY,
|
||||
client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
-- client_id VARCHAR(255) NOT NULL,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
geog GEOGRAPHY(POINT) NULL,
|
||||
arrived TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
departed TIMESTAMP WITHOUT TIME ZONE,
|
||||
duration INTERVAL, -- duration in days and hours?
|
||||
stay_code INT DEFAULT 1, -- REFERENCES api.stays_at(stay_code),
|
||||
notes TEXT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.stays
|
||||
IS 'Stores generated stays';
|
||||
|
||||
-- Index
|
||||
CREATE INDEX stays_client_id_idx ON api.stays (client_id);
|
||||
CREATE INDEX ON api.stays USING GIST ( geog );
|
||||
COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Moorages
|
||||
-- todo add clientid ref
|
||||
-- virtual logbook by boat?
|
||||
CREATE TABLE IF NOT EXISTS api.moorages(
|
||||
id SERIAL PRIMARY KEY,
|
||||
client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
-- client_id VARCHAR(255) NOT NULL,
|
||||
name VARCHAR(255),
|
||||
country VARCHAR(255), -- todo need to update reverse_geocode_py_fn
|
||||
stay_id INT NOT NULL, -- needed?
|
||||
stay_code INT DEFAULT 1, -- needed? REFERENCES api.stays_at(stay_code)
|
||||
stay_duration INTERVAL NULL,
|
||||
reference_count INT DEFAULT 1,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
geog GEOGRAPHY(POINT) NULL,
|
||||
home_flag BOOLEAN DEFAULT false,
|
||||
notes TEXT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
api.moorages
|
||||
IS 'Stores generated moorages';
|
||||
|
||||
-- Index
|
||||
CREATE INDEX moorages_client_id_idx ON api.moorages (client_id);
|
||||
CREATE INDEX ON api.moorages USING GIST ( geog );
|
||||
COMMENT ON COLUMN api.moorages.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Stay Type
|
||||
CREATE TABLE IF NOT EXISTS api.stays_at(
|
||||
stay_code INTEGER,
|
||||
description TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE api.stays_at IS 'Stay Type';
|
||||
-- Insert default possible values
|
||||
INSERT INTO api.stays_at(stay_code, description) VALUES
|
||||
(1, 'Unknow'),
|
||||
(2, 'Anchor'),
|
||||
(3, 'Mooring Buoy'),
|
||||
(4, 'Dock');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Trigger Functions Metadata table
|
||||
--
|
||||
-- UPSERT - Insert vs Update for Metadata
|
||||
DROP FUNCTION IF EXISTS metadata_upsert_trigger_fn;
|
||||
CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert$
|
||||
DECLARE
|
||||
metadata_id integer;
|
||||
metadata_active boolean;
|
||||
BEGIN
|
||||
-- UPSERT - Insert vs Update for Metadata
|
||||
RAISE NOTICE 'metadata_upsert_trigger_fn';
|
||||
SELECT m.id,m.active INTO metadata_id,metadata_active
|
||||
FROM api.metadata m
|
||||
WHERE (m.mmsi IS NOT NULL AND m.mmsi = NEW.mmsi)
|
||||
OR (m.client_id IS NOT NULL AND m.client_id = NEW.client_id);
|
||||
RAISE NOTICE 'metadata_id %', metadata_id;
|
||||
IF metadata_id IS NOT NULL THEN
|
||||
-- send notifitacion if boat is back online
|
||||
IF metadata_active is False THEN
|
||||
-- Add monitor online entry to process queue for later notification
|
||||
INSERT INTO process_queue (channel, payload, stored)
|
||||
VALUES ('monitoring_online', metadata_id, now());
|
||||
END IF;
|
||||
-- Update vessel metadata
|
||||
UPDATE api.metadata
|
||||
SET
|
||||
name = NEW.name,
|
||||
mmsi = NEW.mmsi,
|
||||
client_id = NEW.client_id,
|
||||
length = NEW.length,
|
||||
beam = NEW.beam,
|
||||
height = NEW.height,
|
||||
ship_type = NEW.ship_type,
|
||||
plugin_version = NEW.plugin_version,
|
||||
signalk_version = NEW.signalk_version,
|
||||
time = NEW.time,
|
||||
active = true
|
||||
WHERE id = metadata_id;
|
||||
RETURN NULL; -- Ignore insert
|
||||
ELSE
|
||||
-- Insert new vessel metadata
|
||||
RETURN NEW; -- Insert new vessel metadata
|
||||
END IF;
|
||||
END;
|
||||
$metadata_upsert$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metadata_upsert_trigger_fn
|
||||
IS 'process metadata from vessel, upsert';
|
||||
|
||||
-- Metadata notification for new vessel after insert
|
||||
DROP FUNCTION IF EXISTS metadata_notification_trigger_fn;
|
||||
CREATE FUNCTION metadata_notification_trigger_fn() RETURNS trigger AS $metadata_notification$
|
||||
DECLARE
|
||||
BEGIN
|
||||
RAISE NOTICE 'metadata_notification_trigger_fn';
|
||||
INSERT INTO process_queue (channel, payload, stored)
|
||||
VALUES ('monitoring_online', NEW.id, now());
|
||||
RETURN NULL;
|
||||
END;
|
||||
$metadata_notification$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metadata_notification_trigger_fn
|
||||
IS 'process metadata notification from vessel, monitoring_online';
|
||||
|
||||
-- Metadata trigger BEFORE INSERT
|
||||
CREATE TRIGGER metadata_upsert_trigger BEFORE INSERT ON api.metadata
|
||||
FOR EACH ROW EXECUTE FUNCTION metadata_upsert_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metadata_upsert_trigger ON api.metadata
|
||||
IS 'BEFORE INSERT ON api.metadata run function metadata_upsert_trigger_fn';
|
||||
|
||||
-- Metadata trigger AFTER INSERT
|
||||
CREATE TRIGGER metadata_notification_trigger AFTER INSERT ON api.metadata
|
||||
FOR EACH ROW EXECUTE FUNCTION metadata_notification_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metadata_notification_trigger ON api.metadata
|
||||
IS 'AFTER INSERT ON api.metadata run function metadata_update_trigger_fn for notification on new vessel';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Trigger Functions metrics table
|
||||
--
|
||||
-- Create a logbook or stay entry base on the vessel state, eg: navigation.state
|
||||
-- https://github.com/meri-imperiumi/signalk-autostate
|
||||
|
||||
DROP FUNCTION IF EXISTS metrics_trigger_fn;
|
||||
CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
||||
DECLARE
|
||||
previous_status varchar;
|
||||
previous_time TIMESTAMP WITHOUT TIME ZONE;
|
||||
stay_code integer;
|
||||
logbook_id integer;
|
||||
stay_id integer;
|
||||
BEGIN
|
||||
RAISE NOTICE 'metrics_trigger_fn';
|
||||
-- todo: Check we have the boat metadata?
|
||||
-- Do we have a log in progress?
|
||||
-- Do we have a stay in progress?
|
||||
-- Fetch the latest entry to compare status against the new status to be insert
|
||||
SELECT coalesce(m.status, 'moored'), m.time INTO previous_status, previous_time
|
||||
FROM api.metrics m
|
||||
WHERE m.client_id IS NOT NULL
|
||||
AND m.client_id = NEW.client_id
|
||||
ORDER BY m.time DESC LIMIT 1;
|
||||
RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
IF NEW.status IS NULL THEN
|
||||
RAISE WARNING 'Invalid new status [%], update to default moored', NEW.status;
|
||||
NEW.status := 'moored';
|
||||
END IF;
|
||||
IF previous_status IS NULL THEN
|
||||
RAISE WARNING 'Invalid previous status [%], update to default moored', previous_status;
|
||||
previous_status := 'moored';
|
||||
-- Add new stay as no previous entry exist
|
||||
INSERT INTO api.stays
|
||||
(client_id, active, arrived, latitude, longitude, stay_code)
|
||||
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
|
||||
RETURNING id INTO stay_id;
|
||||
-- Add stay entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored) values ('new_stay', stay_id, now());
|
||||
RAISE WARNING 'Insert first stay as no previous metrics exist, stay_id %', stay_id;
|
||||
END IF;
|
||||
IF previous_time = NEW.time THEN
|
||||
-- Ignore entry if same time
|
||||
RAISE WARNING 'Ignoring metric, duplicate time [%] = [%]', previous_time, NEW.time;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
--
|
||||
-- Check the state and if any previous/current entry
|
||||
IF previous_status <> NEW.status AND (NEW.status = 'sailing' OR NEW.status = 'motoring') THEN
|
||||
-- Start new log
|
||||
RAISE WARNING 'Start new log, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
RAISE NOTICE 'Inserting new trip [%]', NEW.status;
|
||||
INSERT INTO api.logbook
|
||||
(client_id, active, _from_time, _from_lat, _from_lng)
|
||||
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude);
|
||||
-- End current stay
|
||||
-- Fetch stay_id by client_id
|
||||
SELECT id INTO stay_id
|
||||
FROM api.stays s
|
||||
WHERE s.client_id IS NOT NULL
|
||||
AND s.client_id = NEW.client_id
|
||||
AND active IS true
|
||||
LIMIT 1;
|
||||
RAISE NOTICE 'Updating stay status [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
||||
IF stay_id IS NOT NULL THEN
|
||||
UPDATE api.stays
|
||||
SET
|
||||
active = false,
|
||||
departed = NEW.time
|
||||
WHERE id = stay_id;
|
||||
-- Add moorage entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored) values ('new_moorage', stay_id, now());
|
||||
ELSE
|
||||
RAISE WARNING 'Invalid stay_id [%] [%]', stay_id, NEW.time;
|
||||
END IF;
|
||||
ELSIF previous_status <> NEW.status AND (NEW.status = 'moored' OR NEW.status = 'anchored') THEN
|
||||
-- Start new stays
|
||||
RAISE WARNING 'Start new stay, New:[%] Previous:[%]', NEW.status, previous_status;
|
||||
RAISE NOTICE 'Inserting new stay [%]', NEW.status;
|
||||
-- if metric status is anchored set stay_code accordingly
|
||||
stay_code = 1;
|
||||
IF NEW.status = 'anchored' THEN
|
||||
stay_code = 2;
|
||||
END IF;
|
||||
-- Add new stay
|
||||
INSERT INTO api.stays
|
||||
(client_id, active, arrived, latitude, longitude, stay_code)
|
||||
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
|
||||
RETURNING id INTO stay_id;
|
||||
-- Add stay entry to process queue for further processing
|
||||
INSERT INTO process_queue (channel, payload, stored) values ('new_stay', stay_id, now());
|
||||
-- End current log/trip
|
||||
-- Fetch logbook_id by client_id
|
||||
SELECT id INTO logbook_id
|
||||
FROM api.logbook l
|
||||
WHERE l.client_id IS NOT NULL
|
||||
AND l.client_id = NEW.client_id
|
||||
AND active IS true
|
||||
LIMIT 1;
|
||||
IF logbook_id IS NOT NULL THEN
|
||||
-- todo check on time start vs end
|
||||
RAISE NOTICE 'Updating trip status [%] [%] [%]', logbook_id, NEW.status, NEW.time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
active = false,
|
||||
_to_time = NEW.time,
|
||||
_to_lat = NEW.latitude,
|
||||
_to_lng = NEW.longitude
|
||||
WHERE id = logbook_id;
|
||||
-- Add logbook entry to process queue for later processing
|
||||
INSERT INTO process_queue (channel, payload, stored) values ('new_logbook', logbook_id, now());
|
||||
ELSE
|
||||
RAISE WARNING 'Invalid logbook_id [%] [%]', logbook_id, NEW.time;
|
||||
END IF;
|
||||
END IF;
|
||||
RETURN NEW; -- Finally insert the actual new metric
|
||||
END;
|
||||
$metrics$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metrics_trigger_fn
|
||||
IS 'process metrics from vessel, generate new_logbook and new_stay';
|
||||
|
||||
--
|
||||
-- Triggers logbook update on metrics insert
|
||||
CREATE TRIGGER metrics_trigger BEFORE INSERT ON api.metrics
|
||||
FOR EACH ROW EXECUTE FUNCTION metrics_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER
|
||||
metrics_trigger ON api.metrics
|
||||
IS 'BEFORE INSERT ON api.metrics run function metrics_trigger_fn';
|
||||
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Functions API schema
|
||||
|
||||
-- Export a log entry to geojson
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_geojson_point_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_geojson_point_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $export_logbook_geojson_point$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook WHERE id = _id;
|
||||
|
||||
WITH log AS (
|
||||
SELECT m.time as time, m.latitude as lat, m.longitude as lng, m.courseOverGroundTrue as cog
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT null
|
||||
AND m.longitude IS NOT null
|
||||
AND m.time >= logbook_rec._from_time::timestamp without time zone
|
||||
AND m.time <= logbook_rec._to_time::timestamp without time zone
|
||||
GROUP by m.time,m.latitude,m.longitude,m.courseOverGroundTrue
|
||||
ORDER BY m.time ASC)
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'crs', json_build_object(
|
||||
'type', 'name',
|
||||
'properties', json_build_object(
|
||||
'name', 'EPSG:4326'
|
||||
)
|
||||
),
|
||||
'features', json_agg(
|
||||
json_build_object(
|
||||
'type', 'Feature',
|
||||
-- 'id', {id}, -- the GeoJson spec includes an 'id' field, but it is optional, replace {id} with your id field
|
||||
'geometry', ST_AsGeoJSON(st_makepoint(lng,lat))::json,
|
||||
'properties', json_build_object(
|
||||
-- list of fields
|
||||
'field1', time,
|
||||
'field2', cog
|
||||
)
|
||||
)
|
||||
)
|
||||
) INTO geojson
|
||||
FROM log;
|
||||
END;
|
||||
$export_logbook_geojson_point$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_geojson_point_fn
|
||||
IS 'Export a log entry to geojson feature point with Time and courseOverGroundTrue properties';
|
||||
|
||||
-- Export a log entry to geojson
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_geojson_linestring_fn;
|
||||
CREATE FUNCTION api.export_logbook_geojson_linestring_fn(IN _id INTEGER) RETURNS JSON AS $export_logbook_geojson_linestring$
|
||||
DECLARE
|
||||
geojson json;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
SELECT ST_AsGeoJSON(l.track_geom) INTO geojson
|
||||
FROM api.logbook l
|
||||
WHERE l.id = _id;
|
||||
RETURN geojson;
|
||||
END;
|
||||
$export_logbook_geojson_linestring$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_geojson_linestring_fn
|
||||
IS 'Export a log entry to geojson feature linestring';
|
||||
|
||||
-- Find all log from and to moorage geopoint within 100m
|
||||
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
|
||||
CREATE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_log_from_moorage$
|
||||
DECLARE
|
||||
moorage_rec record;
|
||||
logbook_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
SELECT * INTO moorage_rec
|
||||
FROM api.moorages m
|
||||
WHERE m.id = _id;
|
||||
-- find all log from and to moorage geopoint within 100m
|
||||
--RETURN QUERY
|
||||
SELECT id,name,_from,_to,_from_time,_to_time,distance,duration
|
||||
FROM api.logbook
|
||||
WHERE ST_DWithin(
|
||||
Geography(ST_MakePoint(_from_lng, _from_lat)),
|
||||
moorage_rec.geog,
|
||||
100 -- in meters ?
|
||||
)
|
||||
OR ST_DWithin(
|
||||
Geography(ST_MakePoint(_to_lng, _to_lat)),
|
||||
moorage_rec.geog,
|
||||
100 -- in meters ?
|
||||
)
|
||||
ORDER BY _from_time DESC;
|
||||
END;
|
||||
$find_log_from_moorage$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.find_log_from_moorage_fn
|
||||
IS 'Find all log from and to moorage geopoint within 100m';
|
||||
|
||||
-- Find all stay within 100m of moorage geopoint
|
||||
DROP FUNCTION IF EXISTS api.find_stay_from_moorage_fn;
|
||||
CREATE FUNCTION api.find_stay_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_stay_from_moorage$
|
||||
DECLARE
|
||||
moorage_rec record;
|
||||
stay_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
SELECT * INTO moorage_rec
|
||||
FROM api.moorages m
|
||||
WHERE m.id = _id;
|
||||
-- find all log from and to moorage geopoint within 100m
|
||||
--RETURN QUERY
|
||||
SELECT s.id,s.arrived,s.departed,s.duration,sa.description
|
||||
FROM api.stays s, api.stays_at sa
|
||||
WHERE ST_DWithin(
|
||||
s.geog,
|
||||
moorage_rec.geog,
|
||||
100 -- in meters ?
|
||||
)
|
||||
AND departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
ORDER BY s.arrived DESC;
|
||||
END;
|
||||
$find_stay_from_moorage$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.find_stay_from_moorage_fn
|
||||
IS 'Find all stay within 100m of moorage geopoint';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Views
|
||||
-- Views are invoked with the privileges of the view owner,
|
||||
-- make the user_role the view’s owner.
|
||||
--
|
||||
CREATE VIEW first_metric AS
|
||||
SELECT *
|
||||
FROM api.metrics
|
||||
ORDER BY time ASC LIMIT 1;
|
||||
|
||||
CREATE VIEW last_metric AS
|
||||
SELECT *
|
||||
FROM api.metrics
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
CREATE VIEW trip_in_progress AS
|
||||
SELECT *
|
||||
FROM api.logbook
|
||||
WHERE active IS true;
|
||||
|
||||
CREATE VIEW stay_in_progress AS
|
||||
SELECT *
|
||||
FROM api.stays
|
||||
WHERE active IS true;
|
||||
|
||||
-- TODO: Use materialized views instead as it is not live data
|
||||
-- Logs web view
|
||||
DROP VIEW IF EXISTS api.logs_view;
|
||||
CREATE OR REPLACE VIEW api.logs_view AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration"
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.logs_view
|
||||
IS 'Logs web view';
|
||||
|
||||
DROP VIEW IF EXISTS api.log_view;
|
||||
CREATE OR REPLACE VIEW api.log_view AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration",
|
||||
notes as "Notes",
|
||||
track_geojson as geojson,
|
||||
avg_speed as avg_speed,
|
||||
max_speed as max_speed,
|
||||
max_wind_speed as max_wind_speed
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.logs_view
|
||||
IS 'Log web view';
|
||||
|
||||
-- Stays web view
|
||||
-- TODO group by month
|
||||
DROP VIEW IF EXISTS api.stays_view;
|
||||
CREATE VIEW api.stays_view AS
|
||||
SELECT
|
||||
concat(
|
||||
extract(DAYS FROM (s.departed-s.arrived)::interval),
|
||||
' days',
|
||||
--DATE_TRUNC('day', s.departed-s.arrived),
|
||||
' stay at ',
|
||||
s.name,
|
||||
' in ',
|
||||
RTRIM(TO_CHAR(s.departed, 'Month')),
|
||||
' ',
|
||||
TO_CHAR(s.departed, 'YYYY')
|
||||
) as Name,
|
||||
s.name AS Moorage,
|
||||
s.arrived AS Arrived,
|
||||
s.departed AS Departed,
|
||||
sa.description AS "Stayed at",
|
||||
(s.departed-s.arrived) AS Duration
|
||||
FROM api.stays s, api.stays_at sa
|
||||
WHERE departed is not null
|
||||
AND s.name is not null
|
||||
AND s.stay_code = sa.stay_code
|
||||
ORDER BY s.arrived DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.stays_view
|
||||
IS 'Stays web view';
|
||||
|
||||
-- Moorages web view
|
||||
-- TODO, this is wrong using distinct (m.name) should be using postgis geog feature
|
||||
--DROP VIEW IF EXISTS api.moorages_view_old;
|
||||
--CREATE VIEW api.moorages_view_old AS
|
||||
-- SELECT
|
||||
-- m.name AS Moorage,
|
||||
-- sa.description AS "Default Stay",
|
||||
-- sum((m.departed-m.arrived)) OVER (PARTITION by m.name) AS "Total Stay",
|
||||
-- count(m.departed) OVER (PARTITION by m.name) AS "Arrivals & Departures"
|
||||
-- FROM api.moorages m, api.stays_at sa
|
||||
-- WHERE departed is not null
|
||||
-- AND m.name is not null
|
||||
-- AND m.stay_code = sa.stay_code
|
||||
-- GROUP BY m.name,sa.description,m.departed,m.arrived
|
||||
-- ORDER BY 4 DESC;
|
||||
|
||||
-- the good way?
|
||||
DROP VIEW IF EXISTS api.moorages_view;
|
||||
CREATE OR REPLACE VIEW api.moorages_view AS
|
||||
SELECT
|
||||
m.name AS Moorage,
|
||||
sa.description AS "Default Stay",
|
||||
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS "Total Stay",
|
||||
m.reference_count AS "Arrivals & Departures",
|
||||
m.geog
|
||||
-- m.stay_duration,
|
||||
-- justify_hours ( m.stay_duration )
|
||||
FROM api.moorages m, api.stays_at sa
|
||||
WHERE m.name is not null
|
||||
AND m.stay_code = sa.stay_code
|
||||
GROUP BY m.name,sa.description,m.stay_duration,m.reference_count,m.geog
|
||||
-- ORDER BY 4 DESC;
|
||||
ORDER BY m.reference_count DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorages_view
|
||||
IS 'Moorages web view';
|
||||
|
||||
-- All moorage in 100 meters from the start of a logbook.
|
||||
-- ST_DistanceSphere Returns minimum distance in meters between two lon/lat points.
|
||||
--SELECT
|
||||
-- m.name, ST_MakePoint(m._lng,m._lat),
|
||||
-- l._from, ST_MakePoint(l._from_lng,l._from_lat),
|
||||
-- ST_DistanceSphere(ST_MakePoint(m._lng,m._lat), ST_MakePoint(l._from_lng,l._from_lat))
|
||||
-- FROM api.moorages m , api.logbook l
|
||||
-- WHERE ST_DistanceSphere(ST_MakePoint(m._lng,m._lat), ST_MakePoint(l._from_lng,l._from_lat)) <= 100;
|
||||
|
||||
-- Stats web view
|
||||
-- TODO....
|
||||
-- first time entry from metrics
|
||||
----> select * from api.metrics m ORDER BY m.time desc limit 1
|
||||
-- last time entry from metrics
|
||||
----> select * from api.metrics m ORDER BY m.time asc limit 1
|
||||
-- max speed from logbook
|
||||
-- max wind speed from logbook
|
||||
----> select max(l.max_speed) as max_speed, max(l.max_wind_speed) as max_wind_speed from api.logbook l;
|
||||
-- Total Distance from logbook
|
||||
----> select sum(l.distance) as "Total Distance" from api.logbook l;
|
||||
-- Total Time Underway from logbook
|
||||
----> select sum(l.duration) as "Total Time Underway" from api.logbook l;
|
||||
-- Longest Nonstop Sail from logbook, eg longest trip duration and distance
|
||||
----> select max(l.duration),max(l.distance) from api.logbook l;
|
||||
CREATE VIEW api.stats_logs_view AS -- todo
|
||||
WITH
|
||||
meta AS (
|
||||
SELECT m.name FROM api.metadata m ),
|
||||
last_metric AS (
|
||||
SELECT m.time FROM api.metrics m ORDER BY m.time DESC limit 1),
|
||||
first_metric AS (
|
||||
SELECT m.time FROM api.metrics m ORDER BY m.time ASC limit 1),
|
||||
logbook AS (
|
||||
SELECT
|
||||
count(*) AS "Number of Log Entries",
|
||||
max(l.max_speed) AS "Max Speed",
|
||||
max(l.max_wind_speed) AS "Max Wind Speed",
|
||||
sum(l.distance) AS "Total Distance",
|
||||
sum(l.duration) AS "Total Time Underway",
|
||||
concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "Longest Nonstop Sail"
|
||||
FROM api.logbook l)
|
||||
SELECT
|
||||
m.name as Name,
|
||||
fm.time AS first,
|
||||
lm.time AS last,
|
||||
l.*
|
||||
FROM first_metric fm, last_metric lm, logbook l, meta m;
|
||||
|
||||
-- Home Ports / Unique Moorages
|
||||
----> select count(*) as "Home Ports" from api.moorages m where home_flag is true;
|
||||
-- Unique Moorages
|
||||
----> select count(*) as "Home Ports" from api.moorages m;
|
||||
-- Time Spent at Home Port(s)
|
||||
----> select sum(m.stay_duration) as "Time Spent at Home Port(s)" from api.moorages m where home_flag is true;
|
||||
-- OR
|
||||
----> select m.stay_duration as "Time Spent at Home Port(s)" from api.moorages m where home_flag is true;
|
||||
-- Time Spent Away
|
||||
----> select sum(m.stay_duration) as "Time Spent Away" from api.moorages m where home_flag is false;
|
||||
-- Time Spent Away order by, group by stay_code (Dock, Anchor, Mooring Buoys, Unclassified)
|
||||
----> select sa.description,sum(m.stay_duration) as "Time Spent Away" from api.moorages m, api.stays_at sa where home_flag is false AND m.stay_code = sa.stay_code group by m.stay_code,sa.description order by m.stay_code;
|
||||
CREATE VIEW api.stats_moorages_view AS -- todo
|
||||
select *
|
||||
from api.moorages;
|
||||
|
||||
--CREATE VIEW api.stats_view AS -- todo
|
||||
-- WITH
|
||||
-- logs AS (
|
||||
-- SELECT * FROM api.stats_logs_view ),
|
||||
-- moorages AS (
|
||||
-- SELECT * FROM api.stats_moorages_view)
|
||||
-- SELECT
|
||||
-- l.*,
|
||||
-- m.*
|
||||
-- FROM logs l, moorages m;
|
||||
|
||||
-- global timelapse
|
||||
-- TODO
|
||||
CREATE VIEW timelapse AS -- todo
|
||||
SELECT latitude, longitude from api.metrics;
|
||||
|
||||
-- View main monitoring for grafana
|
||||
-- LAST Monitoring data from json!
|
||||
CREATE VIEW api.monitoring AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
metrics-> 'environment.water.temperature' AS waterTemperature,
|
||||
metrics-> 'environment.inside.temperature' AS insideTemperature,
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature,
|
||||
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
|
||||
metrics-> 'environment.wind.directionGround' AS windDirectionGround,
|
||||
metrics-> 'environment.inside.humidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.humidity' AS outsideHumidity,
|
||||
metrics-> 'environment.outside.pressure' AS outsidePressure,
|
||||
metrics-> 'environment.inside.pressure' AS insidePressure
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
CREATE VIEW api.monitoring_humidity AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
metrics-> 'environment.inside.humidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.humidity' AS outsideHumidity
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
-- View System RPI monitoring for grafana
|
||||
-- View Electric monitoring for grafana
|
||||
|
||||
-- View main monitoring for grafana
|
||||
-- LAST Monitoring data from json!
|
||||
CREATE VIEW api.monitorin_temperatures AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
metrics-> 'environment.water.temperature' AS waterTemperature,
|
||||
metrics-> 'environment.inside.temperature' AS insideTemperature,
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
-- json key regexp
|
||||
-- https://stackoverflow.com/questions/38204467/selecting-for-a-jsonb-array-contains-regex-match
|
||||
-- Last voltage data from json!
|
||||
CREATE VIEW api.voltage AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2,
|
||||
cast(metrics-> 'electrical.batteries.House.voltage' AS numeric) AS House,
|
||||
cast(metrics-> 'environment.rpi.pijuice.gpioVoltage' AS numeric) AS gpioVoltage,
|
||||
cast(metrics-> 'electrical.batteries.Seatalk.voltage' AS numeric) AS SeatalkVoltage,
|
||||
cast(metrics-> 'electrical.batteries.Starter.voltage' AS numeric) AS StarterVoltage,
|
||||
cast(metrics-> 'environment.rpi.pijuice.batteryVoltage' AS numeric) AS RPIBatteryVoltage,
|
||||
cast(metrics-> 'electrical.batteries.victronDevice.voltage' AS numeric) AS victronDeviceVoltage
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- API helper functions
|
||||
--
|
||||
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_gpx_py_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_py_fn(IN _id INTEGER) RETURNS XML
|
||||
AS $export_logbook_gpx_py$
|
||||
import uuid
|
||||
|
||||
# BEGIN GPX XML format
|
||||
gpx_data = f"""<?xml version="1.0"?>
|
||||
<gpx version="1.1" creator="PostgSAIL" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/1" xmlns:gpxx="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd" xmlns:opencpn="http://www.opencpn.org">
|
||||
<trk>
|
||||
<link href="https://openplotter.cloud/log/{_id}">
|
||||
<text>openplotter trip log todo</text>
|
||||
</link>
|
||||
<extensions>
|
||||
<opencpn:guid>{uuid.uuid4()}</opencpn:guid>
|
||||
<opencpn:viz>1</opencpn:viz>
|
||||
<opencpn:start>{mytrack[0]['time']}</opencpn:start>
|
||||
<opencpn:end>{mytrack[-1]['time']}</opencpn:end>
|
||||
</extensions>
|
||||
<trkseg>\n""";
|
||||
##print(gpx_data)
|
||||
# LOOP through log entry
|
||||
for entry in mytrack:
|
||||
##print(entry['time'])
|
||||
gpx_data += f""" <trkpt lat="{entry['lat']}" lon="{entry['lng']}">
|
||||
<time>{entry['time']}</time>
|
||||
</trkpt>\n""";
|
||||
|
||||
# END GPX XML format
|
||||
gpx_data += """ </trkseg>
|
||||
</trk>
|
||||
</gpx>""";
|
||||
|
||||
return gpx_data
|
||||
$export_logbook_gpx_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_gpx_py_fn
|
||||
IS 'TODO, Export a log entry to GPX XML format using plpython3u';
|
||||
|
||||
--DROP FUNCTION IF EXISTS api.export_logbook_csv_fn;
|
||||
--CREATE OR REPLACE FUNCTION api.export_logbook_csv_fn(IN _id INTEGER) RETURNS void
|
||||
--AS $export_logbook_csv$
|
||||
-- TODO
|
||||
--$export_logbook_csv$ language plpgsql;
|
||||
-- Description
|
||||
--COMMENT ON FUNCTION
|
||||
-- api.export_logbook_csv_fn
|
||||
-- IS 'TODO, ...';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.log_geojson_fn;
|
||||
CREATE FUNCTION api.log_geojson_fn(IN _id INTEGER, OUT log_map JSON) RETURNS JSON AS $export_log$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(l.*) into log_geojson
|
||||
FROM
|
||||
api.logbook l
|
||||
WHERE l.id = _id;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
(
|
||||
select
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= '2022-08-27 20:00:34.000'
|
||||
AND m.time <= '2022-08-27 20:29:34.000'
|
||||
ORDER BY m.time asc
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Add Linestring into Point array
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
|
||||
-- Build Geojson FeatureCollection
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into log_map;
|
||||
END;
|
||||
$export_log$ LANGUAGE plpgsql;
|
||||
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.log_geojson_fn
|
||||
IS 'TODO';
|
@@ -18,7 +18,7 @@ begin
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'new_logbook' AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
ORDER BY stored ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_logbook_fn [%]', process_rec.payload;
|
||||
-- update logbook
|
||||
@@ -28,7 +28,7 @@ begin
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_logbook_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
@@ -47,7 +47,7 @@ begin
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'new_stay' AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
ORDER BY stored ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_stay_fn [%]', process_rec.payload;
|
||||
-- update stay
|
||||
@@ -57,7 +57,7 @@ begin
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_stay_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
@@ -77,7 +77,7 @@ begin
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'new_moorage' AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
ORDER BY stored ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_moorage_fn [%]', process_rec.payload;
|
||||
-- update moorage
|
||||
@@ -87,7 +87,7 @@ begin
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_moorage_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
@@ -123,29 +123,38 @@ begin
|
||||
SET
|
||||
active = False
|
||||
WHERE id = metadata_rec.id;
|
||||
RAISE NOTICE '-> updated api.metadata table to inactive for [%]', metadata_rec.id;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_monitor_offline_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknow vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_monitor_offline_fn vessel.id %', current_setting('vessel.id', false);
|
||||
RAISE NOTICE '-> cron_process_monitor_offline_fn updated api.metadata table to inactive for [%] [%]', metadata_rec.id, metadata_rec.vessel_id;
|
||||
|
||||
-- Gather email and pushover app settings
|
||||
app_settings = get_app_settings_fn();
|
||||
--app_settings = get_app_settings_fn();
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_metadata_fn(metadata_rec.id::INTEGER);
|
||||
--user_settings := get_user_settings_from_clientid_fn(metadata_rec.id::INTEGER);
|
||||
RAISE DEBUG '-> debug monitor_offline get_user_settings_from_metadata_fn [%]', user_settings;
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> cron_process_monitor_offline_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
--PERFORM send_notification_fn('monitor_offline'::TEXT, metadata_rec::RECORD);
|
||||
PERFORM send_email_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
PERFORM send_notification_fn('monitor_offline'::TEXT, user_settings::JSONB);
|
||||
--PERFORM send_email_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
--PERFORM send_pushover_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
-- log/insert/update process_queue table with processed
|
||||
INSERT INTO process_queue
|
||||
(channel, payload, stored, processed)
|
||||
(channel, payload, stored, processed, ref_id)
|
||||
VALUES
|
||||
('monitoring_offline', metadata_rec.id, metadata_rec.interval, now())
|
||||
('monitoring_offline', metadata_rec.id, metadata_rec.interval, now(), metadata_rec.vessel_id)
|
||||
RETURNING id INTO process_id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_id;
|
||||
RAISE NOTICE '-> cron_process_monitor_offline_fn updated process_queue table [%]', process_id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_monitor_offline_fn
|
||||
IS 'init by pg_cron to monitor offline pending notification, if so perform send_email o send_pushover base on user preferences';
|
||||
|
||||
@@ -169,27 +178,36 @@ begin
|
||||
SELECT * INTO metadata_rec
|
||||
FROM api.metadata
|
||||
WHERE id = process_rec.payload::INTEGER;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_monitor_online_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknow vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_monitor_online_fn vessel_id %', current_setting('vessel.id', false);
|
||||
|
||||
-- Gather email and pushover app settings
|
||||
app_settings = get_app_settings_fn();
|
||||
--app_settings = get_app_settings_fn();
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_metadata_fn(metadata_rec.id::INTEGER);
|
||||
--user_settings := get_user_settings_from_clientid_fn((metadata_rec.client_id::INTEGER, );
|
||||
RAISE NOTICE '-> debug monitor_online get_user_settings_from_metadata_fn [%]', user_settings;
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> DEBUG cron_process_monitor_online_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
--PERFORM send_notification_fn('monitor_online'::TEXT, metadata_rec::RECORD);
|
||||
PERFORM send_email_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
PERFORM send_notification_fn('monitor_online'::TEXT, user_settings::JSONB);
|
||||
--PERFORM send_email_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
--PERFORM send_pushover_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_monitor_online_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_monitor_online_fn
|
||||
IS 'init by pg_cron to monitor back online pending notification, if so perform send_email or send_pushover base on user preferences';
|
||||
|
||||
@@ -213,14 +231,43 @@ begin
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_account_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_account_fn
|
||||
IS 'init by pg_cron to check for new account pending update, if so perform process_account_queue_fn';
|
||||
IS 'deprecated, init by pg_cron to check for new account pending update, if so perform process_account_queue_fn';
|
||||
|
||||
-- CRON for new account pending otp validation notification
|
||||
CREATE FUNCTION cron_process_new_account_otp_validation_fn() RETURNS void AS $$
|
||||
declare
|
||||
process_rec record;
|
||||
begin
|
||||
-- Check for new account pending update
|
||||
RAISE NOTICE 'cron_process_new_account_otp_validation_fn';
|
||||
FOR process_rec in
|
||||
SELECT * from process_queue
|
||||
where channel = 'new_account_otp' and processed is null
|
||||
order by stored asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_account_otp_validation_fn [%]', process_rec.payload;
|
||||
-- update account
|
||||
PERFORM process_account_otp_validation_queue_fn(process_rec.payload::TEXT);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_account_otp_validation_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_account_otp_validation_fn
|
||||
IS 'deprecated, init by pg_cron to check for new account otp pending update, if so perform process_account_otp_validation_queue_fn';
|
||||
|
||||
-- CRON for new vessel pending notification
|
||||
CREATE FUNCTION cron_process_new_vessel_fn() RETURNS void AS $$
|
||||
@@ -242,21 +289,53 @@ begin
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_vessel_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_vessel_fn
|
||||
IS 'init by pg_cron to check for new vessel pending update, if so perform process_vessel_queue_fn';
|
||||
IS 'deprecated, init by pg_cron to check for new vessel pending update, if so perform process_vessel_queue_fn';
|
||||
|
||||
-- CRON for new event notification
|
||||
CREATE FUNCTION cron_process_new_notification_fn() RETURNS void AS $$
|
||||
declare
|
||||
process_rec record;
|
||||
begin
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_new_notification_fn';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE
|
||||
(channel = 'new_account' OR channel = 'new_vessel' OR channel = 'email_otp')
|
||||
and processed is null
|
||||
order by stored asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_notification_fn for [%]', process_rec.payload;
|
||||
-- process_notification_queue
|
||||
PERFORM process_notification_queue_fn(process_rec.payload::TEXT, process_rec.channel::TEXT);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_notification_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_notification_fn
|
||||
IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn';
|
||||
|
||||
-- CRON for Vacuum database
|
||||
CREATE FUNCTION cron_vaccum_fn() RETURNS void AS $$
|
||||
CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$
|
||||
-- ERROR: VACUUM cannot be executed from a function
|
||||
declare
|
||||
begin
|
||||
-- Vacuum
|
||||
RAISE NOTICE 'cron_vaccum_fn';
|
||||
RAISE NOTICE 'cron_vacuum_fn';
|
||||
VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook;
|
||||
VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.stays;
|
||||
VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.moorages;
|
||||
@@ -266,5 +345,44 @@ END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_vaccum_fn
|
||||
IS 'init by pg_cron to full vaccum tables on schema api';
|
||||
public.cron_vacuum_fn
|
||||
IS 'init by pg_cron to full vacuum tables on schema api';
|
||||
|
||||
-- CRON for clean up job details logs
|
||||
CREATE FUNCTION job_run_details_cleanup_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Remove job run log older than 3 months
|
||||
RAISE NOTICE 'job_run_details_cleanup_fn';
|
||||
DELETE FROM postgres.cron.job_run_details
|
||||
WHERE start_time <= NOW() AT TIME ZONE 'UTC' - INTERVAL '91 DAYS';
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.job_run_details_cleanup_fn
|
||||
IS 'init by pg_cron to cleanup job_run_details table on schema public postgres db';
|
||||
|
||||
-- CRON for alerts notification
|
||||
CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
AND a.email = v.owner_email
|
||||
AND (preferences->'alerting'->'enabled')::boolean = false
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_alert_rec_fn for [%]', alert_rec;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_alerts_fn
|
||||
IS 'init by pg_cron to check for alerts, if so perform process_alerts_queue_fn';
|
||||
|
891
initdb/02_3_1_signalk_public_tables.sql
Normal file
891
initdb/02_3_1_signalk_public_tables.sql
Normal file
@@ -0,0 +1,891 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- singalk db public schema tables
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS public;
|
||||
COMMENT ON SCHEMA public IS 'backend public functions and tables';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Table geocoders
|
||||
--
|
||||
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
|
||||
--
|
||||
CREATE TABLE IF NOT EXISTS geocoders(
|
||||
name TEXT UNIQUE,
|
||||
url TEXT,
|
||||
reverse_url TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.geocoders
|
||||
IS 'geo service nominatim url';
|
||||
|
||||
INSERT INTO geocoders VALUES
|
||||
('nominatim',
|
||||
NULL,
|
||||
'https://nominatim.openstreetmap.org/reverse');
|
||||
-- https://photon.komoot.io/reverse?lat=48.30587233333333&lon=14.3040525
|
||||
-- https://docs.mapbox.com/playground/geocoding/?search_text=-3.1457869856990897,51.35921326434686&limit=1
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Tables for message template email/pushover/telegram
|
||||
--
|
||||
DROP TABLE IF EXISTS public.email_templates;
|
||||
CREATE TABLE IF NOT EXISTS public.email_templates(
|
||||
name TEXT UNIQUE,
|
||||
email_subject TEXT,
|
||||
email_content TEXT,
|
||||
pushover_title TEXT,
|
||||
pushover_message TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.email_templates
|
||||
IS 'email/message templates for notifications';
|
||||
|
||||
-- with escape value, eg: E'A\nB\r\nC'
|
||||
-- https://stackoverflow.com/questions/26638615/insert-line-break-in-postgresql-when-updating-text-field
|
||||
-- TODO Update notification subject for log entry to 'logbook #NB ...'
|
||||
INSERT INTO email_templates VALUES
|
||||
('logbook',
|
||||
'New Logbook Entry',
|
||||
E'Hello __RECIPIENT__,\n\nWe just wanted to let you know that you have a new entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\n\r\nSee more details at __APP_URL__/log/__LOGBOOK_LINK__\n\nHappy sailing!\nThe PostgSail Team',
|
||||
'New Logbook Entry',
|
||||
E'New entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\nSee more details at __APP_URL__/log/__LOGBOOK_LINK__\n'),
|
||||
('new_account',
|
||||
'Welcome',
|
||||
E'Hello __RECIPIENT__,\nCongratulations!\nYou successfully created an account.\nKeep in mind to register your vessel.\nHappy sailing!',
|
||||
'Welcome',
|
||||
E'Hi!\nYou successfully created an account\nKeep in mind to register your vessel.\n'),
|
||||
('new_vessel',
|
||||
'New vessel',
|
||||
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n',
|
||||
'New vessel',
|
||||
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n'),
|
||||
('monitor_offline',
|
||||
'Vessel Offline',
|
||||
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n',
|
||||
'Vessel Offline',
|
||||
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n'),
|
||||
('monitor_online',
|
||||
'Vessel Online',
|
||||
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n',
|
||||
'Vessel Online',
|
||||
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n'),
|
||||
('new_badge',
|
||||
'New Badge!',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You have just unlocked a new badge: __BADGE_NAME__\nSee more details at __APP_URL__/badges\nHappy sailing!\nThe PostgSail Team',
|
||||
'New Badge!',
|
||||
E'Congratulations!\nYou have just unlocked a new badge: __BADGE_NAME__\nSee more details at __APP_URL__/badges\n'),
|
||||
('pushover_valid',
|
||||
'Pushover integration',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to Pushover.\n\nThe PostgSail Team',
|
||||
'Pushover integration!',
|
||||
E'Congratulations!\nYou have just connect your account to Pushover.\n'),
|
||||
('email_otp',
|
||||
'Email verification',
|
||||
E'Hello,\nPlease active your account using the following code: __OTP_CODE__.\nThe code is valid 15 minutes.\nThe PostgSail Team',
|
||||
'Email verification',
|
||||
E'Congratulations!\nPlease validate your account. Check your email!'),
|
||||
('email_valid',
|
||||
'Email verified',
|
||||
E'Hello,\nCongratulations!\nYou successfully validate your account.\nThe PostgSail Team',
|
||||
'Email verified',
|
||||
E'Hi!\nYou successfully validate your account.\n'),
|
||||
('email_reset',
|
||||
'Password reset',
|
||||
E'Hello,\nYou requested a password reset. To reset your password __APP_URL__/reset-password?__RESET_QS__.\nThe PostgSail Team',
|
||||
'Password reset',
|
||||
E'You requested a password recovery. Check your email!\n'),
|
||||
('telegram_otp',
|
||||
'Telegram bot',
|
||||
E'Hello,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nThe PostgSail Team',
|
||||
'Telegram bot',
|
||||
E'Hello,\nTo connect your account to a @postgsail_bot. Check your email!\n'),
|
||||
('telegram_valid',
|
||||
'Telegram bot',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\n\nThe PostgSail Team',
|
||||
'Telegram bot!',
|
||||
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Queue handling
|
||||
--
|
||||
-- https://gist.github.com/kissgyorgy/beccba1291de962702ea9c237a900c79
|
||||
-- https://www.depesz.com/2012/06/13/how-to-send-mail-from-database/
|
||||
|
||||
-- Listen/Notify way
|
||||
--create function new_logbook_entry() returns trigger as $$
|
||||
--begin
|
||||
-- perform pg_notify('new_logbook_entry', NEW.id::text);
|
||||
-- return NEW;
|
||||
--END;
|
||||
--$$ language plpgsql;
|
||||
|
||||
-- table way
|
||||
CREATE TABLE IF NOT EXISTS public.process_queue (
|
||||
id SERIAL PRIMARY KEY,
|
||||
channel TEXT NOT NULL,
|
||||
payload TEXT NOT NULL,
|
||||
ref_id TEXT NOT NULL,
|
||||
stored TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
processed TIMESTAMP WITHOUT TIME ZONE DEFAULT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.process_queue
|
||||
IS 'process queue for async job';
|
||||
-- Index
|
||||
CREATE INDEX ON public.process_queue (channel);
|
||||
CREATE INDEX ON public.process_queue (stored);
|
||||
CREATE INDEX ON public.process_queue (processed);
|
||||
|
||||
COMMENT ON COLUMN public.process_queue.ref_id IS 'either user_id or vessel_id';
|
||||
|
||||
-- Function process_queue helpers
|
||||
create function new_account_entry_fn() returns trigger as $new_account_entry$
|
||||
begin
|
||||
insert into process_queue (channel, payload, stored, ref_id) values ('new_account', NEW.email, now(), NEW.user_id);
|
||||
return NEW;
|
||||
END;
|
||||
$new_account_entry$ language plpgsql;
|
||||
|
||||
create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$
|
||||
begin
|
||||
insert into process_queue (channel, payload, stored, ref_id) values ('email_otp', NEW.email, now(), NEW.user_id);
|
||||
return NEW;
|
||||
END;
|
||||
$new_account_otp_validation_entry$ language plpgsql;
|
||||
|
||||
create function new_vessel_entry_fn() returns trigger as $new_vessel_entry$
|
||||
begin
|
||||
insert into process_queue (channel, payload, stored, ref_id) values ('new_vessel', NEW.owner_email, now(), NEW.vessel_id);
|
||||
return NEW;
|
||||
END;
|
||||
$new_vessel_entry$ language plpgsql;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Tables Application Settings
|
||||
-- https://dba.stackexchange.com/questions/27296/storing-application-settings-with-different-datatypes#27297
|
||||
-- https://stackoverflow.com/questions/6893780/how-to-store-site-wide-settings-in-a-database
|
||||
-- http://cvs.savannah.gnu.org/viewvc/*checkout*/gnumed/gnumed/gnumed/server/sql/gmconfiguration.sql
|
||||
|
||||
CREATE TABLE IF NOT EXISTS public.app_settings (
|
||||
name TEXT NOT NULL UNIQUE,
|
||||
value TEXT NOT NULL
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE public.app_settings IS 'application settings';
|
||||
COMMENT ON COLUMN public.app_settings.name IS 'application settings name key';
|
||||
COMMENT ON COLUMN public.app_settings.value IS 'application settings value';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Badges description
|
||||
--
|
||||
DROP TABLE IF EXISTS public.badges;
|
||||
CREATE TABLE IF NOT EXISTS public.badges(
|
||||
name TEXT UNIQUE,
|
||||
description TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.badges
|
||||
IS 'Badges descriptions';
|
||||
|
||||
INSERT INTO badges VALUES
|
||||
('Helmsman',
|
||||
'Nice work logging your first sail! You are officially a helmsman now!'),
|
||||
('Wake Maker',
|
||||
'Yowzers! Welcome to the 15 knot+ club ya speed demon skipper!'),
|
||||
('Explorer',
|
||||
'It looks like home is where the helm is. Cheers to 10 days away from home port!'),
|
||||
('Mooring Pro',
|
||||
'It takes a lot of skill to "thread that floating needle" but seems like you have mastered mooring with 10 nights on buoy!'),
|
||||
('Anchormaster',
|
||||
'Hook, line and sinker, you have this anchoring thing down! 25 days on the hook for you!'),
|
||||
('Traveler todo',
|
||||
'Who needs to fly when one can sail! You are an international sailor. À votre santé!'),
|
||||
('Stormtrooper',
|
||||
'Just like the elite defenders of the Empire, here you are, our braving your own hydro-empire in windspeeds above 30kts. Nice work trooper! '),
|
||||
('Club Alaska',
|
||||
'Home to the bears, glaciers, midnight sun and high adventure. Welcome to the Club Alaska Captain!'),
|
||||
('Tropical Traveler',
|
||||
'Look at you with your suntan, tropical drink and southern latitude!'),
|
||||
('Aloha Award',
|
||||
'Ticking off over 2300 NM across the great blue Pacific makes you the rare recipient of the Aloha Award. Well done and Aloha sailor!'),
|
||||
('Navigator Award',
|
||||
'Woohoo! You made it, Ticking off over 100NM in one go, well done sailor!'),
|
||||
('Captain Award',
|
||||
'Congratulation, you reach over 1000NM, well done sailor!');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- aistypes description
|
||||
--
|
||||
DROP TABLE IF EXISTS public.aistypes;
|
||||
CREATE TABLE IF NOT EXISTS aistypes(
|
||||
id NUMERIC UNIQUE,
|
||||
description TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.aistypes
|
||||
IS 'aistypes AIS Ship Types, https://api.vesselfinder.com/docs/ref-aistypes.html';
|
||||
|
||||
INSERT INTO aistypes VALUES
|
||||
(0, 'Not available (default)'),
|
||||
(20, 'Wing in ground (WIG), all ships of this type'),
|
||||
(21, 'Wing in ground (WIG), Hazardous category A'),
|
||||
(22, 'Wing in ground (WIG), Hazardous category B'),
|
||||
(23, 'Wing in ground (WIG), Hazardous category C'),
|
||||
(24, 'Wing in ground (WIG), Hazardous category D'),
|
||||
(25, 'Wing in ground (WIG), Reserved for future use'),
|
||||
(26, 'Wing in ground (WIG), Reserved for future use'),
|
||||
(27, 'Wing in ground (WIG), Reserved for future use'),
|
||||
(28, 'Wing in ground (WIG), Reserved for future use'),
|
||||
(29, 'Wing in ground (WIG), Reserved for future use'),
|
||||
(30, 'Fishing'),
|
||||
(31, 'Towing'),
|
||||
(32, 'Towing: length exceeds 200m or breadth exceeds 25m'),
|
||||
(33, 'Dredging or underwater ops'),
|
||||
(34, 'Diving ops'),
|
||||
(35, 'Military ops'),
|
||||
(36, 'Sailing'),
|
||||
(37, 'Pleasure Craft'),
|
||||
(38, 'Reserved'),
|
||||
(39, 'Reserved'),
|
||||
(40, 'High speed craft (HSC), all ships of this type'),
|
||||
(41, 'High speed craft (HSC), Hazardous category A'),
|
||||
(42, 'High speed craft (HSC), Hazardous category B'),
|
||||
(43, 'High speed craft (HSC), Hazardous category C'),
|
||||
(44, 'High speed craft (HSC), Hazardous category D'),
|
||||
(45, 'High speed craft (HSC), Reserved for future use'),
|
||||
(46, 'High speed craft (HSC), Reserved for future use'),
|
||||
(47, 'High speed craft (HSC), Reserved for future use'),
|
||||
(48, 'High speed craft (HSC), Reserved for future use'),
|
||||
(49, 'High speed craft (HSC), No additional information'),
|
||||
(50, 'Pilot Vessel'),
|
||||
(51, 'Search and Rescue vessel'),
|
||||
(52, 'Tug'),
|
||||
(53, 'Port Tender'),
|
||||
(54, 'Anti-pollution equipment'),
|
||||
(55, 'Law Enforcement'),
|
||||
(56, 'Spare - Local Vessel'),
|
||||
(57, 'Spare - Local Vessel'),
|
||||
(58, 'Medical Transport'),
|
||||
(59, 'Noncombatant ship according to RR Resolution No. 18'),
|
||||
(60, 'Passenger, all ships of this type'),
|
||||
(61, 'Passenger, Hazardous category A'),
|
||||
(62, 'Passenger, Hazardous category B'),
|
||||
(63, 'Passenger, Hazardous category C'),
|
||||
(64, 'Passenger, Hazardous category D'),
|
||||
(65, 'Passenger, Reserved for future use'),
|
||||
(66, 'Passenger, Reserved for future use'),
|
||||
(67, 'Passenger, Reserved for future use'),
|
||||
(68, 'Passenger, Reserved for future use'),
|
||||
(69, 'Passenger, No additional information'),
|
||||
(70, 'Cargo, all ships of this type'),
|
||||
(71, 'Cargo, Hazardous category A'),
|
||||
(72, 'Cargo, Hazardous category B'),
|
||||
(73, 'Cargo, Hazardous category C'),
|
||||
(74, 'Cargo, Hazardous category D'),
|
||||
(75, 'Cargo, Reserved for future use'),
|
||||
(76, 'Cargo, Reserved for future use'),
|
||||
(77, 'Cargo, Reserved for future use'),
|
||||
(78, 'Cargo, Reserved for future use'),
|
||||
(79, 'Cargo, No additional information'),
|
||||
(80, 'Tanker, all ships of this type'),
|
||||
(81, 'Tanker, Hazardous category A'),
|
||||
(82, 'Tanker, Hazardous category B'),
|
||||
(83, 'Tanker, Hazardous category C'),
|
||||
(84, 'Tanker, Hazardous category D'),
|
||||
(85, 'Tanker, Reserved for future use'),
|
||||
(86, 'Tanker, Reserved for future use'),
|
||||
(87, 'Tanker, Reserved for future use'),
|
||||
(88, 'Tanker, Reserved for future use'),
|
||||
(89, 'Tanker, No additional information'),
|
||||
(90, 'Other Type, all ships of this type'),
|
||||
(91, 'Other Type, Hazardous category A'),
|
||||
(92, 'Other Type, Hazardous category B'),
|
||||
(93, 'Other Type, Hazardous category C'),
|
||||
(94, 'Other Type, Hazardous category D'),
|
||||
(95, 'Other Type, Reserved for future use'),
|
||||
(96, 'Other Type, Reserved for future use'),
|
||||
(97, 'Other Type, Reserved for future use'),
|
||||
(98, 'Other Type, Reserved for future use'),
|
||||
(99, 'Other Type, no additional information');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- MMSI MID Codes
|
||||
--
|
||||
DROP TABLE IF EXISTS public.mid;
|
||||
CREATE TABLE IF NOT EXISTS public.mid(
|
||||
country TEXT,
|
||||
id NUMERIC UNIQUE,
|
||||
country_id INTEGER
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.mid
|
||||
IS 'MMSI MID Codes (Maritime Mobile Service Identity) Filtered by Flag of Registration, https://www.marinevesseltraffic.com/2013/11/mmsi-mid-codes-by-flag.html';
|
||||
|
||||
INSERT INTO mid VALUES
|
||||
('Adelie Land', 501, NULL),
|
||||
('Afghanistan', 401, 4),
|
||||
('Alaska', 303, 840),
|
||||
('Albania', 201, 8),
|
||||
('Algeria', 605, 12),
|
||||
('American Samoa', 559, 16),
|
||||
('Andorra', 202, 20),
|
||||
('Angola', 603, 24),
|
||||
('Anguilla', 301, 660),
|
||||
('Antigua and Barbuda', 304, 28),
|
||||
('Antigua and Barbuda', 305, 28),
|
||||
('Argentina', 701, 32),
|
||||
('Armenia', 216, 51),
|
||||
('Aruba', 307, 533),
|
||||
('Ascension Island', 608, NULL),
|
||||
('Australia', 503, 36),
|
||||
('Austria', 203, 40),
|
||||
('Azerbaijan', 423, 31),
|
||||
('Azores', 204, NULL),
|
||||
('Bahamas', 308, 44),
|
||||
('Bahamas', 309, 44),
|
||||
('Bahamas', 311, 44),
|
||||
('Bahrain', 408, 48),
|
||||
('Bangladesh', 405, 50),
|
||||
('Barbados', 314, 52),
|
||||
('Belarus', 206, 112),
|
||||
('Belgium', 205, 56),
|
||||
('Belize', 312, 84),
|
||||
('Benin', 610, 204),
|
||||
('Bermuda', 310, 60),
|
||||
('Bhutan', 410, 64),
|
||||
('Bolivia', 720, 68),
|
||||
('Bosnia and Herzegovina', 478, 70),
|
||||
('Botswana', 611, 72),
|
||||
('Brazil', 710, 76),
|
||||
('British Virgin Islands', 378, 92),
|
||||
('Brunei Darussalam', 508, 96),
|
||||
('Bulgaria', 207, 100),
|
||||
('Burkina Faso', 633, 854),
|
||||
('Burundi', 609, 108),
|
||||
('Cambodia', 514, 116),
|
||||
('Cambodia', 515, 116),
|
||||
('Cameroon', 613, 120),
|
||||
('Canada', 316, 124),
|
||||
('Cape Verde', 617, 132),
|
||||
('Cayman Islands', 319, 136),
|
||||
('Central African Republic', 612, 140),
|
||||
('Chad', 670, 148),
|
||||
('Chile', 725, 152),
|
||||
('China', 412, 156),
|
||||
('China', 413, 156),
|
||||
('China', 414, 156),
|
||||
('Christmas Island', 516, 162),
|
||||
('Cocos Islands', 523, 166),
|
||||
('Colombia', 730, 170),
|
||||
('Comoros', 616, 174),
|
||||
('Comoros', 620, 174),
|
||||
('Congo', 615, 178),
|
||||
('Cook Islands', 518, 184),
|
||||
('Costa Rica', 321, 188),
|
||||
(E'Côte d\'Ivoire', 619, 384),
|
||||
('Croatia', 238, 191),
|
||||
('Crozet Archipelago', 618, NULL),
|
||||
('Cuba', 323, 192),
|
||||
('Cyprus', 209, 196),
|
||||
('Cyprus', 210, 196),
|
||||
('Cyprus', 212, 196),
|
||||
('Czech Republic', 270, 203),
|
||||
('Denmark', 219, 208),
|
||||
('Denmark', 220, 208),
|
||||
('Djibouti', 621, 262),
|
||||
('Dominica', 325, 212),
|
||||
('Dominican Republic', 327, 214),
|
||||
('DR Congo', 676, NULL),
|
||||
('Ecuador', 735, 218),
|
||||
('Egypt', 622, 818),
|
||||
('El Salvador', 359, 222),
|
||||
('Equatorial Guinea', 631, 226),
|
||||
('Eritrea', 625, 232),
|
||||
('Estonia', 276, 233),
|
||||
('Ethiopia', 624, 231),
|
||||
('Falkland Islands', 740, 234),
|
||||
('Faroe Islands', 231, NULL),
|
||||
('Fiji', 520, 242),
|
||||
('Finland', 230, 246),
|
||||
('France', 226, 250),
|
||||
('France', 227, 250),
|
||||
('France', 228, 250),
|
||||
('French Polynesia', 546, 260),
|
||||
('Gabonese Republic', 626, 266),
|
||||
('Gambia', 629, 270),
|
||||
('Georgia', 213, 268),
|
||||
('Germany', 211, 276),
|
||||
('Germany', 218, 276),
|
||||
('Ghana', 627, 288),
|
||||
('Gibraltar', 236, 292),
|
||||
('Greece', 237, 300),
|
||||
('Greece', 239, 300),
|
||||
('Greece', 240, 300),
|
||||
('Greece', 241, 300),
|
||||
('Greenland', 331, 304),
|
||||
('Grenada', 330, 308),
|
||||
('Guadeloupe', 329, 312),
|
||||
('Guatemala', 332, 320),
|
||||
('Guiana', 745, 324),
|
||||
('Guinea', 632, 324),
|
||||
('Guinea-Bissau', 630, 624),
|
||||
('Guyana', 750, 328),
|
||||
('Haiti', 336, 332),
|
||||
('Honduras', 334, 340),
|
||||
('Hong Kong', 477, 344),
|
||||
('Hungary', 243, 348),
|
||||
('Iceland', 251, 352),
|
||||
('India', 419, 356),
|
||||
('Indonesia', 525, 360),
|
||||
('Iran', 422, 364),
|
||||
('Iraq', 425, 368),
|
||||
('Ireland', 250, 372),
|
||||
('Israel', 428, 376),
|
||||
('Italy', 247, 380),
|
||||
('Jamaica', 339, 388),
|
||||
('Japan', 431, 392),
|
||||
('Japan', 432, 392),
|
||||
('Jordan', 438, 400),
|
||||
('Kazakhstan', 436, 398),
|
||||
('Kenya', 634, 404),
|
||||
('Kerguelen Islands', 635, NULL),
|
||||
('Kiribati', 529, 296),
|
||||
('Kuwait', 447, 414),
|
||||
('Kyrgyzstan', 451, 417),
|
||||
('Lao', 531, 418),
|
||||
('Latvia', 275, 428),
|
||||
('Lebanon', 450, 422),
|
||||
('Lesotho', 644, 426),
|
||||
('Liberia', 636, 430),
|
||||
('Liberia', 637, 430),
|
||||
('Libya', 642, 434),
|
||||
('Liechtenstein', 252, 438),
|
||||
('Lithuania', 277, 440),
|
||||
('Luxembourg', 253, 442),
|
||||
('Macao', 453, 446),
|
||||
('Madagascar', 647, 450),
|
||||
('Madeira', 255, NULL),
|
||||
('Makedonia', 274, NULL),
|
||||
('Malawi', 655, 454),
|
||||
('Malaysia', 533, 458),
|
||||
('Maldives', 455, 462),
|
||||
('Mali', 649, 466),
|
||||
('Malta', 215, 470),
|
||||
('Malta', 229, 470),
|
||||
('Malta', 248, 470),
|
||||
('Malta', 249, 470),
|
||||
('Malta', 256, 470),
|
||||
('Marshall Islands', 538, 584),
|
||||
('Martinique', 347, 474),
|
||||
('Mauritania', 654, 478),
|
||||
('Mauritius', 645, 480),
|
||||
('Mexico', 345, 484),
|
||||
('Micronesia', 510, 583),
|
||||
('Moldova', 214, 498),
|
||||
('Monaco', 254, 492),
|
||||
('Mongolia', 457, 496),
|
||||
('Montenegro', 262, 499),
|
||||
('Montserrat', 348, 500),
|
||||
('Morocco', 242, 504),
|
||||
('Mozambique', 650, 508),
|
||||
('Myanmar', 506, 104),
|
||||
('Namibia', 659, 516),
|
||||
('Nauru', 544, 520),
|
||||
('Nepal', 459, 524),
|
||||
('Netherlands', 244, 528),
|
||||
('Netherlands', 245, 528),
|
||||
('Netherlands', 246, 528),
|
||||
('Netherlands Antilles', 306, NULL),
|
||||
('New Caledonia', 540, 540),
|
||||
('New Zealand', 512, 554),
|
||||
('Nicaragua', 350, 558),
|
||||
('Niger', 656, 562),
|
||||
('Nigeria', 657, 566),
|
||||
('Niue', 542, 570),
|
||||
('North Korea', 445, 408),
|
||||
('Northern Mariana Islands', 536, 580),
|
||||
('Norway', 257, 578),
|
||||
('Norway', 258, 578),
|
||||
('Norway', 259, 578),
|
||||
('Oman', 461, 512),
|
||||
('Pakistan', 463, 586),
|
||||
('Palau', 511, 585),
|
||||
('Palestine', 443, 275),
|
||||
('Panama', 351, 591),
|
||||
('Panama', 352, 591),
|
||||
('Panama', 353, 591),
|
||||
('Panama', 354, 591),
|
||||
('Panama', 355, 591),
|
||||
('Panama', 356, 591),
|
||||
('Panama', 357, 591),
|
||||
('Panama', 370, 591),
|
||||
('Panama', 371, 591),
|
||||
('Panama', 372, 591),
|
||||
('Panama', 373, 591),
|
||||
('Papua New Guinea', 553, 598),
|
||||
('Paraguay', 755, 600),
|
||||
('Peru', 760, 604),
|
||||
('Philippines', 548, 608),
|
||||
('Pitcairn Island', 555, 612),
|
||||
('Poland', 261, 616),
|
||||
('Portugal', 263, 620),
|
||||
('Puerto Rico', 358, 630),
|
||||
('Qatar', 466, 634),
|
||||
('Reunion', 660, 638),
|
||||
('Romania', 264, 642),
|
||||
('Russian Federation', 273, 643),
|
||||
('Rwanda', 661, 646),
|
||||
('Saint Helena', 665, 654),
|
||||
('Saint Kitts and Nevis', 341, 659),
|
||||
('Saint Lucia', 343, 662),
|
||||
('Saint Paul and Amsterdam Islands', 607, NULL),
|
||||
('Saint Pierre and Miquelon', 361, 666),
|
||||
('Samoa', 561, 882),
|
||||
('San Marino', 268, 674),
|
||||
('Sao Tome and Principe', 668, 678),
|
||||
('Saudi Arabia', 403, 682),
|
||||
('Senegal', 663, 686),
|
||||
('Serbia', 279, 688),
|
||||
('Seychelles', 664, 690),
|
||||
('Sierra Leone', 667, 694),
|
||||
('Singapore', 563, 702),
|
||||
('Singapore', 564, 702),
|
||||
('Singapore', 565, 702),
|
||||
('Singapore', 566, 702),
|
||||
('Slovakia', 267, 703),
|
||||
('Slovenia', 278, 705),
|
||||
('Solomon Islands', 557, 90),
|
||||
('Somalia', 666, 706),
|
||||
('South Africa', 601, 710),
|
||||
('South Korea', 440, 410),
|
||||
('South Korea', 441, 410),
|
||||
('South Sudan', 638, 728),
|
||||
('Spain', 224, 724),
|
||||
('Spain', 225, 724),
|
||||
('Sri Lanka', 417, 144),
|
||||
('St Vincent and the Grenadines', 375, 670),
|
||||
('St Vincent and the Grenadines', 376, 670),
|
||||
('St Vincent and the Grenadines', 377, 670),
|
||||
('Sudan', 662, 729),
|
||||
('Suriname', 765, 740),
|
||||
('Swaziland', 669, 748),
|
||||
('Sweden', 265, 752),
|
||||
('Sweden', 266, 752),
|
||||
('Switzerland', 269, 756),
|
||||
('Syria', 468, 760),
|
||||
('Taiwan', 416, 158),
|
||||
('Tajikistan', 472, 762),
|
||||
('Tanzania', 674, 834),
|
||||
('Tanzania', 677, 834),
|
||||
('Thailand', 567, 764),
|
||||
('Togolese', 671, 768),
|
||||
('Tonga', 570, 776),
|
||||
('Trinidad and Tobago', 362, 780),
|
||||
('Tunisia', 672, 788),
|
||||
('Turkey', 271, 792),
|
||||
('Turkmenistan', 434, 795),
|
||||
('Turks and Caicos Islands', 364, 796),
|
||||
('Tuvalu', 572, 798),
|
||||
('Uganda', 675, 800),
|
||||
('Ukraine', 272, 804),
|
||||
('United Arab Emirates', 470, 784),
|
||||
('United Kingdom', 232, 826),
|
||||
('United Kingdom', 233, 826),
|
||||
('United Kingdom', 234, 826),
|
||||
('United Kingdom', 235, 826),
|
||||
('Uruguay', 770, 858),
|
||||
('US Virgin Islands', 379, 850),
|
||||
('USA', 338, 840),
|
||||
('USA', 366, 840),
|
||||
('USA', 367, 840),
|
||||
('USA', 368, 840),
|
||||
('USA', 369, 840),
|
||||
('Uzbekistan', 437, 860),
|
||||
('Vanuatu', 576, 548),
|
||||
('Vanuatu', 577, 548),
|
||||
('Vatican City', 208, NULL),
|
||||
('Venezuela', 775, 862),
|
||||
('Vietnam', 574, 704),
|
||||
('Wallis and Futuna Islands', 578, 876),
|
||||
('Yemen', 473, 887),
|
||||
('Yemen', 475, 887),
|
||||
('Zambia', 678, 894),
|
||||
('Zimbabwe', 679, 716);
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
--
|
||||
DROP TABLE IF EXISTS public.iso3166;
|
||||
CREATE TABLE IF NOT EXISTS public.iso3166(
|
||||
id INTEGER,
|
||||
country TEXT,
|
||||
alpha_2 TEXT,
|
||||
alpha_3 TEXT
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.iso3166
|
||||
IS 'This is a complete list of all country ISO codes as described in the ISO 3166 international standard. Country Codes Alpha-2 & Alpha-3 https://www.iban.com/country-codes';
|
||||
|
||||
INSERT INTO iso3166 VALUES
|
||||
(4,'Afghanistan','AF','AFG'),
|
||||
(8,'Albania','AL','ALB'),
|
||||
(12,'Algeria','DZ','DZA'),
|
||||
(16,'American Samoa','AS','ASM'),
|
||||
(20,'Andorra','AD','AND'),
|
||||
(24,'Angola','AO','AGO'),
|
||||
(660,'Anguilla','AI','AIA'),
|
||||
(10,'Antarctica','AQ','ATA'),
|
||||
(28,'Antigua and Barbuda','AG','ATG'),
|
||||
(32,'Argentina','AR','ARG'),
|
||||
(51,'Armenia','AM','ARM'),
|
||||
(533,'Aruba','AW','ABW'),
|
||||
(36,'Australia','AU','AUS'),
|
||||
(40,'Austria','AT','AUT'),
|
||||
(31,'Azerbaijan','AZ','AZE'),
|
||||
(44,'Bahamas (the)','BS','BHS'),
|
||||
(48,'Bahrain','BH','BHR'),
|
||||
(50,'Bangladesh','BD','BGD'),
|
||||
(52,'Barbados','BB','BRB'),
|
||||
(112,'Belarus','BY','BLR'),
|
||||
(56,'Belgium','BE','BEL'),
|
||||
(84,'Belize','BZ','BLZ'),
|
||||
(204,'Benin','BJ','BEN'),
|
||||
(60,'Bermuda','BM','BMU'),
|
||||
(64,'Bhutan','BT','BTN'),
|
||||
(68,E'Bolivia (Plurinational State of)','BO','BOL'),
|
||||
(535,'Bonaire, Sint Eustatius and Saba','BQ','BES'),
|
||||
(70,'Bosnia and Herzegovina','BA','BIH'),
|
||||
(72,'Botswana','BW','BWA'),
|
||||
(74,'Bouvet Island','BV','BVT'),
|
||||
(76,'Brazil','BR','BRA'),
|
||||
(86,E'British Indian Ocean Territory (the)','IO','IOT'),
|
||||
(96,'Brunei Darussalam','BN','BRN'),
|
||||
(100,'Bulgaria','BG','BGR'),
|
||||
(854,'Burkina Faso','BF','BFA'),
|
||||
(108,'Burundi','BI','BDI'),
|
||||
(132,'Cabo Verde','CV','CPV'),
|
||||
(116,'Cambodia','KH','KHM'),
|
||||
(120,'Cameroon','CM','CMR'),
|
||||
(124,'Canada','CA','CAN'),
|
||||
(136,E'Cayman Islands (the)','KY','CYM'),
|
||||
(140,E'Central African Republic (the)','CF','CAF'),
|
||||
(148,'Chad','TD','TCD'),
|
||||
(152,'Chile','CL','CHL'),
|
||||
(156,'China','CN','CHN'),
|
||||
(162,'Christmas Island','CX','CXR'),
|
||||
(166,E'Cocos (Keeling) Islands (the)','CC','CCK'),
|
||||
(170,'Colombia','CO','COL'),
|
||||
(174,'Comoros (the)','KM','COM'),
|
||||
(180,E'Congo (the Democratic Republic of the)','CD','COD'),
|
||||
(178,E'Congo (the)','CG','COG'),
|
||||
(184,E'Cook Islands (the)','CK','COK'),
|
||||
(188,'Costa Rica','CR','CRI'),
|
||||
(191,'Croatia','HR','HRV'),
|
||||
(192,'Cuba','CU','CUB'),
|
||||
(531,'Curaçao','CW','CUW'),
|
||||
(196,'Cyprus','CY','CYP'),
|
||||
(203,'Czechia','CZ','CZE'),
|
||||
(384,E'Côte d\'Ivoire','CI','CIV'),
|
||||
(208,'Denmark','DK','DNK'),
|
||||
(262,'Djibouti','DJ','DJI'),
|
||||
(212,'Dominica','DM','DMA'),
|
||||
(214,E'Dominican Republic (the)','DO','DOM'),
|
||||
(218,'Ecuador','EC','ECU'),
|
||||
(818,'Egypt','EG','EGY'),
|
||||
(222,'El Salvador','SV','SLV'),
|
||||
(226,'Equatorial Guinea','GQ','GNQ'),
|
||||
(232,'Eritrea','ER','ERI'),
|
||||
(233,'Estonia','EE','EST'),
|
||||
(748,'Eswatini','SZ','SWZ'),
|
||||
(231,'Ethiopia','ET','ETH'),
|
||||
(238,E'Falkland Islands (the) [Malvinas]','FK','FLK'),
|
||||
(234,E'Faroe Islands (the)','FO','FRO'),
|
||||
(242,'Fiji','FJ','FJI'),
|
||||
(246,'Finland','FI','FIN'),
|
||||
(250,'France','FR','FRA'),
|
||||
(254,'French Guiana','GF','GUF'),
|
||||
(258,'French Polynesia','PF','PYF'),
|
||||
(260,E'French Southern Territories (the)','TF','ATF'),
|
||||
(266,'Gabon','GA','GAB'),
|
||||
(270,E'Gambia (the)','GM','GMB'),
|
||||
(268,'Georgia','GE','GEO'),
|
||||
(276,'Germany','DE','DEU'),
|
||||
(288,'Ghana','GH','GHA'),
|
||||
(292,'Gibraltar','GI','GIB'),
|
||||
(300,'Greece','GR','GRC'),
|
||||
(304,'Greenland','GL','GRL'),
|
||||
(308,'Grenada','GD','GRD'),
|
||||
(312,'Guadeloupe','GP','GLP'),
|
||||
(316,'Guam','GU','GUM'),
|
||||
(320,'Guatemala','GT','GTM'),
|
||||
(831,'Guernsey','GG','GGY'),
|
||||
(324,'Guinea','GN','GIN'),
|
||||
(624,'Guinea-Bissau','GW','GNB'),
|
||||
(328,'Guyana','GY','GUY'),
|
||||
(332,'Haiti','HT','HTI'),
|
||||
(334,'Heard Island and McDonald Islands','HM','HMD'),
|
||||
(336,E'Holy See (the)','VA','VAT'),
|
||||
(340,'Honduras','HN','HND'),
|
||||
(344,'Hong Kong','HK','HKG'),
|
||||
(348,'Hungary','HU','HUN'),
|
||||
(352,'Iceland','IS','ISL'),
|
||||
(356,'India','IN','IND'),
|
||||
(360,'Indonesia','ID','IDN'),
|
||||
(364,E'Iran (Islamic Republic of)','IR','IRN'),
|
||||
(368,'Iraq','IQ','IRQ'),
|
||||
(372,'Ireland','IE','IRL'),
|
||||
(833,'Isle of Man','IM','IMN'),
|
||||
(376,'Israel','IL','ISR'),
|
||||
(380,'Italy','IT','ITA'),
|
||||
(388,'Jamaica','JM','JAM'),
|
||||
(392,'Japan','JP','JPN'),
|
||||
(832,'Jersey','JE','JEY'),
|
||||
(400,'Jordan','JO','JOR'),
|
||||
(398,'Kazakhstan','KZ','KAZ'),
|
||||
(404,'Kenya','KE','KEN'),
|
||||
(296,'Kiribati','KI','KIR'),
|
||||
(408,E'Korea (the Democratic People\'s Republic of)','KP','PRK'),
|
||||
(410,E'Korea (the Republic of)','KR','KOR'),
|
||||
(414,'Kuwait','KW','KWT'),
|
||||
(417,'Kyrgyzstan','KG','KGZ'),
|
||||
(418,E'Lao People\'s Democratic Republic (the)','LA','LAO'),
|
||||
(428,'Latvia','LV','LVA'),
|
||||
(422,'Lebanon','LB','LBN'),
|
||||
(426,'Lesotho','LS','LSO'),
|
||||
(430,'Liberia','LR','LBR'),
|
||||
(434,'Libya','LY','LBY'),
|
||||
(438,'Liechtenstein','LI','LIE'),
|
||||
(440,'Lithuania','LT','LTU'),
|
||||
(442,'Luxembourg','LU','LUX'),
|
||||
(446,'Macao','MO','MAC'),
|
||||
(450,'Madagascar','MG','MDG'),
|
||||
(454,'Malawi','MW','MWI'),
|
||||
(458,'Malaysia','MY','MYS'),
|
||||
(462,'Maldives','MV','MDV'),
|
||||
(466,'Mali','ML','MLI'),
|
||||
(470,'Malta','MT','MLT'),
|
||||
(584,E'Marshall Islands (the)','MH','MHL'),
|
||||
(474,'Martinique','MQ','MTQ'),
|
||||
(478,'Mauritania','MR','MRT'),
|
||||
(480,'Mauritius','MU','MUS'),
|
||||
(175,'Mayotte','YT','MYT'),
|
||||
(484,'Mexico','MX','MEX'),
|
||||
(583,E'Micronesia (Federated States of)','FM','FSM'),
|
||||
(498,E'Moldova (the Republic of)','MD','MDA'),
|
||||
(492,'Monaco','MC','MCO'),
|
||||
(496,'Mongolia','MN','MNG'),
|
||||
(499,'Montenegro','ME','MNE'),
|
||||
(500,'Montserrat','MS','MSR'),
|
||||
(504,'Morocco','MA','MAR'),
|
||||
(508,'Mozambique','MZ','MOZ'),
|
||||
(104,'Myanmar','MM','MMR'),
|
||||
(516,'Namibia','NA','NAM'),
|
||||
(520,'Nauru','NR','NRU'),
|
||||
(524,'Nepal','NP','NPL'),
|
||||
(528,E'Netherlands (the)','NL','NLD'),
|
||||
(540,'New Caledonia','NC','NCL'),
|
||||
(554,'New Zealand','NZ','NZL'),
|
||||
(558,'Nicaragua','NI','NIC'),
|
||||
(562,E'Niger (the)','NE','NER'),
|
||||
(566,'Nigeria','NG','NGA'),
|
||||
(570,'Niue','NU','NIU'),
|
||||
(574,'Norfolk Island','NF','NFK'),
|
||||
(580,E'Northern Mariana Islands (the)','MP','MNP'),
|
||||
(578,'Norway','NO','NOR'),
|
||||
(512,'Oman','OM','OMN'),
|
||||
(586,'Pakistan','PK','PAK'),
|
||||
(585,'Palau','PW','PLW'),
|
||||
(275,'Palestine, State of','PS','PSE'),
|
||||
(591,'Panama','PA','PAN'),
|
||||
(598,'Papua New Guinea','PG','PNG'),
|
||||
(600,'Paraguay','PY','PRY'),
|
||||
(604,'Peru','PE','PER'),
|
||||
(608,E'Philippines (the)','PH','PHL'),
|
||||
(612,'Pitcairn','PN','PCN'),
|
||||
(616,'Poland','PL','POL'),
|
||||
(620,'Portugal','PT','PRT'),
|
||||
(630,'Puerto Rico','PR','PRI'),
|
||||
(634,'Qatar','QA','QAT'),
|
||||
(807,'Republic of North Macedonia','MK','MKD'),
|
||||
(642,'Romania','RO','ROU'),
|
||||
(643,'Russian Federation (the)','RU','RUS'),
|
||||
(646,'Rwanda','RW','RWA'),
|
||||
(638,'Réunion','RE','REU'),
|
||||
(652,'Saint Barthélemy','BL','BLM'),
|
||||
(654,'Saint Helena, Ascension and Tristan da Cunha','SH','SHN'),
|
||||
(659,'Saint Kitts and Nevis','KN','KNA'),
|
||||
(662,'Saint Lucia','LC','LCA'),
|
||||
(663,'Saint Martin (French part)','MF','MAF'),
|
||||
(666,'Saint Pierre and Miquelon','PM','SPM'),
|
||||
(670,'Saint Vincent and the Grenadines','VC','VCT'),
|
||||
(882,'Samoa','WS','WSM'),
|
||||
(674,'San Marino','SM','SMR'),
|
||||
(678,'Sao Tome and Principe','ST','STP'),
|
||||
(682,'Saudi Arabia','SA','SAU'),
|
||||
(686,'Senegal','SN','SEN'),
|
||||
(688,'Serbia','RS','SRB'),
|
||||
(690,'Seychelles','SC','SYC'),
|
||||
(694,'Sierra Leone','SL','SLE'),
|
||||
(702,'Singapore','SG','SGP'),
|
||||
(534,'Sint Maarten (Dutch part)','SX','SXM'),
|
||||
(703,'Slovakia','SK','SVK'),
|
||||
(705,'Slovenia','SI','SVN'),
|
||||
(90,'Solomon Islands','SB','SLB'),
|
||||
(706,'Somalia','SO','SOM'),
|
||||
(710,'South Africa','ZA','ZAF'),
|
||||
(239,'South Georgia and the South Sandwich Islands','GS','SGS'),
|
||||
(728,'South Sudan','SS','SSD'),
|
||||
(724,'Spain','ES','ESP'),
|
||||
(144,'Sri Lanka','LK','LKA'),
|
||||
(729,'Sudan (the)','SD','SDN'),
|
||||
(740,'Suriname','SR','SUR'),
|
||||
(744,'Svalbard and Jan Mayen','SJ','SJM'),
|
||||
(752,'Sweden','SE','SWE'),
|
||||
(756,'Switzerland','CH','CHE'),
|
||||
(760,'Syrian Arab Republic','SY','SYR'),
|
||||
(158,'Taiwan (Province of China)','TW','TWN'),
|
||||
(762,'Tajikistan','TJ','TJK'),
|
||||
(834,'Tanzania, United Republic of','TZ','TZA'),
|
||||
(764,'Thailand','TH','THA'),
|
||||
(626,'Timor-Leste','TL','TLS'),
|
||||
(768,'Togo','TG','TGO'),
|
||||
(772,'Tokelau','TK','TKL'),
|
||||
(776,'Tonga','TO','TON'),
|
||||
(780,'Trinidad and Tobago','TT','TTO'),
|
||||
(788,'Tunisia','TN','TUN'),
|
||||
(792,'Turkey','TR','TUR'),
|
||||
(795,'Turkmenistan','TM','TKM'),
|
||||
(796,'Turks and Caicos Islands (the)','TC','TCA'),
|
||||
(798,'Tuvalu','TV','TUV'),
|
||||
(800,'Uganda','UG','UGA'),
|
||||
(804,'Ukraine','UA','UKR'),
|
||||
(784,'United Arab Emirates (the)','AE','ARE'),
|
||||
(826,'United Kingdom of Great Britain and Northern Ireland (the)','GB','GBR'),
|
||||
(581,'United States Minor Outlying Islands (the)','UM','UMI'),
|
||||
(840,'United States of America (the)','US','USA'),
|
||||
(858,'Uruguay','UY','URY'),
|
||||
(860,'Uzbekistan','UZ','UZB'),
|
||||
(548,'Vanuatu','VU','VUT'),
|
||||
(862,'Venezuela (Bolivarian Republic of)','VE','VEN'),
|
||||
(704,'Viet Nam','VN','VNM'),
|
||||
(92,'Virgin Islands (British)','VG','VGB'),
|
||||
(850,'Virgin Islands (U.S.)','VI','VIR'),
|
||||
(876,'Wallis and Futuna','WF','WLF'),
|
||||
(732,'Western Sahara','EH','ESH'),
|
||||
(887,'Yemen','YE','YEM'),
|
||||
(894,'Zambia','ZM','ZMB'),
|
||||
(716,'Zimbabwe','ZW','ZWE'),
|
||||
(248,E'Åland Islands','AX','ALA');
|
1281
initdb/02_3_2_signalk_public_functions.sql
Normal file
1281
initdb/02_3_2_signalk_public_functions.sql
Normal file
File diff suppressed because it is too large
Load Diff
136
initdb/02_3_3_signalk_public_functions_helpers.sql
Normal file
136
initdb/02_3_3_signalk_public_functions_helpers.sql
Normal file
@@ -0,0 +1,136 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- singalk db public schema
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS public;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- basic helpers to check type and more
|
||||
--
|
||||
CREATE OR REPLACE FUNCTION public.isnumeric(text) RETURNS BOOLEAN AS
|
||||
$isnumeric$
|
||||
DECLARE x NUMERIC;
|
||||
BEGIN
|
||||
x = $1::NUMERIC;
|
||||
RETURN TRUE;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$isnumeric$
|
||||
STRICT
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.isnumeric
|
||||
IS 'Check typeof value is numeric';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.isboolean(text) RETURNS BOOLEAN AS
|
||||
$isboolean$
|
||||
DECLARE x BOOLEAN;
|
||||
BEGIN
|
||||
x = $1::BOOLEAN;
|
||||
RETURN TRUE;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$isboolean$
|
||||
STRICT
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.isboolean
|
||||
IS 'Check typeof value is boolean';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.isdate(s varchar) returns boolean as $$
|
||||
BEGIN
|
||||
perform s::date;
|
||||
return true;
|
||||
exception when others then
|
||||
return false;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.isdate
|
||||
IS 'Check typeof value is date';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.istimestamptz(text) RETURNS BOOLEAN AS
|
||||
$isdate$
|
||||
DECLARE x TIMESTAMP WITHOUT TIME ZONE;
|
||||
BEGIN
|
||||
x = $1::TIMESTAMP WITHOUT TIME ZONE;
|
||||
RETURN TRUE;
|
||||
EXCEPTION WHEN others THEN
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$isdate$
|
||||
STRICT
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.istimestamptz
|
||||
IS 'Check typeof value is TIMESTAMP WITHOUT TIME ZONE';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- JSON helpers
|
||||
--
|
||||
CREATE FUNCTION jsonb_key_exists(some_json jsonb, outer_key text)
|
||||
RETURNS BOOLEAN AS $$
|
||||
BEGIN
|
||||
RETURN (some_json->outer_key) IS NOT NULL;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.jsonb_key_exists
|
||||
IS 'function that checks if an outer key exists in some_json and returns a boolean';
|
||||
|
||||
-- https://stackoverflow.com/questions/42944888/merging-jsonb-values-in-postgresql
|
||||
CREATE OR REPLACE FUNCTION public.jsonb_recursive_merge(A jsonb, B jsonb)
|
||||
RETURNS jsonb LANGUAGE SQL AS $$
|
||||
SELECT
|
||||
jsonb_object_agg(
|
||||
coalesce(ka, kb),
|
||||
CASE
|
||||
WHEN va isnull THEN vb
|
||||
WHEN vb isnull THEN va
|
||||
WHEN jsonb_typeof(va) <> 'object' OR jsonb_typeof(vb) <> 'object' THEN vb
|
||||
ELSE jsonb_recursive_merge(va, vb) END
|
||||
)
|
||||
FROM jsonb_each(A) temptable1(ka, va)
|
||||
FULL JOIN jsonb_each(B) temptable2(kb, vb) ON ka = kb
|
||||
$$;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.jsonb_recursive_merge
|
||||
IS 'Merging JSONB values';
|
||||
|
||||
-- https://stackoverflow.com/questions/36041784/postgresql-compare-two-jsonb-objects
|
||||
CREATE OR REPLACE FUNCTION public.jsonb_diff_val(val1 JSONB,val2 JSONB)
|
||||
RETURNS JSONB AS $jsonb_diff_val$
|
||||
DECLARE
|
||||
result JSONB;
|
||||
v RECORD;
|
||||
BEGIN
|
||||
result = val1;
|
||||
FOR v IN SELECT * FROM jsonb_each(val2) LOOP
|
||||
IF result @> jsonb_build_object(v.key,v.value)
|
||||
THEN result = result - v.key;
|
||||
ELSIF result ? v.key THEN CONTINUE;
|
||||
ELSE
|
||||
result = result || jsonb_build_object(v.key,'null');
|
||||
END IF;
|
||||
END LOOP;
|
||||
RETURN result;
|
||||
END;
|
||||
$jsonb_diff_val$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.jsonb_diff_val
|
||||
IS 'Compare two jsonb objects';
|
418
initdb/02_3_3_signalk_public_functions_py.sql
Normal file
418
initdb/02_3_3_signalk_public_functions_py.sql
Normal file
@@ -0,0 +1,418 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- singalk db public schema
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS public;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python reverse_geocode
|
||||
--
|
||||
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
|
||||
--
|
||||
DROP FUNCTION IF EXISTS reverse_geocode_py_fn;
|
||||
CREATE OR REPLACE FUNCTION reverse_geocode_py_fn(IN geocoder TEXT, IN lon NUMERIC, IN lat NUMERIC,
|
||||
OUT geo_name TEXT)
|
||||
AS $reverse_geocode_py$
|
||||
import requests
|
||||
|
||||
# Use the shared cache to avoid preparing the geocoder metadata
|
||||
if geocoder in SD:
|
||||
plan = SD[geocoder]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT reverse_url AS url FROM geocoders WHERE name = $1", ["text"])
|
||||
SD[geocoder] = plan
|
||||
|
||||
# Execute the statement with the geocoder param and limit to 1 result
|
||||
rv = plpy.execute(plan, [geocoder], 1)
|
||||
url = rv[0]['url']
|
||||
|
||||
# Validate input
|
||||
if not lon or not lat:
|
||||
plpy.notice('reverse_geocode_py_fn Parameters [{}] [{}]'.format(lon, lat))
|
||||
plpy.error('Error missing parameters')
|
||||
return None
|
||||
|
||||
# Make the request to the geocoder API
|
||||
# https://operations.osmfoundation.org/policies/nominatim/
|
||||
payload = {"lon": lon, "lat": lat, "format": "jsonv2", "zoom": 18}
|
||||
r = requests.get(url, params=payload)
|
||||
|
||||
# Return the full address or nothing if not found
|
||||
# Option1: If name is null fallback to address field road,neighbourhood,suburb
|
||||
# Option2: Return the json for future reference like country
|
||||
if r.status_code == 200 and "name" in r.json():
|
||||
r_dict = r.json()
|
||||
if r_dict["name"]:
|
||||
return r_dict["name"]
|
||||
elif "address" in r_dict and r_dict["address"]:
|
||||
if "road" in r_dict["address"] and r_dict["address"]["road"]:
|
||||
return r_dict["address"]["road"]
|
||||
elif "neighbourhood" in r_dict["address"] and r_dict["address"]["neighbourhood"]:
|
||||
return r_dict["address"]["neighbourhood"]
|
||||
elif "suburb" in r_dict["address"] and r_dict["address"]["suburb"]:
|
||||
return r_dict["address"]["suburb"]
|
||||
elif "residential" in r_dict["address"] and r_dict["address"]["residential"]:
|
||||
return r_dict["address"]["residential"]
|
||||
elif "village" in r_dict["address"] and r_dict["address"]["village"]:
|
||||
return r_dict["address"]["village"]
|
||||
elif "town" in r_dict["address"] and r_dict["address"]["town"]:
|
||||
return r_dict["address"]["town"]
|
||||
else:
|
||||
return 'n/a'
|
||||
else:
|
||||
return 'n/a'
|
||||
else:
|
||||
plpy.warning('Failed to received a geo full address %s', r.json())
|
||||
#plpy.error('Failed to received a geo full address %s', r.json())
|
||||
return 'unknow'
|
||||
$reverse_geocode_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.reverse_geocode_py_fn
|
||||
IS 'query reverse geo service to return location name using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python send email
|
||||
--
|
||||
-- https://www.programcreek.com/python/example/3684/email.utils.formatdate
|
||||
DROP FUNCTION IF EXISTS send_email_py_fn;
|
||||
CREATE OR REPLACE FUNCTION send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_email_py$
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
|
||||
# Import the email modules we need
|
||||
#from email.message import EmailMessage
|
||||
from email.utils import formatdate,make_msgid
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if email_type in SD:
|
||||
plan = SD[email_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[email_type] = plan
|
||||
|
||||
# Execute the statement with the email_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [email_type], 1)
|
||||
email_subject = rv[0]['email_subject']
|
||||
email_content = rv[0]['email_content']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if not _user or not app:
|
||||
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
|
||||
plpy.error('Error missing parameters')
|
||||
return None
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
email_content = email_content.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
email_content = email_content.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'otp_code' in _user and _user['otp_code']:
|
||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||
if 'reset_qs' in _user and _user['reset_qs']:
|
||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
email_from = 'root@localhost'
|
||||
if 'app.email_from' in app and app['app.email_from']:
|
||||
email_from = 'PostgSail <' + app['app.email_from'] + '>'
|
||||
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
|
||||
|
||||
email_to = 'root@localhost'
|
||||
if 'email' in _user and _user['email']:
|
||||
email_to = _user['email']
|
||||
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
|
||||
else:
|
||||
plpy.error('Error email to')
|
||||
return None
|
||||
|
||||
msg = MIMEText(email_content, 'plain', 'utf-8')
|
||||
msg["Subject"] = email_subject
|
||||
msg["From"] = email_from
|
||||
msg["To"] = email_to
|
||||
msg["Date"] = formatdate()
|
||||
msg["Message-ID"] = make_msgid()
|
||||
|
||||
server_smtp = 'localhost'
|
||||
if 'app.email_server' in app and app['app.email_server']:
|
||||
server_smtp = app['app.email_server']
|
||||
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
|
||||
|
||||
# Send the message via our own SMTP server.
|
||||
try:
|
||||
# send your message with credentials specified above
|
||||
with smtplib.SMTP(server_smtp, 25) as server:
|
||||
if 'app.email_user' in app and app['app.email_user'] \
|
||||
and 'app.email_pass' in app and app['app.email_pass']:
|
||||
server.starttls()
|
||||
server.login(app['app.email_user'], app['app.email_pass'])
|
||||
#server.send_message(msg)
|
||||
server.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||
server.quit()
|
||||
# tell the script to report if your message was sent or which errors need to be fixed
|
||||
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
|
||||
return None
|
||||
except OSError as error:
|
||||
plpy.error('OS Error occurred: ' + str(error))
|
||||
except smtplib.SMTPConnectError:
|
||||
plpy.error('Failed to connect to the server. Bad connection settings?')
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
plpy.error('Failed to connect to the server. Wrong user/password?')
|
||||
except smtplib.SMTPException as e:
|
||||
plpy.error('SMTP error occurred: ' + str(e))
|
||||
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_email_py_fn
|
||||
IS 'Send email notification using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python send pushover message
|
||||
-- https://pushover.net/
|
||||
DROP FUNCTION IF EXISTS send_pushover_py_fn;
|
||||
CREATE OR REPLACE FUNCTION send_pushover_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_pushover_py$
|
||||
import requests
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
pushover_title = rv[0]['pushover_title']
|
||||
pushover_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
pushover_message = pushover_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
pushover_token = None
|
||||
if 'app.pushover_app_token' in app and app['app.pushover_app_token']:
|
||||
pushover_token = app['app.pushover_app_token']
|
||||
else:
|
||||
plpy.error('Error no pushover token defined, check app settings')
|
||||
return None
|
||||
pushover_user = None
|
||||
if 'pushover_user_key' in _user and _user['pushover_user_key']:
|
||||
pushover_user = _user['pushover_user_key']
|
||||
else:
|
||||
plpy.error('Error no pushover user token defined, check user settings')
|
||||
return None
|
||||
|
||||
# requests
|
||||
r = requests.post("https://api.pushover.net/1/messages.json", data = {
|
||||
"token": pushover_token,
|
||||
"user": pushover_user,
|
||||
"title": pushover_title,
|
||||
"message": pushover_message
|
||||
})
|
||||
|
||||
#print(r.text)
|
||||
# Return ?? or None if not found
|
||||
#plpy.notice('Sent pushover successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent pushover successfully to [{}] [{}] [{}]'.format(pushover_user, pushover_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send pushover')
|
||||
return None
|
||||
$send_pushover_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_pushover_py_fn
|
||||
IS 'Send pushover notification using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python send telegram message
|
||||
-- https://core.telegram.org/
|
||||
DROP FUNCTION IF EXISTS send_telegram_py_fn;
|
||||
CREATE OR REPLACE FUNCTION send_telegram_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_telegram_py$
|
||||
"""
|
||||
Send a message to a telegram user or group specified on chatId
|
||||
chat_id must be a number!
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
telegram_title = rv[0]['pushover_title']
|
||||
telegram_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
telegram_message = telegram_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
telegram_token = None
|
||||
if 'app.telegram_bot_token' in app and app['app.telegram_bot_token']:
|
||||
telegram_token = app['app.telegram_bot_token']
|
||||
else:
|
||||
plpy.error('Error no telegram token defined, check app settings')
|
||||
return None
|
||||
telegram_chat_id = None
|
||||
if 'telegram_chat_id' in _user and _user['telegram_chat_id']:
|
||||
telegram_chat_id = _user['telegram_chat_id']
|
||||
else:
|
||||
plpy.error('Error no telegram user token defined, check user settings')
|
||||
return None
|
||||
|
||||
# requests
|
||||
headers = {'Content-Type': 'application/json',
|
||||
'Proxy-Authorization': 'Basic base64'}
|
||||
data_dict = {'chat_id': telegram_chat_id,
|
||||
'text': telegram_message,
|
||||
'parse_mode': 'HTML',
|
||||
'disable_notification': False}
|
||||
data = json.dumps(data_dict)
|
||||
url = f'https://api.telegram.org/bot{telegram_token}/sendMessage'
|
||||
r = requests.post(url,
|
||||
data=data,
|
||||
headers=headers)
|
||||
#print(r.text)
|
||||
# Return something boolean?
|
||||
#plpy.notice('Sent telegram successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent telegram successfully to [{}] [{}] [{}]'.format(telegram_chat_id, telegram_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send telegram')
|
||||
return None
|
||||
$send_telegram_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_telegram_py_fn
|
||||
IS 'Send a message to a telegram user or group specified on chatId using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python url encode
|
||||
CREATE OR REPLACE FUNCTION urlencode_py_fn(uri text) RETURNS text
|
||||
AS $urlencode_py$
|
||||
import urllib.parse
|
||||
return urllib.parse.quote(uri, safe="");
|
||||
$urlencode_py$ LANGUAGE plpython3u IMMUTABLE STRICT;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python
|
||||
-- https://ipapi.co/
|
||||
DROP FUNCTION IF EXISTS reverse_geoip_py_fn;
|
||||
CREATE OR REPLACE FUNCTION reverse_geoip_py_fn(IN _ip TEXT) RETURNS JSONB
|
||||
AS $reverse_geoip_py$
|
||||
"""
|
||||
Return ipapi.co ip details
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
# requests
|
||||
url = f'https://ipapi.co/{_ip}/json/'
|
||||
r = requests.get(url)
|
||||
#print(r.text)
|
||||
# Return something boolean?
|
||||
#plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
|
||||
if r.status_code == 200:
|
||||
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
|
||||
return r.text;
|
||||
else:
|
||||
plpy.error('Failed to get ip details')
|
||||
return '{}'
|
||||
$reverse_geoip_py$ LANGUAGE plpython3u;
|
||||
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.reverse_geoip_py_fn
|
||||
IS 'Retrieve reverse geo IP location via ipapi.co using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python url escape
|
||||
--
|
||||
DROP FUNCTION IF EXISTS urlescape_py_fn;
|
||||
CREATE OR REPLACE FUNCTION urlescape_py_fn(original text) RETURNS text LANGUAGE plpython3u AS $$
|
||||
import urllib.parse
|
||||
return urllib.parse.quote(original);
|
||||
$$
|
||||
IMMUTABLE STRICT;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.urlescape_py_fn
|
||||
IS 'URL-encoding VARCHAR and TEXT values using plpython3u';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- python geojson parser
|
||||
--
|
||||
--CREATE TYPE geometry_type AS ENUM ('LineString', 'Point');
|
||||
DROP FUNCTION IF EXISTS geojson_py_fn;
|
||||
CREATE OR REPLACE FUNCTION geojson_py_fn(IN original JSONB, IN geometry_type TEXT) RETURNS JSONB LANGUAGE plpython3u
|
||||
AS $geojson_py$
|
||||
import json
|
||||
parsed = json.loads(original)
|
||||
output = []
|
||||
#plpy.notice(parsed)
|
||||
# [None, None]
|
||||
if None not in parsed:
|
||||
for idx, x in enumerate(parsed):
|
||||
#plpy.notice(idx, x)
|
||||
for feature in x:
|
||||
#plpy.notice(feature)
|
||||
if (feature['geometry']['type'] != geometry_type):
|
||||
output.append(feature)
|
||||
#elif (feature['properties']['id']): TODO
|
||||
# output.append(feature)
|
||||
#else:
|
||||
# plpy.notice('ignoring')
|
||||
return json.dumps(output)
|
||||
$geojson_py$ -- TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
IMMUTABLE STRICT;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.geojson_py_fn
|
||||
IS 'Parse geojson using plpython3u (should be done in PGSQL)';
|
File diff suppressed because it is too large
Load Diff
@@ -15,41 +15,82 @@ CREATE SCHEMA IF NOT EXISTS auth;
|
||||
COMMENT ON SCHEMA auth IS 'auth postgrest for users and vessels';
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -- provides functions to generate universally unique identifiers (UUIDs)
|
||||
CREATE EXTENSION IF NOT EXISTS "moddatetime"; -- provides functions for tracking last modification time
|
||||
CREATE EXTENSION IF NOT EXISTS "citext"; -- provides data type for case-insensitive character strings
|
||||
CREATE EXTENSION IF NOT EXISTS "pgcrypto"; -- provides cryptographic functions
|
||||
|
||||
DROP TABLE IF EXISTS auth.accounts CASCADE;
|
||||
CREATE TABLE IF NOT EXISTS auth.accounts (
|
||||
-- id UUID DEFAULT uuid_generate_v4() NOT NULL,
|
||||
email text primary key check ( email ~* '^.+@.+\..+$' ),
|
||||
userid UUID NOT NULL UNIQUE DEFAULT uuid_generate_v4(),
|
||||
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||
email CITEXT primary key check ( email ~* '^.+@.+\..+$' ),
|
||||
first text not null check (length(pass) < 512),
|
||||
last text not null check (length(pass) < 512),
|
||||
pass text not null check (length(pass) < 512),
|
||||
role name not null check (length(role) < 512),
|
||||
preferences JSONB null,
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE default NOW()
|
||||
preferences JSONB NULL DEFAULT '{"email_notifications":true}',
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
connected_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
CONSTRAINT valid_email CHECK (length(email) > 5), -- Enforce at least 5 char, eg: a@b.io
|
||||
CONSTRAINT valid_first CHECK (length(first) > 1),
|
||||
CONSTRAINT valid_last CHECK (length(last) > 1),
|
||||
CONSTRAINT valid_pass CHECK (length(pass) > 4)
|
||||
);
|
||||
-- Preferences jsonb
|
||||
---- PushOver Notification, bool
|
||||
---- PushOver user key, varchar
|
||||
---- Email notification, bool
|
||||
---- Instagram Handle, varchar
|
||||
---- Timezone, TZ
|
||||
---- Unit, bool
|
||||
---- Preferred Homepage
|
||||
---- Website, varchar or text
|
||||
---- Public Profile
|
||||
---- References to users ?
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
auth.accounts
|
||||
IS 'users account table';
|
||||
-- Indexes
|
||||
-- is unused index?
|
||||
--CREATE INDEX accounts_role_idx ON auth.accounts (role);
|
||||
CREATE INDEX accounts_preferences_idx ON auth.accounts using GIN (preferences);
|
||||
-- is unused index?
|
||||
--CREATE INDEX accounts_userid_idx ON auth.accounts (userid);
|
||||
|
||||
CREATE TRIGGER accounts_moddatetime
|
||||
BEFORE UPDATE ON auth.accounts
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE moddatetime (updated_at);
|
||||
-- Description
|
||||
COMMENT ON TRIGGER accounts_moddatetime
|
||||
ON auth.accounts
|
||||
IS 'Automatic update of updated_at on table modification';
|
||||
|
||||
DROP TABLE IF EXISTS auth.vessels;
|
||||
CREATE TABLE IF NOT EXISTS auth.vessels (
|
||||
-- vesselId UUID PRIMARY KEY REFERENCES auth.accounts(id) ON DELETE RESTRICT,
|
||||
owner_email TEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
|
||||
mmsi TEXT UNIQUE,
|
||||
name TEXT,
|
||||
-- owner_email TEXT,
|
||||
pass UUID,
|
||||
vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||
-- user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT,
|
||||
owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
|
||||
-- mmsi TEXT UNIQUE, -- Should be a numeric range between 100000000 and 800000000.
|
||||
mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique
|
||||
name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512),
|
||||
-- pass text not null check (length(pass) < 512), -- unused
|
||||
role name not null check (length(role) < 512),
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE default NOW()
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW()
|
||||
-- CONSTRAINT valid_length_mmsi CHECK (length(mmsi) < 10 OR length(mmsi) = 0)
|
||||
CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000)
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
auth.vessels
|
||||
IS 'vessels table link to accounts email user_id column';
|
||||
-- Indexes
|
||||
-- is unused index?
|
||||
--CREATE INDEX vessels_role_idx ON auth.vessels (role);
|
||||
-- is unused index?
|
||||
--CREATE INDEX vessels_name_idx ON auth.vessels (name);
|
||||
CREATE INDEX vessels_vesselid_idx ON auth.vessels (vessel_id);
|
||||
|
||||
CREATE TRIGGER vessels_moddatetime
|
||||
BEFORE UPDATE ON auth.vessels
|
||||
FOR EACH ROW
|
||||
EXECUTE PROCEDURE moddatetime (updated_at);
|
||||
-- Description
|
||||
COMMENT ON TRIGGER vessels_moddatetime
|
||||
ON auth.vessels
|
||||
IS 'Automatic update of updated_at on table modification';
|
||||
|
||||
create or replace function
|
||||
auth.check_role_exists() returns trigger as $$
|
||||
@@ -72,10 +113,13 @@ create constraint trigger ensure_user_role_exists
|
||||
-- trigger add queue new account
|
||||
CREATE TRIGGER new_account_entry AFTER INSERT ON auth.accounts
|
||||
FOR EACH ROW EXECUTE FUNCTION public.new_account_entry_fn();
|
||||
-- trigger add queue new account OTP validation
|
||||
CREATE TRIGGER new_account_otp_validation_entry AFTER INSERT ON auth.accounts
|
||||
FOR EACH ROW EXECUTE FUNCTION public.new_account_otp_validation_entry_fn();
|
||||
|
||||
-- trigger check role on vessel
|
||||
drop trigger if exists ensure_user_role_exists on auth.vessels;
|
||||
create constraint trigger ensure_user_role_exists
|
||||
drop trigger if exists ensure_vessel_role_exists on auth.vessels;
|
||||
create constraint trigger ensure_vessel_role_exists
|
||||
after insert or update on auth.vessels
|
||||
for each row
|
||||
execute procedure auth.check_role_exists();
|
||||
@@ -83,8 +127,6 @@ create constraint trigger ensure_user_role_exists
|
||||
CREATE TRIGGER new_vessel_entry AFTER INSERT ON auth.vessels
|
||||
FOR EACH ROW EXECUTE FUNCTION public.new_vessel_entry_fn();
|
||||
|
||||
create extension if not exists pgcrypto;
|
||||
|
||||
create or replace function
|
||||
auth.encrypt_pass() returns trigger as $$
|
||||
begin
|
||||
@@ -113,6 +155,7 @@ begin
|
||||
return (
|
||||
select role from auth.accounts
|
||||
where accounts.email = user_role.email
|
||||
and user_role.pass is NOT NULL
|
||||
and accounts.pass = crypt(user_role.pass, accounts.pass)
|
||||
);
|
||||
end;
|
||||
@@ -133,6 +176,9 @@ declare
|
||||
_role name;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
_email_valid boolean := false;
|
||||
_email text := email;
|
||||
_user_id text := null;
|
||||
begin
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
@@ -145,13 +191,25 @@ begin
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
-- Check email_valid and generate OTP
|
||||
SELECT preferences['email_valid'],user_id INTO _email_valid,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _email_valid is null or _email_valid is False THEN
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('email_otp', email, now(), _user_id);
|
||||
END IF;
|
||||
|
||||
--RAISE WARNING 'api.login debug: [%],[%],[%]', app_jwt_secret, _role, login.email;
|
||||
-- Generate jwt
|
||||
select jwt.sign(
|
||||
-- row_to_json(r), ''
|
||||
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select _role as role, login.email as email,
|
||||
select _role as role, login.email as email, -- TODO replace with user_id
|
||||
-- select _role as role, user_id as uid, -- add support in check_jwt
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
@@ -165,12 +223,18 @@ api.signup(in email text, in pass text, in firstname text, in lastname text) ret
|
||||
declare
|
||||
_role name;
|
||||
begin
|
||||
IF email IS NULL OR email = ''
|
||||
OR pass IS NULL OR pass = '' THEN
|
||||
RAISE EXCEPTION 'Invalid input'
|
||||
USING HINT = 'Check your parameter';
|
||||
END IF;
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
if _role is null then
|
||||
RAISE WARNING 'Register new account email:[%]', email;
|
||||
INSERT INTO auth.accounts ( email, pass, first, last, role)
|
||||
VALUES (email, pass, firstname, lastname, 'user_role');
|
||||
-- TODO replace preferences default into table rather than trigger
|
||||
INSERT INTO auth.accounts ( email, pass, first, last, role, preferences)
|
||||
VALUES (email, pass, firstname, lastname, 'user_role', '{"email_notifications":true}');
|
||||
end if;
|
||||
return ( api.login(email, pass) );
|
||||
end;
|
||||
@@ -185,21 +249,28 @@ declare
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
vessel_rec record;
|
||||
_vessel_id text;
|
||||
begin
|
||||
IF vessel_email IS NULL OR vessel_email = ''
|
||||
OR vessel_name IS NULL OR vessel_name = '' THEN
|
||||
RAISE EXCEPTION 'Invalid input'
|
||||
USING HINT = 'Check your parameter';
|
||||
END IF;
|
||||
IF public.isnumeric(vessel_mmsi) IS False THEN
|
||||
vessel_mmsi = NULL;
|
||||
END IF;
|
||||
-- check vessel exist
|
||||
SELECT * INTO vessel_rec
|
||||
FROM auth.vessels vessel
|
||||
WHERE LOWER(vessel.owner_email) = LOWER(vessel_email)
|
||||
AND vessel.mmsi = vessel_mmsi
|
||||
AND LOWER(vessel.name) = LOWER(vessel_name);
|
||||
if vessel_rec is null then
|
||||
WHERE vessel.owner_email = vessel_email;
|
||||
IF vessel_rec IS NULL THEN
|
||||
RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, vessel_email;
|
||||
INSERT INTO auth.vessels (owner_email, mmsi, name, role)
|
||||
VALUES (vessel_email, vessel_mmsi, vessel_name, 'vessel_role');
|
||||
VALUES (vessel_email, vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id;
|
||||
vessel_rec.role := 'vessel_role';
|
||||
vessel_rec.owner_email = vessel_email;
|
||||
vessel_rec.mmsi = vessel_mmsi;
|
||||
end if;
|
||||
vessel_rec.vessel_id = _vessel_id;
|
||||
END IF;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
@@ -211,8 +282,9 @@ begin
|
||||
) as token
|
||||
from (
|
||||
select vessel_rec.role as role,
|
||||
vessel_rec.owner_email as email,
|
||||
vessel_rec.mmsi as mmsi
|
||||
vessel_rec.owner_email as email, -- TODO replace with user_id
|
||||
-- vessel_rec.user_id as uid
|
||||
vessel_rec.vessel_id as vid
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
|
@@ -1,6 +1,6 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- singalk db permissions
|
||||
--
|
||||
-- signalk db api schema
|
||||
-- View and Function that have dependency with auth schema
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
@@ -8,54 +8,119 @@ select current_database();
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- Link auth.vessels with api.metadata
|
||||
--ALTER TABLE api.metadata ADD vessel_id TEXT NOT NULL REFERENCES auth.vessels(vessel_id) ON DELETE RESTRICT;
|
||||
ALTER TABLE api.metadata ADD FOREIGN KEY (vessel_id) REFERENCES auth.vessels(vessel_id) ON DELETE RESTRICT;
|
||||
COMMENT ON COLUMN api.metadata.vessel_id IS 'Link auth.vessels with api.metadata via FOREIGN KEY and REFERENCES';
|
||||
|
||||
-- Link auth.vessels with auth.accounts
|
||||
--ALTER TABLE auth.vessels ADD user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT;
|
||||
--COMMENT ON COLUMN auth.vessels.user_id IS 'Link auth.vessels with auth.accounts';
|
||||
--COMMENT ON COLUMN auth.vessels.vessel_id IS 'Vessel identifier. Link auth.vessels with api.metadata';
|
||||
|
||||
-- REFERENCE ship type with AIS type ?
|
||||
-- REFERENCE mmsi MID with country ?
|
||||
|
||||
|
||||
-- List vessel
|
||||
--TODO add geojson with position
|
||||
CREATE OR REPLACE VIEW api.vessel_view AS
|
||||
DROP VIEW IF EXISTS api.vessels_view;
|
||||
CREATE OR REPLACE VIEW api.vessels_view AS
|
||||
WITH metadata AS (
|
||||
SELECT COALESCE(
|
||||
(SELECT m.time
|
||||
FROM api.metadata m
|
||||
WHERE m.vessel_id = current_setting('vessel.id')
|
||||
)::TEXT ,
|
||||
NULL ) as last_contact
|
||||
)
|
||||
SELECT
|
||||
v.name as name,
|
||||
v.mmsi as mmsi,
|
||||
v.created_at as created_at,
|
||||
m.time as last_contact
|
||||
FROM auth.vessels v, api.metadata m
|
||||
WHERE
|
||||
m.mmsi = current_setting('vessel.mmsi')
|
||||
AND lower(v.owner_email) = lower(current_setting('request.jwt.claims', true)::json->>'email');
|
||||
v.created_at::timestamp(0) as created_at,
|
||||
m.last_contact as last_contact
|
||||
FROM auth.vessels v, metadata m
|
||||
WHERE v.owner_email = current_setting('user.email');
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.vessels_view
|
||||
IS 'Expose vessels listing to web api';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.has_vessel_fn;
|
||||
CREATE OR REPLACE FUNCTION public.has_vessel_fn() RETURNS BOOLEAN
|
||||
AS $has_vessel$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Check a vessel and user exist
|
||||
RETURN (
|
||||
SELECT auth.vessels.name
|
||||
FROM auth.vessels, auth.accounts
|
||||
WHERE auth.vessels.owner_email = auth.accounts.email
|
||||
AND auth.accounts.email = current_setting('user.email')
|
||||
) IS NOT NULL;
|
||||
END;
|
||||
$has_vessel$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.has_vessel_fn
|
||||
IS 'Check if user has a vessel register';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.has_vessel_metadata_fn;
|
||||
CREATE OR REPLACE FUNCTION public.has_vessel_metadata_fn() RETURNS BOOLEAN
|
||||
AS $has_vessel_metadata$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Check a vessel metadata
|
||||
RETURN (
|
||||
SELECT m.vessel_id
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
AND auth.vessels.owner_email = auth.accounts.email
|
||||
AND auth.accounts.email = current_setting('user.email')
|
||||
) IS NOT NULL;
|
||||
END;
|
||||
$has_vessel_metadata$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.has_vessel_metadata_fn
|
||||
IS 'Check if user has a vessel register';
|
||||
|
||||
-- Or function?
|
||||
-- TODO Improve: return null until the vessel has sent metadata?
|
||||
DROP FUNCTION IF EXISTS api.vessel_fn;
|
||||
CREATE OR REPLACE FUNCTION api.vessel_fn(OUT vessel JSON) RETURNS JSON
|
||||
AS $vessel$
|
||||
DECLARE
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
json_build_object(
|
||||
'name', v.name,
|
||||
'mmsi', v.mmsi,
|
||||
'created_at', v.created_at,
|
||||
'last_contact', m.time,
|
||||
'geojson', ST_AsGeoJSON(geojson_t.*)::json
|
||||
)
|
||||
jsonb_build_object(
|
||||
'name', v.name,
|
||||
'mmsi', coalesce(v.mmsi, null),
|
||||
'created_at', v.created_at::timestamp(0),
|
||||
'last_contact', coalesce(m.time, null),
|
||||
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
|
||||
)::jsonb || api.vessel_details_fn()::jsonb
|
||||
INTO vessel
|
||||
FROM auth.vessels v, api.metadata m,
|
||||
( SELECT
|
||||
t.*
|
||||
FROM (
|
||||
( select
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM public.last_metric
|
||||
WHERE latitude IS NOT NULL
|
||||
AND longitude IS NOT NULL
|
||||
)
|
||||
) AS t
|
||||
) AS geojson_t
|
||||
WHERE v.mmsi = current_setting('vessel.mmsi')
|
||||
AND m.mmsi = v.mmsi;
|
||||
--RAISE notice 'api.vessel_fn %', obj;
|
||||
( select
|
||||
current_setting('vessel.name') as name,
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics
|
||||
WHERE
|
||||
latitude IS NOT NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY time DESC
|
||||
) AS geojson_t
|
||||
WHERE
|
||||
m.vessel_id = current_setting('vessel.id')
|
||||
AND m.vessel_id = v.vessel_id;
|
||||
--RAISE notice 'api.vessel_fn %', obj;
|
||||
END;
|
||||
$vessel$ language plpgsql security definer;
|
||||
-- Description
|
||||
@@ -65,15 +130,132 @@ COMMENT ON FUNCTION
|
||||
|
||||
-- Export user settings
|
||||
DROP FUNCTION IF EXISTS api.settings_fn;
|
||||
CREATE FUNCTION api.settings_fn(OUT settings JSON) RETURNS JSON AS $user_settings$
|
||||
CREATE OR REPLACE FUNCTION api.settings_fn(out settings json) RETURNS JSON
|
||||
AS $user_settings$
|
||||
BEGIN
|
||||
select first,last,preferences,created_at INTO settings
|
||||
from auth.accounts
|
||||
where lower(email) = lower(current_setting('request.jwt.claims', true)::json->>'email');
|
||||
select row_to_json(row)::json INTO settings
|
||||
from (
|
||||
select a.email, a.first, a.last, a.preferences, a.created_at,
|
||||
INITCAP(CONCAT (LEFT(first, 1), ' ', last)) AS username,
|
||||
public.has_vessel_fn() as has_vessel
|
||||
--public.has_vessel_metadata_fn() as has_vessel_metadata,
|
||||
from auth.accounts a
|
||||
where email = current_setting('user.email')
|
||||
) row;
|
||||
END;
|
||||
$user_settings$ language plpgsql security definer;
|
||||
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.settings_fn
|
||||
IS 'Expose user settings to API';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.versions_fn;
|
||||
CREATE OR REPLACE FUNCTION api.versions_fn() RETURNS JSON
|
||||
AS $version$
|
||||
DECLARE
|
||||
_appv TEXT;
|
||||
_sysv TEXT;
|
||||
BEGIN
|
||||
-- Add postgrest version https://postgrest.org/en/v11.2/references/admin.html#server-version
|
||||
SELECT
|
||||
value, rtrim(substring(version(), 0, 17)) AS sys_version into _appv,_sysv
|
||||
FROM app_settings
|
||||
WHERE name = 'app.version';
|
||||
RETURN json_build_object('api_version', _appv,
|
||||
'sys_version', _sysv,
|
||||
'timescaledb', (SELECT extversion as timescaledb FROM pg_extension WHERE extname='timescaledb'),
|
||||
'postgis', (SELECT extversion as postgis FROM pg_extension WHERE extname='postgis'));
|
||||
END;
|
||||
$version$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.versions_fn
|
||||
IS 'Expose as a function, app and system version to API';
|
||||
|
||||
DROP VIEW IF EXISTS api.versions_view;
|
||||
CREATE OR REPLACE VIEW api.versions_view AS
|
||||
-- Add postgrest version https://postgrest.org/en/v11.2/references/admin.html#server-version
|
||||
SELECT
|
||||
value AS api_version,
|
||||
--version() as sys_version
|
||||
rtrim(substring(version(), 0, 17)) AS sys_version,
|
||||
(SELECT extversion as timescaledb FROM pg_extension WHERE extname='timescaledb'),
|
||||
(SELECT extversion as postgis FROM pg_extension WHERE extname='postgis')
|
||||
FROM app_settings
|
||||
WHERE name = 'app.version';
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.versions_view
|
||||
IS 'Expose as a table view app and system version to API';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.update_user_preferences_fn;
|
||||
-- Update/Add a specific user setting into preferences
|
||||
CREATE OR REPLACE FUNCTION api.update_user_preferences_fn(IN key TEXT, IN value TEXT) RETURNS BOOLEAN AS
|
||||
$update_user_preferences$
|
||||
DECLARE
|
||||
first_c TEXT := NULL;
|
||||
last_c TEXT := NULL;
|
||||
_value TEXT := value;
|
||||
BEGIN
|
||||
-- Is it the only way to check variable type?
|
||||
-- Convert string to jsonb and skip type of json obj or integer or boolean
|
||||
SELECT SUBSTRING(value, 1, 1),RIGHT(value, 1) INTO first_c,last_c;
|
||||
IF first_c <> '{' AND last_c <> '}' AND public.isnumeric(value) IS False
|
||||
AND public.isboolean(value) IS False THEN
|
||||
--RAISE WARNING '-> first_c:[%] last_c:[%] pg_typeof:[%]', first_c,last_c,pg_typeof(value);
|
||||
_value := to_jsonb(value)::jsonb;
|
||||
END IF;
|
||||
--RAISE WARNING '-> update_user_preferences_fn update preferences for user [%]', current_setting('request.jwt.claims', true)::json->>'email';
|
||||
UPDATE auth.accounts
|
||||
SET preferences =
|
||||
jsonb_set(preferences::jsonb, key::text[], _value::jsonb)
|
||||
WHERE
|
||||
email = current_setting('user.email', true);
|
||||
IF FOUND THEN
|
||||
--RAISE WARNING '-> update_user_preferences_fn True';
|
||||
RETURN True;
|
||||
END IF;
|
||||
--RAISE WARNING '-> update_user_preferences_fn False';
|
||||
RETURN False;
|
||||
END;
|
||||
$update_user_preferences$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.update_user_preferences_fn
|
||||
IS 'Update user preferences jsonb key pair value';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.vessel_details_fn;
|
||||
CREATE OR REPLACE FUNCTION api.vessel_details_fn() RETURNS JSON AS
|
||||
$vessel_details$
|
||||
DECLARE
|
||||
BEGIN
|
||||
RETURN ( WITH tbl AS (
|
||||
SELECT mmsi,ship_type,length,beam,height FROM api.metadata WHERE vessel_id = current_setting('vessel.id', false)
|
||||
)
|
||||
SELECT json_build_object(
|
||||
'ship_type', (SELECT ais.description FROM aistypes ais, tbl WHERE t.ship_type = ais.id),
|
||||
'country', (SELECT mid.country FROM mid, tbl WHERE LEFT(cast(mmsi as text), 3)::NUMERIC = mid.id),
|
||||
'alpha_2', (SELECT o.alpha_2 FROM mid m, iso3166 o, tbl WHERE LEFT(cast(mmsi as text), 3)::NUMERIC = m.id AND m.country_id = o.id),
|
||||
'length', t.ship_type,
|
||||
'beam', t.beam,
|
||||
'height', t.height)
|
||||
FROM tbl t
|
||||
);
|
||||
END;
|
||||
$vessel_details$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.vessel_details_fn
|
||||
IS 'Return vessel details such as metadata (length,beam,height), ais type and country name and country iso3166-alpha-2';
|
||||
|
||||
DROP VIEW IF EXISTS api.eventlogs_view;
|
||||
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT pq.*
|
||||
from public.process_queue pq
|
||||
where ref_id = current_setting('user.id', true)
|
||||
or ref_id = current_setting('vessel.id', true)
|
||||
order by id asc;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.eventlogs_view
|
||||
IS 'Event logs view';
|
511
initdb/02_5_signalk_auth_otp.sql
Normal file
511
initdb/02_5_signalk_auth_otp.sql
Normal file
@@ -0,0 +1,511 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- signalk db auth schema
|
||||
-- View and Function that have dependency with auth schema
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
DROP TABLE IF EXISTS auth.otp;
|
||||
CREATE TABLE IF NOT EXISTS auth.otp (
|
||||
-- update email type to CITEXT, https://www.postgresql.org/docs/current/citext.html
|
||||
user_email CITEXT NOT NULL PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
|
||||
otp_pass VARCHAR(10) NOT NULL,
|
||||
otp_timestamp TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(),
|
||||
otp_tries SMALLINT NOT NULL DEFAULT '0'
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
auth.otp
|
||||
IS 'Stores temporal otp code for up to 15 minutes';
|
||||
-- Indexes
|
||||
CREATE INDEX otp_pass_idx ON auth.otp (otp_pass);
|
||||
CREATE INDEX otp_user_email_idx ON auth.otp (user_email);
|
||||
|
||||
DROP FUNCTION IF EXISTS public.generate_uid_fn;
|
||||
CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT
|
||||
AS $generate_uid_fn$
|
||||
DECLARE
|
||||
characters TEXT := '0123456789';
|
||||
bytes BYTEA := gen_random_bytes(size);
|
||||
l INT := length(characters);
|
||||
i INT := 0;
|
||||
output TEXT := '';
|
||||
BEGIN
|
||||
WHILE i < size LOOP
|
||||
output := output || substr(characters, get_byte(bytes, i) % l + 1, 1);
|
||||
i := i + 1;
|
||||
END LOOP;
|
||||
RETURN output;
|
||||
END;
|
||||
$generate_uid_fn$ LANGUAGE plpgsql VOLATILE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.generate_uid_fn
|
||||
IS 'Generate a random digit';
|
||||
|
||||
-- gerenate a OTP code by email
|
||||
-- Expose as an API endpoint
|
||||
DROP FUNCTION IF EXISTS api.generate_otp_fn;
|
||||
CREATE OR REPLACE FUNCTION api.generate_otp_fn(IN email TEXT) RETURNS TEXT
|
||||
AS $generate_otp$
|
||||
DECLARE
|
||||
_email CITEXT := email;
|
||||
_email_check TEXT := NULL;
|
||||
_otp_pass VARCHAR(10) := NULL;
|
||||
BEGIN
|
||||
IF email IS NULL OR _email IS NULL OR _email = '' THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
SELECT lower(a.email) INTO _email_check FROM auth.accounts a WHERE a.email = _email;
|
||||
IF _email_check IS NULL THEN
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
--SELECT substr(gen_random_uuid()::text, 1, 6) INTO otp_pass;
|
||||
SELECT generate_uid_fn(6) INTO _otp_pass;
|
||||
-- upsert - Insert or update otp code on conflit
|
||||
INSERT INTO auth.otp (user_email, otp_pass)
|
||||
VALUES (_email_check, _otp_pass)
|
||||
ON CONFLICT (user_email) DO UPDATE SET otp_pass = _otp_pass, otp_timestamp = NOW();
|
||||
RETURN _otp_pass;
|
||||
END;
|
||||
$generate_otp$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.generate_otp_fn
|
||||
IS 'Generate otp code';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.recover;
|
||||
CREATE OR REPLACE FUNCTION api.recover(in email text) returns BOOLEAN
|
||||
AS $recover_fn$
|
||||
DECLARE
|
||||
_email CITEXT := email;
|
||||
_user_id TEXT := NULL;
|
||||
otp_pass TEXT := NULL;
|
||||
_reset_qs TEXT := NULL;
|
||||
user_settings jsonb := NULL;
|
||||
BEGIN
|
||||
IF _email IS NULL OR _email = '' THEN
|
||||
RAISE EXCEPTION 'Invalid input'
|
||||
USING HINT = 'Check your parameter';
|
||||
END IF;
|
||||
SELECT user_id INTO _user_id FROM auth.accounts a WHERE a.email = _email;
|
||||
IF NOT FOUND THEN
|
||||
RAISE EXCEPTION 'Invalid input'
|
||||
USING HINT = 'Check your parameter';
|
||||
END IF;
|
||||
-- Generate OTP
|
||||
otp_pass := api.generate_otp_fn(email);
|
||||
SELECT CONCAT('uuid=', _user_id, '&token=', otp_pass) INTO _reset_qs;
|
||||
-- Enable email_notifications
|
||||
PERFORM api.update_user_preferences_fn('{email_notifications}'::TEXT, True::TEXT);
|
||||
-- Send email/notifications
|
||||
user_settings := '{"email": "' || _email || '", "reset_qs": "' || _reset_qs || '"}';
|
||||
PERFORM send_notification_fn('email_reset'::TEXT, user_settings::JSONB);
|
||||
RETURN TRUE;
|
||||
END;
|
||||
$recover_fn$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.recover
|
||||
IS 'Send recover password email to reset password';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.reset;
|
||||
CREATE OR REPLACE FUNCTION api.reset(in pass text, in token text, in uuid text) returns BOOLEAN
|
||||
AS $reset_fn$
|
||||
DECLARE
|
||||
_email TEXT := NULL;
|
||||
BEGIN
|
||||
-- Check parameters
|
||||
IF token IS NULL OR uuid IS NULL OR pass IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Verify token
|
||||
SELECT auth.verify_otp_fn(token) INTO _email;
|
||||
IF _email IS NOT NULL THEN
|
||||
SELECT email INTO _email FROM auth.accounts WHERE user_id = uuid;
|
||||
IF _email IS NULL THEN
|
||||
RETURN False;
|
||||
END IF;
|
||||
-- Set user new password
|
||||
UPDATE auth.accounts
|
||||
SET pass = pass
|
||||
WHERE email = _email;
|
||||
-- Enable email_validation into user preferences
|
||||
PERFORM api.update_user_preferences_fn('{email_valid}'::TEXT, True::TEXT);
|
||||
-- Enable email_notifications
|
||||
PERFORM api.update_user_preferences_fn('{email_notifications}'::TEXT, True::TEXT);
|
||||
-- Delete token when validated
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = _email;
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END;
|
||||
$reset_fn$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.reset
|
||||
IS 'Reset user password base on otp code and user_id send by email from api.recover';
|
||||
|
||||
DROP FUNCTION IF EXISTS auth.verify_otp_fn;
|
||||
CREATE OR REPLACE FUNCTION auth.verify_otp_fn(IN token TEXT) RETURNS TEXT
|
||||
AS $verify_otp$
|
||||
DECLARE
|
||||
email TEXT := NULL;
|
||||
BEGIN
|
||||
IF token IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Token is valid 15 minutes
|
||||
SELECT user_email INTO email
|
||||
FROM auth.otp
|
||||
WHERE otp_timestamp > NOW() AT TIME ZONE 'UTC' - INTERVAL '15 MINUTES'
|
||||
AND otp_tries < 3
|
||||
AND otp_pass = token;
|
||||
RETURN email;
|
||||
END;
|
||||
$verify_otp$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
auth.verify_otp_fn
|
||||
IS 'Verify OTP';
|
||||
|
||||
-- CRON to purge OTP older than 15 minutes
|
||||
DROP FUNCTION IF EXISTS public.cron_process_prune_otp_fn;
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_prune_otp_fn() RETURNS void
|
||||
AS $$
|
||||
DECLARE
|
||||
otp_rec record;
|
||||
BEGIN
|
||||
-- Purge OTP older than 15 minutes
|
||||
RAISE NOTICE 'cron_process_prune_otp_fn';
|
||||
FOR otp_rec in
|
||||
SELECT *
|
||||
FROM auth.otp
|
||||
WHERE otp_timestamp < NOW() AT TIME ZONE 'UTC' - INTERVAL '15 MINUTES'
|
||||
ORDER BY otp_timestamp desc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_prune_otp_fn deleting expired otp for user [%]', otp_rec.user_email;
|
||||
-- remove entry
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = otp_rec.user_email;
|
||||
RAISE NOTICE '-> cron_process_prune_otp_fn deleted expire otp for user [%]', otp_rec.user_email;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_prune_otp_fn
|
||||
IS 'init by pg_cron to purge older than 15 minutes OTP token';
|
||||
|
||||
-- Email OTP validation
|
||||
-- Expose as an API endpoint
|
||||
DROP FUNCTION IF EXISTS api.email_fn;
|
||||
CREATE OR REPLACE FUNCTION api.email_fn(IN token TEXT) RETURNS BOOLEAN
|
||||
AS $email_validation$
|
||||
DECLARE
|
||||
_email TEXT := NULL;
|
||||
BEGIN
|
||||
-- Check parameters
|
||||
IF token IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Verify token
|
||||
SELECT auth.verify_otp_fn(token) INTO _email;
|
||||
IF _email IS NOT NULL THEN
|
||||
-- Check the email JWT token match the OTP email
|
||||
IF current_setting('user.email', true) <> _email THEN
|
||||
RETURN False;
|
||||
END IF;
|
||||
-- Set user email into env to allow RLS update
|
||||
--PERFORM set_config('user.email', _email, false);
|
||||
-- Enable email_validation into user preferences
|
||||
PERFORM api.update_user_preferences_fn('{email_valid}'::TEXT, True::TEXT);
|
||||
-- Enable email_notifications
|
||||
PERFORM api.update_user_preferences_fn('{email_notifications}'::TEXT, True::TEXT);
|
||||
-- Delete token when validated
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = _email;
|
||||
-- Disable to reduce spam
|
||||
-- Send Notification async
|
||||
--INSERT INTO process_queue (channel, payload, stored)
|
||||
-- VALUES ('email_valid', _email, now());
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END;
|
||||
$email_validation$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.email_fn
|
||||
IS 'Store email_valid into user preferences if valid token/otp';
|
||||
|
||||
-- Pushover Subscription API
|
||||
-- Web-Based Subscription Process
|
||||
-- https://pushover.net/api/subscriptions#web
|
||||
-- Expose as an API endpoint
|
||||
CREATE OR REPLACE FUNCTION api.pushover_subscribe_link_fn(OUT pushover_link JSON) RETURNS JSON
|
||||
AS $pushover_subscribe_link$
|
||||
DECLARE
|
||||
app_url text;
|
||||
otp_code text;
|
||||
pushover_app_url text;
|
||||
success text;
|
||||
failure text;
|
||||
email text := current_setting('user.email', true);
|
||||
BEGIN
|
||||
--https://pushover.net/api/subscriptions#web
|
||||
-- "https://pushover.net/subscribe/PostgSail-23uvrho1d5y6n3e"
|
||||
-- + "?success=" + urlencode("https://beta.openplotter.cloud/api/rpc/pushover_fn?token=" + generate_otp_fn({{email}}))
|
||||
-- + "&failure=" + urlencode("https://beta.openplotter.cloud/settings");
|
||||
-- get app_url
|
||||
SELECT
|
||||
value INTO app_url
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name = 'app.url';
|
||||
-- get pushover url subscribe
|
||||
SELECT
|
||||
value INTO pushover_app_url
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name = 'app.pushover_app_url';
|
||||
-- Generate OTP
|
||||
otp_code := api.generate_otp_fn(email);
|
||||
-- On success redirect to API endpoint
|
||||
SELECT CONCAT(
|
||||
'?success=',
|
||||
public.urlescape_py_fn(CONCAT(app_url,'/pushover?token=')),
|
||||
otp_code)
|
||||
INTO success;
|
||||
-- On failure redirect to user settings, where he does come from
|
||||
SELECT CONCAT(
|
||||
'&failure=',
|
||||
public.urlescape_py_fn(CONCAT(app_url,'/profile'))
|
||||
) INTO failure;
|
||||
SELECT json_build_object('link', CONCAT(pushover_app_url, success, failure)) INTO pushover_link;
|
||||
END;
|
||||
$pushover_subscribe_link$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.pushover_subscribe_link_fn
|
||||
IS 'Generate Pushover subscription link';
|
||||
|
||||
-- Confirm Pushover Subscription
|
||||
-- Web-Based Subscription Process
|
||||
-- https://pushover.net/api/subscriptions#web
|
||||
-- Expose as an API endpoint
|
||||
DROP FUNCTION IF EXISTS api.pushover_fn;
|
||||
CREATE OR REPLACE FUNCTION api.pushover_fn(IN token TEXT, IN pushover_user_key TEXT) RETURNS BOOLEAN
|
||||
AS $pushover$
|
||||
DECLARE
|
||||
_email TEXT := NULL;
|
||||
BEGIN
|
||||
-- Check parameters
|
||||
IF token IS NULL OR pushover_user_key IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Verify token
|
||||
SELECT auth.verify_otp_fn(token) INTO _email;
|
||||
IF _email IS NOT NULL THEN
|
||||
-- Set user email into env to allow RLS update
|
||||
PERFORM set_config('user.email', _email, false);
|
||||
-- Add pushover_user_key into user preferences
|
||||
PERFORM api.update_user_preferences_fn('{pushover_user_key}'::TEXT, pushover_user_key::TEXT);
|
||||
-- Enable phone_notifications
|
||||
PERFORM api.update_user_preferences_fn('{phone_notifications}'::TEXT, True::TEXT);
|
||||
-- Delete token when validated
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = _email;
|
||||
-- Disable Notification because
|
||||
-- Pushover send a notification when sucesssful with the description of the app
|
||||
--
|
||||
-- Send Notification async
|
||||
--INSERT INTO process_queue (channel, payload, stored)
|
||||
-- VALUES ('pushover_valid', _email, now());
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END;
|
||||
$pushover$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.pushover_fn
|
||||
IS 'Confirm Pushover Subscription and store pushover_user_key into user preferences if provide a valid OTP token';
|
||||
|
||||
-- Telegram OTP Validation
|
||||
-- Expose as an API endpoint
|
||||
DROP FUNCTION IF EXISTS api.telegram_fn;
|
||||
CREATE OR REPLACE FUNCTION api.telegram_fn(IN token TEXT, IN telegram_obj TEXT) RETURNS BOOLEAN
|
||||
AS $telegram$
|
||||
DECLARE
|
||||
_email TEXT := NULL;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check parameters
|
||||
IF token IS NULL OR telegram_obj IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Verify token
|
||||
SELECT auth.verify_otp_fn(token) INTO _email;
|
||||
IF _email IS NOT NULL THEN
|
||||
-- Set user email into env to allow RLS update
|
||||
PERFORM set_config('user.email', _email, false);
|
||||
-- Add telegram obj into user preferences
|
||||
PERFORM api.update_user_preferences_fn('{telegram}'::TEXT, telegram_obj::TEXT);
|
||||
-- Delete token when validated
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = _email;
|
||||
-- Send Notification async
|
||||
--INSERT INTO process_queue (channel, payload, stored)
|
||||
-- VALUES ('telegram_valid', _email, now());
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END;
|
||||
$telegram$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.telegram_fn
|
||||
IS 'Confirm telegram user and store telegram chat details into user preferences if provide a valid OTP token';
|
||||
|
||||
-- Telegram user validation
|
||||
DROP FUNCTION IF EXISTS auth.telegram_user_exists_fn;
|
||||
CREATE OR REPLACE FUNCTION auth.telegram_user_exists_fn(IN email TEXT, IN user_id BIGINT) RETURNS BOOLEAN
|
||||
AS $telegram_user_exists$
|
||||
DECLARE
|
||||
_email CITEXT := email;
|
||||
_user_id BIGINT := user_id;
|
||||
BEGIN
|
||||
IF _email IS NULL OR _chat_id IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Does user and telegram obj
|
||||
SELECT preferences->'telegram'->'from'->'id' INTO _user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email
|
||||
AND cast(preferences->'telegram'->'from'->'id' as BIGINT) = _user_id::BIGINT;
|
||||
IF FOUND THEN
|
||||
RETURN TRUE;
|
||||
END IF;
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$telegram_user_exists$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
auth.telegram_user_exists_fn
|
||||
IS 'Check if user exist based on email and user_id';
|
||||
|
||||
-- Telegram otp validation
|
||||
DROP FUNCTION IF EXISTS api.telegram_otp_fn;
|
||||
CREATE OR REPLACE FUNCTION api.telegram_otp_fn(IN email TEXT, OUT otp_code TEXT) RETURNS TEXT
|
||||
AS $telegram_otp$
|
||||
DECLARE
|
||||
_email CITEXT := email;
|
||||
user_settings jsonb := NULL;
|
||||
BEGIN
|
||||
IF _email IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
-- Generate token
|
||||
otp_code := api.generate_otp_fn(_email);
|
||||
IF otp_code IS NOT NULL THEN
|
||||
-- Set user email into env to allow RLS update
|
||||
PERFORM set_config('user.email', _email, false);
|
||||
-- Send Notification
|
||||
user_settings := '{"email": "' || _email || '", "otp_code": "' || otp_code || '"}';
|
||||
PERFORM send_notification_fn('telegram_otp'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
END;
|
||||
$telegram_otp$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.telegram_otp_fn
|
||||
IS 'Telegram otp generation';
|
||||
|
||||
-- Telegram JWT auth
|
||||
-- Expose as an API endpoint
|
||||
-- Avoid sending a password so use email and chat_id as key pair
|
||||
DROP FUNCTION IF EXISTS api.telegram;
|
||||
CREATE OR REPLACE FUNCTION api.telegram(IN user_id BIGINT, IN email TEXT DEFAULT NULL) RETURNS auth.jwt_token
|
||||
AS $telegram_jwt$
|
||||
DECLARE
|
||||
_email TEXT := email;
|
||||
_user_id BIGINT := user_id;
|
||||
_uid TEXT := NULL;
|
||||
_exist BOOLEAN := False;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
BEGIN
|
||||
IF _user_id IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
|
||||
-- Check _user_id
|
||||
SELECT auth.telegram_session_exists_fn(_user_id) into _exist;
|
||||
IF _exist IS NULL OR _exist <> True THEN
|
||||
--RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
|
||||
-- Get email and user_id
|
||||
SELECT a.email,a.user_id INTO _email,_uid
|
||||
FROM auth.accounts a
|
||||
WHERE cast(preferences->'telegram'->'from'->'id' as BIGINT) = _user_id::BIGINT;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
-- Generate JWT token, force user_role
|
||||
select jwt.sign(
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select 'user_role' as role,
|
||||
(select lower(_email)) as email,
|
||||
_uid as uid,
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
END;
|
||||
$telegram_jwt$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.telegram
|
||||
IS 'Generate a JWT user_role token based on chat_id from telegram';
|
||||
|
||||
-- Telegram chat_id session validation
|
||||
DROP FUNCTION IF EXISTS auth.telegram_session_exists_fn;
|
||||
CREATE OR REPLACE FUNCTION auth.telegram_session_exists_fn(IN user_id BIGINT) RETURNS BOOLEAN
|
||||
AS $telegram_session_exists$
|
||||
DECLARE
|
||||
_id BIGINT := NULL;
|
||||
_user_id BIGINT := user_id;
|
||||
_email TEXT := NULL;
|
||||
BEGIN
|
||||
IF user_id IS NULL THEN
|
||||
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
|
||||
END IF;
|
||||
|
||||
-- Find user email based on telegram chat_id
|
||||
SELECT preferences->'telegram'->'from'->'id' INTO _id
|
||||
FROM auth.accounts a
|
||||
WHERE cast(preferences->'telegram'->'from'->'id' as BIGINT) = _user_id::BIGINT;
|
||||
IF FOUND THEN
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN FALSE;
|
||||
END;
|
||||
$telegram_session_exists$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
auth.telegram_session_exists_fn
|
||||
IS 'Check if session/user exist based on user_id';
|
@@ -18,99 +18,131 @@ select current_database();
|
||||
-- api_anonymous role in the database with which to execute anonymous web requests, limit 10 connections
|
||||
-- api_anonymous allows JWT token generation with an expiration time via function api.login() from auth.accounts table
|
||||
create role api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOLOGIN NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10;
|
||||
comment on role api_anonymous is
|
||||
'The role that PostgREST will switch to when a user is not authenticated.';
|
||||
-- Limit to 10 connections
|
||||
--alter user api_anonymous connection limit 10;
|
||||
grant usage on schema api to api_anonymous;
|
||||
-- explicitly limit EXECUTE privileges to only signup and login functions
|
||||
-- explicitly limit EXECUTE privileges to only signup and login and reset functions
|
||||
grant execute on function api.login(text,text) to api_anonymous;
|
||||
grant execute on function api.signup(text,text,text,text) to api_anonymous;
|
||||
grant execute on function api.recover(text) to api_anonymous;
|
||||
grant execute on function api.reset(text,text,text) to api_anonymous;
|
||||
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
|
||||
grant execute on function public.check_jwt() to api_anonymous;
|
||||
-- explicitly limit EXECUTE privileges to only telegram jwt auth function
|
||||
grant execute on function api.telegram(bigint,text) to api_anonymous;
|
||||
-- explicitly limit EXECUTE privileges to only pushover subscription validation function
|
||||
grant execute on function api.email_fn(text) to api_anonymous;
|
||||
grant execute on function api.pushover_fn(text,text) to api_anonymous;
|
||||
grant execute on function api.telegram_fn(text,text) to api_anonymous;
|
||||
grant execute on function api.telegram_otp_fn(text) to api_anonymous;
|
||||
--grant execute on function api.generate_otp_fn(text) to api_anonymous;
|
||||
|
||||
-- authenticator
|
||||
-- login role
|
||||
create role authenticator NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT login password 'mysecretpassword';
|
||||
comment on role authenticator is
|
||||
'Role that serves as an entry-point for API servers such as PostgREST.';
|
||||
grant api_anonymous to authenticator;
|
||||
|
||||
-- Grafana user and role with login, read-only, limit 10 connections
|
||||
CREATE ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN PASSWORD 'mysecretpassword';
|
||||
-- Grafana user and role with login, read-only, limit 15 connections
|
||||
CREATE ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 15 LOGIN PASSWORD 'mysecretpassword';
|
||||
comment on role grafana is
|
||||
'Role that grafana will use for authenticated web users.';
|
||||
-- Allow API schema and Tables
|
||||
GRANT USAGE ON SCHEMA api TO grafana;
|
||||
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO grafana;
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO grafana;
|
||||
-- Allow read on VIEWS
|
||||
-- Allow read on VIEWS on API schema
|
||||
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO grafana;
|
||||
GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO grafana;
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO grafana;
|
||||
-- Allow Auth schema and Tables
|
||||
GRANT USAGE ON SCHEMA auth TO grafana;
|
||||
GRANT SELECT ON TABLE auth.vessels TO grafana;
|
||||
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana;
|
||||
|
||||
-- Grafana_auth authenticator user and role with login, read-only on auth.accounts, limit 15 connections
|
||||
CREATE ROLE grafana_auth WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 15 LOGIN PASSWORD 'mysecretpassword';
|
||||
comment on role grafana_auth is
|
||||
'Role that grafana auth proxy authenticator via apache.';
|
||||
-- Allow read on VIEWS on API schema
|
||||
GRANT USAGE ON SCHEMA api TO grafana_auth;
|
||||
GRANT SELECT ON TABLE api.metadata TO grafana_auth;
|
||||
-- Allow Auth schema and Tables
|
||||
GRANT USAGE ON SCHEMA auth TO grafana_auth;
|
||||
GRANT SELECT ON TABLE auth.accounts TO grafana_auth;
|
||||
GRANT SELECT ON TABLE auth.vessels TO grafana_auth;
|
||||
-- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth;
|
||||
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
|
||||
|
||||
-- User:
|
||||
-- nologin, web api only
|
||||
-- read-only for all and Read-Write on logbook, stays and moorage except for specific (name, notes) COLUMNS
|
||||
CREATE ROLE user_role WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
|
||||
comment on role user_role is
|
||||
'Role that PostgREST will switch to for authenticated web users.';
|
||||
GRANT user_role to authenticator;
|
||||
GRANT USAGE ON SCHEMA api TO user_role;
|
||||
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO user_role;
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO user_role;
|
||||
GRANT SELECT ON TABLE public.process_queue TO user_role;
|
||||
-- To check?
|
||||
GRANT SELECT ON TABLE auth.vessels TO user_role;
|
||||
-- Allow update on table for notes
|
||||
--GRANT UPDATE ON TABLE api.logbook,api.moorages,api.stays TO user_role;
|
||||
-- Allow users to update certain columns
|
||||
GRANT UPDATE (name, notes) ON api.logbook TO user_role;
|
||||
GRANT UPDATE (name, notes, stay_code) ON api.stays TO user_role;
|
||||
GRANT UPDATE (name, notes, stay_code, home_flag) ON api.moorages TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
|
||||
GRANT EXECUTE ON FUNCTION api.export_logbook_geojson_linestring_fn(int4) TO user_role;
|
||||
GRANT EXECUTE ON FUNCTION public.check_jwt() TO user_role;
|
||||
GRANT EXECUTE ON FUNCTION public.st_asgeojson(text) TO user_role;
|
||||
GRANT EXECUTE ON FUNCTION public.geography_eq(geography, geography) TO user_role;
|
||||
--GRANT EXECUTE ON FUNCTION public.check_jwt() TO user_role;
|
||||
-- Allow others functions or allow all in public !! ??
|
||||
--GRANT EXECUTE ON FUNCTION api.export_logbook_geojson_linestring_fn(int4) TO user_role;
|
||||
--GRANT EXECUTE ON FUNCTION public.st_asgeojson(text) TO user_role;
|
||||
--GRANT EXECUTE ON FUNCTION public.geography_eq(geography, geography) TO user_role;
|
||||
-- TODO should not be need !! ??
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
|
||||
-- pg15 feature security_invoker=true,security_barrier=true
|
||||
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_view,api.monitoring_view2,api.monitoring_view3 TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_humidity,api.monitoring_voltage,api.monitoring_temperatures TO user_role;
|
||||
GRANT SELECT ON TABLE api.total_info_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.stats_logs_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.stats_moorages_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.eventlogs_view TO user_role;
|
||||
-- Update ownership for security user_role as run by web user.
|
||||
-- Web listing
|
||||
ALTER VIEW api.stays_view OWNER TO user_role;
|
||||
ALTER VIEW api.moorages_view OWNER TO user_role;
|
||||
ALTER VIEW api.logs_view OWNER TO user_role;
|
||||
-- Remove all right except select
|
||||
REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.stays_view FROM user_role;
|
||||
REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.moorages_view FROM user_role;
|
||||
REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.logs_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.vessel_view FROM user_role;
|
||||
--ALTER VIEW api.stays_view OWNER TO user_role;
|
||||
--ALTER VIEW api.moorages_view OWNER TO user_role;
|
||||
--ALTER VIEW api.logs_view OWNER TO user_role;
|
||||
--ALTER VIEW api.vessel_p_view OWNER TO user_role;
|
||||
--ALTER VIEW api.monitoring_view OWNER TO user_role;
|
||||
-- Remove all permissions except select
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.stays_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.moorages_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.logs_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.monitoring_view FROM user_role;
|
||||
|
||||
-- Allow read and update on VIEWS
|
||||
-- Web detail view
|
||||
ALTER VIEW api.log_view OWNER TO user_role;
|
||||
REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.log_view FROM user_role;
|
||||
--ALTER VIEW api.log_view OWNER TO user_role;
|
||||
-- Remove all permissions except select and update
|
||||
--REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.log_view FROM user_role;
|
||||
|
||||
-- For cron job
|
||||
--GRANT EXECUTE ON function api.run_cron_jobs() TO user_role;
|
||||
ALTER VIEW api.vessels_view OWNER TO user_role;
|
||||
-- Remove all permissions except select and update
|
||||
REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.vessels_view FROM user_role;
|
||||
|
||||
-- List vessel
|
||||
--TODO add geojson with position
|
||||
CREATE OR REPLACE VIEW api.vessel_view AS
|
||||
SELECT
|
||||
v.name as name,
|
||||
v.mmsi as mmsi,
|
||||
v.created_at as created_at,
|
||||
m.time as last_contact
|
||||
FROM auth.vessels v, api.metadata m
|
||||
WHERE
|
||||
m.mmsi = current_setting('vessel.mmsi')
|
||||
AND lower(v.owner_email) = lower(current_setting('request.jwt.claims', true)::json->>'email');
|
||||
|
||||
ALTER VIEW api.vessel_view OWNER TO user_role;
|
||||
REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.vessel_view FROM user_role;
|
||||
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view,api.vessel_view TO grafana;
|
||||
|
||||
GRANT EXECUTE ON FUNCTION api.vessel_fn() TO user_role;
|
||||
GRANT EXECUTE ON FUNCTION api.settings_fn() TO user_role;
|
||||
|
||||
|
||||
-- Allow read on VIEWS
|
||||
--GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view,api.vessel_view TO user_role;
|
||||
|
||||
-- Vessel:
|
||||
-- nologin
|
||||
-- insert-update-only for api.metrics,api.logbook,api.moorages,api.stays,api.metadata and sequences and process_queue
|
||||
CREATE ROLE vessel_role WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
|
||||
comment on role vessel_role is
|
||||
'Role that PostgREST will switch to for authenticated web vessels.';
|
||||
GRANT vessel_role to authenticator;
|
||||
GRANT USAGE ON SCHEMA api TO vessel_role;
|
||||
GRANT INSERT, UPDATE, SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO vessel_role;
|
||||
@@ -119,19 +151,31 @@ GRANT INSERT ON TABLE public.process_queue TO vessel_role;
|
||||
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO vessel_role;
|
||||
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
|
||||
GRANT EXECUTE ON FUNCTION public.check_jwt() to vessel_role;
|
||||
-- explicitly limit EXECUTE privileges to api.metrics triggers function
|
||||
GRANT EXECUTE ON FUNCTION public.trip_in_progress_fn(text) to vessel_role;
|
||||
GRANT EXECUTE ON FUNCTION public.stay_in_progress_fn(text) to vessel_role;
|
||||
-- hypertable get_partition_hash ?!?
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role;
|
||||
|
||||
|
||||
--- Scheduler:
|
||||
-- TODO: currently cron function are run as super user, switch to scheduler role.
|
||||
-- Scheduler read-only all, and write on logbook, stays, moorage, process_queue
|
||||
-- Scheduler read-only all, and write on api.logbook, api.stays, api.moorages, public.process_queue, auth.otp
|
||||
-- Crons
|
||||
CREATE ROLE scheduler WITH NOLOGIN;
|
||||
--CREATE ROLE scheduler WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
|
||||
CREATE ROLE scheduler WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN;
|
||||
comment on role scheduler is
|
||||
'Role that pgcron will use to process logbook,moorages,stays,monitoring and notification.';
|
||||
GRANT scheduler to authenticator;
|
||||
GRANT EXECUTE ON FUNCTION api.run_cron_jobs() to scheduler;
|
||||
GRANT USAGE ON SCHEMA api TO scheduler;
|
||||
GRANT SELECT ON TABLE api.metrics,api.metadata TO scheduler;
|
||||
GRANT INSERT, UPDATE, SELECT ON TABLE api.logbook,api.moorages,api.stays TO scheduler;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO scheduler;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA public TO scheduler;
|
||||
GRANT SELECT,UPDATE ON TABLE process_queue TO scheduler;
|
||||
GRANT SELECT,UPDATE ON TABLE public.process_queue TO scheduler;
|
||||
GRANT USAGE ON SCHEMA auth TO scheduler;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA auth TO scheduler;
|
||||
GRANT SELECT,UPDATE,DELETE ON TABLE auth.otp TO scheduler;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Security policy
|
||||
@@ -144,12 +188,24 @@ CREATE POLICY admin_all ON api.metadata TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON api.metadata TO vessel_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON api.metadata TO user_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
|
||||
USING (vessel_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow scheduler to update and select based on the vessel.id
|
||||
CREATE POLICY api_scheduler_role ON api.metadata TO scheduler
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow grafana to select based on email
|
||||
CREATE POLICY grafana_role ON api.metadata TO grafana
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (false);
|
||||
-- Allow grafana_auth to select
|
||||
CREATE POLICY grafana_proxy_role ON api.metadata TO grafana_auth
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
||||
ALTER TABLE api.metrics ENABLE ROW LEVEL SECURITY;
|
||||
-- Administrator can see all rows and add any rows
|
||||
@@ -158,12 +214,20 @@ CREATE POLICY admin_all ON api.metrics TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON api.metrics TO vessel_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON api.metrics TO user_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
|
||||
USING (vessel_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow scheduler to update and select based on the vessel.id
|
||||
CREATE POLICY api_scheduler_role ON api.metrics TO scheduler
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow grafana to select based on the vessel.id
|
||||
CREATE POLICY grafana_role ON api.metrics TO grafana
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE api.logbook ENABLE ROW LEVEL SECURITY;
|
||||
@@ -174,12 +238,20 @@ CREATE POLICY admin_all ON api.logbook TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON api.logbook TO vessel_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON api.logbook TO user_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
|
||||
USING (vessel_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow scheduler to update and select based on the vessel.id
|
||||
CREATE POLICY api_scheduler_role ON api.logbook TO scheduler
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow grafana to select based on the vessel.id
|
||||
CREATE POLICY grafana_role ON api.logbook TO grafana
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE api.stays ENABLE ROW LEVEL SECURITY;
|
||||
@@ -189,12 +261,20 @@ CREATE POLICY admin_all ON api.stays TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON api.stays TO vessel_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON api.stays TO user_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
|
||||
USING (vessel_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow scheduler to update and select based on the vessel_id
|
||||
CREATE POLICY api_scheduler_role ON api.stays TO scheduler
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow grafana to select based on the vessel_id
|
||||
CREATE POLICY grafana_role ON api.stays TO grafana
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
|
||||
@@ -204,12 +284,20 @@ CREATE POLICY admin_all ON api.moorages TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON api.moorages TO vessel_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON api.moorages TO user_role
|
||||
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
|
||||
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
|
||||
USING (vessel_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow scheduler to update and select based on the vessel_id
|
||||
CREATE POLICY api_scheduler_role ON api.moorages TO scheduler
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
-- Allow grafana to select based on the vessel_id
|
||||
CREATE POLICY grafana_role ON api.moorages TO grafana
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY;
|
||||
@@ -219,9 +307,55 @@ CREATE POLICY admin_all ON auth.vessels TO current_user
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON auth.vessels TO user_role
|
||||
USING (mmsi = current_setting('vessel.mmsi', false)
|
||||
AND owner_email = current_setting('request.jwt.claims', false)::json->>'email'
|
||||
)
|
||||
WITH CHECK (mmsi = current_setting('vessel.mmsi', false)
|
||||
AND owner_email = current_setting('request.jwt.claims', false)::json->>'email'
|
||||
USING (vessel_id = current_setting('vessel.id', true)
|
||||
AND owner_email = current_setting('user.email', true)
|
||||
)
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', true)
|
||||
AND owner_email = current_setting('user.email', true)
|
||||
);
|
||||
-- Allow grafana to select based on email
|
||||
CREATE POLICY grafana_role ON auth.vessels TO grafana
|
||||
USING (owner_email = current_setting('user.email', true))
|
||||
WITH CHECK (false);
|
||||
-- Allow grafana to select
|
||||
CREATE POLICY grafana_proxy_role ON auth.vessels TO grafana_auth
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE auth.accounts ENABLE ROW LEVEL SECURITY;
|
||||
-- Administrator can see all rows and add any rows
|
||||
CREATE POLICY admin_all ON auth.accounts TO current_user
|
||||
USING (true)
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON auth.accounts TO user_role
|
||||
USING (email = current_setting('user.email', true))
|
||||
WITH CHECK (email = current_setting('user.email', true));
|
||||
-- Allow scheduler see all rows and add any rows
|
||||
CREATE POLICY api_scheduler_role ON auth.accounts TO scheduler
|
||||
USING (email = current_setting('user.email', true))
|
||||
WITH CHECK (email = current_setting('user.email', true));
|
||||
-- Allow grafana_auth to select
|
||||
CREATE POLICY grafana_proxy_role ON auth.accounts TO grafana_auth
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Be sure to enable row level security on the table
|
||||
ALTER TABLE public.process_queue ENABLE ROW LEVEL SECURITY;
|
||||
-- Administrator can see all rows and add any rows
|
||||
CREATE POLICY admin_all ON public.process_queue TO current_user
|
||||
USING (true)
|
||||
WITH CHECK (true);
|
||||
-- Allow vessel_role to insert and select on their own records
|
||||
CREATE POLICY api_vessel_role ON public.process_queue TO vessel_role
|
||||
USING (ref_id = current_setting('user.id', true) OR ref_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (true);
|
||||
-- Allow user_role to update and select on their own records
|
||||
CREATE POLICY api_user_role ON public.process_queue TO user_role
|
||||
USING (ref_id = current_setting('user.id', true) OR ref_id = current_setting('vessel.id', true))
|
||||
WITH CHECK (ref_id = current_setting('user.id', true) OR ref_id = current_setting('vessel.id', true));
|
||||
-- Allow scheduler see all rows and updates any rows
|
||||
CREATE POLICY api_scheduler_role ON public.process_queue TO scheduler
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
@@ -8,7 +8,7 @@
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connext to the DB
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
CREATE SCHEMA IF NOT EXISTS jwt;
|
||||
|
@@ -8,18 +8,18 @@
|
||||
|
||||
CREATE EXTENSION IF NOT EXISTS pg_cron; -- provides a simple cron-based job scheduler for PostgreSQL
|
||||
-- TRUNCATE table jobs
|
||||
TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
|
||||
--TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
|
||||
|
||||
-- Create a every 5 minutes or minute job cron_process_new_logbook_fn ??
|
||||
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()') ;
|
||||
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook';
|
||||
|
||||
-- Create a every 5 minute job cron_process_new_stay_fn
|
||||
SELECT cron.schedule('cron_new_stay', '*/5 * * * *', 'select public.cron_process_new_stay_fn()');
|
||||
SELECT cron.schedule('cron_new_stay', '*/6 * * * *', 'select public.cron_process_new_stay_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_stay';
|
||||
|
||||
-- Create a every 6 minute job cron_process_new_moorage_fn, delay from stay to give time to generate geo reverse location, eg: name
|
||||
SELECT cron.schedule('cron_new_moorage', '*/6 * * * *', 'select public.cron_process_new_moorage_fn()');
|
||||
SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_process_new_moorage_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage';
|
||||
|
||||
-- Create a every 10 minute job cron_process_monitor_offline_fn
|
||||
@@ -31,20 +31,43 @@ SELECT cron.schedule('cron_monitor_online', '*/10 * * * *', 'select public.cron_
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_online';
|
||||
|
||||
-- Create a every 5 minute job cron_process_new_account_fn
|
||||
SELECT cron.schedule('cron_new_account', '*/5 * * * *', 'select public.cron_process_new_account_fn()');
|
||||
--SELECT cron.schedule('cron_new_account', '*/5 * * * *', 'select public.cron_process_new_account_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account';
|
||||
|
||||
-- Create a every 5 minute job cron_process_new_vessel_fn
|
||||
SELECT cron.schedule('cron_new_vessel', '*/5 * * * *', 'select public.cron_process_new_vessel_fn()');
|
||||
--SELECT cron.schedule('cron_new_vessel', '*/5 * * * *', 'select public.cron_process_new_vessel_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_vessel';
|
||||
|
||||
-- Create a every 6 minute job cron_process_new_account_otp_validation_queue_fn, delay from cron_new_account
|
||||
--SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp';
|
||||
|
||||
-- Notification
|
||||
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
|
||||
SELECT cron.schedule('cron_new_notification', '*/2 * * * *', 'select public.cron_process_new_notification_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification';
|
||||
|
||||
-- Maintenance
|
||||
-- Vacuum database at “At 01:01 on Sunday.”
|
||||
SELECT cron.schedule('cron_vacumm', '1 1 * * 0', 'select public.cron_vaccum_fn()');
|
||||
SELECT cron.schedule('cron_vacuum', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;');
|
||||
-- Remove all jobs log at “At 02:02 on Sunday.”
|
||||
SELECT cron.schedule('job_run_details_cleanup', '2 2 * * 0', 'select public.job_run_details_cleanup_fn()');
|
||||
-- Rebuilding indexes at “first day of each month at 23:01.”
|
||||
SELECT cron.schedule('cron_reindex', '1 23 1 * *', 'REINDEX TABLE api.logbook; REINDEX TABLE api.stays; REINDEX TABLE api.moorages; REINDEX TABLE api.metadata; REINDEX TABLE api.metrics;');
|
||||
-- Any other maintenance require?
|
||||
|
||||
-- OTP
|
||||
-- Create a every 15 minute job cron_process_prune_otp_fn
|
||||
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_process_prune_otp_fn()');
|
||||
|
||||
-- Alerts
|
||||
-- Create a every 11 minute job cron_process_alerts_fn
|
||||
--SELECT cron.schedule('cron_alerts', '*/11 * * * *', 'select public.cron_process_alerts_fn()');
|
||||
|
||||
-- Cron job settings
|
||||
UPDATE cron.job SET database = 'signalk';
|
||||
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
|
||||
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vaccuum permissions
|
||||
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
|
||||
-- check job lists
|
||||
SELECT * FROM cron.job;
|
||||
@@ -53,6 +76,8 @@ SELECT * FROM cron.job;
|
||||
-- unschedule by job name
|
||||
--SELECT cron.unschedule('cron_new_logbook');
|
||||
-- TRUNCATE TABLE cron.job_run_details
|
||||
TRUNCATE TABLE cron.job_run_details CONTINUE IDENTITY RESTRICT;
|
||||
--TRUNCATE TABLE cron.job_run_details CONTINUE IDENTITY RESTRICT;
|
||||
-- check job log
|
||||
select * from cron.job_run_details ORDER BY end_time DESC LIMIT 10;
|
||||
SELECT * FROM cron.job_run_details ORDER BY end_time DESC;
|
||||
-- DEBUG Disable all
|
||||
UPDATE cron.job SET active = False;
|
76
initdb/07naturalearthdata.sql
Normal file
76
initdb/07naturalearthdata.sql
Normal file
@@ -0,0 +1,76 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- https://www.naturalearthdata.com
|
||||
--
|
||||
-- https://naciscdn.org/naturalearth/10m/physical/ne_10m_geography_marine_polys.zip
|
||||
--
|
||||
-- https://github.com/nvkelso/natural-earth-vector/raw/master/10m_physical/ne_10m_geography_marine_polys.shp
|
||||
--
|
||||
|
||||
-- Import from shapefile
|
||||
-- # shp2pgsql ne_10m_geography_marine_polys.shp public.ne_10m_geography_marine_polys | psql -U ${POSTGRES_USER} signalk
|
||||
--
|
||||
-- PostgSail Customization, add tropics and alaska area.
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
CREATE TABLE public.ne_10m_geography_marine_polys (
|
||||
gid serial4 NOT NULL,
|
||||
featurecla TEXT NULL,
|
||||
"name" TEXT NULL,
|
||||
namealt TEXT NULL,
|
||||
changed TEXT NULL,
|
||||
note TEXT NULL,
|
||||
name_fr TEXT NULL,
|
||||
min_label float8 NULL,
|
||||
max_label float8 NULL,
|
||||
scalerank int2 NULL,
|
||||
"label" TEXT NULL,
|
||||
wikidataid TEXT NULL,
|
||||
name_ar TEXT NULL,
|
||||
name_bn TEXT NULL,
|
||||
name_de TEXT NULL,
|
||||
name_en TEXT NULL,
|
||||
name_es TEXT NULL,
|
||||
name_el TEXT NULL,
|
||||
name_hi TEXT NULL,
|
||||
name_hu TEXT NULL,
|
||||
name_id TEXT NULL,
|
||||
name_it TEXT NULL,
|
||||
name_ja TEXT NULL,
|
||||
name_ko TEXT NULL,
|
||||
name_nl TEXT NULL,
|
||||
name_pl TEXT NULL,
|
||||
name_pt TEXT NULL,
|
||||
name_ru TEXT NULL,
|
||||
name_sv TEXT NULL,
|
||||
name_tr TEXT NULL,
|
||||
name_vi TEXT NULL,
|
||||
name_zh TEXT NULL,
|
||||
ne_id int8 NULL,
|
||||
name_fa TEXT NULL,
|
||||
name_he TEXT NULL,
|
||||
name_uk TEXT NULL,
|
||||
name_ur TEXT NULL,
|
||||
name_zht TEXT NULL,
|
||||
geom geometry(multipolygon,4326) NULL,
|
||||
CONSTRAINT ne_10m_geography_marine_polys_pkey PRIMARY KEY (gid)
|
||||
);
|
||||
-- Add GIST index
|
||||
CREATE INDEX ne_10m_geography_marine_polys_geom_idx
|
||||
ON public.ne_10m_geography_marine_polys
|
||||
USING GIST (geom);
|
||||
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
public.ne_10m_geography_marine_polys
|
||||
IS 'imperfect but light weight geographic marine areas from https://www.naturalearthdata.com';
|
||||
|
||||
-- Import data
|
||||
COPY public.ne_10m_geography_marine_polys(gid,featurecla,"name",namealt,changed,note,name_fr,min_label,max_label,scalerank,"label",wikidataid,name_ar,name_bn,name_de,name_en,name_es,name_el,name_hi,name_hu,name_id,name_it,name_ja,name_ko,name_nl,name_pl,name_pt,name_ru,name_sv,name_tr,name_vi,name_zh,ne_id,name_fa,name_he,name_uk,name_ur,name_zht,geom)
|
||||
FROM '/docker-entrypoint-initdb.d/ne_10m_geography_marine_polys.csv'
|
||||
DELIMITER ','
|
||||
CSV HEADER;
|
@@ -14,12 +14,15 @@ INSERT INTO app_settings (name, value) VALUES
|
||||
('app.email_user', '${PGSAIL_EMAIL_USER}'),
|
||||
('app.email_pass', '${PGSAIL_EMAIL_PASS}'),
|
||||
('app.email_from', '${PGSAIL_EMAIL_FROM}'),
|
||||
('app.pushover_token', '${PGSAIL_PUSHOVER_TOKEN}'),
|
||||
('app.pushover_app', '_todo_'),
|
||||
('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'),
|
||||
('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'),
|
||||
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
|
||||
('app.url', '${PGSAIL_APP_URL}'),
|
||||
('app.version', '${PGSAIL_VERSION}');
|
||||
-- Update comment with version
|
||||
COMMENT ON DATABASE signalk IS 'version ${PGSAIL_VERSION}';
|
||||
COMMENT ON DATABASE signalk IS 'PostgSail version ${PGSAIL_VERSION}';
|
||||
-- Update password from env
|
||||
ALTER ROLE authenticator WITH PASSWORD '${PGSAIL_AUTHENTICATOR_PASSWORD}';
|
||||
ALTER ROLE grafana WITH PASSWORD '${PGSAIL_GRAFANA_PASSWORD}';
|
||||
ALTER ROLE grafana_auth WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
END
|
||||
|
@@ -1 +1 @@
|
||||
0.0.6
|
||||
0.2.2
|
||||
|
310
initdb/ne_10m_geography_marine_polys.csv
Normal file
310
initdb/ne_10m_geography_marine_polys.csv
Normal file
File diff suppressed because one or more lines are too long
13
pgadmin_servers.json
Normal file
13
pgadmin_servers.json
Normal file
@@ -0,0 +1,13 @@
|
||||
{
|
||||
"Servers": {
|
||||
"dev": {
|
||||
"Name": "PostgSail dev db",
|
||||
"Group": "Servers",
|
||||
"Port": 5432,
|
||||
"Host": "db",
|
||||
"SSLMode": "prefer",
|
||||
"MaintenanceDB": "postgres",
|
||||
"Username": "postgres"
|
||||
}
|
||||
}
|
||||
}
|
8
tests/Dockerfile
Normal file
8
tests/Dockerfile
Normal file
@@ -0,0 +1,8 @@
|
||||
FROM node:lts
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
# Install and update the system
|
||||
RUN apt-get -q update && apt-get -qy upgrade && apt-get -qy install postgresql-client
|
||||
# Clean up APT when done.
|
||||
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
|
||||
|
18
tests/README.md
Normal file
18
tests/README.md
Normal file
@@ -0,0 +1,18 @@
|
||||
# PostgSail Unit Tests
|
||||
The Unit Tests allow to automatically validate api workflow.
|
||||
|
||||
## A global overview
|
||||
Based on `mocha` & `psql`
|
||||
|
||||
## get started
|
||||
```bash
|
||||
$ npm i
|
||||
$ alias mocha="./node_modules/.bin/mocha"
|
||||
$ bash tests.sh
|
||||
```
|
||||
|
||||
## docker
|
||||
```bash
|
||||
$ docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
$ docker-compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
||||
```
|
Reference in New Issue
Block a user