mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 03:07:47 +00:00
Compare commits
71 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
27081c32f7 | ||
![]() |
c84cfb9547 | ||
![]() |
9be725fa24 | ||
![]() |
40675a467e | ||
![]() |
f90356c2a7 | ||
![]() |
5f89f63223 | ||
![]() |
8a9abf5340 | ||
![]() |
fbf6047b46 | ||
![]() |
7b17bbcae1 | ||
![]() |
65455c93af | ||
![]() |
48bba3eb99 | ||
![]() |
0c0071236e | ||
![]() |
23d3586a2c | ||
![]() |
07a89d1fb8 | ||
![]() |
2a4b5dbb43 | ||
![]() |
306b942b42 | ||
![]() |
59f812c1e1 | ||
![]() |
798be66c07 | ||
![]() |
ac21b0219c | ||
![]() |
05aa73890a | ||
![]() |
48d19f656a | ||
![]() |
3bbb57e29e | ||
![]() |
bfc0b3756b | ||
![]() |
4936e37f8c | ||
![]() |
9071643aa3 | ||
![]() |
590927481e | ||
![]() |
788d811b15 | ||
![]() |
f72d6b9859 | ||
![]() |
3e30709675 | ||
![]() |
60e0097540 | ||
![]() |
5d21cb2e44 | ||
![]() |
ea89c934ee | ||
![]() |
20e1b6ad73 | ||
![]() |
79e195c24b | ||
![]() |
156d64d936 | ||
![]() |
c3dccf94de | ||
![]() |
3038821353 | ||
![]() |
051408a307 | ||
![]() |
2f7439d704 | ||
![]() |
c792bf81d9 | ||
![]() |
5551376ce2 | ||
![]() |
069ac31ca0 | ||
![]() |
d10b0cf501 | ||
![]() |
5dda28db51 | ||
![]() |
0a80f2e35e | ||
![]() |
dc79ca2f28 | ||
![]() |
fe950b2d2a | ||
![]() |
029e0b3fb6 | ||
![]() |
62854a95e0 | ||
![]() |
e301e6fedd | ||
![]() |
57cf87fbe9 | ||
![]() |
3a43e57b3c | ||
![]() |
95d283b2ac | ||
![]() |
18aba507e9 | ||
![]() |
6045ff46c0 | ||
![]() |
6e367a0e4c | ||
![]() |
eec149d411 | ||
![]() |
de2f9c94e8 | ||
![]() |
d65a0b0a54 | ||
![]() |
59c5142909 | ||
![]() |
e2fe23e58d | ||
![]() |
eedf5881d9 | ||
![]() |
3327c5a813 | ||
![]() |
e857440133 | ||
![]() |
0a13e0a798 | ||
![]() |
8fe0513c3c | ||
![]() |
b69a52eacd | ||
![]() |
129ee7dcbd | ||
![]() |
9800d83463 | ||
![]() |
b80799aa2f | ||
![]() |
84f73f2281 |
12
.github/workflows/db-lint.yml
vendored
12
.github/workflows/db-lint.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db api
|
||||
run: docker compose pull db api
|
||||
|
||||
- name: Run PostgSail Database & schemalint
|
||||
# Environment variables
|
||||
@@ -41,10 +41,10 @@ jobs:
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
docker compose stop || true
|
||||
docker compose rm || true
|
||||
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||
docker compose ps -a
|
||||
echo ${PGSAIL_API_URL}
|
||||
curl ${PGSAIL_API_URL}
|
||||
npm i -D schemalint
|
||||
@@ -52,4 +52,4 @@ jobs:
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
||||
docker compose logs
|
14
.github/workflows/db-test.yml
vendored
14
.github/workflows/db-test.yml
vendored
@@ -29,10 +29,10 @@ jobs:
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db api
|
||||
run: docker compose pull db api
|
||||
|
||||
- name: Build Docker images
|
||||
run: docker-compose -f docker-compose.dev.yml -f docker-compose.yml build tests
|
||||
run: docker compose -f docker-compose.dev.yml -f docker-compose.yml build tests
|
||||
|
||||
- name: Install psql
|
||||
run: sudo apt install postgresql-client
|
||||
@@ -49,10 +49,10 @@ jobs:
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
docker compose stop || true
|
||||
docker compose rm || true
|
||||
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||
docker compose ps -a
|
||||
echo ${PGSAIL_API_URL}
|
||||
curl ${PGSAIL_API_URL}
|
||||
psql -c "select 1"
|
||||
@@ -70,4 +70,4 @@ jobs:
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
||||
docker compose logs
|
10
.github/workflows/frontend-test.yml
vendored
10
.github/workflows/frontend-test.yml
vendored
@@ -49,10 +49,10 @@ jobs:
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
docker compose stop || true
|
||||
docker compose rm || true
|
||||
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||
docker compose ps -a
|
||||
echo "Test PostgSail Web Unit Test"
|
||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml up -d web_dev && sleep 100
|
||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml logs web_dev
|
||||
@@ -67,4 +67,4 @@ jobs:
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
||||
docker compose logs
|
17
.github/workflows/grafana-test.yml
vendored
17
.github/workflows/grafana-test.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db app
|
||||
run: docker compose pull db app
|
||||
|
||||
- name: Run PostgSail Grafana test
|
||||
# Environment variables
|
||||
@@ -40,15 +40,16 @@ jobs:
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 30
|
||||
docker-compose ps -a
|
||||
docker compose stop || true
|
||||
docker compose rm || true
|
||||
docker compose up -d db && sleep 30
|
||||
docker compose ps -a
|
||||
echo "Test PostgSail Grafana Unit Test"
|
||||
docker-compose up -d app && sleep 5
|
||||
docker-compose ps -a
|
||||
docker compose up -d app && sleep 5
|
||||
docker compose ps -a
|
||||
curl http://localhost:3001/
|
||||
docker compose exec -i db psql -Uusername signalk -c "select public.cron_process_grafana_fn();"
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
||||
docker compose logs
|
1
.gitignore
vendored
1
.gitignore
vendored
@@ -1,5 +1,6 @@
|
||||
.DS_Store
|
||||
.env
|
||||
docker-compose.mm.yml
|
||||
initdb/*.csv
|
||||
initdb/*.no
|
||||
initdb/*.jwk
|
||||
|
@@ -47,6 +47,8 @@ services:
|
||||
PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000
|
||||
PGRST_DB_PRE_REQUEST: public.check_jwt
|
||||
PGRST_DB_POOL: 20
|
||||
PGRST_DB_POOL_MAX_IDLETIME: 60
|
||||
PGRST_DB_POOL_ACQUISITION_TIMEOUT: 20
|
||||
PGRST_DB_URI: ${PGRST_DB_URI}
|
||||
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
||||
PGRST_SERVER_TIMING_ENABLED: 1
|
||||
|
@@ -74,6 +74,10 @@ flowchart TD
|
||||
G --> P
|
||||
A --> Q((cron_post_logbook))
|
||||
Q --> R{QGIS and notification}
|
||||
A --> S((cron_video))
|
||||
A --> U((cron_alert))
|
||||
S --> T{notification}
|
||||
U --> T{notification}
|
||||
```
|
||||
cron job are not process by default because if you don't have the correct settings set (SMTP, PushOver, Telegram), you might enter in a loop with error and you could be blocked or banned from the external services.
|
||||
|
||||
@@ -120,5 +124,13 @@ SELECT * from public.process_queue;
|
||||
If you just want to use this as a standalone application and don't want people to be able to sign up for an account.
|
||||
|
||||
```SQL
|
||||
revoke execute on function api.signup(text,text,text,text) to api_anonymous;
|
||||
REVOKE execute on function api.signup(text,text,text,text) to api_anonymous;
|
||||
```
|
||||
|
||||
### How to disable completely anonymous access
|
||||
|
||||
If you just want to use this as a standalone application and don't want people to be able to access public account.
|
||||
|
||||
```SQL
|
||||
REVOKE SELECT ON ALL TABLES IN SCHEMA api TO api_anonymous;
|
||||
```
|
@@ -24,6 +24,14 @@ erDiagram
|
||||
geography track_geog "postgis geography type default SRID 4326 Unit: degres"
|
||||
jsonb track_geojson "store generated geojson with track metrics data using with LineString and Point features, we can not depend api.metrics table"
|
||||
geometry track_geom "postgis geometry type EPSG:4326 Unit: degres"
|
||||
tgeogpoint trip "MobilityDB trajectory"
|
||||
tfloat trip_cog "courseovergroundtrue"
|
||||
ttext trip_notes
|
||||
tfloat trip_sog "speedoverground"
|
||||
ttext trip_status
|
||||
tfloat trip_twa "windspeedapparent"
|
||||
tfloat trip_twd "truewinddirection"
|
||||
tfloat trip_tws "truewindspeed"
|
||||
text vessel_id "{NOT_NULL}"
|
||||
}
|
||||
|
||||
@@ -187,6 +195,13 @@ erDiagram
|
||||
numeric id
|
||||
}
|
||||
|
||||
public_mobilitydb_opcache {
|
||||
integer ltypnum
|
||||
oid opid
|
||||
integer opnum
|
||||
integer rtypnum
|
||||
}
|
||||
|
||||
public_ne_10m_geography_marine_polys {
|
||||
text changed
|
||||
text featurecla
|
||||
@@ -257,5 +272,5 @@ erDiagram
|
||||
api_stays }o--|| api_moorages : ""
|
||||
api_stays }o--|| api_stays_at : ""
|
||||
auth_otp |o--|| auth_accounts : ""
|
||||
auth_vessels |o--|| auth_accounts : ""
|
||||
auth_vessels }o--|| auth_accounts : ""
|
||||
```
|
@@ -124,7 +124,9 @@ This step can take some time as it will first do a build to generate the static
|
||||
|
||||
The frontend should be accessible via port HTTP/8080.
|
||||
|
||||
Users are collaborating on an installation guide, [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/wiki/Self-hosted-installation-guide)
|
||||
Users are collaborating on two installation guide:
|
||||
- [Self-hosted-installation-guide on AWS EC2](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide%20on%20AWS.md)
|
||||
- [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide.md)
|
||||
|
||||
### SQL Configuration
|
||||
|
||||
@@ -190,7 +192,7 @@ Check the [End-to-End (E2E) test sample](https://github.com/xbgmsharp/postgsail/
|
||||
|
||||
### Docker dependencies
|
||||
|
||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||
`docker compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||
|
||||
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
||||
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
||||
|
167
docs/Self‐hosted-installation-guide on AWS.md
Normal file
167
docs/Self‐hosted-installation-guide on AWS.md
Normal file
@@ -0,0 +1,167 @@
|
||||
## Self AWS cloud hosted setup example
|
||||
|
||||
In this guide we install, setup and run a postgsail project on an AWS instance in the cloud.
|
||||
|
||||
## On AWS Console
|
||||
***Launch an instance on AWS EC2***
|
||||
With the following settings:
|
||||
+ Ubuntu
|
||||
+ Instance type: t2.small
|
||||
+ Create a new key pair:
|
||||
+ key pair type: RSA
|
||||
+ Private key file format: .pem
|
||||
+ The key file is stored for later use
|
||||
|
||||
+ Allow SSH traffic from: Anywhere
|
||||
+ Allow HTTPS traffic from the internet
|
||||
+ Allow HTTP traffic from the internet
|
||||
|
||||
Configure storage:
|
||||
The standard storage of 8GiB is too small so change this to 16GiB.
|
||||
|
||||
***Create a new security group***
|
||||
Go to: EC2>Security groups>Create security group
|
||||
Add inbound rules for the following ports:443, 8080, 80, 3000, 5432, 22, 5050
|
||||
Go to your instance>select your instance>Actions>security>change security group
|
||||
And add the correct security group to the instance.
|
||||
|
||||
## Connect to instance with SSH
|
||||
|
||||
Copy the key file in your default SSH configuration file location (the one VSCode will use)
|
||||
In terminal, go to the folder and run this command to ensure your key is not publicly viewable:
|
||||
```chmod 600 "privatekey.pem"```
|
||||
|
||||
We are using VSCode to connect to the instance:
|
||||
Install the Remote - SSH Extension for VSCode;
|
||||
Open the Command Palette (Ctrl+Shift+P) and type Remote-SSH: Add New SSH Host:
|
||||
```ssh -i "privatekey.pem" ubuntu@ec2-111-22-33-44.eu-west-1.compute.amazonaws.com```
|
||||
When prompted, select the default SSH configuration file location.
|
||||
Open the config file and add the location:
|
||||
```xIdentityFile ~/.ssh/privatekey.pem```
|
||||
|
||||
|
||||
## Install Docker on your instance
|
||||
To install Docker on your new EC2 Ubuntu instance via SSH, follow these steps:
|
||||
|
||||
Update your package list:
|
||||
```sudo apt-get update```
|
||||
Install required dependencies:
|
||||
```sudo apt-get install apt-transport-https ca-certificates curl software-properties-common```
|
||||
Add Docker's official GPG key:
|
||||
```curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg```
|
||||
Add Docker's official repository:
|
||||
```echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null```
|
||||
Update the package list again:
|
||||
```sudo apt-get update```
|
||||
Install Docker:
|
||||
```sudo apt-get install docker-ce docker-ce-cli containerd.io```
|
||||
Verify Docker installation:
|
||||
```sudo docker --version```
|
||||
Add your user to the docker group to run Docker without sudo:
|
||||
```sudo usermod -aG docker ubuntu```
|
||||
Then, log out and back in or use the following to apply the changes:
|
||||
```newgrp docker```
|
||||
|
||||
|
||||
|
||||
## Install Postgsail
|
||||
+ Git clone the postgsail repo:
|
||||
```git clone https://github.com/xbgmsharp/postgsail.git```
|
||||
|
||||
## Edit environment variables
|
||||
Copy the example.env file and edit the environment variables:
|
||||
```cd postgsail```
|
||||
```cp .env.example .env```
|
||||
```nano .env```
|
||||
|
||||
***POSTGRES_USER***
|
||||
Come up with a unique username for the database user. This will be used in the docker image when it’s started up. Nothing beyond creating a unique username and password is required here.
|
||||
This environment variable is used in conjunction with `POSTGRES_PASSWORD` to set a user and its password. This variable will create the specified user with superuser power and a database with the same name.
|
||||
|
||||
https://github.com/docker-library/docs/blob/master/postgres/README.md
|
||||
|
||||
***POSTGRES_PASSWORD***
|
||||
This should be a good password. It will be used for the postgres user above. Again this is used in the docker image.
|
||||
This environment variable is required for you to use the PostgreSQL image. It must not be empty or undefined. This environment variable sets the superuser password for PostgreSQL. The default superuser is defined by the POSTGRES_USER environment variable.
|
||||
|
||||
***POSTGRES_DB***
|
||||
This is the name of the database within postgres. You can leave it named postgres but give it a unique name if you like. The schema will be loaded into this database and all data will be stored within it. Since this is used inside the docker image the name really doesn’t matter. If you plan to run additional databases within the image, then you might care.
|
||||
This environment variable can be used to define a different name for the default database that is created when the image is first started. If it is not specified, then the value of `POSTGRES_USER` will be used.
|
||||
|
||||
***PGSAIL_APP_URL***
|
||||
This is the webapp (webui) entrypoint, typically the public DNS or IP
|
||||
```PGSAIL_APP_URL=http://localhost:8080```
|
||||
|
||||
|
||||
***PGSAIL_API_URL***
|
||||
This is the URL to your API on your instance on port 3000:
|
||||
```PGSAIL_API_URL=PGSAIL_API_URL=http://localhost:3000```
|
||||
|
||||
***PGSAIL_AUTHENTICATOR_PASSWORD***
|
||||
This password is used as part of the database access configuration. It’s used as part of the access URI later on. (Put the same password in both lines.)
|
||||
|
||||
***PGSAIL_GRAFANA_PASSWORD***
|
||||
This password is used for the grafana service
|
||||
|
||||
***PGSAIL_GRAFANA_AUTH_PASSWORD***
|
||||
??This password is used for user authentication on grafana?
|
||||
|
||||
***PGSAIL_EMAIL_FROM***
|
||||
***PGSAIL_EMAIL_SERVER***
|
||||
***PGSAIL_EMAIL_USER***
|
||||
***PGSAIL_EMAIL_PASS***
|
||||
Pgsail does not include a built in email service - only hooks to send email via an existing server.
|
||||
We use gmail as a third party email service:
|
||||
PGSAIL_EMAIL_FROM=email@gmail.com
|
||||
PGSAIL_EMAIL_SERVER=smtp.gmail.com
|
||||
PGSAIL_EMAIL_USER=email@gmail.com
|
||||
You need to get the PGSAIL_EMAIL_PASS from your gmail account security settings: it is not the account password, instead you need to make an "App password"
|
||||
|
||||
***PGRST_JWT_SECRET***
|
||||
This secret key must be at least 32 characters long, you can create a random key with the following command:
|
||||
```cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1```
|
||||
|
||||
***Other ENV variables***
|
||||
```
|
||||
PGSAIL_PUSHOVER_APP_TOKEN
|
||||
PGSAIL_PUSHOVER_APP
|
||||
PGSAIL_TELEGRAM_BOT_TOKEN
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD=password
|
||||
PGSAIL_GRAFANA_PASSWORD=password
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD=password
|
||||
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
|
||||
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
|
||||
```
|
||||
|
||||
## Run the project
|
||||
If needed, add your user to the docker group to run Docker without sudo:
|
||||
```sudo usermod -aG docker ubuntu```
|
||||
Then, log out and back in or use the following to apply the changes:
|
||||
```newgrp docker```
|
||||
|
||||
|
||||
Step 1. Import the SQL schema, execute:
|
||||
```docker compose up db```
|
||||
Step 2. Launch the full backend stack (db, api), execute:
|
||||
```docker compose up db api```
|
||||
Step 3. Launch the frontend webapp
|
||||
```docker compose up web```
|
||||
|
||||
Open browser and navigate to your PGSAIL_APP_URL, you should see the postgsail login screen now:
|
||||
http://ec2-11-234-567-890.eu-west-1.compute.amazonaws.com::8080
|
||||
|
||||
|
||||
## Additional SQL setup
|
||||
Aditional setup will be required.
|
||||
There is no useraccount yet, also cronjobs need to be activated.
|
||||
We'll do that by using pgadmin.
|
||||
|
||||
***Run pgadmin***
|
||||
First add two more vars to your env. file:
|
||||
```PGADMIN_DEFAULT_EMAIL=setup@setup.com```
|
||||
```PGADMIN_DEFAULT_PASSWORD=123456```
|
||||
And add pgadmin to the docker-compose.yml file under "services":
|
||||
```
|
||||
|
||||
```
|
166
docs/Self‐hosted-installation-guide.md
Normal file
166
docs/Self‐hosted-installation-guide.md
Normal file
@@ -0,0 +1,166 @@
|
||||
# Self hosted setup example environment:
|
||||
|
||||
Virtual machine with Ubuntu 22.04 LTS minimal server installation.
|
||||
|
||||
Install openssh, update and install docker-ce manually (ubuntu docker repo is lame)
|
||||
The following ports are exposed to the internet either using a static public IP address or port forwarding via your favorite firewall platform. (not need by default docker will expose all ports to all IPs)
|
||||
The base install uses ports 5432 (db) and 3000 (api) and 8080 (web).
|
||||
|
||||
We’ll add https using Apache or Nginx proxy once everything is tested. At that point you’ll want to open 443 or whatever other port you want to use for secure communication.
|
||||
|
||||
For docker-ce installation, this is a decent guide to installation:
|
||||
https://www.digitalocean.com/community/tutorials/how-to-install-and-use-docker-on-ubuntu-20-04
|
||||
|
||||
Third party services and options:
|
||||
Emails
|
||||
For email notifications you may want to install a local email handler like postfix or use a third party service like gmail.
|
||||
|
||||
Pushover
|
||||
Add more here
|
||||
|
||||
Telegram Bot
|
||||
Add more here
|
||||
|
||||
|
||||
```
|
||||
$ git clone https://github.com/xbgmsharp/postgsail
|
||||
cd postgsail
|
||||
cp .env.example .env
|
||||
nano .env
|
||||
```
|
||||
|
||||
Login to your docker host once it’s setup.
|
||||
Clone the repo to your user directory:
|
||||
|
||||
Copy the example file and edit the environment variables
|
||||
|
||||
The example has the following:
|
||||
```
|
||||
# POSTGRESQL ENV Settings
|
||||
POSTGRES_USER=username
|
||||
POSTGRES_PASSWORD=password
|
||||
POSTGRES_DB=postgres
|
||||
# PostgSail ENV Settings
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD=password
|
||||
PGSAIL_GRAFANA_PASSWORD=password
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD=password
|
||||
# SMTP server settings
|
||||
PGSAIL_EMAIL_FROM=root@localhost
|
||||
PGSAIL_EMAIL_SERVER=localhost
|
||||
#PGSAIL_EMAIL_USER= Comment if not use
|
||||
#PGSAIL_EMAIL_PASS= Comment if not use
|
||||
# Pushover settings
|
||||
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
|
||||
# TELEGRAM BOT, ask BotFather
|
||||
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
|
||||
# webapp entrypoint, typically the public DNS or IP
|
||||
PGSAIL_APP_URL=http://localhost:8080
|
||||
# API entrypoint from the webapp, typically the public DNS or IP
|
||||
PGSAIL_API_URL=http://localhost:3000
|
||||
#
|
||||
POSTGREST ENV Settings
|
||||
PGRST_DB_URI=postgres://authenticator:${PGSAIL_AUTHENTICATOR_PASSWORD}@db:5432/signalk
|
||||
# % cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1
|
||||
PGRST_JWT_SECRET=_at_least_32__char__long__random
|
||||
# Grafana ENV Settings
|
||||
GF_SECURITY_ADMIN_PASSWORD=password
|
||||
```
|
||||
|
||||
All of these need to be configured.
|
||||
|
||||
Step by step:
|
||||
|
||||
## POSTGRESQL ENV Settings
|
||||
|
||||
***POSTGRES_USER***
|
||||
Come up with a unique username for the database user. This will be used in the docker image when it’s started up. Nothing beyond creating a unique username and password is required here.
|
||||
This environment variable is used in conjunction with `POSTGRES_PASSWORD` to set a user and its password. This variable will create the specified user with superuser power and a database with the same name.
|
||||
|
||||
https://github.com/docker-library/docs/blob/master/postgres/README.md
|
||||
|
||||
***POSTGRES_PASSWORD***
|
||||
This should be a good password. It will be used for the postgres user above. Again this is used in the docker image.
|
||||
This environment variable is required for you to use the PostgreSQL image. It must not be empty or undefined. This environment variable sets the superuser password for PostgreSQL. The default superuser is defined by the POSTGRES_USER environment variable.
|
||||
|
||||
***POSTGRES_DB***
|
||||
This is the name of the database within postgres. Give it a unique name if you like. The schema will be loaded into this database and all data will be stored within it. Since this is used inside the docker image the name really doesn’t matter. If you plan to run additional databases within the image, then you might care.
|
||||
This environment variable can be used to define a different name for the default database that is created when the image is first started. If it is not specified, then the value of `POSTGRES_USER` will be used.
|
||||
|
||||
|
||||
```
|
||||
# PostgSail ENV Settings
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD=password
|
||||
PGSAIL_GRAFANA_PASSWORD=password
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD=password
|
||||
PGSAIL_EMAIL_FROM=root@localhost
|
||||
PGSAIL_EMAIL_SERVER=localhost
|
||||
#PGSAIL_EMAIL_USER= Comment if not use
|
||||
#PGSAIL_EMAIL_PASS= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
|
||||
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
|
||||
PGSAIL_APP_URL=http://localhost:8080
|
||||
PGSAIL_API_URL=http://localhost:3000
|
||||
```
|
||||
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD
|
||||
This password is used as part of the database access configuration. It’s used as part of the access URI later on. (Put the same password in both lines.)
|
||||
|
||||
PGSAIL_GRAFANA_PASSWORD
|
||||
This password is used for the grafana service
|
||||
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD
|
||||
??This password is used for user authentication on grafana?
|
||||
|
||||
PGSAIL_EMAIL_FROM
|
||||
PGSAIL_EMAIL_SERVER
|
||||
Pgsail does not include a built in email service - only hooks to send email via an existing server.
|
||||
You can install an email service on the ubuntu host or use a third party service like gmail. If you chose to use a local service, be aware that some email services will filter it as spam unless you’ve properly configured it.
|
||||
|
||||
PGSAIL_PUSHOVER_APP_TOKEN
|
||||
PGSAIL_PUSHOVER_APP
|
||||
PGSAIL_TELEGRAM_BOT_TOKEN
|
||||
|
||||
Add more info here
|
||||
PGSAIL_APP_URL
|
||||
This is the full url (with domain name or IP) that you access PGSAIL via. Once nginx ssl proxy is added this may need to be updated. (Service restart required after changing?)
|
||||
|
||||
|
||||
PGSAIL_API_URL
|
||||
This is the API URL that’s used for the boat and user access. Once apache or nginx ssl proxy is added this may need to be updated. (same restart?)
|
||||
|
||||
Network configuration example:
|
||||
It is a docker question but in general no special network config should be need, docker created and assign one automatically. all images will be bind to all IPs on the host.
|
||||
The volume can be on disk or should be a docker volume prefer.
|
||||
```
|
||||
# docker compose -f docker-compose.yml -f docker-compose.dev.yml ps -a
|
||||
NAME IMAGE COMMAND SERVICE CREATED STATUS PORTS
|
||||
api postgrest/postgrest "/bin/postgrest" api 2 months ago Up 2 months 0.0.0.0:3000->3000/tcp, :::3000->3000/tcp, 0.0.0.0:3003->3003/tcp, :::3003->3003/tcp
|
||||
app grafana/grafana:latest "/run.sh" app 3 months ago Up 12 days 0.0.0.0:3001->3000/tcp, :::3001->3000/tcp
|
||||
db xbgmsharp/timescaledb-postgis "docker-entrypoint.sh postgres" db 2 months ago Up 2 months (healthy) 0.0.0.0:5432->5432/tcp, :::5432->5432/tcp
|
||||
```
|
||||
All services (db,api,web) will be accessible via localhost and others IPs, hence the default configuration.
|
||||
|
||||
```bash
|
||||
# telnet localhost 5432
|
||||
```
|
||||
and
|
||||
```bash
|
||||
# curl localhost:3000
|
||||
```
|
||||
|
||||
```bash
|
||||
# docker network ls
|
||||
NETWORK ID NAME DRIVER SCOPE
|
||||
...
|
||||
14f30223ebf2 postgsail_default bridge local
|
||||
```
|
||||
|
||||
Volumes:
|
||||
```bash
|
||||
% docker volume ls
|
||||
DRIVER VOLUME NAME
|
||||
local postgsail_grafana-data
|
||||
local postgsail_postgres-data
|
||||
```
|
2
frontend
2
frontend
Submodule frontend updated: bc4df1dd5e...2fb525adad
@@ -366,7 +366,7 @@ CREATE OR REPLACE FUNCTION public.process_logbook_queue_fn(IN _id integer) RETUR
|
||||
|
||||
-- Add trip details name as note for the first geometry point entry from the GeoJSON
|
||||
SELECT format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into trip_note;
|
||||
-- Update the properties of the first feature
|
||||
-- Update the properties of the first feature
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
|
755
initdb/99_migrations_202407.sql
Normal file
755
initdb/99_migrations_202407.sql
Normal file
@@ -0,0 +1,755 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration July 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Add video error notification message
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('video_error','PostgSail Video Error',E'Hey,\nSorry we could not generate your video.\nPlease reach out to debug and solve the issue.','PostgSail Video Error!',E'There has been an error with your video.');
|
||||
|
||||
-- CRON for new video notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_video_fn;
|
||||
CREATE FUNCTION public.cron_process_video_fn() RETURNS void AS $cron_process_video$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
metadata_rec record;
|
||||
video_settings jsonb;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_video_fn';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE (channel = 'new_video' OR channel = 'error_video')
|
||||
AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_video_fn for [%]', process_rec.payload;
|
||||
SELECT * INTO metadata_rec
|
||||
FROM api.metadata
|
||||
WHERE vessel_id = process_rec.ref_id::TEXT;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_video_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknown vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_video_fn vessel_id %', current_setting('vessel.id', false);
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('video_link', CONCAT('https://videos.openplotter.cloud/', process_rec.payload)) into video_settings;
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || video_settings::JSONB into user_settings;
|
||||
RAISE DEBUG '-> DEBUG cron_process_video_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
IF process_rec.channel = 'new_video' THEN
|
||||
PERFORM send_notification_fn('video_ready'::TEXT, user_settings::JSONB);
|
||||
ELSE
|
||||
PERFORM send_notification_fn('video_error'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_video_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_process_video$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_video_fn
|
||||
IS 'init by pg_cron to check for new video event pending notifications, if so perform process_notification_queue_fn';
|
||||
|
||||
-- Fix error when stateOfCharge is null. Make stateOfCharge null value assume to be charge 1.
|
||||
DROP FUNCTION IF EXISTS public.cron_alerts_fn();
|
||||
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $cron_alerts$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
app_settings JSONB;
|
||||
user_settings JSONB;
|
||||
alerting JSONB;
|
||||
_alarms JSONB;
|
||||
alarms TEXT;
|
||||
alert_default JSONB := '{
|
||||
"low_pressure_threshold": 990,
|
||||
"high_wind_speed_threshold": 30,
|
||||
"low_water_depth_threshold": 1,
|
||||
"min_notification_interval": 6,
|
||||
"high_pressure_drop_threshold": 12,
|
||||
"low_battery_charge_threshold": 90,
|
||||
"low_battery_voltage_threshold": 12.5,
|
||||
"low_water_temperature_threshold": 10,
|
||||
"low_indoor_temperature_threshold": 7,
|
||||
"low_outdoor_temperature_threshold": 3
|
||||
}';
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id,
|
||||
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||
(a.preferences->'alarms')::JSONB as alarms
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||
PERFORM set_config('user.email', alert_rec.email, false);
|
||||
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||
avg(coalesce((m.metrics->>'electrical.batteries.House.capacity.stateOfCharge')::numeric, 1)) AS charge
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = alert_rec.vessel_id
|
||||
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||
END IF;
|
||||
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||
END IF;
|
||||
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||
END IF;
|
||||
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_alerts$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_alerts_fn
|
||||
IS 'init by pg_cron to check for alerts';
|
||||
|
||||
-- Fix error: None of these media types are available: text/xml
|
||||
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL) RETURNS "text/xml"
|
||||
AS $export_logbooks_gpx$
|
||||
declare
|
||||
merged_jsonb jsonb;
|
||||
app_settings jsonb;
|
||||
BEGIN
|
||||
-- Merge GIS track_geom of geometry type Point into a jsonb array format
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geojson IS NOT NULL
|
||||
ORDER BY _from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE track_geojson IS NOT NULL
|
||||
ORDER BY _from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
END IF;
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn _jsonb %' , _jsonb;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_url_fn();
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn app_settings %', app_settings;
|
||||
-- Generate GPX XML, extract Point features from geojson.
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
app_settings->>'app.url' as "xmlns:postgsail"),
|
||||
xmlelement(name metadata,
|
||||
xmlelement(name link, xmlattributes(app_settings->>'app.url' as href),
|
||||
xmlelement(name text, 'PostgSail'))),
|
||||
xmlelement(name trk,
|
||||
xmlelement(name name, 'logbook name'),
|
||||
xmlelement(name trkseg, xmlagg(
|
||||
xmlelement(name trkpt,
|
||||
xmlattributes(features->'coordinates'->1 as lat, features->'coordinates'->0 as lon),
|
||||
xmlelement(name time, features->'properties'->>'time')
|
||||
)))))::pg_catalog.xml
|
||||
FROM jsonb_array_elements(merged_jsonb) AS features;
|
||||
END;
|
||||
$export_logbooks_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbooks_gpx_fn
|
||||
IS 'Export a logs entries to GPX XML format';
|
||||
|
||||
-- Add export logbooks as png
|
||||
DROP FUNCTION IF EXISTS public.qgis_bbox_trip_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_bbox_trip_py_fn(IN _str_to_parse TEXT DEFAULT NULL, OUT bbox TEXT)
|
||||
AS $qgis_bbox_trip_py$
|
||||
plpy.notice('qgis_bbox_trip_py_fn _str_to_parse [{}]'.format(_str_to_parse))
|
||||
vessel_id, log_id, log_end = _str_to_parse.split('_')
|
||||
width = 1080
|
||||
height = 566
|
||||
scaleout = True
|
||||
log_extent = None
|
||||
# If we have a vessel_id then it is full logs image map
|
||||
if vessel_id and log_end is None:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if vessel_id in SD:
|
||||
plan = SD[vessel_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 ) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text"])
|
||||
SD[vessel_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# If we have a vessel_id and a log_end then it is subset logs image map
|
||||
elif vessel_id and log_end:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
shared_cache = vessel_id + str(log_id) + str(log_end)
|
||||
if shared_cache in SD:
|
||||
plan = SD[shared_cache]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 and id >= $2::NUMERIC and id <= $3::NUMERIC) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text","text","text"])
|
||||
SD[shared_cache] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id,log_id,log_end], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# Else we have a log_id then it is single log image map
|
||||
else :
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if log_id in SD:
|
||||
plan = SD[log_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT ST_Extent(ST_Transform(track_geom, 3857)) FROM api.logbook WHERE id = $1::NUMERIC", ["text"])
|
||||
SD[log_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [log_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = float(max_x - min_x)
|
||||
bbox_height = float(max_y - min_y)
|
||||
bbox_aspect_ratio = float(bbox_width / bbox_height)
|
||||
image_aspect_ratio = float(fixed_width / fixed_height)
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
if not log_extent:
|
||||
plpy.warning('Failed to get sql qgis_bbox_trip_py_fn log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
# Parse extent and apply ZoomOut scale factor
|
||||
if scaleout:
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(log_extent))
|
||||
else:
|
||||
scaled_extent = parse_extent_from_db(log_extent)
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], scaled_extent [{}]'.format(log_id, scaled_extent))
|
||||
fixed_width = width # default 1080
|
||||
fixed_height = height # default 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], adjusted_extent [{}]'.format(log_id, adjusted_extent))
|
||||
min_x, min_y, max_x, max_y = adjusted_extent
|
||||
return f"{min_x},{min_y},{max_x},{max_y}"
|
||||
$qgis_bbox_trip_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_bbox_trip_py_fn
|
||||
IS 'Generate the BBOX base on trip extent and adapt extent to the image size for QGIS Server';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.grafana_py_fn;
|
||||
-- Update grafana provisioning, ERROR: KeyError: 'secureJsonFields'
|
||||
CREATE OR REPLACE FUNCTION public.grafana_py_fn(_v_name text, _v_id text, _u_email text, app jsonb)
|
||||
RETURNS void
|
||||
TRANSFORM FOR TYPE jsonb
|
||||
LANGUAGE plpython3u
|
||||
AS $function$
|
||||
"""
|
||||
https://grafana.com/docs/grafana/latest/developers/http_api/
|
||||
Create organization base on vessel name
|
||||
Create user base on user email
|
||||
Add user to organization
|
||||
Add data_source to organization
|
||||
Add dashboard to organization
|
||||
Update organization preferences
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import re
|
||||
|
||||
grafana_uri = None
|
||||
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
|
||||
grafana_uri = app['app.grafana_admin_uri']
|
||||
else:
|
||||
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||
return None
|
||||
|
||||
b_name = None
|
||||
if not _v_name:
|
||||
b_name = _v_id
|
||||
else:
|
||||
b_name = _v_name
|
||||
|
||||
# add vessel org
|
||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||
path = 'api/orgs'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'name':b_name}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "orgId" in r.json():
|
||||
org_id = r.json()['orgId']
|
||||
else:
|
||||
plpy.error('Error grafana add vessel org {req} - {res}'.format(req=data_dict,res=r.json()))
|
||||
return none
|
||||
|
||||
# add user to vessel org
|
||||
path = 'api/admin/users'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "id" in r.json():
|
||||
user_id = r.json()['id']
|
||||
else:
|
||||
plpy.error('Error grafana add user to vessel org')
|
||||
return
|
||||
|
||||
# read data_source
|
||||
path = 'api/datasources/1'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
r = requests.get(url, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
data_source = r.json()
|
||||
data_source['id'] = 0
|
||||
data_source['orgId'] = org_id
|
||||
data_source['uid'] = "ds_" + _v_id
|
||||
data_source['name'] = "ds_" + _v_id
|
||||
data_source['secureJsonData'] = {}
|
||||
data_source['secureJsonData']['password'] = 'mysecretpassword'
|
||||
data_source['readOnly'] = True
|
||||
if "secureJsonFields" in data_source:
|
||||
del data_source['secureJsonFields']
|
||||
|
||||
# add data_source to vessel org
|
||||
path = 'api/datasources'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(data_source)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
del headers['X-Grafana-Org-Id']
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add data_source to vessel org')
|
||||
return
|
||||
|
||||
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
|
||||
for dashboard in dashboards_tpl:
|
||||
# read dashboard template by uid
|
||||
path = 'api/dashboards/uid'
|
||||
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
|
||||
if 'X-Grafana-Org-Id' in headers:
|
||||
del headers['X-Grafana-Org-Id']
|
||||
r = requests.get(url, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana read dashboard template')
|
||||
return
|
||||
new_dashboard = r.json()
|
||||
del new_dashboard['meta']
|
||||
new_dashboard['dashboard']['version'] = 0
|
||||
new_dashboard['dashboard']['id'] = 0
|
||||
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
|
||||
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
|
||||
# add dashboard to vessel org
|
||||
path = 'api/dashboards/db'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(new_dashboard)
|
||||
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=new_data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add dashboard to vessel org')
|
||||
return
|
||||
|
||||
# Update Org Prefs
|
||||
path = 'api/org/preferences'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
home_dashboard = {}
|
||||
home_dashboard['timezone'] = 'utc'
|
||||
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
|
||||
data = json.dumps(home_dashboard)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.patch(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200:
|
||||
plpy.error('Error grafana update org preferences')
|
||||
return
|
||||
|
||||
plpy.notice('Done')
|
||||
$function$
|
||||
;
|
||||
COMMENT ON FUNCTION public.grafana_py_fn(text, text, text, jsonb) IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
|
||||
|
||||
-- Add missing comment on function cron_process_no_activity_fn
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_no_activity_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||
|
||||
-- Update grafana,qgis,api role SQL connection to 30
|
||||
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
ALTER ROLE api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
ALTER ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
|
||||
-- Create qgis schema for qgis projects
|
||||
CREATE SCHEMA IF NOT EXISTS qgis;
|
||||
COMMENT ON SCHEMA qgis IS 'Hold qgis_projects';
|
||||
GRANT USAGE ON SCHEMA qgis TO qgis_role;
|
||||
CREATE TABLE qgis.qgis_projects (
|
||||
"name" text NOT NULL,
|
||||
metadata jsonb NULL,
|
||||
"content" bytea NULL,
|
||||
CONSTRAINT qgis_projects_pkey PRIMARY KEY (name)
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
qgis.qgis_projects
|
||||
IS 'Store qgis projects using QGIS-Server or QGIS-Desktop from https://qgis.org/';
|
||||
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE qgis.qgis_projects TO qgis_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO qgis_role;
|
||||
|
||||
-- allow anonymous access to tbl and views
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO api_anonymous;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO api_anonymous;
|
||||
-- Allow EXECUTE on all FUNCTIONS on API and public schema to user_role
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
-- Allow EXECUTE on all FUNCTIONS on public schema to vessel_role
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO vessel_role;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.5'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Update video cronjob
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_process_video_fn()'
|
||||
WHERE jobname = 'cron_new_video';
|
||||
UPDATE cron.job
|
||||
SET jobname='cron_video'
|
||||
WHERE command='select public.cron_process_video_fn()';
|
1370
initdb/99_migrations_202408.sql
Normal file
1370
initdb/99_migrations_202408.sql
Normal file
File diff suppressed because it is too large
Load Diff
693
initdb/99_migrations_202409.sql
Normal file
693
initdb/99_migrations_202409.sql
Normal file
@@ -0,0 +1,693 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration September 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Add new email template account_inactivity
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('inactivity','We Haven''t Seen You in a While!','Hi __RECIPIENT__,
|
||||
|
||||
You''re busy. We understand.
|
||||
|
||||
You haven''t logged into PostgSail for a considerable period. Since we last saw you, we have continued to add new and exciting features to help you explorer your navigation journey.
|
||||
|
||||
Meanwhile, we have cleanup your data. If you wish to maintain an up-to-date overview of your sail journey in PostgSail''''s dashboard, kindly log in to your account within the next seven days.
|
||||
|
||||
Please note that your account will be permanently deleted if it remains inactive for seven more days.
|
||||
|
||||
If you have any questions or concerns or if you believe this to be an error, please do not hesitate to reach out at info@openplotter.cloud.
|
||||
|
||||
Sincerely,
|
||||
Francois','We Haven''t Seen You in a While!','You haven''t logged into PostgSail for a considerable period. Login to check what''s new!.');
|
||||
|
||||
-- Update HTML email for new logbook
|
||||
DROP FUNCTION IF EXISTS public.send_email_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_email_py$
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
import requests
|
||||
|
||||
# Import the email modules we need
|
||||
from email.message import EmailMessage
|
||||
from email.utils import formatdate,make_msgid
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if email_type in SD:
|
||||
plan = SD[email_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[email_type] = plan
|
||||
|
||||
# Execute the statement with the email_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [email_type], 1)
|
||||
email_subject = rv[0]['email_subject']
|
||||
email_content = rv[0]['email_content']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if not _user or not app:
|
||||
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
|
||||
plpy.error('Error missing parameters')
|
||||
return None
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
email_content = email_content.replace('__LOGBOOK_NAME__', str(_user['logbook_name']))
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'logbook_img' in _user and _user['logbook_img']:
|
||||
email_content = email_content.replace('__LOGBOOK_IMG__', str(_user['logbook_img']))
|
||||
if 'logbook_stats' in _user and _user['logbook_stats']:
|
||||
email_content = email_content.replace('__LOGBOOK_STATS__', str(_user['logbook_stats']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
email_content = email_content.replace('__VIDEO_LINK__', str(_user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
email_content = email_content.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'otp_code' in _user and _user['otp_code']:
|
||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||
if 'reset_qs' in _user and _user['reset_qs']:
|
||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
email_from = 'root@localhost'
|
||||
if 'app.email_from' in app and app['app.email_from']:
|
||||
email_from = 'PostgSail <' + app['app.email_from'] + '>'
|
||||
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
|
||||
|
||||
email_to = 'root@localhost'
|
||||
if 'email' in _user and _user['email']:
|
||||
email_to = _user['email']
|
||||
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
|
||||
else:
|
||||
plpy.error('Error email to')
|
||||
return None
|
||||
|
||||
if email_type == 'logbook':
|
||||
msg = EmailMessage()
|
||||
msg.set_content(email_content)
|
||||
else:
|
||||
msg = MIMEText(email_content, 'plain', 'utf-8')
|
||||
msg["Subject"] = email_subject
|
||||
msg["From"] = email_from
|
||||
msg["To"] = email_to
|
||||
msg["Date"] = formatdate()
|
||||
msg["Message-ID"] = make_msgid()
|
||||
|
||||
if email_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Create a Content-ID for the image
|
||||
image_cid = make_msgid()
|
||||
# Transform to HTML template, replace text by HTML link
|
||||
logbook_link = "{__APP_URL__}/log/{__LOGBOOK_LINK__}".format( __APP_URL__=app['app.url'], __LOGBOOK_LINK__=str(_user['logbook_link']))
|
||||
timelapse_link = "{__APP_URL__}/timelapse/{__LOGBOOK_LINK__}".format( __APP_URL__=app['app.url'], __LOGBOOK_LINK__=str(_user['logbook_link']))
|
||||
email_content = email_content.replace('\n', '<br/>')
|
||||
email_content = email_content.replace(logbook_link, '<a href="{logbook_link}">{logbook_link}</a>'.format(logbook_link=str(logbook_link)))
|
||||
email_content = email_content.replace(timelapse_link, '<a href="{timelapse_link}">{timelapse_link}</a>'.format(timelapse_link=str(logbook_link)))
|
||||
email_content = email_content.replace(str(_user['logbook_name']), '<a href="{logbook_link}">{logbook_name}</a>'.format(logbook_link=str(logbook_link), logbook_name=str(_user['logbook_name'])))
|
||||
# Set an alternative html body
|
||||
msg.add_alternative("""\
|
||||
<html>
|
||||
<body>
|
||||
<p>{email_content}</p>
|
||||
<img src="cid:{image_cid}">
|
||||
</body>
|
||||
</html>
|
||||
""".format(email_content=email_content, image_cid=image_cid[1:-1]), subtype='html')
|
||||
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
response = requests.get(img_url, stream=True)
|
||||
if response.status_code == 200:
|
||||
msg.get_payload()[1].add_related(response.raw.data,
|
||||
maintype='image',
|
||||
subtype='png',
|
||||
cid=image_cid)
|
||||
|
||||
server_smtp = 'localhost'
|
||||
if 'app.email_server' in app and app['app.email_server']:
|
||||
server_smtp = app['app.email_server']
|
||||
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
|
||||
|
||||
# Send the message via our own SMTP server.
|
||||
try:
|
||||
# send your message with credentials specified above
|
||||
with smtplib.SMTP(server_smtp, 587) as server:
|
||||
if 'app.email_user' in app and app['app.email_user'] \
|
||||
and 'app.email_pass' in app and app['app.email_pass']:
|
||||
server.starttls()
|
||||
server.login(app['app.email_user'], app['app.email_pass'])
|
||||
#server.send_message(msg)
|
||||
server.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||
server.quit()
|
||||
# tell the script to report if your message was sent or which errors need to be fixed
|
||||
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
|
||||
return None
|
||||
except OSError as error:
|
||||
plpy.error('OS Error occurred: ' + str(error))
|
||||
except smtplib.SMTPConnectError:
|
||||
plpy.error('Failed to connect to the server. Bad connection settings?')
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
plpy.error('Failed to connect to the server. Wrong user/password?')
|
||||
except smtplib.SMTPException as e:
|
||||
plpy.error('SMTP error occurred: ' + str(e))
|
||||
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_email_py_fn
|
||||
IS 'Send email notification using plpython3u';
|
||||
|
||||
-- Update stats_logs_fn, update debug
|
||||
CREATE OR REPLACE FUNCTION api.stats_logs_fn(start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT stats jsonb)
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||
BEGIN
|
||||
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
RAISE WARNING '--> stats_logs_fn, filter result stats by date [%]', start_date;
|
||||
_start_date := start_date::TIMESTAMPTZ;
|
||||
_end_date := end_date::TIMESTAMPTZ;
|
||||
END IF;
|
||||
--RAISE NOTICE '--> stats_logs_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||
WITH
|
||||
meta AS (
|
||||
SELECT m.name FROM api.metadata m ),
|
||||
logs_view AS (
|
||||
SELECT *
|
||||
FROM api.logbook l
|
||||
WHERE _from_time >= _start_date::TIMESTAMPTZ
|
||||
AND _to_time <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
),
|
||||
first_date AS (
|
||||
SELECT _from_time as first_date from logs_view ORDER BY first_date ASC LIMIT 1
|
||||
),
|
||||
last_date AS (
|
||||
SELECT _to_time as last_date from logs_view ORDER BY _to_time DESC LIMIT 1
|
||||
),
|
||||
max_speed_id AS (
|
||||
SELECT id FROM logs_view WHERE max_speed = (SELECT max(max_speed) FROM logs_view) ),
|
||||
max_wind_speed_id AS (
|
||||
SELECT id FROM logs_view WHERE max_wind_speed = (SELECT max(max_wind_speed) FROM logs_view)),
|
||||
max_distance_id AS (
|
||||
SELECT id FROM logs_view WHERE distance = (SELECT max(distance) FROM logs_view)),
|
||||
max_duration_id AS (
|
||||
SELECT id FROM logs_view WHERE duration = (SELECT max(duration) FROM logs_view)),
|
||||
logs_stats AS (
|
||||
SELECT
|
||||
count(*) AS count,
|
||||
max(max_speed) AS max_speed,
|
||||
max(max_wind_speed) AS max_wind_speed,
|
||||
max(distance) AS max_distance,
|
||||
sum(distance) AS sum_distance,
|
||||
max(duration) AS max_duration,
|
||||
sum(duration) AS sum_duration
|
||||
FROM logs_view l )
|
||||
--select * from logbook;
|
||||
-- Return a JSON
|
||||
SELECT jsonb_build_object(
|
||||
'name', meta.name,
|
||||
'first_date', first_date.first_date,
|
||||
'last_date', last_date.last_date,
|
||||
'max_speed_id', max_speed_id.id,
|
||||
'max_wind_speed_id', max_wind_speed_id.id,
|
||||
'max_duration_id', max_duration_id.id,
|
||||
'max_distance_id', max_distance_id.id)::jsonb || to_jsonb(logs_stats.*)::jsonb INTO stats
|
||||
FROM max_speed_id, max_wind_speed_id, max_distance_id, max_duration_id,
|
||||
logs_stats, meta, logs_view, first_date, last_date;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
|
||||
-- Fix stays and moorage statistics for user by date
|
||||
CREATE OR REPLACE FUNCTION api.stats_stays_fn(
|
||||
IN start_date TEXT DEFAULT NULL,
|
||||
IN end_date TEXT DEFAULT NULL,
|
||||
OUT stats JSON) RETURNS JSON AS $stats_stays$
|
||||
DECLARE
|
||||
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||
BEGIN
|
||||
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
RAISE NOTICE '--> stats_stays_fn, custom filter result stats by date [%]', start_date;
|
||||
_start_date := start_date::TIMESTAMPTZ;
|
||||
_end_date := end_date::TIMESTAMPTZ;
|
||||
END IF;
|
||||
--RAISE NOTICE '--> stats_stays_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||
WITH
|
||||
stays as (
|
||||
select distinct(moorage_id) as moorage_id, sum(duration) as duration, count(id) as reference_count
|
||||
from api.stays s
|
||||
WHERE arrived >= _start_date::TIMESTAMPTZ
|
||||
AND departed <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
group by moorage_id
|
||||
order by moorage_id
|
||||
),
|
||||
moorages AS (
|
||||
SELECT m.id, m.home_flag, m.reference_count, m.stay_duration, m.stay_code, m.country, s.duration, s.reference_count
|
||||
from api.moorages m, stays s
|
||||
where s.moorage_id = m.id
|
||||
order by moorage_id
|
||||
),
|
||||
home_ports AS (
|
||||
select count(*) as home_ports from moorages m where home_flag is true
|
||||
),
|
||||
unique_moorages AS (
|
||||
select count(*) as unique_moorages from moorages m
|
||||
),
|
||||
time_at_home_ports AS (
|
||||
select sum(m.stay_duration) as time_at_home_ports from moorages m where home_flag is true
|
||||
),
|
||||
sum_stay_duration AS (
|
||||
select sum(m.stay_duration) as sum_stay_duration from moorages m where home_flag is false
|
||||
),
|
||||
time_spent_away_arr AS (
|
||||
select m.stay_code,sum(m.stay_duration) as stay_duration from moorages m where home_flag is false group by m.stay_code order by m.stay_code
|
||||
),
|
||||
time_spent_arr as (
|
||||
select jsonb_agg(t.*) as time_spent_away_arr from time_spent_away_arr t
|
||||
),
|
||||
time_spent_away AS (
|
||||
select sum(m.stay_duration) as time_spent_away from moorages m where home_flag is false
|
||||
),
|
||||
time_spent as (
|
||||
select jsonb_agg(t.*) as time_spent_away from time_spent_away t
|
||||
)
|
||||
-- Return a JSON
|
||||
SELECT jsonb_build_object(
|
||||
'home_ports', home_ports.home_ports,
|
||||
'unique_moorages', unique_moorages.unique_moorages,
|
||||
'time_at_home_ports', time_at_home_ports.time_at_home_ports,
|
||||
'sum_stay_duration', sum_stay_duration.sum_stay_duration,
|
||||
'time_spent_away', time_spent_away.time_spent_away,
|
||||
'time_spent_away_arr', time_spent_arr.time_spent_away_arr) INTO stats
|
||||
FROM home_ports, unique_moorages,
|
||||
time_at_home_ports, sum_stay_duration, time_spent_away, time_spent_arr;
|
||||
END;
|
||||
$stats_stays$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.stats_stays_fn
|
||||
IS 'Stays/Moorages stats by date';
|
||||
|
||||
-- Update api.stats_moorages_view, fix time_spent_at_home_port
|
||||
CREATE OR REPLACE VIEW api.stats_moorages_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
WITH
|
||||
home_ports AS (
|
||||
select count(*) as home_ports from api.moorages m where home_flag is true
|
||||
),
|
||||
unique_moorage AS (
|
||||
select count(*) as unique_moorage from api.moorages m
|
||||
),
|
||||
time_at_home_ports AS (
|
||||
select sum(m.stay_duration) as time_at_home_ports from api.moorages m where home_flag is true
|
||||
),
|
||||
time_spent_away AS (
|
||||
select sum(m.stay_duration) as time_spent_away from api.moorages m where home_flag is false
|
||||
)
|
||||
SELECT
|
||||
home_ports.home_ports as "home_ports",
|
||||
unique_moorage.unique_moorage as "unique_moorages",
|
||||
time_at_home_ports.time_at_home_ports as "time_spent_at_home_port(s)",
|
||||
time_spent_away.time_spent_away as "time_spent_away"
|
||||
FROM home_ports, unique_moorage, time_at_home_ports, time_spent_away;
|
||||
|
||||
-- Add stats_fn, user statistics by date
|
||||
DROP FUNCTION IF EXISTS api.stats_fn;
|
||||
CREATE OR REPLACE FUNCTION api.stats_fn(
|
||||
IN start_date TEXT DEFAULT NULL,
|
||||
IN end_date TEXT DEFAULT NULL,
|
||||
OUT stats JSONB) RETURNS JSONB AS $stats_global$
|
||||
DECLARE
|
||||
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||
stats_logs JSONB;
|
||||
stats_moorages JSONB;
|
||||
stats_logs_topby JSONB;
|
||||
stats_moorages_topby JSONB;
|
||||
BEGIN
|
||||
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
RAISE WARNING '--> stats_fn, filter result stats by date [%]', start_date;
|
||||
_start_date := start_date::TIMESTAMPTZ;
|
||||
_end_date := end_date::TIMESTAMPTZ;
|
||||
END IF;
|
||||
RAISE NOTICE '--> stats_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||
-- Get global logs statistics
|
||||
SELECT api.stats_logs_fn(_start_date::TEXT, _end_date::TEXT) INTO stats_logs;
|
||||
-- Get global stays/moorages statistics
|
||||
SELECT api.stats_stays_fn(_start_date::TEXT, _end_date::TEXT) INTO stats_moorages;
|
||||
-- Get Top 5 trips statistics
|
||||
WITH
|
||||
logs_view AS (
|
||||
SELECT id,avg_speed,max_speed,max_wind_speed,distance,duration
|
||||
FROM api.logbook l
|
||||
WHERE _from_time >= _start_date::TIMESTAMPTZ
|
||||
AND _to_time <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
),
|
||||
logs_top_avg_speed AS (
|
||||
SELECT id,avg_speed FROM logs_view
|
||||
GROUP BY id,avg_speed
|
||||
ORDER BY avg_speed DESC
|
||||
LIMIT 5),
|
||||
logs_top_speed AS (
|
||||
SELECT id,max_speed FROM logs_view
|
||||
WHERE max_speed IS NOT NULL
|
||||
GROUP BY id,max_speed
|
||||
ORDER BY max_speed DESC
|
||||
LIMIT 5),
|
||||
logs_top_wind_speed AS (
|
||||
SELECT id,max_wind_speed FROM logs_view
|
||||
WHERE max_wind_speed IS NOT NULL
|
||||
GROUP BY id,max_wind_speed
|
||||
ORDER BY max_wind_speed DESC
|
||||
LIMIT 5),
|
||||
logs_top_distance AS (
|
||||
SELECT id FROM logs_view
|
||||
GROUP BY id,distance
|
||||
ORDER BY distance DESC
|
||||
LIMIT 5),
|
||||
logs_top_duration AS (
|
||||
SELECT id FROM logs_view
|
||||
GROUP BY id,duration
|
||||
ORDER BY duration DESC
|
||||
LIMIT 5)
|
||||
-- Stats Top Logs
|
||||
SELECT jsonb_build_object(
|
||||
'stats_logs', stats_logs,
|
||||
'stats_moorages', stats_moorages,
|
||||
'logs_top_speed', (SELECT jsonb_agg(logs_top_speed.*) FROM logs_top_speed),
|
||||
'logs_top_avg_speed', (SELECT jsonb_agg(logs_top_avg_speed.*) FROM logs_top_avg_speed),
|
||||
'logs_top_wind_speed', (SELECT jsonb_agg(logs_top_wind_speed.*) FROM logs_top_wind_speed),
|
||||
'logs_top_distance', (SELECT jsonb_agg(logs_top_distance.id) FROM logs_top_distance),
|
||||
'logs_top_duration', (SELECT jsonb_agg(logs_top_duration.id) FROM logs_top_duration)
|
||||
) INTO stats;
|
||||
-- Stats top 5 moorages statistics
|
||||
WITH
|
||||
stays as (
|
||||
select distinct(moorage_id) as moorage_id, sum(duration) as duration, count(id) as reference_count
|
||||
from api.stays s
|
||||
WHERE s.arrived >= _start_date::TIMESTAMPTZ
|
||||
AND s.departed <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
group by s.moorage_id
|
||||
order by s.moorage_id
|
||||
),
|
||||
moorages AS (
|
||||
SELECT m.id, m.home_flag, m.reference_count, m.stay_duration, m.stay_code, m.country, s.duration as dur, s.reference_count as ref_count
|
||||
from api.moorages m, stays s
|
||||
where s.moorage_id = m.id
|
||||
order by s.moorage_id
|
||||
),
|
||||
moorages_top_arrivals AS (
|
||||
SELECT id,ref_count FROM moorages
|
||||
GROUP BY id,ref_count
|
||||
ORDER BY ref_count DESC
|
||||
LIMIT 5),
|
||||
moorages_top_duration AS (
|
||||
SELECT id,dur FROM moorages
|
||||
GROUP BY id,dur
|
||||
ORDER BY dur DESC
|
||||
LIMIT 5),
|
||||
moorages_countries AS (
|
||||
SELECT DISTINCT(country) FROM moorages
|
||||
WHERE country IS NOT NULL AND country <> 'unknown'
|
||||
GROUP BY country
|
||||
ORDER BY country DESC
|
||||
LIMIT 5)
|
||||
SELECT stats || jsonb_build_object(
|
||||
'moorages_top_arrivals', (SELECT jsonb_agg(moorages_top_arrivals) FROM moorages_top_arrivals),
|
||||
'moorages_top_duration', (SELECT jsonb_agg(moorages_top_duration) FROM moorages_top_duration),
|
||||
'moorages_top_countries', (SELECT jsonb_agg(moorages_countries.country) FROM moorages_countries)
|
||||
) INTO stats;
|
||||
END;
|
||||
$stats_global$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.stats_fn
|
||||
IS 'Stats logbook and moorages by date';
|
||||
|
||||
-- Add mapgl_fn, generate a geojson with all linestring
|
||||
DROP FUNCTION IF EXISTS api.mapgl_fn;
|
||||
CREATE OR REPLACE FUNCTION api.mapgl_fn(start_log integer DEFAULT NULL::integer, end_log integer DEFAULT NULL::integer, start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT geojson jsonb)
|
||||
RETURNS jsonb
|
||||
AS $mapgl$
|
||||
DECLARE
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- Using sub query to force id order by time
|
||||
-- Extract GeoJSON LineString and merge into a new GeoJSON
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND end_log IS NULL THEN
|
||||
end_log := start_log;
|
||||
END IF;
|
||||
IF start_date IS NOT NULL AND end_date IS NULL THEN
|
||||
end_date := start_date;
|
||||
END IF;
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.id >= start_log
|
||||
AND l.id <= end_log
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l._from_time >= start_date::TIMESTAMPTZ
|
||||
AND l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
END IF;
|
||||
-- Generate the GeoJSON with all moorages
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _geojson || ( SELECT
|
||||
jsonb_agg(ST_AsGeoJSON(m.*)::JSONB) as moorages_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,stay_code,
|
||||
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||
geog
|
||||
FROM api.moorages
|
||||
WHERE geog IS NOT null
|
||||
) AS m
|
||||
) ) INTO geojson;
|
||||
END;
|
||||
$mapgl$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.mapgl_fn
|
||||
IS 'Get all logbook LineString alone with all moorages into a geojson to be process by DeckGL';
|
||||
|
||||
-- Refresh user_role permissions
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
|
||||
-- Add cron_inactivity_fn, cleanup all data for inactive users and vessels
|
||||
CREATE OR REPLACE FUNCTION public.cron_inactivity_fn()
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
total_metrics INTEGER;
|
||||
del_metrics INTEGER;
|
||||
out_json JSONB;
|
||||
BEGIN
|
||||
-- List accounts with vessel inactivity for more than 200 DAYS
|
||||
-- List accounts with no vessel created for more than 200 DAYS
|
||||
-- List accounts with no vessel metadata for more than 200 DAYS
|
||||
-- Check for users and vessels with no activity for more than 200 days
|
||||
-- remove data and notify user
|
||||
RAISE NOTICE 'cron_inactivity_fn';
|
||||
FOR no_activity_rec in
|
||||
with accounts as (
|
||||
SELECT a.email,a.first,a.last,
|
||||
(a.updated_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS') as no_account_activity,
|
||||
COALESCE((m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS'),true) as no_metadata_activity,
|
||||
m.vessel_id IS null as no_metadata_vesssel_id,
|
||||
m.time IS null as no_metadata_time,
|
||||
v.vessel_id IS null as no_vessel_vesssel_id,
|
||||
a.preferences->>'ip' as ip,v.name as user_vesssel,
|
||||
m.name as sk_vesssel,v.vessel_id as v_vessel_id,m.vessel_id as m_vessel_id,
|
||||
a.created_at as account_created,m.time as metadata_updated_at,
|
||||
v.created_at as vessel_created,v.updated_at as vessel_updated_at
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
order by a.created_at asc
|
||||
)
|
||||
select * from accounts a where
|
||||
(no_account_activity is true
|
||||
or no_vessel_vesssel_id is true
|
||||
or no_metadata_activity is true
|
||||
or no_metadata_vesssel_id is true
|
||||
or no_metadata_time is true )
|
||||
ORDER BY a.account_created asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_inactivity_fn for [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_inactivity_fn user_settings [%]', user_settings;
|
||||
IF no_activity_rec.no_vessel_vesssel_id is true then
|
||||
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_metadata_vesssel_id is true then
|
||||
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_metadata_activity is true then
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_account_activity is true then
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('inactivity'::TEXT, user_settings::JSONB);
|
||||
-- Delete vessel metrics
|
||||
IF no_activity_rec.v_vessel_id IS NOT NULL THEN
|
||||
SELECT count(*) INTO total_metrics from api.metrics where vessel_id = no_activity_rec.v_vessel_id;
|
||||
WITH deleted AS (delete from api.metrics m where vessel_id = no_activity_rec.v_vessel_id RETURNING *) SELECT count(*) INTO del_metrics FROM deleted;
|
||||
SELECT jsonb_build_object('total_metrics', total_metrics, 'del_metrics', del_metrics) INTO out_json;
|
||||
RAISE NOTICE '-> debug cron_inactivity_fn [%]', out_json;
|
||||
END IF;
|
||||
END LOOP;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
|
||||
COMMENT ON FUNCTION public.cron_inactivity_fn() IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||
|
||||
-- Add cron_deactivated_fn, delete all data for inactive users and vessels
|
||||
CREATE OR REPLACE FUNCTION public.cron_deactivated_fn()
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
del_vessel_data JSONB;
|
||||
del_meta INTEGER;
|
||||
del_vessel INTEGER;
|
||||
del_account INTEGER;
|
||||
out_json JSONB;
|
||||
BEGIN
|
||||
RAISE NOTICE 'cron_deactivated_fn';
|
||||
-- List accounts with vessel inactivity for more than 230 DAYS
|
||||
-- List accounts with no vessel created for more than 230 DAYS
|
||||
-- List accounts with no vessel metadata for more than 230 DAYS
|
||||
-- Remove data and remove user and notify user
|
||||
FOR no_activity_rec in
|
||||
with accounts as (
|
||||
SELECT a.email,a.first,a.last,
|
||||
(a.updated_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS') as no_account_activity,
|
||||
COALESCE((m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'),true) as no_metadata_activity,
|
||||
m.vessel_id IS null as no_metadata_vesssel_id,
|
||||
m.time IS null as no_metadata_time,
|
||||
v.vessel_id IS null as no_vessel_vesssel_id,
|
||||
a.preferences->>'ip' as ip,v.name as user_vesssel,
|
||||
m.name as sk_vesssel,v.vessel_id as v_vessel_id,m.vessel_id as m_vessel_id,
|
||||
a.created_at as account_created,m.time as metadata_updated_at,
|
||||
v.created_at as vessel_created,v.updated_at as vessel_updated_at
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
order by a.created_at asc
|
||||
)
|
||||
select * from accounts a where
|
||||
(no_account_activity is true
|
||||
or no_vessel_vesssel_id is true
|
||||
or no_metadata_activity is true
|
||||
or no_metadata_vesssel_id is true
|
||||
or no_metadata_time is true )
|
||||
ORDER BY a.account_created asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_deactivated_fn for [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_deactivated_fn user_settings [%]', user_settings;
|
||||
IF no_activity_rec.no_vessel_vesssel_id is true then
|
||||
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_metadata_vesssel_id is true then
|
||||
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_metadata_activity is true then
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
ELSIF no_activity_rec.no_account_activity is true then
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
-- Delete vessel data
|
||||
IF no_activity_rec.v_vessel_id IS NOT NULL THEN
|
||||
SELECT public.delete_vessel_fn(no_activity_rec.v_vessel_id) INTO del_vessel_data;
|
||||
WITH deleted AS (delete from api.metadata where vessel_id = no_activity_rec.v_vessel_id RETURNING *) SELECT count(*) INTO del_meta FROM deleted;
|
||||
SELECT jsonb_build_object('del_metadata', del_meta) || del_vessel_data INTO del_vessel_data;
|
||||
RAISE NOTICE '-> debug cron_deactivated_fn [%]', del_vessel_data;
|
||||
END IF;
|
||||
-- Delete account data
|
||||
WITH deleted AS (delete from auth.vessels where owner_email = no_activity_rec.email RETURNING *) SELECT count(*) INTO del_vessel FROM deleted;
|
||||
WITH deleted AS (delete from auth.accounts where email = no_activity_rec.email RETURNING *) SELECT count(*) INTO del_account FROM deleted;
|
||||
SELECT jsonb_build_object('del_account', del_account, 'del_vessel', del_vessel) || del_vessel_data INTO out_json;
|
||||
RAISE NOTICE '-> debug cron_deactivated_fn [%]', out_json;
|
||||
-- TODO remove keycloak and grafana provisioning
|
||||
END LOOP;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
|
||||
COMMENT ON FUNCTION public.cron_deactivated_fn() IS 'init by pg_cron, check for vessel with no activity for more than 230 then send notification and delete account and vessel data';
|
||||
|
||||
-- Remove unused and duplicate function
|
||||
DROP FUNCTION IF EXISTS public.cron_process_no_activity_fn;
|
||||
DROP FUNCTION IF EXISTS public.cron_process_inactivity_fn;
|
||||
DROP FUNCTION IF EXISTS public.cron_process_deactivated_fn;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.7'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
253
initdb/99_migrations_202410.sql
Normal file
253
initdb/99_migrations_202410.sql
Normal file
@@ -0,0 +1,253 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration October 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Update moorages map, export more properties (notes,reference_count) from moorages tbl
|
||||
CREATE OR REPLACE FUNCTION api.export_moorages_geojson_fn(OUT geojson jsonb)
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features',
|
||||
( SELECT
|
||||
json_agg(ST_AsGeoJSON(m.*)::JSON) as moorages_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,stay_code,notes,reference_count,
|
||||
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||
geog
|
||||
FROM api.moorages
|
||||
WHERE geog IS NOT NULL
|
||||
) AS m
|
||||
)
|
||||
) INTO geojson;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
|
||||
COMMENT ON FUNCTION api.export_moorages_geojson_fn(out jsonb) IS 'Export moorages as geojson';
|
||||
|
||||
-- Update mapgl_fn, update moorages map sub query to export more properties (notes,reference_count) from moorages tbl
|
||||
DROP FUNCTION IF EXISTS api.mapgl_fn;
|
||||
CREATE OR REPLACE FUNCTION api.mapgl_fn(start_log integer DEFAULT NULL::integer, end_log integer DEFAULT NULL::integer, start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT geojson jsonb)
|
||||
RETURNS jsonb
|
||||
AS $mapgl$
|
||||
DECLARE
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- Using sub query to force id order by time
|
||||
-- Extract GeoJSON LineString and merge into a new GeoJSON
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND end_log IS NULL THEN
|
||||
end_log := start_log;
|
||||
END IF;
|
||||
IF start_date IS NOT NULL AND end_date IS NULL THEN
|
||||
end_date := start_date;
|
||||
END IF;
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.id >= start_log
|
||||
AND l.id <= end_log
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l._from_time >= start_date::TIMESTAMPTZ
|
||||
AND l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||
END IF;
|
||||
-- Generate the GeoJSON with all moorages
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _geojson || ( SELECT
|
||||
jsonb_agg(ST_AsGeoJSON(m.*)::JSONB) as moorages_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,stay_code,notes,reference_count,
|
||||
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||
geog
|
||||
FROM api.moorages
|
||||
WHERE geog IS NOT null
|
||||
) AS m
|
||||
) ) INTO geojson;
|
||||
END;
|
||||
$mapgl$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.mapgl_fn
|
||||
IS 'Generate a geojson with all logs as geometry LineString with moorages as geometry Point to be process by DeckGL';
|
||||
|
||||
-- Update logbook_update_geojson_fn, fix corrupt linestring properties
|
||||
CREATE OR REPLACE FUNCTION public.logbook_update_geojson_fn(_id integer, _start text, _end text, OUT _track_geojson json)
|
||||
RETURNS json
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
_to_time,
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
notes,
|
||||
extra['avg_wind_speed'] as avg_wind_speed,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) as status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMPTZ
|
||||
AND time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
COMMENT ON FUNCTION public.logbook_update_geojson_fn(in int4, in text, in text, out json) IS 'Update log details with geojson';
|
||||
|
||||
-- Add trigger to update logbook stats from user edit geojson
|
||||
DROP FUNCTION IF EXISTS public.update_logbook_with_geojson_trigger_fn;
|
||||
CREATE OR REPLACE FUNCTION public.update_logbook_with_geojson_trigger_fn() RETURNS TRIGGER AS $$
|
||||
DECLARE
|
||||
geojson JSONB;
|
||||
feature JSONB;
|
||||
BEGIN
|
||||
-- Parse the incoming GeoJSON data from the track_geojson column
|
||||
geojson := NEW.track_geojson::jsonb;
|
||||
|
||||
-- Extract the first feature (assume it is the LineString)
|
||||
feature := geojson->'features'->0;
|
||||
|
||||
IF geojson IS NOT NULL AND feature IS NOT NULL AND (feature->'properties' ? 'x-update') THEN
|
||||
|
||||
-- Get properties from the feature to extract avg_speed, and max_speed
|
||||
NEW.avg_speed := (feature->'properties'->>'avg_speed')::FLOAT;
|
||||
NEW.max_speed := (feature->'properties'->>'max_speed')::FLOAT;
|
||||
NEW.max_wind_speed := (feature->'properties'->>'max_wind_speed')::FLOAT;
|
||||
NEW.extra := jsonb_set( NEW.extra,
|
||||
'{avg_wind_speed}',
|
||||
to_jsonb((feature->'properties'->>'avg_wind_speed')::FLOAT),
|
||||
true -- this flag means it will create the key if it does not exist
|
||||
);
|
||||
|
||||
-- Calculate the LineString's actual spatial distance
|
||||
NEW.track_geom := ST_GeomFromGeoJSON(feature->'geometry'::text);
|
||||
NEW.distance := TRUNC (ST_Length(NEW.track_geom,false)::INT * 0.0005399568, 4); -- convert to NM
|
||||
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.update_logbook_with_geojson_trigger_fn
|
||||
IS 'Extracts specific properties (distance, duration, avg_speed, max_speed) from a geometry LINESTRING part of a GeoJSON FeatureCollection, and then updates a column in a table named logbook';
|
||||
|
||||
-- Add trigger on logbook update to update metrics from track_geojson
|
||||
CREATE TRIGGER update_logbook_with_geojson_trigger_fn
|
||||
BEFORE UPDATE OF track_geojson ON api.logbook
|
||||
FOR EACH ROW
|
||||
WHEN (NEW.track_geojson IS DISTINCT FROM OLD.track_geojson)
|
||||
EXECUTE FUNCTION public.update_logbook_with_geojson_trigger_fn();
|
||||
|
||||
-- Refresh user_role permissions
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.8'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
1976
initdb/99_migrations_202411.sql
Normal file
1976
initdb/99_migrations_202411.sql
Normal file
File diff suppressed because it is too large
Load Diff
@@ -19,6 +19,8 @@ INSERT INTO app_settings (name, value) VALUES
|
||||
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
|
||||
('app.grafana_admin_uri', '${PGSAIL_GRAFANA_ADMIN_URI}'),
|
||||
('app.keycloak_uri', '${PGSAIL_KEYCLOAK_URI}'),
|
||||
('app.gis_url', '${PGSAIL_QGIS_URL}'),
|
||||
('app.videos_url', '${PGSAIL_VIDEOS_URL}'),
|
||||
('app.url', '${PGSAIL_APP_URL}'),
|
||||
('app.version', '${PGSAIL_VERSION}');
|
||||
-- Update comment with version
|
||||
@@ -27,6 +29,8 @@ COMMENT ON DATABASE signalk IS 'PostgSail version ${PGSAIL_VERSION}';
|
||||
ALTER ROLE authenticator WITH PASSWORD '${PGSAIL_AUTHENTICATOR_PASSWORD}';
|
||||
ALTER ROLE grafana WITH PASSWORD '${PGSAIL_GRAFANA_PASSWORD}';
|
||||
ALTER ROLE grafana_auth WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
ALTER ROLE qgis_role WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
ALTER ROLE maplapse_role WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
END
|
||||
|
||||
curl -s -XPOST -Hx-pgsail:${PGSAIL_VERSION} https://api.openplotter.cloud/rpc/telemetry_fn
|
||||
|
@@ -1 +1 @@
|
||||
0.7.4
|
||||
0.8.0
|
||||
|
File diff suppressed because one or more lines are too long
@@ -13,6 +13,6 @@ $ bash tests.sh
|
||||
|
||||
## docker
|
||||
```bash
|
||||
$ docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
$ docker-compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
||||
$ docker compose up -d db && sleep 15 && docker compose up -d api && sleep 5
|
||||
$ docker compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
||||
```
|
@@ -600,7 +600,7 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
for (i = 0; i < test.vessel_metrics['metrics'].length; i++) {
|
||||
data[i] = test.vessel_metrics['metrics'][i];
|
||||
// Override time, -2h to allow to new data later without delay.
|
||||
data[i]['time'] = moment.utc().subtract(2, 'hours').add(i, 'minutes').format();
|
||||
data[i]['time'] = moment.utc().subtract(1, 'day').add(i, 'minutes').format();
|
||||
// Override client_id
|
||||
data[i]['client_id'] = test.vessel_metadata.client_id;
|
||||
}
|
||||
@@ -611,6 +611,11 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
// Force last valid entry to be back in time from previous, it should be ignore silently
|
||||
data.at(-1).time = moment.utc(data.at(-3).time).subtract(1, 'minutes').format();
|
||||
//console.log(data[0]);
|
||||
// Force the -2 entry to be in the future add 1 year, it should be ignore silently
|
||||
data.splice(i-2, 1, data.at(-2))
|
||||
data.at(-3).time = moment.utc(data.at(-3).time).add(1, 'year').format();
|
||||
//console.log(data.at(-2));
|
||||
//console.log(data.at(-1));
|
||||
|
||||
it('/metrics?select=time', function(done) {
|
||||
request = supertest.agent(test.cname);
|
||||
@@ -629,6 +634,7 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
res.header['content-type'].should.match(new RegExp('json','g'));
|
||||
res.header['server'].should.match(new RegExp('postgrest','g'));
|
||||
should.exist(res.body);
|
||||
//console.log(res.body);
|
||||
res.body.length.should.match(test.vessel_metrics['metrics'].length-3);
|
||||
done(err);
|
||||
});
|
||||
|
@@ -6,7 +6,7 @@
|
||||
*
|
||||
* npm install supertest should mocha mochawesome moment
|
||||
* alias mocha="./node_modules/mocha/bin/_mocha"
|
||||
* mocha index.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||
* mocha index2.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||
*
|
||||
*/
|
||||
|
||||
@@ -432,11 +432,11 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
for (i = 0; i < test.vessel_metrics['metrics'].length; i++) {
|
||||
data[i] = test.vessel_metrics['metrics'][i];
|
||||
// Override time, +1h because previous sample include 47 entry.
|
||||
data[i]['time'] = moment().add(1, 'hour').add(i, 'minutes').format();
|
||||
data[i]['time'] = moment.utc().subtract(2, 'hours').add(i, 'minutes').format();
|
||||
// Override client_id
|
||||
data[i]['client_id'] = test.vessel_metadata.client_id;
|
||||
}
|
||||
console.log(data[0]);
|
||||
//console.log(data[0]);
|
||||
|
||||
it('/metrics?select=time', function(done) {
|
||||
request = supertest.agent(test.cname);
|
||||
|
@@ -6,7 +6,7 @@
|
||||
*
|
||||
* npm install supertest should mocha mochawesome moment
|
||||
* alias mocha="./node_modules/mocha/bin/_mocha"
|
||||
* mocha index.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||
* mocha index3.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||
*
|
||||
*/
|
||||
|
||||
|
@@ -142,7 +142,7 @@ var moment = require("moment");
|
||||
.set(test.logs.header.name, test.logs.header.value)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
res.status.should.equal(404);
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
@@ -192,7 +192,7 @@ var moment = require("moment");
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
console.log(res.text);
|
||||
res.status.should.equal(404); // return 404 as it is not enable in user settings.
|
||||
res.status.should.equal(200); // return 404 as it is not enable in user settings.
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
|
203
tests/index6.js
Normal file
203
tests/index6.js
Normal file
@@ -0,0 +1,203 @@
|
||||
"use strict";
|
||||
/*
|
||||
* Unit test #6
|
||||
* Public/Anonymous access
|
||||
*
|
||||
* process.env.PGSAIL_API_URI = from inside the docker
|
||||
*
|
||||
* npm install supertest should mocha mochawesome moment
|
||||
* alias mocha="./node_modules/mocha/bin/_mocha"
|
||||
* mocha index6.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||
*
|
||||
*/
|
||||
|
||||
const sleep = (ms) => new Promise((r) => setTimeout(r, ms));
|
||||
|
||||
const supertest = require("supertest");
|
||||
// Deprecated
|
||||
const should = require("should");
|
||||
//const chai = require("chai");
|
||||
//const should = chai.should();
|
||||
let request = null;
|
||||
var moment = require("moment");
|
||||
|
||||
// Users Array
|
||||
[
|
||||
{
|
||||
cname: process.env.PGSAIL_API_URI,
|
||||
name: "PostgSail unit test anonymous, no x-is-public header",
|
||||
moorages: {
|
||||
url: "/moorages_view",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
stays: {
|
||||
url: "/stays_view",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
logs: {
|
||||
url: "/logs_view",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
log: {
|
||||
url: "/log_view?id=eq.1",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
monitoring: {
|
||||
url: "/monitoring_view",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
timelapse: {
|
||||
url: "/rpc/timelapse_fn",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
timelapse_full: {
|
||||
url: "/rpc/timelapse_fn",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
stats_logs: {
|
||||
url: "/rpc/stats_logs_fn",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
stats_stays: {
|
||||
url: "/rpc/stats_stay_fn",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
export_gpx: {
|
||||
url: "/rpc/export_logbook_gpx_fn",
|
||||
payload: null,
|
||||
res: {},
|
||||
},
|
||||
},
|
||||
].forEach(function (test) {
|
||||
//console.log(`${test.cname}`);
|
||||
describe(`${test.name}`, function () {
|
||||
request = supertest.agent(test.cname);
|
||||
request.set("User-Agent", "PostgSail unit tests");
|
||||
|
||||
describe("With no JWT as api_anonymous, no x-is-public", function () {
|
||||
it("/stays_view, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.get(test.stays.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
res.body.length.should.be.equal(0);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/moorages_view, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.get(test.log.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
res.body.length.should.be.equal(0);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/logs_view, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.get(test.logs.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
res.body.length.should.be.equal(0);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/log_view, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.get(test.log.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
res.body.length.should.be.equal(0);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/monitoring_view, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.get(test.monitoring.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
console.log(res.text);
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
res.body.length.should.be.equal(0);
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/rpc/timelapse_fn, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.post(test.timelapse.url)
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
console.log(res.text);
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
it("/rpc/export_logbook_gpx_fn, api_anonymous no jwt token", function (done) {
|
||||
// Reset agent so we do not save cookies
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.post(test.export_gpx.url)
|
||||
.send({_id: 1})
|
||||
.set("Accept", "application/json")
|
||||
.end(function (err, res) {
|
||||
console.log(res.text)
|
||||
res.status.should.equal(401);
|
||||
should.exist(res.header["content-type"]);
|
||||
should.exist(res.header["server"]);
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
done(err);
|
||||
});
|
||||
});
|
||||
}); // user JWT
|
||||
}); // OpenAPI description
|
||||
}); // Users Array
|
@@ -180,6 +180,18 @@
|
||||
"status" : "sailing",
|
||||
"metrics" : {"navigation.log": 17441766, "navigation.trip.log": 80747, "navigation.headingTrue": 3.5972, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 20.948999999999998, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.47, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 192.4, "electrical.batteries.1.voltage": 14.56, "navigation.gnss.antennaAltitude": 0.39, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 20.95, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 64, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 3, "network.n2k.ngt-1.130356.ch1.bandwidth": 2, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 4, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
||||
},
|
||||
{
|
||||
"time" : "2022-07-31T11:41:28.561Z",
|
||||
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||
"latitude" : 59.7163052,
|
||||
"longitude" : 25.7325741,
|
||||
"speedoverground" : 9.5,
|
||||
"courseovergroundtrue" : 198.8,
|
||||
"windspeedapparent" : 18.0,
|
||||
"anglespeedapparent" : 41.0,
|
||||
"status" : "sailing",
|
||||
"metrics" : {"navigation.log": 17441766, "navigation.trip.log": 80747, "navigation.headingTrue": 3.5972, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 20.948999999999998, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.47, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 192.4, "electrical.batteries.1.voltage": 14.56, "navigation.gnss.antennaAltitude": 0.39, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 20.95, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 64, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 3, "network.n2k.ngt-1.130356.ch1.bandwidth": 2, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 4, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
||||
},
|
||||
{
|
||||
"time" : "2022-07-31T11:42:28.569Z",
|
||||
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||
|
@@ -24,7 +24,6 @@ INSERT INTO api.logbook
|
||||
|
||||
\echo 'Set config'
|
||||
SELECT set_config('user.email', 'demo+kapla@openplotter.cloud', false);
|
||||
--SELECT set_config('vessel.client_id', 'vessels.urn:mrn:imo:mmsi:123456789', false);
|
||||
|
||||
\echo 'Process badge'
|
||||
SELECT badges_logbook_fn(5,NOW()::TEXT);
|
||||
@@ -32,11 +31,10 @@ SELECT badges_logbook_fn(6,NOW()::TEXT);
|
||||
SELECT badges_geom_fn(5,NOW()::TEXT);
|
||||
SELECT badges_geom_fn(6,NOW()::TEXT);
|
||||
|
||||
\echo 'Check badges for user'
|
||||
\echo 'Check badges for all users'
|
||||
SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a;
|
||||
|
||||
\echo 'Check details from vessel_id kapla'
|
||||
--SELECT get_user_settings_from_vesselid_fn('vessels.urn:mrn:imo:mmsi:123456789'::TEXT);
|
||||
SELECT
|
||||
json_build_object(
|
||||
'boat', v.name,
|
||||
@@ -68,6 +66,9 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
\echo 'Process badge'
|
||||
SELECT badges_moorages_fn();
|
||||
|
||||
\echo 'Check badges for all users'
|
||||
SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a;
|
||||
|
||||
\echo 'Check details from vessel_id aava'
|
||||
--SELECT get_user_settings_from_vesselid_fn('vessels.urn:mrn:imo:mmsi:787654321'::TEXT);
|
||||
SELECT
|
||||
|
@@ -27,7 +27,7 @@ badges_geom_fn |
|
||||
-[ RECORD 1 ]--+-
|
||||
badges_geom_fn |
|
||||
|
||||
Check badges for user
|
||||
Check badges for all users
|
||||
-[ RECORD 1 ]-----+------------------
|
||||
jsonb_object_keys | Helmsman
|
||||
-[ RECORD 2 ]-----+------------------
|
||||
@@ -76,6 +76,38 @@ Process badge
|
||||
-[ RECORD 1 ]------+-
|
||||
badges_moorages_fn |
|
||||
|
||||
Check badges for all users
|
||||
-[ RECORD 1 ]-----+------------------
|
||||
jsonb_object_keys | Helmsman
|
||||
-[ RECORD 2 ]-----+------------------
|
||||
jsonb_object_keys | Wake Maker
|
||||
-[ RECORD 3 ]-----+------------------
|
||||
jsonb_object_keys | Balearic Sea
|
||||
-[ RECORD 4 ]-----+------------------
|
||||
jsonb_object_keys | Stormtrooper
|
||||
-[ RECORD 5 ]-----+------------------
|
||||
jsonb_object_keys | Gulf of Finland
|
||||
-[ RECORD 6 ]-----+------------------
|
||||
jsonb_object_keys | Helmsman
|
||||
-[ RECORD 7 ]-----+------------------
|
||||
jsonb_object_keys | Wake Maker
|
||||
-[ RECORD 8 ]-----+------------------
|
||||
jsonb_object_keys | Club Alaska
|
||||
-[ RECORD 9 ]-----+------------------
|
||||
jsonb_object_keys | Stormtrooper
|
||||
-[ RECORD 10 ]----+------------------
|
||||
jsonb_object_keys | Captain Award
|
||||
-[ RECORD 11 ]----+------------------
|
||||
jsonb_object_keys | Caribbean Sea
|
||||
-[ RECORD 12 ]----+------------------
|
||||
jsonb_object_keys | Gulf of Alaska
|
||||
-[ RECORD 13 ]----+------------------
|
||||
jsonb_object_keys | Gulf of Finland
|
||||
-[ RECORD 14 ]----+------------------
|
||||
jsonb_object_keys | Navigator Award
|
||||
-[ RECORD 15 ]----+------------------
|
||||
jsonb_object_keys | Tropical Traveler
|
||||
|
||||
Check details from vessel_id aava
|
||||
-[ RECORD 1 ]-+--------------------------------------------------------------------------------------------------------------
|
||||
user_settings | {"boat" : "aava", "recipient" : "first_aava", "email" : "demo+aava@openplotter.cloud", "pushover_key" : null}
|
||||
|
@@ -25,7 +25,7 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
\echo 'logbook'
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, trajectory(trip)::geometry as track_geom, distance,duration,round(avg_speed::NUMERIC,6),max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Test stays for user
|
||||
\echo 'stays'
|
||||
@@ -69,6 +69,12 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
|
||||
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'Check numbers of geojson properties'
|
||||
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[0].properties'))
|
||||
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[1].properties'))
|
||||
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Check export
|
||||
--\echo 'check logbook export fn'
|
||||
--SELECT api.export_logbook_geojson_fn(1);
|
||||
|
@@ -16,28 +16,28 @@ logbook
|
||||
count | 2
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Pojoviken to Norra hamnen
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E61000001C000000B0DEBBE0E68737404DA938FBF0094E40B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
round | 3.635714
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E610000015000000029A081B9E6E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
round | 5.452381
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
@@ -66,21 +66,21 @@ stay_code | 4
|
||||
|
||||
eventlogs_view
|
||||
-[ RECORD 1 ]
|
||||
count | 12
|
||||
count | 11
|
||||
|
||||
stats_logs_fn
|
||||
SELECT 1
|
||||
-[ RECORD 1 ]+----------
|
||||
name | "kapla"
|
||||
count | 4
|
||||
max_speed | 7.1
|
||||
max_distance | 8.8968
|
||||
max_speed | 9.5
|
||||
max_distance | 68.8677
|
||||
max_duration | "PT1H11M"
|
||||
?column? | 3
|
||||
?column? | 30.1154
|
||||
?column? | "PT2H43M"
|
||||
?column? | 90.6030
|
||||
?column? | "PT2H44M"
|
||||
?column? | 44.2
|
||||
?column? | 2
|
||||
?column? | 3
|
||||
?column? | 4
|
||||
?column? | 4
|
||||
first_date | t
|
||||
@@ -110,3 +110,56 @@ update_logbook_observations_fn | t
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
Check numbers of geojson properties
|
||||
-[ RECORD 1 ]-----+-----------------
|
||||
jsonb_object_keys | id
|
||||
-[ RECORD 2 ]-----+-----------------
|
||||
jsonb_object_keys | _to
|
||||
-[ RECORD 3 ]-----+-----------------
|
||||
jsonb_object_keys | name
|
||||
-[ RECORD 4 ]-----+-----------------
|
||||
jsonb_object_keys | _from
|
||||
-[ RECORD 5 ]-----+-----------------
|
||||
jsonb_object_keys | notes
|
||||
-[ RECORD 6 ]-----+-----------------
|
||||
jsonb_object_keys | times
|
||||
-[ RECORD 7 ]-----+-----------------
|
||||
jsonb_object_keys | _to_time
|
||||
-[ RECORD 8 ]-----+-----------------
|
||||
jsonb_object_keys | distance
|
||||
-[ RECORD 9 ]-----+-----------------
|
||||
jsonb_object_keys | duration
|
||||
-[ RECORD 10 ]----+-----------------
|
||||
jsonb_object_keys | avg_speed
|
||||
-[ RECORD 11 ]----+-----------------
|
||||
jsonb_object_keys | max_speed
|
||||
-[ RECORD 12 ]----+-----------------
|
||||
jsonb_object_keys | _from_time
|
||||
-[ RECORD 13 ]----+-----------------
|
||||
jsonb_object_keys | _to_moorage_id
|
||||
-[ RECORD 14 ]----+-----------------
|
||||
jsonb_object_keys | avg_wind_speed
|
||||
-[ RECORD 15 ]----+-----------------
|
||||
jsonb_object_keys | max_wind_speed
|
||||
-[ RECORD 16 ]----+-----------------
|
||||
jsonb_object_keys | _from_moorage_id
|
||||
|
||||
-[ RECORD 1 ]-----+-------
|
||||
jsonb_object_keys | cog
|
||||
-[ RECORD 2 ]-----+-------
|
||||
jsonb_object_keys | sog
|
||||
-[ RECORD 3 ]-----+-------
|
||||
jsonb_object_keys | twa
|
||||
-[ RECORD 4 ]-----+-------
|
||||
jsonb_object_keys | twd
|
||||
-[ RECORD 5 ]-----+-------
|
||||
jsonb_object_keys | tws
|
||||
-[ RECORD 6 ]-----+-------
|
||||
jsonb_object_keys | time
|
||||
-[ RECORD 7 ]-----+-------
|
||||
jsonb_object_keys | trip
|
||||
-[ RECORD 8 ]-----+-------
|
||||
jsonb_object_keys | notes
|
||||
-[ RECORD 9 ]-----+-------
|
||||
jsonb_object_keys | status
|
||||
|
||||
|
@@ -13,7 +13,7 @@ select current_database();
|
||||
|
||||
-- Check the number of process pending
|
||||
\echo 'Check the number of process pending'
|
||||
-- Should be 22
|
||||
-- Should be 24
|
||||
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
||||
--set role scheduler
|
||||
SELECT public.run_cron_jobs();
|
||||
|
@@ -7,7 +7,7 @@ You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
Check the number of process pending
|
||||
-[ RECORD 1 ]
|
||||
jobs | 26
|
||||
jobs | 24
|
||||
|
||||
-[ RECORD 1 ]-+-
|
||||
run_cron_jobs |
|
||||
@@ -17,5 +17,5 @@ any_pending_jobs | 2
|
||||
|
||||
Check the number of metrics entries
|
||||
-[ RECORD 1 ]-+----
|
||||
metrics_count | 172
|
||||
metrics_count | 173
|
||||
|
||||
|
@@ -15,15 +15,15 @@ select current_database();
|
||||
-- grafana_auth
|
||||
SET ROLE grafana_auth;
|
||||
\echo 'ROLE grafana_auth current_setting'
|
||||
SELECT current_user, current_setting('user.email', true), current_setting('vessel.client_id', true), current_setting('vessel.id', true);
|
||||
SELECT current_user, current_setting('user.email', true), current_setting('vessel.id', true);
|
||||
|
||||
--SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND cast(a.preferences->>'email_valid' as Boolean) = True AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
|
||||
--SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
|
||||
\echo 'link vessel and user based on current_setting'
|
||||
SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
|
||||
SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id ORDER BY a.id DESC;
|
||||
|
||||
\echo 'auth.accounts details'
|
||||
SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a;
|
||||
SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a ORDER BY a.id DESC;
|
||||
\echo 'auth.vessels details'
|
||||
--SELECT 'SELECT ' || STRING_AGG('v.' || column_name, ', ') || ' FROM auth.vessels AS v' FROM information_schema.columns WHERE table_name = 'vessels' AND table_schema = 'auth' AND column_name NOT IN ('created_at', 'updated_at');
|
||||
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
|
||||
@@ -46,7 +46,7 @@ SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'dem
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
--SELECT current_user, current_setting('user.email', true), current_setting('vessel.client_id', true), current_setting('vessel.id', true);
|
||||
SELECT current_user, current_setting('user.email', true), current_setting('vessel.client_id', true);
|
||||
SELECT current_user, current_setting('user.email', true);
|
||||
|
||||
SELECT v.name AS __text, m.client_id AS __value FROM auth.vessels v JOIN api.metadata m ON v.owner_email = 'demo+kapla@openplotter.cloud' and m.vessel_id = v.vessel_id;
|
||||
|
||||
|
@@ -11,7 +11,6 @@ ROLE grafana_auth current_setting
|
||||
current_user | grafana_auth
|
||||
current_setting |
|
||||
current_setting |
|
||||
current_setting |
|
||||
|
||||
link vessel and user based on current_setting
|
||||
-[ RECORD 1 ]----------------------------------------------------------------
|
||||
@@ -93,7 +92,6 @@ vessel_id | t
|
||||
-[ RECORD 1 ]---+-----------------------------
|
||||
current_user | grafana
|
||||
current_setting | demo+kapla@openplotter.cloud
|
||||
current_setting |
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------
|
||||
__text | kapla
|
||||
@@ -257,23 +255,20 @@ id | 2
|
||||
moorage | Norra hamnen
|
||||
default_stay | Dock
|
||||
default_stay_id | 4
|
||||
total_stay | 0
|
||||
total_duration | PT2M
|
||||
arrivals_departures | 2
|
||||
total_duration | PT2M
|
||||
-[ RECORD 2 ]-------+---------------------
|
||||
id | 1
|
||||
moorage | patch moorage name 3
|
||||
default_stay | Anchor
|
||||
default_stay_id | 2
|
||||
total_stay | 0
|
||||
total_duration | PT1M
|
||||
arrivals_departures | 1
|
||||
total_duration | PT1M
|
||||
-[ RECORD 3 ]-------+---------------------
|
||||
id | 3
|
||||
moorage | Ekenäs
|
||||
default_stay | Unknown
|
||||
default_stay_id | 1
|
||||
total_stay | 0
|
||||
total_duration | PT0S
|
||||
arrivals_departures | 1
|
||||
total_duration | PT0S
|
||||
|
||||
|
@@ -17,12 +17,12 @@ logbook
|
||||
count | 4
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E61000001C000000B0DEBBE0E68737404DA938FBF0094E40B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
@@ -30,12 +30,12 @@ max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E610000015000000029A081B9E6E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
@@ -43,7 +43,7 @@ max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 3 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
@@ -56,7 +56,7 @@ max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 4 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
|
36
tests/sql/maplapse.sql
Normal file
36
tests/sql/maplapse.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
-- insert fake request maplapse
|
||||
\echo 'Insert fake request maplapse'
|
||||
SELECT api.maplapse_record_fn('Kapla,?start_log=1&end_log=1&height=100vh');
|
||||
|
||||
-- maplapse_role
|
||||
SET ROLE maplapse_role;
|
||||
|
||||
\echo 'GET pending maplapse task'
|
||||
SELECT id as maplapse_id from process_queue where channel = 'maplapse_video' and processed is null order by stored asc limit 1 \gset
|
||||
SELECT count(id) from process_queue where channel = 'maplapse_video' and processed is null limit 1;
|
||||
|
||||
\echo 'Update process on completion'
|
||||
UPDATE process_queue SET processed = NOW() WHERE id = :'maplapse_id';
|
||||
|
||||
\echo 'Insert video availability notification in process queue'
|
||||
INSERT INTO process_queue ("channel", "payload", "ref_id", "stored") VALUES ('new_video', CONCAT('video_', :'vessel_id_kapla'::TEXT, '_1', '_1.mp4'), :'vessel_id_kapla'::TEXT, NOW());
|
24
tests/sql/maplapse.sql.output
Normal file
24
tests/sql/maplapse.sql.output
Normal file
@@ -0,0 +1,24 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
SET
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Insert fake request maplapse
|
||||
-[ RECORD 1 ]------+--
|
||||
maplapse_record_fn | t
|
||||
|
||||
SET
|
||||
GET pending maplapse task
|
||||
-[ RECORD 1 ]
|
||||
count | 1
|
||||
|
||||
Update process on completion
|
||||
UPDATE 1
|
||||
Insert video availability notification in process queue
|
||||
INSERT 0 1
|
81
tests/sql/mobilitydb.sql
Normal file
81
tests/sql/mobilitydb.sql
Normal file
@@ -0,0 +1,81 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
-- Switch user as aava
|
||||
SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Update notes
|
||||
\echo 'Add a note for an entry from a trip'
|
||||
-- Get original value, should be empty
|
||||
SELECT numInstants(trip), valueAtTimestamp(trip_notes,timestampN(trip,14)) from api.logbook where id = 3;
|
||||
-- Create the string
|
||||
SELECT concat('["fishing"@', timestampN(trip,14),',""@',timestampN(trip,15),']') as to_be_update FROM api.logbook where id = 3 \gset
|
||||
--\echo :to_be_update
|
||||
-- Update the notes
|
||||
SELECT api.update_trip_notes_fn(3, :'to_be_update');
|
||||
-- Compare with previous value, should include "fishing"
|
||||
SELECT valueAtTimestamp(trip_notes,timestampN(trip,14)) from api.logbook where id = 3;
|
||||
|
||||
-- Delete notes
|
||||
\echo 'Delete an entry from a trip'
|
||||
-- Get original value, should be 45
|
||||
SELECT numInstants(trip), jsonb_array_length(api.export_logbook_geojson_point_trip_fn(id)->'features') from api.logbook where id = 3;
|
||||
-- Extract the timestamps of the invalid coords
|
||||
--SELECT timestampN(trip,14) as "to_be_delete" FROM api.logbook where id = 3 \gset
|
||||
SELECT concat('[', timestampN(trip,13),',',timestampN(trip,14),')') as to_be_delete FROM api.logbook where id = 3 \gset
|
||||
--\echo :to_be_delete
|
||||
-- Delete the entry for all trip sequence
|
||||
SELECT api.delete_trip_entry_fn(3, :'to_be_delete');
|
||||
-- Compare with previous value, should be 44
|
||||
SELECT numInstants(trip), jsonb_array_length(api.export_logbook_geojson_point_trip_fn(id)->'features') from api.logbook where id = 3;
|
||||
|
||||
-- Export PostGIS geography from a trip
|
||||
\echo 'Export PostGIS geography from trajectory'
|
||||
SELECT ST_IsValid(trajectory(trip)::geometry) IS TRUE FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Export GeoJSON from a trip
|
||||
\echo 'Export GeoJSON with properties from a trip'
|
||||
SELECT jsonb_array_length(api.export_logbook_geojson_point_trip_fn(3)->'features');
|
||||
|
||||
-- Export GPX from a trip
|
||||
\echo 'Export GPX from a trip'
|
||||
SELECT api.export_logbook_gpx_trip_fn(3) IS NOT NULL;
|
||||
|
||||
-- Export KML from a trip
|
||||
\echo 'Export KML from a trip'
|
||||
SELECT api.export_logbook_kml_trip_fn(3) IS NOT NULL;
|
||||
|
||||
-- Switch user as kapla
|
||||
SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Export timelapse as Geometry LineString from a trip
|
||||
\echo 'Export timelapse as Geometry LineString from a trip'
|
||||
SELECT api.export_logbooks_geojson_linestring_trips_fn(1,2) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Export timelapse as Geometry Point from a trip
|
||||
\echo 'Export timelapse as Geometry Point from a trip'
|
||||
SELECT api.export_logbooks_geojson_point_trips_fn(1,2) IS NOT NULL FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Export GPX from trips
|
||||
\echo 'Export GPX from trips'
|
||||
SELECT api.export_logbooks_gpx_trips_fn(1,2) IS NOT NULL FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Export KML from trips
|
||||
\echo 'Export KML from trips'
|
||||
SELECT api.export_logbooks_kml_trips_fn(1,2) IS NOT NULL FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
71
tests/sql/mobilitydb.sql.output
Normal file
71
tests/sql/mobilitydb.sql.output
Normal file
@@ -0,0 +1,71 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
SET
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Add a note for an entry from a trip
|
||||
-[ RECORD 1 ]----+---
|
||||
numinstants | 45
|
||||
valueattimestamp |
|
||||
|
||||
-[ RECORD 1 ]--------+-
|
||||
update_trip_notes_fn |
|
||||
|
||||
-[ RECORD 1 ]----+--------
|
||||
valueattimestamp | fishing
|
||||
|
||||
Delete an entry from a trip
|
||||
-[ RECORD 1 ]------+---
|
||||
numinstants | 45
|
||||
jsonb_array_length | 45
|
||||
|
||||
-[ RECORD 1 ]--------+-
|
||||
delete_trip_entry_fn |
|
||||
|
||||
-[ RECORD 1 ]------+---
|
||||
numinstants | 44
|
||||
jsonb_array_length | 44
|
||||
|
||||
Export PostGIS geography from trajectory
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
-[ RECORD 2 ]
|
||||
?column? | t
|
||||
|
||||
Export GeoJSON with properties from a trip
|
||||
-[ RECORD 1 ]------+---
|
||||
jsonb_array_length | 44
|
||||
|
||||
Export GPX from a trip
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
|
||||
Export KML from a trip
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Export timelapse as Geometry LineString from a trip
|
||||
-[ RECORD 1 ]-------------------------------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
export_logbooks_geojson_linestring_trips_fn | {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "LineString", "coordinates": [[23.530866667, 60.077666667], [23.52355, 60.07065], [23.515866667, 60.0637], [23.507866667, 60.056716667], [23.500533333, 60.04915], [23.493, 60.041633333], [23.485466667, 60.033983333], [23.479033333, 60.026216667], [23.47295, 60.01835], [23.46745, 60.01045], [23.461033333, 60.003516667], [23.45415, 59.99755], [23.445683333, 59.99235], [23.438766667, 59.989266667], [23.435116667, 59.987866667], [23.43165, 59.986333333], [23.4292, 59.984833333], [23.432566667, 59.9862], [23.43375, 59.987266667], [23.431566667, 59.98615], [23.4307, 59.98565], [23.429383333, 59.984683333], [23.421066667, 59.978233333], [23.431, 59.977716667], [23.432133333, 59.976883333], [23.4321, 59.976883333]]}, "properties": {}}]}
|
||||
|
||||
Export timelapse as Geometry Point from a trip
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
|
||||
Export GPX from trips
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
|
||||
Export KML from trips
|
||||
-[ RECORD 1 ]
|
||||
?column? | t
|
||||
|
@@ -22,15 +22,15 @@ count | 21
|
||||
|
||||
Test monitoring_view3 for user
|
||||
-[ RECORD 1 ]
|
||||
count | 3736
|
||||
count | 3775
|
||||
|
||||
Test monitoring_voltage for user
|
||||
-[ RECORD 1 ]
|
||||
count | 47
|
||||
count | 48
|
||||
|
||||
Test monitoring_temperatures for user
|
||||
-[ RECORD 1 ]
|
||||
count | 120
|
||||
count | 121
|
||||
|
||||
Test monitoring_humidity for user
|
||||
-[ RECORD 1 ]
|
||||
|
52
tests/sql/qgis.sql
Normal file
52
tests/sql/qgis.sql
Normal file
@@ -0,0 +1,52 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- qgis
|
||||
SET ROLE qgis_role;
|
||||
|
||||
-- Get BBOX Extent from SQL query for a log:
|
||||
-- "^/log_(\w+)_(\d+).png$"
|
||||
-- "^/log_(\w+)_(\d+)_sat.png$
|
||||
-- require a log_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for a log: "^/log_(\w+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(null, 1);
|
||||
SELECT public.qgis_bbox_py_fn(null, 3);
|
||||
-- "^/log_(\w+)_(\d+)_line.png$"
|
||||
\echo 'Get BBOX Extent from SQL query for a log as line: "^/log_(\w+)_(\d+)_line.png$"'
|
||||
SELECT public.qgis_bbox_py_fn(null, 1, 333, 216, False);
|
||||
SELECT public.qgis_bbox_py_fn(null, 3, 333, 216, False);
|
||||
-- Get BBOX Extent from SQL query for all logs by vessel_id
|
||||
-- "^/logs_(\w+)_(\d+).png$"
|
||||
-- require a vessel_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(\w+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_kapla'::TEXT);
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_aava'::TEXT);
|
||||
-- Get BBOX Extent from SQL query for all logs by vessel_id
|
||||
-- "^/logs_(\w+)_(\d+).png$"
|
||||
-- require a vessel_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(\w+)_(\d+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_kapla'::TEXT, 1, 2);
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_aava'::TEXT, 3, 4);
|
||||
-- require a vessel_id, optional image width and height, scale_out as in Apache
|
||||
\echo 'Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((\w+)_(\d+)_(\d+)).png$"'
|
||||
SELECT public.qgis_bbox_trip_py_fn(CONCAT(:'vessel_id_kapla'::TEXT, '_', 1, '_',2));
|
||||
SELECT public.qgis_bbox_trip_py_fn(CONCAT(:'vessel_id_aava'::TEXT, '_', 3, '_', 4));
|
||||
|
||||
--SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
-- SQL request from QGIS to fetch the necessary data base on vessel_id
|
||||
--SELECT id, vessel_id, name as logname, ST_Transform(track_geom, 3857) as track_geom, ROUND(distance, 2) as distance, ROUND(EXTRACT(epoch FROM duration)/3600,2) as duration,_from_time,_to_time FROM api.logbook where track_geom is not null and _to_time is not null ORDER BY _from_time DESC;
|
||||
SELECT count(*) FROM api.logbook where track_geom is not null and _to_time is not null;
|
46
tests/sql/qgis.sql.output
Normal file
46
tests/sql/qgis.sql.output
Normal file
@@ -0,0 +1,46 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
SET
|
||||
Get BBOX Extent from SQL query for a log: "^/log_(w+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]---+----------------------------------------------------
|
||||
qgis_bbox_py_fn | 2745681,8303937.662962963,2871529,8369891.337037037
|
||||
|
||||
Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$"
|
||||
-[ RECORD 1 ]---+-------------------------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059
|
||||
|
||||
-[ RECORD 1 ]---+-----------------------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2752672.6236475753,8300633.73408079,2864537.04561218,8373194.440219993
|
||||
|
||||
Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049
|
||||
|
||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+-------------------------------------
|
||||
qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0
|
||||
|
||||
-[ RECORD 1 ]---+---------------------------------------
|
||||
qgis_bbox_py_fn | 90420,-201110377.5,3027720,214517572.5
|
||||
|
||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$"
|
||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||
qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||
qgis_bbox_trip_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049
|
||||
|
||||
-[ RECORD 1 ]
|
||||
count | 3
|
||||
|
@@ -6,10 +6,10 @@
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]--+-------------------------------
|
||||
server_version | 16.3 (Debian 16.3-1.pgdg120+1)
|
||||
server_version | 16.6 (Debian 16.6-1.pgdg120+1)
|
||||
|
||||
-[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
-[ RECORD 1 ]--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.5.0 d2c3ca4" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.11.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||
Name | citext
|
||||
@@ -22,41 +22,46 @@ Version | 1.0
|
||||
Schema | public
|
||||
Description | transform between jsonb and plpython3u
|
||||
-[ RECORD 3 ]--------------------------------------------------------------------------------------
|
||||
Name | mobilitydb
|
||||
Version | 1.2.0
|
||||
Schema | public
|
||||
Description | MobilityDB geospatial trajectory data management & analysis platform
|
||||
-[ RECORD 4 ]--------------------------------------------------------------------------------------
|
||||
Name | moddatetime
|
||||
Version | 1.0
|
||||
Schema | public
|
||||
Description | functions for tracking last modification time
|
||||
-[ RECORD 4 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 5 ]--------------------------------------------------------------------------------------
|
||||
Name | pg_stat_statements
|
||||
Version | 1.10
|
||||
Schema | public
|
||||
Description | track planning and execution statistics of all SQL statements executed
|
||||
-[ RECORD 5 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 6 ]--------------------------------------------------------------------------------------
|
||||
Name | pgcrypto
|
||||
Version | 1.3
|
||||
Schema | public
|
||||
Description | cryptographic functions
|
||||
-[ RECORD 6 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 7 ]--------------------------------------------------------------------------------------
|
||||
Name | plpgsql
|
||||
Version | 1.0
|
||||
Schema | pg_catalog
|
||||
Description | PL/pgSQL procedural language
|
||||
-[ RECORD 7 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
||||
Name | plpython3u
|
||||
Version | 1.0
|
||||
Schema | pg_catalog
|
||||
Description | PL/Python3U untrusted procedural language
|
||||
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
Name | postgis
|
||||
Version | 3.4.2
|
||||
Version | 3.5.0
|
||||
Schema | public
|
||||
Description | PostGIS geometry and geography spatial types and functions
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
Name | timescaledb
|
||||
Version | 2.15.3
|
||||
Version | 2.17.2
|
||||
Schema | public
|
||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
-[ RECORD 11 ]-------------------------------------------------------------------------------------
|
||||
Name | uuid-ossp
|
||||
Version | 1.1
|
||||
Schema | public
|
||||
@@ -106,14 +111,14 @@ laninline | 13566
|
||||
lanvalidator | 13567
|
||||
lanacl |
|
||||
-[ RECORD 5 ]-+-----------
|
||||
oid | 18168
|
||||
oid | 18190
|
||||
lanname | plpython3u
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 18165
|
||||
laninline | 18166
|
||||
lanvalidator | 18167
|
||||
lanplcallfoid | 18187
|
||||
laninline | 18188
|
||||
lanvalidator | 18189
|
||||
lanacl |
|
||||
|
||||
-[ RECORD 1 ]+-----------
|
||||
@@ -180,25 +185,30 @@ Type | table
|
||||
Owner | username
|
||||
-[ RECORD 8 ]---------------------------------
|
||||
Schema | public
|
||||
Name | ne_10m_geography_marine_polys
|
||||
Name | mobilitydb_opcache
|
||||
Type | table
|
||||
Owner | username
|
||||
-[ RECORD 9 ]---------------------------------
|
||||
Schema | public
|
||||
Name | ne_10m_geography_marine_polys
|
||||
Type | table
|
||||
Owner | username
|
||||
-[ RECORD 10 ]--------------------------------
|
||||
Schema | public
|
||||
Name | ne_10m_geography_marine_polys_gid_seq
|
||||
Type | sequence
|
||||
Owner | username
|
||||
-[ RECORD 10 ]--------------------------------
|
||||
-[ RECORD 11 ]--------------------------------
|
||||
Schema | public
|
||||
Name | process_queue
|
||||
Type | table
|
||||
Owner | username
|
||||
-[ RECORD 11 ]--------------------------------
|
||||
-[ RECORD 12 ]--------------------------------
|
||||
Schema | public
|
||||
Name | process_queue_id_seq
|
||||
Type | sequence
|
||||
Owner | username
|
||||
-[ RECORD 12 ]--------------------------------
|
||||
-[ RECORD 13 ]--------------------------------
|
||||
Schema | public
|
||||
Name | spatial_ref_sys
|
||||
Type | table
|
||||
@@ -232,10 +242,12 @@ schema_public | iso3166
|
||||
-[ RECORD 7 ]-+------------------------------
|
||||
schema_public | mid
|
||||
-[ RECORD 8 ]-+------------------------------
|
||||
schema_public | ne_10m_geography_marine_polys
|
||||
schema_public | mobilitydb_opcache
|
||||
-[ RECORD 9 ]-+------------------------------
|
||||
schema_public | process_queue
|
||||
schema_public | ne_10m_geography_marine_polys
|
||||
-[ RECORD 10 ]+------------------------------
|
||||
schema_public | process_queue
|
||||
-[ RECORD 11 ]+------------------------------
|
||||
schema_public | spatial_ref_sys
|
||||
|
||||
-[ RECORD 1 ]---------
|
||||
@@ -653,22 +665,22 @@ reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
|
||||
|
||||
Test geoip reverse_geoip_py_fn
|
||||
Test opverpass API overpass_py_fn
|
||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "wikipedia": "ca:Port Olímpic de Barcelona", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
|
||||
-[ RECORD 1 ]--+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "panoramax": "b637d864-4a5f-4d56-a68e-6c81d2c70292", "wikipedia": "ca:Port Olímpic de Barcelona", "check_date": "2024-09-16", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
|
||||
|
||||
-[ RECORD 1 ]--+----------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta"}
|
||||
-[ RECORD 1 ]--+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta", "check_date": "2024-08-23"}
|
||||
|
||||
-[ RECORD 1 ]--+----------------------------------------------
|
||||
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
||||
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.7.4", "sys_version" : "PostgreSQL 16.3", "timescaledb" : "2.15.3", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.2.2"}
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.8.0", "sys_version" : "PostgreSQL 16.6", "mobilitydb" : "1.2.0", "timescaledb" : "2.17.2", "postgis" : "3.5.0", "postgrest" : "PostgREST 12.2.3"}
|
||||
|
||||
-[ RECORD 1 ]-----------------
|
||||
api_version | 0.7.4
|
||||
sys_version | PostgreSQL 16.3
|
||||
timescaledb | 2.15.3
|
||||
postgis | 3.4.2
|
||||
postgrest | PostgREST 12.2.2
|
||||
api_version | 0.8.0
|
||||
sys_version | PostgreSQL 16.6
|
||||
timescaledb | 2.17.2
|
||||
postgis | 3.5.0
|
||||
postgrest | PostgREST 12.2.3
|
||||
|
||||
|
@@ -168,6 +168,14 @@ else
|
||||
echo mocha index5.js
|
||||
exit 1
|
||||
fi
|
||||
# Anonymous API unit tests
|
||||
$mymocha index6.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report6.html
|
||||
if [ $? -eq 0 ]; then
|
||||
echo OK
|
||||
else
|
||||
echo mocha index6.js
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Anonymous SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/anonymous.sql > output/anonymous.sql.output
|
||||
@@ -195,6 +203,58 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Stats SQL unit tests
|
||||
#psql ${PGSAIL_DB_URI} < sql/stats.sql > output/stats.sql.output
|
||||
#diff sql/stats.sql.output output/stats.sql.output > /dev/null
|
||||
#diff -u sql/stats.sql.output output/stats.sql.output | wc -l
|
||||
#echo 0
|
||||
#if [ $? -eq 0 ]; then
|
||||
# echo SQL stats.sql OK
|
||||
#else
|
||||
# echo SQL stats.sql FAILED
|
||||
# diff -u sql/stats.sql.output output/stats.sql.output
|
||||
# exit 1
|
||||
#fi
|
||||
|
||||
# MobilityDB SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/mobilitydb.sql > output/mobilitydb.sql.output
|
||||
diff sql/mobilitydb.sql.output output/mobilitydb.sql.output > /dev/null
|
||||
#diff -u sql/mobilitydb.sql.output output/mobilitydb.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL mobilitydb.sql OK
|
||||
else
|
||||
echo SQL mobilitydb.sql FAILED
|
||||
diff -u sql/mobilitydb.sql.output output/mobilitydb.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# qgis SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/qgis.sql > output/qgis.sql.output
|
||||
diff sql/qgis.sql.output output/qgis.sql.output > /dev/null
|
||||
#diff -u sql/qgis.sql.output output/qgis.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL qgis.sql OK
|
||||
else
|
||||
echo SQL qgis.sql FAILED
|
||||
diff -u sql/qgis.sql.output output/qgis.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# maplapse SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/maplapse.sql > output/maplapse.sql.output
|
||||
diff sql/maplapse.sql.output output/maplapse.sql.output > /dev/null
|
||||
#diff -u sql/maplapse.sql.output output/maplapse.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL maplapse.sql OK
|
||||
else
|
||||
echo SQL maplapse.sql FAILED
|
||||
diff -u sql/maplapse.sql.output output/maplapse.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and update openapi documentation
|
||||
wget ${PGSAIL_API_URI} -O openapi.json
|
||||
#echo 0
|
||||
|
Reference in New Issue
Block a user