mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 03:07:47 +00:00
Compare commits
58 Commits
v0.9.1
...
f56bb6f538
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f56bb6f538 | ||
![]() |
d1578e3786 | ||
![]() |
3bda948646 | ||
![]() |
45b2e3f28f | ||
![]() |
7ec74d7b82 | ||
![]() |
759a51d426 | ||
![]() |
0b76cb3d0f | ||
![]() |
b4b0c1d014 | ||
![]() |
ee44420e30 | ||
![]() |
5f25a57c3c | ||
![]() |
883c875e39 | ||
![]() |
0282823938 | ||
![]() |
4546b75e0d | ||
![]() |
0c28ed6a0f | ||
![]() |
57a754cdc0 | ||
![]() |
46f16fb077 | ||
![]() |
4c80d041cc | ||
![]() |
12e4baf662 | ||
![]() |
faf62ed9a3 | ||
![]() |
40bdb9620f | ||
![]() |
8fe84ea80c | ||
![]() |
54eefe582d | ||
![]() |
d60af8c7b0 | ||
![]() |
b505c98723 | ||
![]() |
976ea85538 | ||
![]() |
347af573c2 | ||
![]() |
60ba821af7 | ||
![]() |
9759045b0a | ||
![]() |
a6c351c936 | ||
![]() |
b7efb9636f | ||
![]() |
14d19a5394 | ||
![]() |
66b61f9d65 | ||
![]() |
b50c8f5007 | ||
![]() |
a9e1990184 | ||
![]() |
59b52515e3 | ||
![]() |
f528456c08 | ||
![]() |
a76c25b19f | ||
![]() |
00d2247549 | ||
![]() |
b84ac31da1 | ||
![]() |
63cf5d24a5 | ||
![]() |
fe7c1dc1e5 | ||
![]() |
10a26942c7 | ||
![]() |
a5fb08fa42 | ||
![]() |
25c74fd75a | ||
![]() |
fa45782553 | ||
![]() |
e237391e8a | ||
![]() |
dcf4eaca9b | ||
![]() |
ade15f538d | ||
![]() |
f2cf604dab | ||
![]() |
ad2e95bfa8 | ||
![]() |
02130a9e4f | ||
![]() |
29fa3863eb | ||
![]() |
7cd06fced4 | ||
![]() |
564b85f58c | ||
![]() |
da317dce87 | ||
![]() |
08ee757fa5 | ||
![]() |
76ade18d6b | ||
![]() |
e6852a43f1 |
21
README.md
21
README.md
@@ -24,6 +24,8 @@
|
||||
<a href="https://github.com/sponsors/xbgmsharp">Sponsors</a>
|
||||
.
|
||||
<a href="https://discord.gg/uuZrwz4dCS">Discord</a>
|
||||
.
|
||||
<a href="https://deepwiki.com/xbgmsharp/postgsail/">DeepWiki</a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
@@ -32,19 +34,24 @@
|
||||
[](https://github.com/xbgmsharp/postgsail/issues)
|
||||
[](http://makeapullrequest.com)
|
||||

|
||||
[](https://github.com/xbgmsharp/postgsail/stargazers)
|
||||
[](https://deepwiki.com/xbgmsharp/postgsail)
|
||||
|
||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml)
|
||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml)
|
||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/grafana-test.yml)
|
||||
|
||||
signalk-postgsail:
|
||||
[](https://github.com/xbgmsharp/signalk-postgsail/releases/latest)
|
||||
[](https://github.com/xbgmsharp/signalk-postgsail/releases/latest)
|
||||
|
||||
postgsail-backend:
|
||||
[](https://github.com/xbgmsharp/postgsail/releases/latest)
|
||||
|
||||
postgsail-frontend:
|
||||
[](https://github.com/xbgmsharp/vuestic-postgsail/releases/latest)
|
||||
[](https://github.com/xbgmsharp/vuestic-postgsail/releases/latest)
|
||||
|
||||
postgsail-telegram-bot:
|
||||
[](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest)
|
||||
[](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest)
|
||||
|
||||
[](https://www.bestpractices.dev/projects/8124)
|
||||
|
||||
@@ -55,7 +62,7 @@ postgsail-telegram-bot:
|
||||
- [About The Project](#about-the-project)
|
||||
- [Features](#features)
|
||||
- [Cloud-hosted PostgSail](#cloud-hosted-postgsail)
|
||||
- [On-Premise (for free)](#on-premise-for-free)
|
||||
- [On-Premise](#on-premise)
|
||||
- [Roadmap](#roadmap)
|
||||
- [Contributing](#contributing)
|
||||
- [Creating A Pull Request](#creating-a-pull-request)
|
||||
@@ -106,10 +113,14 @@ Remove the hassle of running PostgSail yourself. Here you can skip the technical
|
||||
|
||||
PostgSail Cloud is Open Source and free for personal use with a single vessel. If wish to manage multiple boats contact us.
|
||||
|
||||
## On-Premise (for free)
|
||||
PostgSail is free to use, but is not free to make or host. The stability and accuracy of PostgSail depends on its volunteers and donations from its users. Please consider making an annual recurring gift to PostgSail.
|
||||
|
||||
## On-Premise
|
||||
|
||||
Self host postgSail where you want and how you want. There are no restrictions, you’re in full control. [Install Guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/README.md)
|
||||
|
||||
PostgSail is free to use, but is not free to make or host. The stability and accuracy of PostgSail depends on its volunteers and donations from its users. Please consider making an annual recurring gift to PostgSail.
|
||||
|
||||
## Roadmap
|
||||
|
||||
See the [open issues](https://github.com/xbgmsharp/postgsail/issues) for a list of proposed features (and known issues).
|
||||
|
@@ -53,9 +53,10 @@ erDiagram
|
||||
jsonb configuration
|
||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||
double_precision height
|
||||
integer id "{NOT_NULL}"
|
||||
text ip "Store vessel ip address"
|
||||
text ip
|
||||
double_precision length
|
||||
numeric mmsi
|
||||
text mmsi
|
||||
text name
|
||||
text platform
|
||||
text plugin_version "{NOT_NULL}"
|
||||
@@ -67,6 +68,19 @@ erDiagram
|
||||
text vessel_id "{NOT_NULL}"
|
||||
}
|
||||
|
||||
api_metadata_ext {
|
||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||
bytea image "Store user boat image in bytea format"
|
||||
text image_b64
|
||||
text image_type "Store user boat image type in text format"
|
||||
timestamp_with_time_zone image_updated_at
|
||||
text image_url
|
||||
text make_model "Store user make & model in text format"
|
||||
text polar "Store polar data in CSV notation as used on ORC sailboat data"
|
||||
timestamp_with_time_zone polar_updated_at
|
||||
text vessel_id "{NOT_NULL}"
|
||||
}
|
||||
|
||||
api_metrics {
|
||||
double_precision anglespeedapparent
|
||||
text client_id "Deprecated client_id to be removed"
|
||||
@@ -117,6 +131,17 @@ erDiagram
|
||||
integer stay_code "{NOT_NULL}"
|
||||
}
|
||||
|
||||
api_stays_ext {
|
||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||
bytea image "Store stays image in bytea format"
|
||||
text image_b64
|
||||
text image_type "Store stays image type in text format"
|
||||
timestamp_with_time_zone image_updated_at
|
||||
text image_url
|
||||
integer stay_id "{NOT_NULL}"
|
||||
text vessel_id "{NOT_NULL}"
|
||||
}
|
||||
|
||||
auth_accounts {
|
||||
timestamp_with_time_zone connected_at "{NOT_NULL}"
|
||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||
@@ -275,12 +300,15 @@ erDiagram
|
||||
api_logbook }o--|| api_moorages : ""
|
||||
api_logbook }o--|| api_moorages : ""
|
||||
api_metadata }o--|| auth_vessels : ""
|
||||
api_metadata_ext |o--|| api_metadata : ""
|
||||
api_metrics }o--|| api_metadata : ""
|
||||
api_moorages }o--|| api_metadata : ""
|
||||
api_stays }o--|| api_metadata : ""
|
||||
api_stays_ext }o--|| api_metadata : ""
|
||||
api_moorages }o--|| api_stays_at : ""
|
||||
api_stays }o--|| api_moorages : ""
|
||||
api_stays }o--|| api_stays_at : ""
|
||||
api_stays_ext |o--|| api_stays : ""
|
||||
auth_otp |o--|| auth_accounts : ""
|
||||
auth_vessels }o--|| auth_accounts : ""
|
||||
```
|
277
docs/Self-hosted-update-guide.md
Normal file
277
docs/Self-hosted-update-guide.md
Normal file
@@ -0,0 +1,277 @@
|
||||
# Self hosted update guide
|
||||
|
||||
In this guide we are updating a self hosted installation version 0.7.2 to version 0.9.3. When updating from or to other versions principle remain the same.
|
||||
|
||||
The installation we are upgrading was installed in April 2024 using the installation instructions found on the pgsail github site. Platform is an Ubuntu 22.04 Virtual Machine.
|
||||
Before the upgrade, around 120 trips were logged. Needless to say we don't want to loose our data.
|
||||
|
||||
Unfortunately, there is no automatic update path available, this may change but for now we had to follow the general update instuctions.
|
||||
|
||||
## General update instructions
|
||||
|
||||
- Make a backup
|
||||
- Update the containers.
|
||||
- Update possible extensions.
|
||||
- Run database migrations.
|
||||
- Additional data migration.
|
||||
- Update SignalK client.
|
||||
|
||||
## Let's go
|
||||
|
||||
### Tools used
|
||||
|
||||
In addition to the tools that are already installed as part of Unbuntu and PostgSail, I used DBeaver to examine the database from my Windows desktop.
|
||||
|
||||
<https://dbeaver.io/download/>
|
||||
|
||||
### Make a backup
|
||||
|
||||
Start by making a backup of the database, the docker-compose.yml and .env files. Note that in my case the database was stored in a host folder, later versions are using a docker volume. To copy the database it neccesary the containers are stopped.
|
||||
|
||||
```bash
|
||||
cd postgsail
|
||||
mkdir backup
|
||||
docker compose stop
|
||||
cp .env docker-compose.yml backup/
|
||||
docker compose cp -a db:/var/lib/postgresql/data backup/db-data
|
||||
```
|
||||
|
||||
### Update the containers
|
||||
|
||||
Make a note of the last migration in the initdb folder, in my case this was 99_migrations_202404.sql. Because I used git clone, the migration file was a bit inbetween 0.7.1 and 0.7.2, therefore I decided 99_migrations_202404.sql was the first migration to run.
|
||||
|
||||
Remove the containers:
|
||||
|
||||
```bash
|
||||
docker compose down
|
||||
```
|
||||
|
||||
Get the latest PostgSail from github, we checkout a specific tag to ensure we have a stable release version. If you installed it from a binary release, just update from the latest binary release.
|
||||
|
||||
```bash
|
||||
git pull remote main
|
||||
git fetch --all --tags
|
||||
git checkout tags/v0.9.3
|
||||
```
|
||||
|
||||
```text
|
||||
Note: switching to 'tags/v0.9.3'.
|
||||
|
||||
You are in 'detached HEAD' state. You can look around, make experimental
|
||||
changes and commit them, and you can discard any commits you make in this
|
||||
state without impacting any branches by switching back to a branch.
|
||||
|
||||
If you want to create a new branch to retain commits you create, you may
|
||||
do so (now or later) by using -c with the switch command. Example:
|
||||
|
||||
git switch -c <new-branch-name>
|
||||
|
||||
Or undo this operation with:
|
||||
|
||||
git switch -
|
||||
|
||||
Turn off this advice by setting config variable advice.detachedHead to false
|
||||
|
||||
HEAD is now at 12e4baf Release PostgSail 0.9.3
|
||||
```
|
||||
|
||||
**Ensure new docker-compose.yml file matches your database folder or volume setting, adjust as needed.**
|
||||
|
||||
Get the latest containers.
|
||||
|
||||
```bash
|
||||
docker compose pull
|
||||
```
|
||||
|
||||
### Update possible extentions
|
||||
|
||||
Start database container.
|
||||
|
||||
```bash
|
||||
docker compose up -d db
|
||||
```
|
||||
|
||||
Excec psql shell in databse container.
|
||||
|
||||
```bash
|
||||
docker compose exec db sh
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB"
|
||||
\c signalk;
|
||||
```
|
||||
|
||||
Check extensions which can be updated, be sure to run from the signalk database:
|
||||
|
||||
```sql
|
||||
SELECT name, default_version, installed_version FROM pg_available_extensions where default_version <> installed_version;
|
||||
```
|
||||
|
||||
The postgis extention can be upgraded with this SQL query:
|
||||
|
||||
```sql
|
||||
SELECT postgis_extensions_upgrade();
|
||||
```
|
||||
|
||||
Updating the timescaledb requires running from a new session, use following commands (note the -X options, that is neccesary):
|
||||
|
||||
```bash
|
||||
docker compose exec db sh
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" -X
|
||||
```
|
||||
|
||||
Then run following SQL commands from the psql shell:
|
||||
|
||||
```sql
|
||||
ALTER EXTENSION timescaledb UPDATE;
|
||||
CREATE EXTENSION IF NOT EXISTS timescaledb_toolkit;
|
||||
ALTER EXTENSION timescaledb_toolkit UPDATE;
|
||||
```
|
||||
|
||||
For others, to be checked. In my case, the postgis extension was essential.
|
||||
|
||||
### Run datbabase migrations
|
||||
|
||||
Then run the migrations, adjust start and end for first and last migration file to execute.
|
||||
|
||||
```bash
|
||||
start=202404; end=202507; for f in $(ls ./docker-entrypoint-initdb.d/99_migrations_*.sql | sort); do s=$(basename "$f" | sed -E 's/^99_migrations_([0-9]{6})\.sql$/\1/'); if [[ "$s" < "$start" || "$s" > "$end" ]]; then continue; fi; echo "Running $f"; psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < "$f"; done
|
||||
```
|
||||
|
||||
Or line by line
|
||||
|
||||
```bash
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202404.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202405.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202406.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202407.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202408.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202409.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202410.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202411.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202412.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202501.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202504.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202505.sql
|
||||
psql --username "$POSTGRES_USER" --dbname "$POSTGRES_DB" < ./docker-entrypoint-initdb.d/99_migrations_202507.sql
|
||||
```
|
||||
|
||||
Now rebuild the web app.
|
||||
|
||||
```bash
|
||||
docker compose build web
|
||||
```
|
||||
|
||||
Maybe need to run 99env.sh - check.
|
||||
|
||||
Then we can start the other containers.
|
||||
|
||||
```bash
|
||||
docker compose up -d
|
||||
```
|
||||
|
||||
After everything is started, the web site should be accesible.
|
||||
|
||||
### Additional data migration
|
||||
|
||||
Depending on the starting version, additional data migration may be needed.
|
||||
If the old trips are visible, but the routes are not, we need to run an SQL Script to re-calculate the trip metadata.
|
||||
|
||||
```sql
|
||||
DO $$
|
||||
declare
|
||||
-- Re calculate the trip metadata
|
||||
logbook_rec record;
|
||||
avg_rec record;
|
||||
t_rec record;
|
||||
batch_size INTEGER := 20;
|
||||
offset_value INTEGER := 0;
|
||||
done BOOLEAN := FALSE;
|
||||
processed INTEGER := 0;
|
||||
begin
|
||||
WHILE NOT done LOOP
|
||||
processed := 0;
|
||||
FOR logbook_rec IN
|
||||
SELECT *
|
||||
FROM api.logbook
|
||||
WHERE _from IS NOT NULL
|
||||
AND _to IS NOT NULL
|
||||
AND active IS FALSE
|
||||
AND trip IS NULL
|
||||
--AND trip_heading IS NULL
|
||||
--AND vessel_id = '06b6d311ccfe'
|
||||
ORDER BY id DESC
|
||||
LIMIT batch_size -- OFFSET offset_value -- don's use offset as causes entries to skip
|
||||
LOOP
|
||||
processed := processed + 1;
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
|
||||
-- Calculate trip metadata
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
--UPDATE api.logbook
|
||||
-- SET extra = jsonb_recursive_merge(extra, jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed))
|
||||
-- WHERE id = logbook_rec.id;
|
||||
if avg_rec.count_metric IS NULL OR avg_rec.count_metric = 0 then
|
||||
-- We don't have the orignal metrics, we should read the geojson
|
||||
continue; -- return current row of SELECT
|
||||
end if;
|
||||
|
||||
-- mobilitydb, add spaciotemporal sequence
|
||||
-- reduce the numbers of metrics by skipping row or aggregate time-series
|
||||
-- By default the signalk plugin report one entry every minute.
|
||||
IF avg_rec.count_metric < 30 THEN -- if less ~20min trip we keep it all data
|
||||
t_rec := logbook_update_metrics_short_fn(avg_rec.count_metric, logbook_rec._from_time, logbook_rec._to_time);
|
||||
ELSIF avg_rec.count_metric < 2000 THEN -- if less ~33h trip we skip data
|
||||
t_rec := logbook_update_metrics_fn(avg_rec.count_metric, logbook_rec._from_time, logbook_rec._to_time);
|
||||
ELSE -- As we have too many data, we time-series aggregate data
|
||||
t_rec := logbook_update_metrics_timebucket_fn(avg_rec.count_metric, logbook_rec._from_time, logbook_rec._to_time);
|
||||
END IF;
|
||||
--RAISE NOTICE 'mobilitydb [%]', t_rec;
|
||||
IF t_rec.trajectory IS NULL THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn, vessel_id [%], invalid mobilitydb data [%] [%]', logbook_rec.vessel_id, _id, t_rec;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE '-> process_logbook_queue_fn, vessel_id [%], update entry logbook id:[%] start:[%] end:[%]', logbook_rec.vessel_id, logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
trip = t_rec.trajectory,
|
||||
trip_cog = t_rec.courseovergroundtrue,
|
||||
trip_sog = t_rec.speedoverground,
|
||||
trip_twa = t_rec.windspeedapparent,
|
||||
trip_tws = t_rec.truewindspeed,
|
||||
trip_twd = t_rec.truewinddirection,
|
||||
trip_notes = t_rec.notes, -- don't overwrite existing user notes. **** Must set trip_notes otherwise replay is not working.
|
||||
trip_status = t_rec.status,
|
||||
trip_depth = t_rec.depth,
|
||||
trip_batt_charge = t_rec.stateofcharge,
|
||||
trip_batt_voltage = t_rec.voltage,
|
||||
trip_temp_water = t_rec.watertemperature,
|
||||
trip_temp_out = t_rec.outsidetemperature,
|
||||
trip_pres_out = t_rec.outsidepressure,
|
||||
trip_hum_out = t_rec.outsidehumidity,
|
||||
trip_heading = t_rec.heading, -- heading True
|
||||
trip_tank_level = t_rec.tankLevel, -- Tank currentLevel
|
||||
trip_solar_voltage = t_rec.solarVoltage, -- solar voltage
|
||||
trip_solar_power = t_rec.solarPower -- solar powerPanel
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
END LOOP;
|
||||
|
||||
RAISE NOTICE '-> Processed:[%]', processed;
|
||||
IF processed = 0 THEN
|
||||
done := TRUE;
|
||||
ELSE
|
||||
offset_value := offset_value + batch_size;
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
END $$;
|
||||
```
|
||||
|
||||
### Update SignalK client
|
||||
|
||||
The SignalK client can be updated from the SignalK Web UI. After the migration we updated this to version v0.5.0
|
||||
|
||||
### Trouble shooting
|
||||
|
||||
During this migration, several issues came up, they eventually boiled down to an extension not updated and permissions issues.
|
2
frontend
2
frontend
Submodule frontend updated: 41e5f0d1b1...44c270ea8b
3454
initdb/99_migrations_202505.sql
Normal file
3454
initdb/99_migrations_202505.sql
Normal file
File diff suppressed because it is too large
Load Diff
734
initdb/99_migrations_202507.sql
Normal file
734
initdb/99_migrations_202507.sql
Normal file
@@ -0,0 +1,734 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2025 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration June/July 2025
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Update plugin upgrade message
|
||||
UPDATE public.email_templates
|
||||
SET email_content='Hello __RECIPIENT__,
|
||||
Please upgrade your postgsail signalk plugin. Make sure you restart your Signalk instance after upgrading. Be sure to contact me if you encounter any issue.'
|
||||
WHERE "name"='skplugin_upgrade';
|
||||
|
||||
-- DROP FUNCTION api.login(text, text);
|
||||
-- Update api.login, update the connected_at field to the current time
|
||||
CREATE OR REPLACE FUNCTION api.login(email text, pass text)
|
||||
RETURNS auth.jwt_token
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
AS $function$
|
||||
declare
|
||||
_role name;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
_email_valid boolean := false;
|
||||
_email text := email;
|
||||
_user_id text := null;
|
||||
_user_disable boolean := false;
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
begin
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
if _role is null then
|
||||
-- HTTP/403
|
||||
--raise invalid_password using message = 'invalid user or password';
|
||||
-- HTTP/401
|
||||
raise insufficient_privilege using message = 'invalid user or password';
|
||||
end if;
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
SELECT preferences['disable'],user_id INTO _user_disable,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _user_disable is True then
|
||||
-- due to the raise, the insert is never committed.
|
||||
--INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
-- VALUES ('account_disable', _email, now(), _user_id);
|
||||
RAISE sqlstate 'PT402' using message = 'Account disable, contact us',
|
||||
detail = 'Quota exceeded',
|
||||
hint = 'Upgrade your plan';
|
||||
END IF;
|
||||
|
||||
-- Check email_valid and generate OTP
|
||||
SELECT preferences['email_valid'],user_id INTO _email_valid,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _email_valid is null or _email_valid is False THEN
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('email_otp', _email, now(), _user_id);
|
||||
END IF;
|
||||
|
||||
-- Track IP per user to avoid abuse
|
||||
--RAISE WARNING 'api.login debug: [%],[%]', client_ip, login.email;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE auth.accounts a SET
|
||||
preferences = jsonb_recursive_merge(a.preferences, jsonb_build_object('ip', client_ip)),
|
||||
connected_at = NOW()
|
||||
WHERE a.email = login.email;
|
||||
END IF;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
--RAISE WARNING 'api.login debug: [%],[%],[%]', app_jwt_secret, _role, login.email;
|
||||
-- Generate jwt
|
||||
select jwt.sign(
|
||||
-- row_to_json(r), ''
|
||||
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select _role as role, login.email as email, -- TODO replace with user_id
|
||||
-- select _role as role, user_id as uid, -- add support in check_jwt
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
end;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION api.login IS 'Handle user login, returns a JWT token with user role and email.';
|
||||
|
||||
-- DROP FUNCTION api.monitoring_history_fn(in text, out jsonb);
|
||||
-- Update monitoring_history_fn to use custom user settings for metrics
|
||||
CREATE OR REPLACE FUNCTION api.monitoring_history_fn(time_interval text DEFAULT '24'::text, OUT history_metrics jsonb)
|
||||
RETURNS jsonb
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
bucket_interval interval := '5 minutes';
|
||||
BEGIN
|
||||
RAISE NOTICE '-> monitoring_history_fn';
|
||||
SELECT CASE time_interval
|
||||
WHEN '24' THEN '5 minutes'
|
||||
WHEN '48' THEN '2 hours'
|
||||
WHEN '72' THEN '4 hours'
|
||||
WHEN '168' THEN '7 hours'
|
||||
ELSE '5 minutes'
|
||||
END bucket INTO bucket_interval;
|
||||
RAISE NOTICE '-> monitoring_history_fn % %', time_interval, bucket_interval;
|
||||
WITH history_table AS (
|
||||
SELECT time_bucket(bucket_interval::INTERVAL, mt.time) AS time_bucket,
|
||||
avg(-- Water Temperature
|
||||
COALESCE(
|
||||
mt.metrics->'water'->>'temperature',
|
||||
mt.metrics->>(md.configuration->>'waterTemperatureKey'),
|
||||
mt.metrics->>'environment.water.temperature'
|
||||
)::FLOAT) AS waterTemperature,
|
||||
avg(-- Inside Temperature
|
||||
COALESCE(
|
||||
mt.metrics->'temperature'->>'inside',
|
||||
mt.metrics->>(md.configuration->>'insideTemperatureKey'),
|
||||
mt.metrics->>'environment.inside.temperature'
|
||||
)::FLOAT) AS insideTemperature,
|
||||
avg(-- Outside Temperature
|
||||
COALESCE(
|
||||
mt.metrics->'temperature'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsideTemperatureKey'),
|
||||
mt.metrics->>'environment.outside.temperature'
|
||||
)::FLOAT) AS outsideTemperature,
|
||||
avg(-- Wind Speed True
|
||||
COALESCE(
|
||||
mt.metrics->'wind'->>'speed',
|
||||
mt.metrics->>(md.configuration->>'windSpeedKey'),
|
||||
mt.metrics->>'environment.wind.speedTrue'
|
||||
)::FLOAT) AS windSpeedOverGround,
|
||||
avg(-- Inside Humidity
|
||||
COALESCE(
|
||||
mt.metrics->'humidity'->>'inside',
|
||||
mt.metrics->>(md.configuration->>'insideHumidityKey'),
|
||||
mt.metrics->>'environment.inside.relativeHumidity',
|
||||
mt.metrics->>'environment.inside.humidity'
|
||||
)::FLOAT) AS insideHumidity,
|
||||
avg(-- Outside Humidity
|
||||
COALESCE(
|
||||
mt.metrics->'humidity'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsideHumidityKey'),
|
||||
mt.metrics->>'environment.outside.relativeHumidity',
|
||||
mt.metrics->>'environment.outside.humidity'
|
||||
)::FLOAT) AS outsideHumidity,
|
||||
avg(-- Outside Pressure
|
||||
COALESCE(
|
||||
mt.metrics->'pressure'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsidePressureKey'),
|
||||
mt.metrics->>'environment.outside.pressure'
|
||||
)::FLOAT) AS outsidePressure,
|
||||
avg(--Inside Pressure
|
||||
COALESCE(
|
||||
mt.metrics->'pressure'->>'inside',
|
||||
mt.metrics->>(md.configuration->>'insidePressureKey'),
|
||||
mt.metrics->>'environment.inside.pressure'
|
||||
)::FLOAT) AS insidePressure,
|
||||
avg(-- Battery Charge (State of Charge)
|
||||
COALESCE(
|
||||
mt.metrics->'battery'->>'charge',
|
||||
mt.metrics->>(md.configuration->>'stateOfChargeKey'),
|
||||
mt.metrics->>'electrical.batteries.House.capacity.stateOfCharge'
|
||||
)::FLOAT) AS batteryCharge,
|
||||
avg(-- Battery Voltage
|
||||
COALESCE(
|
||||
mt.metrics->'battery'->>'voltage',
|
||||
mt.metrics->>(md.configuration->>'voltageKey'),
|
||||
mt.metrics->>'electrical.batteries.House.voltage'
|
||||
)::FLOAT) AS batteryVoltage,
|
||||
avg(-- Water Depth
|
||||
COALESCE(
|
||||
mt.metrics->'water'->>'depth',
|
||||
mt.metrics->>(md.configuration->>'depthKey'),
|
||||
mt.metrics->>'environment.depth.belowTransducer'
|
||||
)::FLOAT) AS depth
|
||||
FROM api.metrics mt
|
||||
JOIN api.metadata md ON md.vessel_id = mt.vessel_id
|
||||
WHERE mt.time > (NOW() AT TIME ZONE 'UTC' - INTERVAL '1 hours' * time_interval::NUMERIC)
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket asc
|
||||
)
|
||||
SELECT jsonb_agg(history_table) INTO history_metrics FROM history_table;
|
||||
END
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION api.monitoring_history_fn(in text, out jsonb) IS 'Export metrics from a time period 24h, 48h, 72h, 7d';
|
||||
|
||||
-- DROP FUNCTION public.cron_alerts_fn();
|
||||
-- Update cron_alerts_fn to check for alerts, filters out empty strings (""), so they are not included in the result.
|
||||
CREATE OR REPLACE FUNCTION public.cron_alerts_fn()
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
app_settings JSONB;
|
||||
user_settings JSONB;
|
||||
alerting JSONB;
|
||||
_alarms JSONB;
|
||||
alarms TEXT;
|
||||
alert_default JSONB := '{
|
||||
"low_pressure_threshold": 990,
|
||||
"high_wind_speed_threshold": 30,
|
||||
"low_water_depth_threshold": 1,
|
||||
"min_notification_interval": 6,
|
||||
"high_pressure_drop_threshold": 12,
|
||||
"low_battery_charge_threshold": 90,
|
||||
"low_battery_voltage_threshold": 12.5,
|
||||
"low_water_temperature_threshold": 10,
|
||||
"low_indoor_temperature_threshold": 7,
|
||||
"low_outdoor_temperature_threshold": 3
|
||||
}';
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id,
|
||||
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||
(alert_default || ( -- Filters out empty strings (""), so they are not included in the result.
|
||||
SELECT jsonb_object_agg(key, value)
|
||||
FROM jsonb_each(a.preferences->'alerting')
|
||||
WHERE value <> '""'
|
||||
)) as alerting,
|
||||
(a.preferences->'alarms')::JSONB as alarms,
|
||||
m.configuration as config
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||
PERFORM set_config('user.email', alert_rec.email, false);
|
||||
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg(-- Inside Temperature
|
||||
COALESCE(
|
||||
mt.metrics->'temperature'->>'inside',
|
||||
mt.metrics->>(md.configuration->>'insideTemperatureKey'),
|
||||
mt.metrics->>'environment.inside.temperature'
|
||||
)::FLOAT) AS intemp,
|
||||
avg(-- Wind Speed True
|
||||
COALESCE(
|
||||
mt.metrics->'wind'->>'speed',
|
||||
mt.metrics->>(md.configuration->>'windSpeedKey'),
|
||||
mt.metrics->>'environment.wind.speedTrue'
|
||||
)::FLOAT) AS wind,
|
||||
avg(-- Water Depth
|
||||
COALESCE(
|
||||
mt.metrics->'water'->>'depth',
|
||||
mt.metrics->>(md.configuration->>'depthKey'),
|
||||
mt.metrics->>'environment.depth.belowTransducer'
|
||||
)::FLOAT) AS watdepth,
|
||||
avg(-- Outside Temperature
|
||||
COALESCE(
|
||||
m.metrics->'temperature'->>'outside',
|
||||
m.metrics->>(alert_rec.config->>'outsideTemperatureKey'),
|
||||
m.metrics->>'environment.outside.temperature'
|
||||
)::NUMERIC) AS outtemp,
|
||||
avg(-- Water Temperature
|
||||
COALESCE(
|
||||
m.metrics->'water'->>'temperature',
|
||||
m.metrics->>(alert_rec.config->>'waterTemperatureKey'),
|
||||
m.metrics->>'environment.water.temperature'
|
||||
)::NUMERIC) AS wattemp,
|
||||
avg(-- Outside Pressure
|
||||
COALESCE(
|
||||
m.metrics->'pressure'->>'outside',
|
||||
m.metrics->>(alert_rec.config->>'outsidePressureKey'),
|
||||
m.metrics->>'environment.outside.pressure'
|
||||
)::NUMERIC) AS pressure,
|
||||
avg(-- Battery Voltage
|
||||
COALESCE(
|
||||
m.metrics->'battery'->>'voltage',
|
||||
m.metrics->>(alert_rec.config->>'voltageKey'),
|
||||
m.metrics->>'electrical.batteries.House.voltage'
|
||||
)::NUMERIC) AS voltage,
|
||||
avg(-- Battery Charge (State of Charge)
|
||||
COALESCE(
|
||||
m.metrics->'battery'->>'charge',
|
||||
m.metrics->>(alert_rec.config->>'stateOfChargeKey'),
|
||||
m.metrics->>'electrical.batteries.House.capacity.stateOfCharge'
|
||||
)::NUMERIC) AS charge
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = alert_rec.vessel_id
|
||||
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||
IF metric_rec.intemp IS NOT NULL AND public.kelvintocel(metric_rec.intemp::NUMERIC) < (alert_rec.alerting->'low_indoor_temperature_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug indoor_temp [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug indoor_temp [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.outtemp IS NOT NULL AND public.kelvintocel(metric_rec.outtemp::NUMERIC) < (alert_rec.alerting->>'low_outdoor_temperature_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug outdoor_temp [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug outdoor_temp [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wattemp IS NOT NULL AND public.kelvintocel(metric_rec.wattemp::NUMERIC) < (alert_rec.alerting->>'low_water_temperature_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug water_temp [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug water_temp [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.watdepth IS NOT NULL AND metric_rec.watdepth::NUMERIC < (alert_rec.alerting->'low_water_depth_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug water_depth [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug water_depth [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| ROUND(metric_rec.watdepth,2) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||
END IF;
|
||||
if metric_rec.pressure IS NOT NULL AND metric_rec.pressure::NUMERIC < (alert_rec.alerting->'high_pressure_drop_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug pressure [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug pressure [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| ROUND(metric_rec.pressure,2) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wind IS NOT NULL AND metric_rec.wind::NUMERIC > (alert_rec.alerting->'high_wind_speed_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug wind [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug wind [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| ROUND(metric_rec.wind,2) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||
END IF;
|
||||
IF metric_rec.voltage IS NOT NULL AND metric_rec.voltage::NUMERIC < (alert_rec.alerting->'low_battery_voltage_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug voltage [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug voltage [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| ROUND(metric_rec.voltage,2) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||
END IF;
|
||||
IF metric_rec.charge IS NOT NULL AND (metric_rec.charge::NUMERIC*100) < (alert_rec.alerting->'low_battery_charge_threshold')::NUMERIC then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| ROUND(metric_rec.charge::NUMERIC*100,2) ||' date:'|| metric_rec.time_bucket ||' "}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION public.cron_alerts_fn() IS 'init by pg_cron to check for alerts';
|
||||
|
||||
-- DROP FUNCTION public.process_pre_logbook_fn(int4);
|
||||
-- Update process_pre_logbook_fn to detect and avoid logbook we more than 1000NM in less 15h
|
||||
CREATE OR REPLACE FUNCTION public.process_pre_logbook_fn(_id integer)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
_invalid_time boolean;
|
||||
_invalid_interval boolean;
|
||||
_invalid_distance boolean;
|
||||
_invalid_ratio boolean;
|
||||
count_metric numeric;
|
||||
previous_stays_id numeric;
|
||||
current_stays_departed text;
|
||||
current_stays_id numeric;
|
||||
current_stays_active boolean;
|
||||
timebucket boolean;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_pre_logbook_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_pre_logbook_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Check if all metrics are within 50meters base on geo loc
|
||||
count_metric := logbook_metrics_dwithin_fn(logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT, logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||
RAISE NOTICE '-> process_pre_logbook_fn logbook_metrics_dwithin_fn count:[%]', count_metric;
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
|
||||
-- Avoid/ignore/delete logbook stationary movement or time sync issue
|
||||
-- Check time start vs end
|
||||
SELECT logbook_rec._to_time::TIMESTAMPTZ < logbook_rec._from_time::TIMESTAMPTZ INTO _invalid_time;
|
||||
-- Is distance is less than 0.010
|
||||
SELECT geo_rec._track_distance < 0.010 INTO _invalid_distance;
|
||||
-- Is duration is less than 100sec
|
||||
SELECT (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) < (100::text||' secs')::interval INTO _invalid_interval;
|
||||
-- If we have more than 800NM in less 15h
|
||||
IF geo_rec._track_distance >= 800 AND (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) < (15::text||' hours')::interval THEN
|
||||
_invalid_distance := True;
|
||||
_invalid_interval := True;
|
||||
--RAISE NOTICE '-> process_pre_logbook_fn invalid logbook data id [%], _invalid_distance [%], _invalid_interval [%]', logbook_rec.id, _invalid_distance, _invalid_interval;
|
||||
END IF;
|
||||
-- If we have less than 20 metrics or less than 0.5NM or less than avg 0.5knts
|
||||
-- Is within metrics represent more or equal than 60% of the total entry
|
||||
IF count_metric::NUMERIC <= 20 OR geo_rec._track_distance < 0.5 OR avg_rec.avg_speed < 0.5 THEN
|
||||
SELECT (count_metric::NUMERIC / avg_rec.count_metric::NUMERIC) >= 0.60 INTO _invalid_ratio;
|
||||
END IF;
|
||||
-- if stationary fix data metrics,logbook,stays,moorage
|
||||
IF _invalid_time IS True OR _invalid_distance IS True
|
||||
OR _invalid_interval IS True OR count_metric = avg_rec.count_metric
|
||||
OR _invalid_ratio IS True
|
||||
OR avg_rec.count_metric <= 3 THEN
|
||||
RAISE NOTICE '-> process_pre_logbook_fn invalid logbook data id [%], _invalid_time [%], _invalid_distance [%], _invalid_interval [%], count_metric_in_zone [%], count_metric_log [%], _invalid_ratio [%]',
|
||||
logbook_rec.id, _invalid_time, _invalid_distance, _invalid_interval, count_metric, avg_rec.count_metric, _invalid_ratio;
|
||||
-- Update metrics status to moored
|
||||
UPDATE api.metrics
|
||||
SET status = 'moored'
|
||||
WHERE time >= logbook_rec._from_time::TIMESTAMPTZ
|
||||
AND time <= logbook_rec._to_time::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false);
|
||||
-- Update logbook
|
||||
UPDATE api.logbook
|
||||
SET notes = 'invalid logbook data, stationary need to fix metrics?'
|
||||
WHERE vessel_id = current_setting('vessel.id', false)
|
||||
AND id = logbook_rec.id;
|
||||
-- Get related stays
|
||||
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
|
||||
FROM api.stays s
|
||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
||||
AND s.arrived = logbook_rec._to_time::TIMESTAMPTZ;
|
||||
-- Update related stays
|
||||
UPDATE api.stays s
|
||||
SET notes = 'invalid stays data, stationary need to fix metrics?'
|
||||
WHERE vessel_id = current_setting('vessel.id', false)
|
||||
AND arrived = logbook_rec._to_time::TIMESTAMPTZ;
|
||||
-- Find previous stays
|
||||
SELECT id INTO previous_stays_id
|
||||
FROM api.stays s
|
||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
||||
AND s.arrived < logbook_rec._to_time::TIMESTAMPTZ
|
||||
ORDER BY s.arrived DESC LIMIT 1;
|
||||
-- Update previous stays with the departed time from current stays
|
||||
-- and set the active state from current stays
|
||||
UPDATE api.stays
|
||||
SET departed = current_stays_departed::TIMESTAMPTZ,
|
||||
active = current_stays_active
|
||||
WHERE vessel_id = current_setting('vessel.id', false)
|
||||
AND id = previous_stays_id;
|
||||
-- Clean up, remove invalid logbook and stay entry
|
||||
DELETE FROM api.logbook WHERE id = logbook_rec.id;
|
||||
RAISE WARNING '-> process_pre_logbook_fn delete invalid logbook [%]', logbook_rec.id;
|
||||
DELETE FROM api.stays WHERE id = current_stays_id;
|
||||
RAISE WARNING '-> process_pre_logbook_fn delete invalid stays [%]', current_stays_id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
--IF (logbook_rec.notes IS NULL) THEN -- run one time only
|
||||
-- -- If duration is over 24h or number of entry is over 400, check for stays and potential multiple logs with stationary location
|
||||
-- IF (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) > INTERVAL '24 hours'
|
||||
-- OR avg_rec.count_metric > 400 THEN
|
||||
-- timebucket := public.logbook_metrics_timebucket_fn('15 minutes'::TEXT, logbook_rec.id, logbook_rec._from_time::TIMESTAMPTZ, logbook_rec._to_time::TIMESTAMPTZ);
|
||||
-- -- If true exit current process as the current logbook need to be re-process.
|
||||
-- IF timebucket IS True THEN
|
||||
-- RETURN;
|
||||
-- END IF;
|
||||
-- ELSE
|
||||
-- timebucket := public.logbook_metrics_timebucket_fn('5 minutes'::TEXT, logbook_rec.id, logbook_rec._from_time::TIMESTAMPTZ, logbook_rec._to_time::TIMESTAMPTZ);
|
||||
-- -- If true exit current process as the current logbook need to be re-process.
|
||||
-- IF timebucket IS True THEN
|
||||
-- RETURN;
|
||||
-- END IF;
|
||||
-- END IF;
|
||||
--END IF;
|
||||
|
||||
-- Add logbook entry to process queue for later processing
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('new_logbook', logbook_rec.id, NOW(), current_setting('vessel.id', true));
|
||||
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
|
||||
COMMENT ON FUNCTION public.process_pre_logbook_fn(int4) IS 'Detect/Avoid/ignore/delete logbook stationary movement or time sync issue';
|
||||
|
||||
-- Revoke security definer
|
||||
--ALTER FUNCTION api.update_logbook_observations_fn(_id integer, observations text) SECURITY INVOKER;
|
||||
--ALTER FUNCTION api.delete_logbook_fn(_id integer) SECURITY INVOKER;
|
||||
ALTER FUNCTION api.merge_logbook_fn(_id integer, _id integer) SECURITY INVOKER;
|
||||
|
||||
GRANT DELETE ON TABLE public.process_queue TO user_role;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
|
||||
GRANT UPDATE (status) ON api.metrics TO user_role;
|
||||
GRANT UPDATE ON api.logbook TO user_role;
|
||||
|
||||
DROP POLICY IF EXISTS api_user_role ON api.metrics;
|
||||
CREATE POLICY api_user_role ON api.metrics TO user_role
|
||||
USING (vessel_id = current_setting('vessel.id', false))
|
||||
WITH CHECK (vessel_id = current_setting('vessel.id', false));
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.9.3'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
--\c postgres
|
||||
--UPDATE cron.job SET username = 'scheduler'; -- Update to scheduler
|
||||
--UPDATE cron.job SET username = current_user WHERE jobname = 'cron_vacuum'; -- Update to superuser for vacuum permissions
|
||||
--UPDATE cron.job SET username = current_user WHERE jobname = 'job_run_details_cleanup';
|
578
initdb/99_migrations_202508.sql
Normal file
578
initdb/99_migrations_202508.sql
Normal file
@@ -0,0 +1,578 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2025 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration August 2025
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Lint fix
|
||||
CREATE INDEX ON api.stays_ext (vessel_id);
|
||||
ALTER TABLE api.stays_ext FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE api.metadata_ext FORCE ROW LEVEL SECURITY;
|
||||
ALTER TABLE api.metadata ADD PRIMARY KEY (vessel_id);
|
||||
COMMENT ON CONSTRAINT metadata_vessel_id_fkey ON api.metadata IS 'Link api.metadata with auth.vessels via vessel_id using FOREIGN KEY and REFERENCES';
|
||||
COMMENT ON CONSTRAINT metrics_vessel_id_fkey ON api.metrics IS 'Link api.metrics api.metadata via vessel_id using FOREIGN KEY and REFERENCES';
|
||||
COMMENT ON CONSTRAINT logbook_vessel_id_fkey ON api.logbook IS 'Link api.stays with api.metadata via vessel_id using FOREIGN KEY and REFERENCES';
|
||||
COMMENT ON CONSTRAINT moorages_vessel_id_fkey ON api.moorages IS 'Link api.stays with api.metadata via vessel_id using FOREIGN KEY and REFERENCES';
|
||||
COMMENT ON CONSTRAINT stays_vessel_id_fkey ON api.stays IS 'Link api.stays with api.metadata via vessel_id using FOREIGN KEY and REFERENCES';
|
||||
COMMENT ON COLUMN api.logbook._from IS 'Name of the location where the log started, usually a moorage name';
|
||||
COMMENT ON COLUMN api.logbook._to IS 'Name of the location where the log ended, usually a moorage name';
|
||||
COMMENT ON COLUMN api.logbook.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.metrics.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.moorages.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.moorages.nominatim IS 'Output of the nominatim reverse geocoding service, see https://nominatim.org/release-docs/develop/api/Reverse/';
|
||||
COMMENT ON COLUMN api.moorages.overpass IS 'Output of the overpass API, see https://wiki.openstreetmap.org/wiki/Overpass_API';
|
||||
COMMENT ON COLUMN api.stays.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.stays_ext.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.metadata_ext.vessel_id IS 'Unique identifier for the vessel associated with the api.metadata entry';
|
||||
COMMENT ON COLUMN api.metadata.mmsi IS 'Maritime Mobile Service Identity (MMSI) number associated with the vessel, link to public.mid';
|
||||
COMMENT ON COLUMN api.metadata.ship_type IS 'Type of ship associated with the vessel, link to public.aistypes';
|
||||
COMMENT ON TRIGGER ts_insert_blocker ON api.metrics IS 'manage by timescaledb, prevent direct insert on hypertable api.metrics';
|
||||
COMMENT ON TRIGGER ensure_vessel_role_exists ON auth.vessels IS 'ensure vessel role exists';
|
||||
COMMENT ON TRIGGER encrypt_pass ON auth.accounts IS 'execute function auth.encrypt_pass()';
|
||||
|
||||
-- Fix typo in comment
|
||||
COMMENT ON FUNCTION public.new_account_entry_fn() IS 'trigger process_queue on INSERT for new account';
|
||||
-- Update missing comment on trigger
|
||||
COMMENT ON TRIGGER encrypt_pass ON auth.accounts IS 'execute function auth.encrypt_pass()';
|
||||
|
||||
-- Update new account email subject
|
||||
UPDATE public.email_templates
|
||||
SET email_subject='Welcome aboard!',
|
||||
email_content='Welcome aboard __RECIPIENT__,
|
||||
Congratulations!
|
||||
You successfully created an account.
|
||||
Keep in mind to register your vessel.
|
||||
Happy sailing!'
|
||||
WHERE "name"='new_account';
|
||||
|
||||
-- Update deactivated email subject
|
||||
UPDATE public.email_templates
|
||||
SET email_subject='We hate to see you go'
|
||||
WHERE "name"='deactivated';
|
||||
|
||||
-- Update first badge message
|
||||
UPDATE public.badges
|
||||
SET description='Nice work logging your first sail! You’re officially a helmsman now!
|
||||
While you’re at it, why not spread the word about Postgsail? ⭐
|
||||
If you found it useful, consider starring the project on GitHub, contributing, or even sponsoring the project to help steer it forward.
|
||||
Happy sailing! 🌊
|
||||
https://github.com/xbgmsharp/postgsail
|
||||
https://github.com/sponsors/xbgmsharp/'
|
||||
WHERE "name"='Helmsman';
|
||||
|
||||
-- DROP FUNCTION public.stays_delete_trigger_fn();
|
||||
-- Add public.stay_delete_trigger_fn trigger function to delete stays_ext and process_queue entries
|
||||
CREATE OR REPLACE FUNCTION public.stay_delete_trigger_fn()
|
||||
RETURNS trigger
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
BEGIN
|
||||
RAISE NOTICE 'stay_delete_trigger_fn [%]', OLD;
|
||||
-- If api.stays is deleted, deleted entry in api.stays_ext table as well.
|
||||
IF EXISTS (SELECT FROM information_schema.tables
|
||||
WHERE table_schema = 'api'
|
||||
AND table_name = 'stays_ext') THEN
|
||||
-- Delete stays_ext
|
||||
DELETE FROM api.stays_ext s
|
||||
WHERE s.stay_id = OLD.id;
|
||||
END IF;
|
||||
-- Delete process_queue references
|
||||
DELETE FROM public.process_queue p
|
||||
WHERE p.payload = OLD.id::TEXT
|
||||
AND p.ref_id = OLD.vessel_id
|
||||
AND p.channel LIKE '%_stays';
|
||||
RETURN OLD;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION public.stay_delete_trigger_fn() IS 'When stays is delete, stays_ext need to deleted as well.';
|
||||
|
||||
-- Create trigger to delete stays_ext and process_queue entries
|
||||
create trigger stay_delete_trigger before
|
||||
delete
|
||||
on
|
||||
api.stays for each row execute function stay_delete_trigger_fn();
|
||||
|
||||
COMMENT ON TRIGGER stay_delete_trigger ON api.stays IS 'BEFORE DELETE ON api.stays run function public.stay_delete_trigger_fn to delete reference and stay_ext need to deleted.';
|
||||
|
||||
-- Remove trigger that duplicate the OTP validation entry on insert for new account, it is handle by api.login
|
||||
DROP TRIGGER new_account_otp_validation_entry ON auth.accounts;
|
||||
|
||||
-- DEBUG
|
||||
DROP TRIGGER IF EXISTS debug_trigger ON public.process_queue;
|
||||
DROP FUNCTION IF EXISTS debug_trigger_fn;
|
||||
CREATE FUNCTION debug_trigger_fn() RETURNS trigger AS $debug$
|
||||
DECLARE
|
||||
BEGIN
|
||||
--RAISE NOTICE 'debug_trigger_fn [%]', NEW;
|
||||
IF NEW.channel = 'email_otp' THEN
|
||||
RAISE WARNING 'debug_trigger_fn: channel is email_otp [%]', NEW;
|
||||
END IF;
|
||||
RETURN NEW;
|
||||
END;
|
||||
$debug$ LANGUAGE plpgsql;
|
||||
CREATE TRIGGER debug_trigger AFTER INSERT ON public.process_queue
|
||||
FOR EACH ROW EXECUTE FUNCTION debug_trigger_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER debug_trigger ON public.process_queue IS 'Log debug information.';
|
||||
DROP TRIGGER debug_trigger ON public.process_queue;
|
||||
|
||||
-- DROP FUNCTION api.login(text, text);
|
||||
-- Update api.login function to handle user disable and email verification, update error code with invalid_email_or_password
|
||||
CREATE OR REPLACE FUNCTION api.login(email text, pass text)
|
||||
RETURNS auth.jwt_token
|
||||
LANGUAGE plpgsql
|
||||
SECURITY DEFINER
|
||||
AS $function$
|
||||
declare
|
||||
_role name;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
_email_valid boolean := false;
|
||||
_email text := email;
|
||||
_user_id text := null;
|
||||
_user_disable boolean := false;
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
begin
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
if _role is null then
|
||||
-- HTTP/403
|
||||
--raise invalid_password using message = 'invalid user or password';
|
||||
-- HTTP/401
|
||||
--raise insufficient_privilege using message = 'invalid user or password';
|
||||
-- HTTP/402 - to distinguish with JWT Expiration token
|
||||
RAISE sqlstate 'PT402' using message = 'invalid email or password',
|
||||
detail = 'invalid auth specification',
|
||||
hint = 'Use a valid email and password';
|
||||
end if;
|
||||
|
||||
-- Gather user information
|
||||
SELECT preferences['disable'], preferences['email_valid'], user_id
|
||||
INTO _user_disable,_email_valid,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
IF _user_disable::BOOLEAN IS TRUE THEN
|
||||
-- due to the raise, the insert is never committed.
|
||||
--INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
-- VALUES ('account_disable', _email, now(), _user_id);
|
||||
RAISE sqlstate 'PT402' using message = 'Account disable, contact us',
|
||||
detail = 'Quota exceeded',
|
||||
hint = 'Upgrade your plan';
|
||||
END IF;
|
||||
|
||||
-- Check if email has been verified, if not generate OTP
|
||||
IF _email_valid::BOOLEAN IS NOT True THEN
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('email_otp', _email, now(), _user_id);
|
||||
END IF;
|
||||
|
||||
-- Track IP per user to avoid abuse
|
||||
--RAISE WARNING 'api.login debug: [%],[%]', client_ip, login.email;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE auth.accounts a SET
|
||||
preferences = jsonb_recursive_merge(a.preferences, jsonb_build_object('ip', client_ip)),
|
||||
connected_at = NOW()
|
||||
WHERE a.email = login.email;
|
||||
END IF;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
--RAISE WARNING 'api.login debug: [%],[%],[%]', app_jwt_secret, _role, login.email;
|
||||
-- Generate jwt
|
||||
select jwt.sign(
|
||||
-- row_to_json(r), ''
|
||||
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select _role as role, login.email as email, -- TODO replace with user_id
|
||||
-- select _role as role, user_id as uid, -- add support in check_jwt
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
end;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION api.login(text, text) IS 'Handle user login, returns a JWT token with user role and email.';
|
||||
|
||||
-- DROP FUNCTION public.cron_windy_fn();
|
||||
-- Update cron_windy_fn to support custom user metrics
|
||||
CREATE OR REPLACE FUNCTION public.cron_windy_fn()
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
windy_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ := NOW();
|
||||
metric_rec record;
|
||||
windy_metric jsonb;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
windy_pws jsonb;
|
||||
BEGIN
|
||||
-- Check for new observations pending update
|
||||
RAISE NOTICE 'cron_process_windy_fn';
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Find users with Windy active and with an active vessel
|
||||
-- Map account id to Windy Station ID
|
||||
FOR windy_rec in
|
||||
SELECT
|
||||
a.id,a.email,v.vessel_id,v.name,
|
||||
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'public_windy')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn for [%]', windy_rec;
|
||||
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', mt.time) AS time_bucket,
|
||||
avg(-- Outside Temperature
|
||||
COALESCE(
|
||||
mt.metrics->'temperature'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsideTemperatureKey'),
|
||||
mt.metrics->>'environment.outside.temperature'
|
||||
)::FLOAT) AS temperature,
|
||||
avg(-- Outside Pressure
|
||||
COALESCE(
|
||||
mt.metrics->'pressure'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsidePressureKey'),
|
||||
mt.metrics->>'environment.outside.pressure'
|
||||
)::FLOAT) AS pressure,
|
||||
avg(-- Outside Humidity
|
||||
COALESCE(
|
||||
mt.metrics->'humidity'->>'outside',
|
||||
mt.metrics->>(md.configuration->>'outsideHumidityKey'),
|
||||
mt.metrics->>'environment.outside.relativeHumidity',
|
||||
mt.metrics->>'environment.outside.humidity'
|
||||
)::FLOAT) AS rh,
|
||||
avg(-- Wind Direction True
|
||||
COALESCE(
|
||||
mt.metrics->'wind'->>'direction',
|
||||
mt.metrics->>(md.configuration->>'windDirectionKey'),
|
||||
mt.metrics->>'environment.wind.directionTrue'
|
||||
)::FLOAT) AS winddir,
|
||||
avg(-- Wind Speed True
|
||||
COALESCE(
|
||||
mt.metrics->'wind'->>'speed',
|
||||
mt.metrics->>(md.configuration->>'windSpeedKey'),
|
||||
mt.metrics->>'environment.wind.speedTrue',
|
||||
mt.metrics->>'environment.wind.speedApparent'
|
||||
)::FLOAT) AS wind,
|
||||
max(-- Max Wind Speed True
|
||||
COALESCE(
|
||||
mt.metrics->'wind'->>'speed',
|
||||
mt.metrics->>(md.configuration->>'windSpeedKey'),
|
||||
mt.metrics->>'environment.wind.speedTrue',
|
||||
mt.metrics->>'environment.wind.speedApparent'
|
||||
)::FLOAT) AS gust,
|
||||
last(latitude, mt.time) AS lat,
|
||||
last(longitude, mt.time) AS lng
|
||||
FROM api.metrics mt
|
||||
JOIN api.metadata md ON md.vessel_id = mt.vessel_id
|
||||
WHERE md.vessel_id = windy_rec.vessel_id
|
||||
AND mt.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking metrics [%]', metric_rec;
|
||||
if metric_rec.wind is null or metric_rec.temperature is null
|
||||
or metric_rec.pressure is null or metric_rec.rh is null then
|
||||
-- Ignore when there is no metrics.
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy_error'::TEXT, user_settings::JSONB);
|
||||
-- Disable windy
|
||||
PERFORM api.update_user_preferences_fn('{public_windy}'::TEXT, 'false'::TEXT);
|
||||
RETURN;
|
||||
end if;
|
||||
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
-- temp from kelvin to celcuis
|
||||
-- winddir from radiant to degres
|
||||
-- rh from ratio to percentage
|
||||
SELECT jsonb_build_object(
|
||||
'dateutc', metric_rec.time_bucket,
|
||||
'station', windy_rec.id,
|
||||
'name', windy_rec.name,
|
||||
'lat', metric_rec.lat,
|
||||
'lon', metric_rec.lng,
|
||||
'wind', metric_rec.wind,
|
||||
'gust', metric_rec.gust,
|
||||
'pressure', metric_rec.pressure,
|
||||
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||
'rh', valToPercent(metric_rec.rh::numeric)
|
||||
) INTO windy_metric;
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking windy_metrics [%]', windy_metric;
|
||||
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||
RAISE NOTICE '-> cron_process_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||
RAISE NOTICE '-> cron_process_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||
-- Send metrics to Windy
|
||||
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||
-- Refresh user settings after first success
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION public.cron_windy_fn() IS 'init by pg_cron to create (or update) station and uploading observations to Windy Personal Weather Station observations';
|
||||
|
||||
-- DROP FUNCTION api.merge_logbook_fn(int4, int4);
|
||||
-- Update merge_logbook_fn to handle more metrics and limit moorage deletion
|
||||
CREATE OR REPLACE FUNCTION api.merge_logbook_fn(id_start integer, id_end integer)
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
logbook_rec_start record;
|
||||
logbook_rec_end record;
|
||||
log_name text;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
t_rec record;
|
||||
BEGIN
|
||||
-- If id_start or id_end is not NULL
|
||||
IF (id_start IS NULL OR id_start < 1) OR (id_end IS NULL OR id_end < 1) THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % %', id_start, id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- If id_end is lower than id_start
|
||||
IF id_end <= id_start THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % < %', id_end, id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the start logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_start
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_start
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_start.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the end logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_end
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_end
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_end.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE WARNING '-> merge_logbook_fn logbook start:% end:%', id_start, id_end;
|
||||
PERFORM set_config('vessel.id', logbook_rec_start.vessel_id, false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
-- add the avg_wind_speed
|
||||
extra_json := extra_json || jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed);
|
||||
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
SELECT CONCAT(logbook_rec_start._from, ' to ', logbook_rec_end._to) INTO log_name;
|
||||
|
||||
-- mobilitydb, add spaciotemporal sequence
|
||||
-- reduce the numbers of metrics by skipping row or aggregate time-series
|
||||
-- By default the signalk PostgSail plugin report one entry every minute.
|
||||
IF avg_rec.count_metric < 30 THEN -- if less ~20min trip we keep it all data
|
||||
t_rec := public.logbook_update_metrics_short_fn(avg_rec.count_metric, logbook_rec_start._from_time, logbook_rec_end._to_time);
|
||||
ELSIF avg_rec.count_metric < 2000 THEN -- if less ~33h trip we skip data
|
||||
t_rec := public.logbook_update_metrics_fn(avg_rec.count_metric, logbook_rec_start._from_time, logbook_rec_end._to_time);
|
||||
ELSE -- As we have too many data, we time-series aggregate data
|
||||
t_rec := public.logbook_update_metrics_timebucket_fn(avg_rec.count_metric, logbook_rec_start._from_time, logbook_rec_end._to_time);
|
||||
END IF;
|
||||
--RAISE NOTICE 'mobilitydb [%]', t_rec;
|
||||
IF t_rec.trajectory IS NULL THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn, vessel_id [%], invalid mobilitydb data [%] [%]', logbook_rec_start.vessel_id, logbook_rec_start.id, t_rec;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec_start.id, logbook_rec_start._from_time, logbook_rec_end._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
duration = (logbook_rec_end._to_time::TIMESTAMPTZ - logbook_rec_start._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
-- Set _to metrics from end logbook
|
||||
_to = logbook_rec_end._to,
|
||||
_to_moorage_id = logbook_rec_end._to_moorage_id,
|
||||
_to_lat = logbook_rec_end._to_lat,
|
||||
_to_lng = logbook_rec_end._to_lng,
|
||||
_to_time = logbook_rec_end._to_time,
|
||||
name = log_name,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
notes = NULL, -- reset pre_log process
|
||||
trip = t_rec.trajectory,
|
||||
trip_cog = t_rec.courseovergroundtrue,
|
||||
trip_sog = t_rec.speedoverground,
|
||||
trip_twa = t_rec.windspeedapparent,
|
||||
trip_tws = t_rec.truewindspeed,
|
||||
trip_twd = t_rec.truewinddirection,
|
||||
trip_notes = t_rec.notes,
|
||||
trip_status = t_rec.status,
|
||||
trip_depth = t_rec.depth,
|
||||
trip_batt_charge = t_rec.stateofcharge,
|
||||
trip_batt_voltage = t_rec.voltage,
|
||||
trip_temp_water = t_rec.watertemperature,
|
||||
trip_temp_out = t_rec.outsidetemperature,
|
||||
trip_pres_out = t_rec.outsidepressure,
|
||||
trip_hum_out = t_rec.outsidehumidity,
|
||||
trip_tank_level = t_rec.tankLevel,
|
||||
trip_solar_voltage = t_rec.solarVoltage,
|
||||
trip_solar_power = t_rec.solarPower,
|
||||
trip_heading = t_rec.heading
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
/*** Deprecated removed column
|
||||
-- GeoJSON require track_geom field geometry linestring
|
||||
--geojson := logbook_update_geojson_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
-- GeoJSON require trip* columns
|
||||
geojson := api.logbook_update_geojson_trip_fn(logbook_rec_start.id);
|
||||
UPDATE api.logbook
|
||||
SET -- Update the data column, it should be generate dynamically on request
|
||||
-- However there is a lot of dependencies to consider for a larger cleanup
|
||||
-- badges, qgis etc... depends on track_geom
|
||||
-- many export and others functions depends on track_geojson
|
||||
track_geojson = geojson,
|
||||
track_geog = trajectory(t_rec.trajectory),
|
||||
track_geom = trajectory(t_rec.trajectory)::geometry
|
||||
-- embedding = NULL,
|
||||
-- spatial_embedding = NULL
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
-- GeoJSON Timelapse require track_geojson geometry point
|
||||
-- Add properties to the geojson for timelapse purpose
|
||||
PERFORM public.logbook_timelapse_geojson_fn(logbook_rec_start.id);
|
||||
***/
|
||||
-- Update logbook mark for deletion
|
||||
UPDATE api.logbook
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE id = logbook_rec_end.id;
|
||||
-- Update related stays mark for deletion
|
||||
UPDATE api.stays
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE arrived = logbook_rec_start._to_time;
|
||||
-- Update related moorages mark for deletion
|
||||
-- We can't delete the stays and moorages as it might expand to other previous logs and stays
|
||||
--UPDATE api.moorages
|
||||
-- SET notes = 'mark for deletion'
|
||||
-- WHERE id = logbook_rec_start._to_moorage_id;
|
||||
|
||||
-- Clean up, remove invalid logbook and stay, moorage entry
|
||||
DELETE FROM api.logbook WHERE id = logbook_rec_end.id;
|
||||
RAISE WARNING '-> merge_logbook_fn delete logbook id [%]', logbook_rec_end.id;
|
||||
DELETE FROM api.stays WHERE arrived = logbook_rec_start._to_time;
|
||||
RAISE WARNING '-> merge_logbook_fn delete stay arrived [%]', logbook_rec_start._to_time;
|
||||
-- We can't delete the stays and moorages as it might expand to other previous logs and stays
|
||||
-- Delete the moorage only if exactly one record exists with that id.
|
||||
DELETE FROM api.moorages
|
||||
WHERE id = logbook_rec_start._to_moorage_id
|
||||
AND (
|
||||
SELECT COUNT(*)
|
||||
FROM api.logbook
|
||||
WHERE _from_moorage_id = logbook_rec_start._to_moorage_id
|
||||
OR _to_moorage_id = logbook_rec_start._to_moorage_id
|
||||
) = 1;
|
||||
RAISE WARNING '-> merge_logbook_fn delete moorage id [%]', logbook_rec_start._to_moorage_id;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION api.merge_logbook_fn(int4, int4) IS 'Merge 2 logbook by id, from the start of the lower log id and the end of the higher log id, update the calculate data as well (avg, geojson)';
|
||||
|
||||
-- Add api.counts_fn to count logbook, moorages and stays entries
|
||||
CREATE OR REPLACE FUNCTION api.counts_fn()
|
||||
RETURNS jsonb
|
||||
LANGUAGE sql
|
||||
AS $function$
|
||||
SELECT jsonb_build_object(
|
||||
'logs', (SELECT COUNT(*) FROM api.logbook),
|
||||
'moorages', (SELECT COUNT(*) FROM api.moorages),
|
||||
'stays', (SELECT COUNT(*) FROM api.stays)
|
||||
);
|
||||
$function$;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION api.counts_fn() IS 'count logbook, moorages and stays entries';
|
||||
|
||||
-- allow user_role to delete on api.stays_ext
|
||||
GRANT DELETE ON TABLE api.stays_ext TO user_role;
|
||||
|
||||
-- refresh permissions
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.9.4'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
-- Add cron job for vacuum and cleanup the public tables
|
||||
INSERT INTO cron.job (schedule,command,nodename,nodeport,database,username,active,jobname)
|
||||
VALUES ('1 1 * * 0','VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) public.process_queue,public.app_settings,public.email_templates;','/var/run/postgresql/',5432,'signalk','username',false,'cron_vacuum_public');
|
||||
|
||||
--UPDATE cron.job SET username = 'scheduler'; -- Update to scheduler
|
||||
--UPDATE cron.job SET username = current_user WHERE jobname = 'cron_vacuum'; -- Update to superuser for vacuum permissions
|
||||
--UPDATE cron.job SET username = current_user WHERE jobname = 'job_run_details_cleanup';
|
@@ -1 +1 @@
|
||||
0.9.1
|
||||
0.9.4
|
||||
|
File diff suppressed because one or more lines are too long
@@ -1,4 +1,5 @@
|
||||
FROM node:lts
|
||||
#FROM node:lts
|
||||
FROM mcr.microsoft.com/devcontainers/javascript-node:22
|
||||
|
||||
ENV DEBIAN_FRONTEND=noninteractive
|
||||
# Install and update the system
|
||||
|
@@ -1,5 +1,5 @@
|
||||
# PostgSail Unit Tests
|
||||
The Unit Tests allow to automatically validate api workflow.
|
||||
The Unit Tests allow to automatically validate SQL and API workflow.
|
||||
|
||||
## A global overview
|
||||
Based on `mocha` & `psql`
|
||||
|
@@ -199,7 +199,7 @@ let configtime = new Date().toISOString();
|
||||
preferences: { key: '{email_notifications}', value: false }, /* Disable email_notifications */
|
||||
vessel_metadata: {
|
||||
name: "aava",
|
||||
mmsi: "787654321",
|
||||
mmsi: "n/a",
|
||||
//client_id: "vessels.urn:mrn:imo:mmsi:787654321",
|
||||
length: "12",
|
||||
beam: "10",
|
||||
@@ -343,6 +343,18 @@ let configtime = new Date().toISOString();
|
||||
obj_name: 'settings'
|
||||
}
|
||||
},
|
||||
],
|
||||
meta_ext_fn: [
|
||||
{ url: '/metadata_ext?',
|
||||
res: {
|
||||
obj_name: 'configuration'
|
||||
}
|
||||
},
|
||||
{ url: `/metadata_ext?`,
|
||||
res: {
|
||||
obj_name: 'image'
|
||||
}
|
||||
},
|
||||
]
|
||||
}
|
||||
].forEach( function(test){
|
||||
@@ -621,7 +633,7 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
.set('Authorization', `Bearer ${vessel_jwt}`)
|
||||
.set('Accept', 'application/json')
|
||||
.set('Content-Type', 'application/json')
|
||||
.set('Prefer', 'return=headers-only,resolution=merge-duplicates')
|
||||
.set('Prefer', 'missing=default,return=headers-only,resolution=merge-duplicates')
|
||||
.end(function(err,res){
|
||||
res.status.should.equal(201);
|
||||
//console.log(res.header);
|
||||
|
@@ -413,19 +413,19 @@ request.set('User-Agent', 'PostgSail unit tests');
|
||||
|
||||
describe("Vessel POST metadata, JWT vessel_role", function(){
|
||||
|
||||
it('/metadata', function(done) {
|
||||
it('/metadata?on_conflict=vessel_id', function(done) {
|
||||
request = supertest.agent(test.cname);
|
||||
request
|
||||
.post('/metadata')
|
||||
.post('/metadata?on_conflict=vessel_id')
|
||||
.send(test.vessel_metadata)
|
||||
.set('Authorization', `Bearer ${vessel_jwt}`)
|
||||
.set('Accept', 'application/json')
|
||||
.set('Content-Type', 'application/json')
|
||||
.set('Prefer', 'return=headers-only')
|
||||
.set('Prefer', 'missing=default,return=headers-only,resolution=merge-duplicates')
|
||||
.end(function(err,res){
|
||||
//console.log(res.body);
|
||||
//console.log(res.header);
|
||||
res.status.should.equal(201);
|
||||
res.status.should.equal(200);
|
||||
should.exist(res.header['server']);
|
||||
res.header['server'].should.match(new RegExp('postgrest','g'));
|
||||
done(err);
|
||||
|
@@ -688,7 +688,7 @@ var moment = require("moment");
|
||||
should.exist(res.body);
|
||||
let event = res.body;
|
||||
//console.log(event);
|
||||
// minimum events log for kapla & aava 13 + 4 email_otp = 17
|
||||
// minimum events log per users 6 + 4 logs + OTP one per login
|
||||
event.length.should.be.aboveOrEqual(11);
|
||||
done(err);
|
||||
});
|
||||
|
@@ -8,8 +8,5 @@
|
||||
"moment": "^2.29.4",
|
||||
"should": "^13.2.3",
|
||||
"supertest": "^6.3.3"
|
||||
},
|
||||
"devDependencies": {
|
||||
"schemalint": "^2.0.5"
|
||||
}
|
||||
}
|
||||
|
@@ -21,6 +21,9 @@ SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'dem
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
|
||||
-- Test logbook for user
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
@@ -38,7 +41,7 @@ SELECT active,name IS NOT NULL AS name,geog,stay_code FROM api.stays WHERE vesse
|
||||
\echo 'eventlogs_view'
|
||||
SELECT count(*) from api.eventlogs_view;
|
||||
|
||||
-- Test event logs view for user
|
||||
-- Test stats logs view for user
|
||||
\echo 'stats_logs_fn'
|
||||
SELECT api.stats_logs_fn(null, null) INTO stats_jsonb;
|
||||
SELECT stats_logs_fn->'name' AS name,
|
||||
|
@@ -11,13 +11,14 @@ user_id | t
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
SET
|
||||
logbook
|
||||
-[ RECORD 1 ]
|
||||
count | 2
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Pojoviken to Formanshagen
|
||||
name | Pojoviken to Norra hamnen
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
@@ -30,7 +31,7 @@ max_wind_speed | 22.1
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Formanshagen to Ekenäs
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
@@ -70,19 +71,19 @@ count | 11
|
||||
|
||||
stats_logs_fn
|
||||
SELECT 1
|
||||
-[ RECORD 1 ]+----------
|
||||
name | "aava"
|
||||
count | 4
|
||||
max_speed | 9.5
|
||||
max_distance | 68.8677
|
||||
max_duration | "PT1H11M"
|
||||
?column? | 3
|
||||
?column? | 90.6030
|
||||
?column? | "PT2H44M"
|
||||
?column? | 44.2
|
||||
?column? | 3
|
||||
?column? | 4
|
||||
?column? | 4
|
||||
-[ RECORD 1 ]+--------
|
||||
name | "kapla"
|
||||
count | 2
|
||||
max_speed | 6.5
|
||||
max_distance | 8.8968
|
||||
max_duration | "PT27M"
|
||||
?column? | 2
|
||||
?column? | 16.5415
|
||||
?column? | "PT47M"
|
||||
?column? | 37.2
|
||||
?column? | 2
|
||||
?column? | 1
|
||||
?column? | 2
|
||||
first_date | t
|
||||
last_date | t
|
||||
|
||||
|
@@ -15,7 +15,12 @@ select current_database();
|
||||
\echo 'Check the number of process pending'
|
||||
-- Should be 24
|
||||
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
||||
--set role scheduler
|
||||
-- Switch to the scheduler role
|
||||
--\echo 'Switch to the scheduler role'
|
||||
--SET ROLE scheduler;
|
||||
-- Should be 24
|
||||
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
||||
-- Run the cron jobs
|
||||
SELECT public.run_cron_jobs();
|
||||
-- Check any pending job
|
||||
SELECT count(*) as any_pending_jobs from public.process_queue pq where pq.processed is null;
|
||||
|
@@ -9,6 +9,9 @@ Check the number of process pending
|
||||
-[ RECORD 1 ]
|
||||
jobs | 24
|
||||
|
||||
-[ RECORD 1 ]
|
||||
jobs | 24
|
||||
|
||||
-[ RECORD 1 ]-+-
|
||||
run_cron_jobs |
|
||||
|
||||
|
@@ -29,7 +29,7 @@ SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT
|
||||
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
|
||||
\echo 'api.metadata details'
|
||||
--
|
||||
SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration, available_keys FROM api.metadata AS m ORDER BY m.name DESC;
|
||||
SELECT vessel_id IS NOT NULL AS vessel_id_not_null, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration_not_null, available_keys FROM api.metadata AS m ORDER BY m.name DESC;
|
||||
|
||||
--
|
||||
-- grafana
|
||||
@@ -55,7 +55,7 @@ SELECT v.name AS __text, m.vessel_id IS NOT NULL AS __value FROM auth.vessels v
|
||||
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
|
||||
--SELECT * FROM api.metadata m;
|
||||
\echo 'api.metadata details'
|
||||
SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration, available_keys FROM api.metadata AS m;
|
||||
SELECT vessel_id IS NOT NULL AS vessel_id_not_null, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration_not_null, available_keys FROM api.metadata AS m;
|
||||
|
||||
\echo 'api.logs_view'
|
||||
--SELECT * FROM api.logbook l;
|
||||
|
@@ -55,34 +55,34 @@ name | aava
|
||||
role | vessel_role
|
||||
|
||||
api.metadata details
|
||||
-[ RECORD 1 ]---+----------------
|
||||
id | 1
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration | t
|
||||
available_keys |
|
||||
-[ RECORD 2 ]---+----------------
|
||||
id | 2
|
||||
name | aava
|
||||
mmsi | 787654321
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 37
|
||||
plugin_version | 1.0.2
|
||||
signalk_version | 1.20.0
|
||||
time | t
|
||||
active | t
|
||||
configuration | f
|
||||
available_keys | []
|
||||
-[ RECORD 1 ]----------+----------------
|
||||
vessel_id_not_null | t
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration_not_null | t
|
||||
available_keys |
|
||||
-[ RECORD 2 ]----------+----------------
|
||||
vessel_id_not_null | t
|
||||
name | aava
|
||||
mmsi | 787654321
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 37
|
||||
plugin_version | 1.0.2
|
||||
signalk_version | 1.20.0
|
||||
time | t
|
||||
active | t
|
||||
configuration_not_null | t
|
||||
available_keys | []
|
||||
|
||||
SET
|
||||
ROLE grafana current_setting
|
||||
@@ -108,26 +108,26 @@ name | kapla
|
||||
role | vessel_role
|
||||
|
||||
api.metadata details
|
||||
-[ RECORD 1 ]---+----------------
|
||||
id | 1
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration | t
|
||||
available_keys |
|
||||
-[ RECORD 1 ]----------+----------------
|
||||
vessel_id_not_null | t
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration_not_null | t
|
||||
available_keys |
|
||||
|
||||
api.logs_view
|
||||
-[ RECORD 1 ]----+-----------------------
|
||||
id | 2
|
||||
name | Formanshagen to Ekenäs
|
||||
from | Formanshagen
|
||||
name | Norra hamnen to Ekenäs
|
||||
from | Norra hamnen
|
||||
to | Ekenäs
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
@@ -137,7 +137,7 @@ _to_moorage_id | 3
|
||||
id | 1
|
||||
name | patch log name 3
|
||||
from | patch moorage name 3
|
||||
to | Formanshagen
|
||||
to | Norra hamnen
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
_from_moorage_id | 1
|
||||
@@ -191,7 +191,7 @@ api.stays_view
|
||||
-[ RECORD 1 ]+---------------------
|
||||
id | 2
|
||||
name | t
|
||||
moorage | Formanshagen
|
||||
moorage | Norra hamnen
|
||||
moorage_id | 2
|
||||
duration | PT2M
|
||||
stayed_at | Dock
|
||||
@@ -226,7 +226,7 @@ notes | new moorage note 3
|
||||
-[ RECORD 2 ]-------------------------------------------------
|
||||
id | 2
|
||||
vessel_id | t
|
||||
name | Formanshagen
|
||||
name | Norra hamnen
|
||||
country | fi
|
||||
stay_code | 4
|
||||
latitude | 59.9768833333333
|
||||
@@ -249,7 +249,7 @@ notes |
|
||||
api.moorages_view
|
||||
-[ RECORD 1 ]-------+---------------------
|
||||
id | 2
|
||||
moorage | Formanshagen
|
||||
moorage | Norra hamnen
|
||||
default_stay | Dock
|
||||
default_stay_id | 4
|
||||
arrivals_departures | 2
|
||||
@@ -287,7 +287,7 @@ stay_first_seen | f
|
||||
stay_last_seen | f
|
||||
-[ RECORD 2 ]------+---------------------------------------------------
|
||||
id | 2
|
||||
name | Formanshagen
|
||||
name | Norra hamnen
|
||||
default_stay | Dock
|
||||
latitude | 59.9768833333333
|
||||
longitude | 23.4321
|
||||
|
@@ -22,12 +22,26 @@ SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'dem
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Delete logbook for user
|
||||
-- Count logbook for user
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
\echo 'logbook'
|
||||
-- track_geom and track_geojson are now dynamic from mobilitydb
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time, _to_time IS NOT NULL AS _to_time, trajectory(trip) AS track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false) ORDER BY id ASC;
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time_not_null, _to_time IS NOT NULL AS _to_time_not_null, trajectory(trip) AS track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false) ORDER BY id ASC;
|
||||
|
||||
--
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
\echo 'ROLE user_role current_setting'
|
||||
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Count logbook for user
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook;
|
||||
\echo 'logbook'
|
||||
-- track_geom and track_geojson are now dynamic from mobilitydb
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time_not_null, _to_time IS NOT NULL AS _to_time_not_null, trajectory(trip) AS track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook ORDER BY id ASC;
|
||||
|
||||
-- Delete logbook for user
|
||||
\echo 'Delete logbook for user kapla'
|
||||
|
@@ -17,54 +17,113 @@ logbook
|
||||
count | 4
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Formanshagen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
|
||||
distance | 123
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
|
||||
distance | 1234
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 1 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
|
||||
distance | 123
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
|
||||
distance | 1234
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
|
||||
SET
|
||||
ROLE user_role current_setting
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]
|
||||
count | 4
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
|
||||
distance | 123
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]-------+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time_not_null | t
|
||||
_to_time_not_null | t
|
||||
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
|
||||
distance | 1234
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
|
||||
Delete logbook for user kapla
|
||||
-[ RECORD 1 ]-----+--
|
||||
|
@@ -11,22 +11,74 @@ select current_database();
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
SELECT count(*) as count_eq_2 FROM api.metadata m;
|
||||
|
||||
SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
--SELECT * FROM api.metadata m;
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
|
||||
\echo 'api.metadata details'
|
||||
SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration, available_keys FROM api.metadata AS m ORDER BY m.name ASC;
|
||||
SELECT vessel_id IS NOT NULL AS vessel_id_not_null, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration, available_keys FROM api.metadata AS m ORDER BY m.name ASC;
|
||||
|
||||
\echo 'api.metadata get configuration'
|
||||
select configuration from api.metadata WHERE vessel_id = current_setting('vessel.id', false);
|
||||
select configuration from api.metadata; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'api.metadata update configuration'
|
||||
UPDATE api.metadata SET configuration = '{ "depthKey": "environment.depth.belowTransducer" }' WHERE vessel_id = current_setting('vessel.id', false);
|
||||
UPDATE api.metadata SET configuration = '{ "depthKey": "environment.depth.belowTransducer" }'; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'api.metadata get configuration with new value'
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at from api.metadata WHERE vessel_id = current_setting('vessel.id', false);
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at_not_null from api.metadata; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'api.metadata get configuration base on update_at value'
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at from api.metadata WHERE vessel_id = current_setting('vessel.id', false) AND configuration->>'update_at' <= to_char(NOW(), 'YYYY-MM-DD"T"HH24:MI:SS"Z"');
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at_not_null from api.metadata WHERE vessel_id = current_setting('vessel.id', false) AND configuration->>'update_at' <= to_char(NOW(), 'YYYY-MM-DD"T"HH24:MI:SS"Z"');
|
||||
|
||||
-- Upsert make_model on metadata_ext table
|
||||
\echo 'api.metadata_ext set make_model'
|
||||
INSERT INTO api.metadata_ext (vessel_id, make_model)
|
||||
VALUES (current_setting('vessel.id', false), 'my super yacht')
|
||||
ON CONFLICT (vessel_id) DO UPDATE
|
||||
SET make_model = EXCLUDED.make_model;
|
||||
|
||||
-- Upsert polar on metadata_ext table
|
||||
\echo 'api.metadata_ext set polar'
|
||||
INSERT INTO api.metadata_ext (vessel_id, polar)
|
||||
VALUES (current_setting('vessel.id', false), 'twa/tws;4;6;8;10;12;14;16;20;24\n0;0;0;0;0;0;0;0;0;0')
|
||||
ON CONFLICT (vessel_id) DO UPDATE
|
||||
SET polar = EXCLUDED.polar;
|
||||
|
||||
-- Upsert image on metadata_ext table
|
||||
\echo 'api.metadata_ext set image/image_b64'
|
||||
INSERT INTO api.metadata_ext (vessel_id, image_b64)
|
||||
VALUES (current_setting('vessel.id', false), 'iVBORw0KGgoAAAANSUhEUgAAAMgAAAAyCAIAAACWMwO2AAABNklEQVR4nO3bwY6CMBiF0XYy7//KzIKk6VBjiMMNk59zVljRIH6WsrBv29bgal93HwA1CYsIYREhLCKERYSwiBAWEcIiQlhECIsIYREhLCKERYSwiBAWEcIiQlhECIsIYREhLCK+7z6A/6j33lq75G8m')
|
||||
ON CONFLICT (vessel_id) DO UPDATE
|
||||
SET image_b64 = EXCLUDED.image_b64;
|
||||
|
||||
-- Ensure make_model on metadata_ext table is updated
|
||||
\echo 'api.metadata_ext get make_model'
|
||||
SELECT make_model FROM api.metadata_ext; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Ensure polar_updated_at on metadata_ext table is updated by trigger
|
||||
\echo 'api.metadata_ext get polar_updated_at'
|
||||
SELECT polar,polar_updated_at IS NOT NULL AS polar_updated_at_not_null FROM api.metadata_ext; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Ensure image_updated_at on metadata_ext table is updated by trigger
|
||||
\echo 'api.metadata_ext get image_updated_at'
|
||||
SELECT image_b64 IS NULL AS image_b64_is_null,image IS NOT NULL AS image_not_null,image_updated_at IS NOT NULL AS image_updated_at_not_null FROM api.metadata_ext; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- vessel_role
|
||||
SET ROLE vessel_role;
|
||||
|
||||
\echo 'api.metadata get configuration with new value as vessel'
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at_not_null from api.metadata; -- WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'api.metadata get configuration base on update_at value as vessel'
|
||||
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at_not_null from api.metadata WHERE vessel_id = current_setting('vessel.id', false) AND configuration->>'update_at' <= to_char(NOW(), 'YYYY-MM-DD"T"HH24:MI:SS"Z"');
|
||||
|
||||
-- api_anonymous
|
||||
SET ROLE api_anonymous;
|
||||
|
||||
\echo 'api_anonymous get vessel image'
|
||||
SELECT api.vessel_image(current_setting('vessel.id', false)) IS NOT NULL AS vessel_image_not_null;
|
||||
|
@@ -5,38 +5,28 @@
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]-
|
||||
count_eq_2 | 2
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
SET
|
||||
api.metadata details
|
||||
-[ RECORD 1 ]---+----------------
|
||||
id | 2
|
||||
name | aava
|
||||
mmsi | 787654321
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 37
|
||||
plugin_version | 1.0.2
|
||||
signalk_version | 1.20.0
|
||||
time | t
|
||||
active | t
|
||||
configuration |
|
||||
available_keys | []
|
||||
-[ RECORD 2 ]---+----------------
|
||||
id | 1
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration |
|
||||
available_keys |
|
||||
-[ RECORD 1 ]------+----------------
|
||||
vessel_id_not_null | t
|
||||
name | kapla
|
||||
mmsi | 123456789
|
||||
length | 12
|
||||
beam | 10
|
||||
height | 24
|
||||
ship_type | 36
|
||||
plugin_version | 0.0.1
|
||||
signalk_version | signalk_version
|
||||
time | t
|
||||
active | t
|
||||
configuration |
|
||||
available_keys |
|
||||
|
||||
api.metadata get configuration
|
||||
-[ RECORD 1 ]-+-
|
||||
@@ -45,12 +35,49 @@ configuration |
|
||||
api.metadata update configuration
|
||||
UPDATE 1
|
||||
api.metadata get configuration with new value
|
||||
-[ RECORD 1 ]----------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at | t
|
||||
-[ RECORD 1 ]------+------------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at_not_null | t
|
||||
|
||||
api.metadata get configuration base on update_at value
|
||||
-[ RECORD 1 ]----------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at | t
|
||||
-[ RECORD 1 ]------+------------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at_not_null | t
|
||||
|
||||
api.metadata_ext set make_model
|
||||
INSERT 0 1
|
||||
api.metadata_ext set polar
|
||||
INSERT 0 1
|
||||
api.metadata_ext set image/image_b64
|
||||
INSERT 0 1
|
||||
api.metadata_ext get make_model
|
||||
-[ RECORD 1 ]--------------
|
||||
make_model | my super yacht
|
||||
|
||||
api.metadata_ext get polar_updated_at
|
||||
-[ RECORD 1 ]-------------+-----------------------------------------------------
|
||||
polar | twa/tws;4;6;8;10;12;14;16;20;24\n0;0;0;0;0;0;0;0;0;0
|
||||
polar_updated_at_not_null | t
|
||||
|
||||
api.metadata_ext get image_updated_at
|
||||
-[ RECORD 1 ]-------------+--
|
||||
image_b64_is_null | f
|
||||
image_not_null | t
|
||||
image_updated_at_not_null | t
|
||||
|
||||
SET
|
||||
api.metadata get configuration with new value as vessel
|
||||
-[ RECORD 1 ]------+------------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at_not_null | t
|
||||
|
||||
api.metadata get configuration base on update_at value as vessel
|
||||
-[ RECORD 1 ]------+------------------------------------
|
||||
depthkey | "environment.depth.belowTransducer"
|
||||
update_at_not_null | t
|
||||
|
||||
SET
|
||||
api_anonymous get vessel image
|
||||
-[ RECORD 1 ]---------+--
|
||||
vessel_image_not_null | t
|
||||
|
||||
|
@@ -54,7 +54,7 @@ vessel_id | t
|
||||
Export timelapse as Geometry LineString from a trip
|
||||
-[ RECORD 1 ]--+-----------
|
||||
geometry_type | LineString
|
||||
num_properties | 26
|
||||
num_properties | 35
|
||||
|
||||
Export timelapse as Geometry Point from a trip
|
||||
-[ RECORD 1 ]
|
||||
|
50
tests/sql/stats.sql
Normal file
50
tests/sql/stats.sql
Normal file
@@ -0,0 +1,50 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
\echo 'Validate Stats operation'
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
\echo 'ROLE user_role current_setting'
|
||||
|
||||
SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Stats logbook and moorages for user
|
||||
\echo 'Stats logbook and moorages for user kapla'
|
||||
--SELECT api.stats_fn();
|
||||
WITH tbl as (SELECT api.stats_fn() as stats)
|
||||
SELECT tbl.stats->'stats_logs'->>'name' = 'kapla' AS boat_name,
|
||||
(tbl.stats->'stats_logs'->>'count')::int = 1 AS logs_count,
|
||||
(tbl.stats->'stats_logs'->>'max_speed')::numeric = 6.5 AS max_speed,
|
||||
(tbl.stats->'stats_moorages'->>'home_ports')::int = 1 AS home_ports,
|
||||
(tbl.stats->'stats_moorages'->>'unique_moorages')::numeric = 5 AS unique_moorages,
|
||||
(tbl.stats->'moorages_top_countries') = '["fi"]' AS moorages_top_countries
|
||||
FROM tbl;
|
||||
|
||||
SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Stats logbook and moorages for user
|
||||
\echo 'Stats logbook and moorages for user aava'
|
||||
--SELECT api.stats_fn();
|
||||
WITH tbl as (SELECT api.stats_fn() as stats)
|
||||
SELECT tbl.stats->'stats_logs'->>'name' = 'aava' AS boat_name,
|
||||
(tbl.stats->'stats_logs'->>'count')::int = 2 AS logs_count,
|
||||
(tbl.stats->'stats_logs'->>'max_speed')::numeric = 9.5 AS max_speed,
|
||||
(tbl.stats->'stats_moorages'->>'home_ports')::int = 1 AS home_ports,
|
||||
(tbl.stats->'stats_moorages'->>'unique_moorages')::numeric = 4 AS unique_moorages,
|
||||
(tbl.stats->'moorages_top_countries') = '["ee"]' AS moorages_top_countries
|
||||
FROM tbl;
|
||||
|
34
tests/sql/stats.sql.output
Normal file
34
tests/sql/stats.sql.output
Normal file
@@ -0,0 +1,34 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
Validate Stats operation
|
||||
SET
|
||||
ROLE user_role current_setting
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Stats logbook and moorages for user kapla
|
||||
-[ RECORD 1 ]----------+--
|
||||
boat_name | t
|
||||
logs_count | t
|
||||
max_speed | t
|
||||
home_ports | t
|
||||
unique_moorages | t
|
||||
moorages_top_countries | t
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Stats logbook and moorages for user aava
|
||||
-[ RECORD 1 ]----------+--
|
||||
boat_name | t
|
||||
logs_count | t
|
||||
max_speed | t
|
||||
home_ports | t
|
||||
unique_moorages | t
|
||||
moorages_top_countries | t
|
||||
|
47
tests/sql/stays_ext.sql
Normal file
47
tests/sql/stays_ext.sql
Normal file
@@ -0,0 +1,47 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
SELECT count(*) as count_eq_0 FROM api.stays_ext m;
|
||||
|
||||
SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
|
||||
\echo 'api.stays details'
|
||||
SELECT vessel_id IS NOT NULL AS vessel_id_not_null, m.name IS NOT NULL AS name_not_null FROM api.stays AS m WHERE active IS False ORDER BY m.name ASC;
|
||||
|
||||
-- Upsert image on stays_ext table
|
||||
\echo 'api.stays_ext set image/image_b64'
|
||||
INSERT INTO api.stays_ext (vessel_id, stay_id, image_b64)
|
||||
VALUES (current_setting('vessel.id', false), 1, 'iVBORw0KGgoAAAANSUhEUgAAAMgAAAAyCAIAAACWMwO2AAABNklEQVR4nO3bwY6CMBiF0XYy7//KzIKk6VBjiMMNk59zVljRIH6WsrBv29bgal93HwA1CYsIYREhLCKERYSwiBAWEcIiQlhECIsIYREhLCKERYSwiBAWEcIiQlhECIsIYREhLCK+7z6A/6j33lq75G8m')
|
||||
ON CONFLICT (stay_id) DO UPDATE
|
||||
SET image_b64 = EXCLUDED.image_b64;
|
||||
|
||||
-- Ensure image_updated_at on metadata_ext table is updated by trigger
|
||||
\echo 'api.stays_ext get image_updated_at'
|
||||
SELECT image_b64 IS NULL AS image_b64_is_null,image IS NOT NULL AS image_not_null,image_updated_at IS NOT NULL AS image_updated_at_not_null FROM api.metadata_ext; --WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- vessel_role
|
||||
SET ROLE vessel_role;
|
||||
|
||||
\echo 'api.stays_ext'
|
||||
SELECT vessel_id IS NOT NULL AS vessel_id_not_null, stay_id FROM api.stays_ext;
|
||||
|
||||
-- api_anonymous
|
||||
SET ROLE api_anonymous;
|
||||
|
||||
\echo 'api_anonymous get stays image'
|
||||
SELECT api.stays_image(current_setting('vessel.id', false), 1) IS NOT NULL AS stays_image_not_null;
|
37
tests/sql/stays_ext.sql.output
Normal file
37
tests/sql/stays_ext.sql.output
Normal file
@@ -0,0 +1,37 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]-
|
||||
count_eq_0 | 0
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
SET
|
||||
api.stays details
|
||||
-[ RECORD 1 ]------+--
|
||||
vessel_id_not_null | t
|
||||
name_not_null | t
|
||||
-[ RECORD 2 ]------+--
|
||||
vessel_id_not_null | t
|
||||
name_not_null | t
|
||||
|
||||
api.stays_ext set image/image_b64
|
||||
INSERT 0 1
|
||||
api.stays_ext get image_updated_at
|
||||
-[ RECORD 1 ]-------------+--
|
||||
image_b64_is_null | f
|
||||
image_not_null | t
|
||||
image_updated_at_not_null | t
|
||||
|
||||
SET
|
||||
api.stays_ext
|
||||
SET
|
||||
api_anonymous get stays image
|
||||
-[ RECORD 1 ]--------+--
|
||||
stays_image_not_null | t
|
||||
|
@@ -5,11 +5,11 @@
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]--+-------------------------------
|
||||
server_version | 16.8 (Debian 16.8-1.pgdg120+1)
|
||||
-[ RECORD 1 ]--+--------------------------------
|
||||
server_version | 16.10 (Debian 16.10-1.pgdg12+1)
|
||||
|
||||
-[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.5.2 dea6d0a" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.1.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
postgis_full_version | POSTGIS="3.5.3 aab5f55" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.1.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||
Name | citext
|
||||
@@ -53,12 +53,12 @@ Schema | pg_catalog
|
||||
Description | PL/Python3U untrusted procedural language
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
Name | postgis
|
||||
Version | 3.5.2
|
||||
Version | 3.5.3
|
||||
Schema | public
|
||||
Description | PostGIS geometry and geography spatial types and functions
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
Name | timescaledb
|
||||
Version | 2.19.3
|
||||
Version | 2.21.3
|
||||
Schema | public
|
||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||
-[ RECORD 11 ]-------------------------------------------------------------------------------------
|
||||
@@ -116,14 +116,14 @@ laninline | 13566
|
||||
lanvalidator | 13567
|
||||
lanacl |
|
||||
-[ RECORD 5 ]-+-----------
|
||||
oid | 18225
|
||||
oid | 18251
|
||||
lanname | plpython3u
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 18222
|
||||
laninline | 18223
|
||||
lanvalidator | 18224
|
||||
lanplcallfoid | 18248
|
||||
laninline | 18249
|
||||
lanvalidator | 18250
|
||||
lanacl |
|
||||
|
||||
-[ RECORD 1 ]+-----------
|
||||
@@ -219,18 +219,22 @@ Name | spatial_ref_sys
|
||||
Type | table
|
||||
Owner | username
|
||||
|
||||
-[ RECORD 1 ]--------
|
||||
-[ RECORD 1 ]------------
|
||||
schema_api | logbook
|
||||
-[ RECORD 2 ]--------
|
||||
-[ RECORD 2 ]------------
|
||||
schema_api | metadata
|
||||
-[ RECORD 3 ]--------
|
||||
-[ RECORD 3 ]------------
|
||||
schema_api | metadata_ext
|
||||
-[ RECORD 4 ]------------
|
||||
schema_api | metrics
|
||||
-[ RECORD 4 ]--------
|
||||
-[ RECORD 5 ]------------
|
||||
schema_api | moorages
|
||||
-[ RECORD 5 ]--------
|
||||
-[ RECORD 6 ]------------
|
||||
schema_api | stays
|
||||
-[ RECORD 6 ]--------
|
||||
-[ RECORD 7 ]------------
|
||||
schema_api | stays_at
|
||||
-[ RECORD 8 ]------------
|
||||
schema_api | stays_ext
|
||||
|
||||
-[ RECORD 1 ]-+------------------------------
|
||||
schema_public | aistypes
|
||||
@@ -279,31 +283,13 @@ with_check | true
|
||||
-[ RECORD 2 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 3 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 4 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | api_scheduler_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 5 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 3 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | grafana_role
|
||||
@@ -312,7 +298,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 6 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 4 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | grafana_proxy_role
|
||||
@@ -321,52 +307,34 @@ roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 5 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | admin_all
|
||||
permissive | PERMISSIVE
|
||||
roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 6 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | grafana_proxy_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 7 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | admin_all
|
||||
permissive | PERMISSIVE
|
||||
roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 8 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | grafana_proxy_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 10 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 11 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_scheduler_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 12 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 8 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | grafana_role
|
||||
@@ -375,7 +343,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 13 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_anonymous_role
|
||||
@@ -384,7 +352,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 14 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 10 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | admin_all
|
||||
@@ -393,7 +361,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 11 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | api_vessel_role
|
||||
@@ -402,7 +370,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 16 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 12 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | admin_all
|
||||
@@ -411,7 +379,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 13 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | api_user_role
|
||||
@@ -420,7 +388,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 18 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 14 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | api_scheduler_role
|
||||
@@ -429,7 +397,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 19 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | grafana_role
|
||||
@@ -438,7 +406,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 20 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 16 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | api_anonymous_role
|
||||
@@ -447,7 +415,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 21 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | admin_all
|
||||
@@ -456,7 +424,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 22 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 18 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_vessel_role
|
||||
@@ -465,7 +433,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 19 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | logbook_qgis_role
|
||||
@@ -474,7 +442,7 @@ roles | {qgis_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 20 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | public_maplapse_role
|
||||
@@ -483,7 +451,7 @@ roles | {maplapse_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 21 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_user_role
|
||||
@@ -492,7 +460,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 22 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_scheduler_role
|
||||
@@ -501,7 +469,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | grafana_role
|
||||
@@ -510,7 +478,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_anonymous_role
|
||||
@@ -519,7 +487,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | admin_all
|
||||
@@ -528,7 +496,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_vessel_role
|
||||
@@ -537,7 +505,16 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays_ext
|
||||
policyname | admin_all
|
||||
permissive | PERMISSIVE
|
||||
roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_user_role
|
||||
@@ -546,7 +523,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_scheduler_role
|
||||
@@ -555,7 +532,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | grafana_role
|
||||
@@ -564,7 +541,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_anonymous_role
|
||||
@@ -573,7 +550,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | admin_all
|
||||
@@ -582,7 +559,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | api_user_role
|
||||
@@ -591,7 +568,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | grafana_role
|
||||
@@ -600,7 +577,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | ((owner_email)::text = current_setting('user.email'::text, true))
|
||||
with_check | false
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_user_role
|
||||
@@ -609,7 +586,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_scheduler_role
|
||||
@@ -618,7 +595,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | grafana_proxy_role
|
||||
@@ -627,7 +604,7 @@ roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | admin_all
|
||||
@@ -636,7 +613,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_vessel_role
|
||||
@@ -645,7 +622,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | true
|
||||
-[ RECORD 43 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_user_role
|
||||
@@ -654,7 +631,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
-[ RECORD 44 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_scheduler_role
|
||||
@@ -663,6 +640,105 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays_ext
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 43 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays_ext
|
||||
policyname | api_anonymous_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 44 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays_ext
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | false
|
||||
with_check | false
|
||||
-[ RECORD 45 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata_ext
|
||||
policyname | admin_all
|
||||
permissive | PERMISSIVE
|
||||
roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 46 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata_ext
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 47 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata_ext
|
||||
policyname | api_anonymous_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 48 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata_ext
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | false
|
||||
with_check | false
|
||||
-[ RECORD 49 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 50 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | api_vessel_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 51 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metadata
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 52 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | metrics
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
|
||||
Test nominatim reverse_geocode_py_fn
|
||||
-[ RECORD 1 ]---------+----------------------------------------
|
||||
@@ -676,16 +752,16 @@ overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "
|
||||
-[ RECORD 1 ]--+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta", "check_date": "2024-08-23"}
|
||||
|
||||
-[ RECORD 1 ]--+---------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"leisure": "marina", "seamark:type": "harbour", "seamark:harbour:category": "marina"}
|
||||
-[ RECORD 1 ]--+---------------------------------------------------------------------------------------------------------------
|
||||
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina", "seamark:type": "harbour", "seamark:harbour:category": "marina"}
|
||||
|
||||
-[ RECORD 1 ]-----------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.9.1", "sys_version" : "PostgreSQL 16.8", "mobilitydb" : "1.2.0", "timescaledb" : "2.19.3", "postgis" : "3.5.2", "postgrest" : "PostgREST 12.2.12"}
|
||||
versions_fn | {"api_version" : "0.9.4", "sys_version" : "PostgreSQL 16.10", "mobilitydb" : "1.2.0", "timescaledb" : "2.21.3", "postgis" : "3.5.3", "postgrest" : "PostgREST 13.0.6"}
|
||||
|
||||
-[ RECORD 1 ]------------------
|
||||
api_version | 0.9.1
|
||||
sys_version | PostgreSQL 16.8
|
||||
timescaledb | 2.19.3
|
||||
postgis | 3.5.2
|
||||
postgrest | PostgREST 12.2.12
|
||||
-[ RECORD 1 ]-----------------
|
||||
api_version | 0.9.4
|
||||
sys_version | PostgreSQL 16.10
|
||||
timescaledb | 2.21.3
|
||||
postgis | 3.5.3
|
||||
postgrest | PostgREST 13.0.6
|
||||
|
||||
|
@@ -14,15 +14,6 @@ if [[ ! -x "/usr/bin/psql" ]]; then
|
||||
apt update && apt -y install postgresql-client
|
||||
fi
|
||||
|
||||
# go install
|
||||
if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then
|
||||
#wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \
|
||||
#rm -rf /usr/local/go && tar -C /usr/local -xzf go1.21.4.linux-arm64.tar.gz && \
|
||||
apt update && apt -y install golang-go && \
|
||||
#go install github.com/KarnerTh/mermerd@latest require latest go version
|
||||
go install github.com/KarnerTh/mermerd@v0.11.0
|
||||
fi
|
||||
|
||||
# pnpm install
|
||||
if [[ ! -x "/usr/local/bin/pnpm" ]]; then
|
||||
npm install -g pnpm
|
||||
@@ -139,6 +130,19 @@ else
|
||||
exit
|
||||
fi
|
||||
|
||||
# Stays extended unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/stays_ext.sql > output/stays_ext.sql.output
|
||||
diff sql/stays_ext.sql.output output/stays_ext.sql.output > /dev/null
|
||||
#diff -u sql/stays_ext.sql.output output/stays_ext.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo OK
|
||||
else
|
||||
echo SQL stays_ext.sql FAILED
|
||||
diff -u sql/stays_ext.sql.output output/stays_ext.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Summary unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/summary.sql > output/summary.sql.output
|
||||
diff sql/summary.sql.output output/summary.sql.output > /dev/null
|
||||
@@ -218,17 +222,17 @@ else
|
||||
fi
|
||||
|
||||
# Stats SQL unit tests
|
||||
#psql ${PGSAIL_DB_URI} < sql/stats.sql > output/stats.sql.output
|
||||
#diff sql/stats.sql.output output/stats.sql.output > /dev/null
|
||||
psql ${PGSAIL_DB_URI} < sql/stats.sql > output/stats.sql.output
|
||||
diff sql/stats.sql.output output/stats.sql.output > /dev/null
|
||||
#diff -u sql/stats.sql.output output/stats.sql.output | wc -l
|
||||
#echo 0
|
||||
#if [ $? -eq 0 ]; then
|
||||
# echo SQL stats.sql OK
|
||||
#else
|
||||
# echo SQL stats.sql FAILED
|
||||
# diff -u sql/stats.sql.output output/stats.sql.output
|
||||
# exit 1
|
||||
#fi
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL stats.sql OK
|
||||
else
|
||||
echo SQL stats.sql FAILED
|
||||
diff -u sql/stats.sql.output output/stats.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# MobilityDB SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/mobilitydb.sql > output/mobilitydb.sql.output
|
||||
@@ -279,17 +283,3 @@ else
|
||||
echo openapi.json FAILED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Generate and update mermaid schema documentation
|
||||
/root/go/bin/mermerd --runConfig ../docs/ERD/mermerdConfig.yaml
|
||||
#echo $?
|
||||
echo 0 # not working in github-actions
|
||||
if [ $? -eq 0 ]; then
|
||||
cp postgsail.md ../docs/ERD/postgsail.md
|
||||
echo postgsail.md OK
|
||||
else
|
||||
echo postgsail.md FAILED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#npm i -D schemalint && npx schemalint
|
||||
|
Reference in New Issue
Block a user