Compare commits
116 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b150d9706f | ||
![]() |
813460da7b | ||
![]() |
813b8088f3 | ||
![]() |
f90911c523 | ||
![]() |
790bbb671c | ||
![]() |
5455d8246f | ||
![]() |
95d24c538d | ||
![]() |
57799c9ee4 | ||
![]() |
437bfd0252 | ||
![]() |
dcceab2551 | ||
![]() |
cdc2e4e55c | ||
![]() |
76bbe29567 | ||
![]() |
36b8eece52 | ||
![]() |
a5436479cf | ||
![]() |
23ea3bd0d8 | ||
![]() |
826566e097 | ||
![]() |
f942076cc2 | ||
![]() |
8dba0c21b6 | ||
![]() |
a96160ef15 | ||
![]() |
ccf91bb832 | ||
![]() |
b9993ed28f | ||
![]() |
0e5e619625 | ||
![]() |
294a60d13a | ||
![]() |
b6587b1287 | ||
![]() |
74512d0bf3 | ||
![]() |
322a479b4f | ||
![]() |
c5cba6a59f | ||
![]() |
22466430ac | ||
![]() |
643c16ad3f | ||
![]() |
59a3c41b4a | ||
![]() |
371eb6c720 | ||
![]() |
c2ffe9777c | ||
![]() |
c0261791f5 | ||
![]() |
e727954f83 | ||
![]() |
22b04334f8 | ||
![]() |
a5ec4c0039 | ||
![]() |
6a63f7d02f | ||
![]() |
2fec2e650c | ||
![]() |
ed8514bfb1 | ||
![]() |
f25e735674 | ||
![]() |
c1b71cabd8 | ||
![]() |
495e25b838 | ||
![]() |
a547271496 | ||
![]() |
3a1d0baef8 | ||
![]() |
628de57b5f | ||
![]() |
9a5f27d21e | ||
![]() |
7892b615e0 | ||
![]() |
2c2f5d8605 | ||
![]() |
47eda3dcaf | ||
![]() |
0c0279767f | ||
![]() |
98e28aacea | ||
![]() |
b04d336c0d | ||
![]() |
288c458c5a | ||
![]() |
d3dd46c834 | ||
![]() |
d0bc468ce7 | ||
![]() |
3f51e89303 | ||
![]() |
000c5651e2 | ||
![]() |
4bec738826 | ||
![]() |
012812c898 | ||
![]() |
1c04822cf8 | ||
![]() |
50f018100b | ||
![]() |
13c461a038 | ||
![]() |
8763448523 | ||
![]() |
e02aaf3676 | ||
![]() |
ae61072ba4 | ||
![]() |
242a5554ea | ||
![]() |
39888e1957 | ||
![]() |
666f69c42a | ||
![]() |
77f41251c5 | ||
![]() |
40a1e0fa39 | ||
![]() |
5cf2d10757 | ||
![]() |
0dd6410589 | ||
![]() |
682c68a108 | ||
![]() |
5d1db984b8 | ||
![]() |
0a09d7bbfc | ||
![]() |
14cc4f5ed2 | ||
![]() |
ff23f5c2ad | ||
![]() |
489fb9562b | ||
![]() |
8c32345342 | ||
![]() |
480417917d | ||
![]() |
e557ed49a5 | ||
![]() |
a3475dfe99 | ||
![]() |
e670e11cd5 | ||
![]() |
88de5003c2 | ||
![]() |
c46a428fc2 | ||
![]() |
db3bd6b06f | ||
![]() |
4ea7a1b019 | ||
![]() |
ce106074dc | ||
![]() |
e7d8229e83 | ||
![]() |
f14342bb07 | ||
![]() |
c4fbf7682d | ||
![]() |
f8c1f43f48 | ||
![]() |
0d5089af2d | ||
![]() |
da1952ed31 | ||
![]() |
a5d5585366 | ||
![]() |
5f9a889a44 | ||
![]() |
f9719bd174 | ||
![]() |
8d1b8cb389 | ||
![]() |
acfd058d3b | ||
![]() |
eeae7c40c6 | ||
![]() |
2bbf27f3ad | ||
![]() |
2ba81a935f | ||
![]() |
0fbac67895 | ||
![]() |
228b234582 | ||
![]() |
75c8a9506a | ||
![]() |
2b48a66cd2 | ||
![]() |
e642049e93 | ||
![]() |
94e123c95e | ||
![]() |
9787328990 | ||
![]() |
de62d936d5 | ||
![]() |
293a33da08 | ||
![]() |
2b105db5c7 | ||
![]() |
af003d5a62 | ||
![]() |
ecc9fd6d9f | ||
![]() |
df5f667b41 | ||
![]() |
1bfa04a057 |
@@ -5,6 +5,7 @@ Effortless cloud based solution for storing and sharing your SignalK data. Allow
|
|||||||
[](https://github.com/xbgmsharp/postgsail/releases/latest)
|
[](https://github.com/xbgmsharp/postgsail/releases/latest)
|
||||||
[](#license)
|
[](#license)
|
||||||
[](https://github.com/xbgmsharp/postgsail/issues)
|
[](https://github.com/xbgmsharp/postgsail/issues)
|
||||||
|
[](http://makeapullrequest.com)
|
||||||
|
|
||||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml)
|
[](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml)
|
||||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml)
|
[](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml)
|
||||||
@@ -19,6 +20,8 @@ postgsail-frontend:
|
|||||||
postgsail-telegram-bot:
|
postgsail-telegram-bot:
|
||||||
[](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest)
|
[](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest)
|
||||||
|
|
||||||
|
[](https://www.bestpractices.dev/projects/8124)
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- Automatically log your voyages without manually starting or stopping a trip.
|
- Automatically log your voyages without manually starting or stopping a trip.
|
||||||
@@ -35,6 +38,8 @@ postgsail-telegram-bot:
|
|||||||
- Offline mode.
|
- Offline mode.
|
||||||
- Low Bandwidth mode.
|
- Low Bandwidth mode.
|
||||||
- Awesome statistics and graphs.
|
- Awesome statistics and graphs.
|
||||||
|
- Create and manage your own dashboards.
|
||||||
|
- Windy PWS (Personal Weather Station).
|
||||||
- Anything missing? just ask!
|
- Anything missing? just ask!
|
||||||
|
|
||||||
## Context
|
## Context
|
||||||
|
@@ -48,6 +48,9 @@ services:
|
|||||||
PGRST_DB_POOL: 20
|
PGRST_DB_POOL: 20
|
||||||
PGRST_DB_URI: ${PGRST_DB_URI}
|
PGRST_DB_URI: ${PGRST_DB_URI}
|
||||||
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
||||||
|
PGRST_SERVER_TIMING_ENABLED: 1
|
||||||
|
PGRST_DB_MAX_ROWS: 500
|
||||||
|
PGRST_JWT_CACHE_MAX_LIFETIME: 3600
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
logging:
|
logging:
|
||||||
@@ -75,10 +78,9 @@ services:
|
|||||||
env_file: .env
|
env_file: .env
|
||||||
environment:
|
environment:
|
||||||
- GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel
|
- GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel
|
||||||
|
- GF_SECURITY_ADMIN_PASSWORD=${PGSAIL_GRAFANA_PASSWORD}
|
||||||
- GF_USERS_ALLOW_SIGN_UP=false
|
- GF_USERS_ALLOW_SIGN_UP=false
|
||||||
- GF_SMTP_ENABLED=false
|
- GF_SMTP_ENABLED=false
|
||||||
- PGSAIL_GRAFANA_URI=db:5432
|
|
||||||
- PGSAIL_GRAFANA_PASSWORD=${PGSAIL_GRAFANA_PASSWORD}
|
|
||||||
depends_on:
|
depends_on:
|
||||||
- db
|
- db
|
||||||
logging:
|
logging:
|
||||||
|
@@ -4,7 +4,7 @@ The Entity-Relationship Diagram (ERD) provides a graphical representation of dat
|
|||||||
## A global overview
|
## A global overview
|
||||||
Auto generated Mermaid diagram using [mermerd](https://github.com/KarnerTh/mermerd) and [MermaidJs](https://github.com/mermaid-js/mermaid).
|
Auto generated Mermaid diagram using [mermerd](https://github.com/KarnerTh/mermerd) and [MermaidJs](https://github.com/mermaid-js/mermaid).
|
||||||
|
|
||||||
[PostgSail SQL Schema](https://github.com/xbgmsharp/postgsail/tree/main/ERD/postgsail.md "PostgSail SQL Schema")
|
[PostgSail SQL Schema](https://github.com/xbgmsharp/postgsail/tree/main/docs/ERD/postgsail.md "PostgSail SQL Schema")
|
||||||
|
|
||||||
## Further
|
## Further
|
||||||
There is 3 main schemas:
|
There is 3 main schemas:
|
@@ -32,12 +32,14 @@ erDiagram
|
|||||||
boolean active
|
boolean active
|
||||||
double_precision beam
|
double_precision beam
|
||||||
text client_id
|
text client_id
|
||||||
|
text configuration
|
||||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||||
double_precision height
|
double_precision height
|
||||||
integer id "{NOT_NULL}"
|
integer id "{NOT_NULL}"
|
||||||
double_precision length
|
double_precision length
|
||||||
numeric mmsi
|
numeric mmsi
|
||||||
text name
|
text name
|
||||||
|
text platform
|
||||||
text plugin_version "{NOT_NULL}"
|
text plugin_version "{NOT_NULL}"
|
||||||
numeric ship_type
|
numeric ship_type
|
||||||
text signalk_version "{NOT_NULL}"
|
text signalk_version "{NOT_NULL}"
|
||||||
@@ -55,7 +57,7 @@ erDiagram
|
|||||||
double_precision longitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
|
double_precision longitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
|
||||||
jsonb metrics
|
jsonb metrics
|
||||||
double_precision speedoverground
|
double_precision speedoverground
|
||||||
status status "<sailing,motoring,moored,anchored>"
|
text status
|
||||||
timestamp_with_time_zone time "{NOT_NULL}"
|
timestamp_with_time_zone time "{NOT_NULL}"
|
||||||
text vessel_id "{NOT_NULL}"
|
text vessel_id "{NOT_NULL}"
|
||||||
double_precision windspeedapparent
|
double_precision windspeedapparent
|
||||||
@@ -104,10 +106,10 @@ erDiagram
|
|||||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||||
citext email "{NOT_NULL}"
|
citext email "{NOT_NULL}"
|
||||||
text first "User first name with CONSTRAINT CHECK {NOT_NULL}"
|
text first "User first name with CONSTRAINT CHECK {NOT_NULL}"
|
||||||
|
integer id "{NOT_NULL}"
|
||||||
text last "User last name with CONSTRAINT CHECK {NOT_NULL}"
|
text last "User last name with CONSTRAINT CHECK {NOT_NULL}"
|
||||||
text pass "{NOT_NULL}"
|
text pass "{NOT_NULL}"
|
||||||
jsonb preferences
|
jsonb preferences
|
||||||
integer public_id "{NOT_NULL}"
|
|
||||||
name role "{NOT_NULL}"
|
name role "{NOT_NULL}"
|
||||||
timestamp_with_time_zone updated_at "{NOT_NULL}"
|
timestamp_with_time_zone updated_at "{NOT_NULL}"
|
||||||
text user_id "{NOT_NULL}"
|
text user_id "{NOT_NULL}"
|
||||||
@@ -120,9 +122,22 @@ erDiagram
|
|||||||
citext user_email "{NOT_NULL}"
|
citext user_email "{NOT_NULL}"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
auth_users {
|
||||||
|
timestamp_with_time_zone connected_at "{NOT_NULL}"
|
||||||
|
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||||
|
name email "{NOT_NULL}"
|
||||||
|
text first "{NOT_NULL}"
|
||||||
|
name id "{NOT_NULL}"
|
||||||
|
text last "{NOT_NULL}"
|
||||||
|
jsonb preferences
|
||||||
|
name role "{NOT_NULL}"
|
||||||
|
timestamp_with_time_zone updated_at "{NOT_NULL}"
|
||||||
|
text user_id "{NOT_NULL}"
|
||||||
|
}
|
||||||
|
|
||||||
auth_vessels {
|
auth_vessels {
|
||||||
timestamp_with_time_zone created_at "{NOT_NULL}"
|
timestamp_with_time_zone created_at "{NOT_NULL}"
|
||||||
numeric mmsi
|
numeric mmsi "MMSI can be optional but if present must be a valid one and unique but must be in numeric range between 100000000 and 800000000"
|
||||||
text name "{NOT_NULL}"
|
text name "{NOT_NULL}"
|
||||||
citext owner_email "{NOT_NULL}"
|
citext owner_email "{NOT_NULL}"
|
||||||
name role "{NOT_NULL}"
|
name role "{NOT_NULL}"
|
Before Width: | Height: | Size: 360 KiB After Width: | Height: | Size: 360 KiB |
Before Width: | Height: | Size: 222 KiB After Width: | Height: | Size: 222 KiB |
Before Width: | Height: | Size: 18 KiB After Width: | Height: | Size: 18 KiB |
Before Width: | Height: | Size: 195 KiB After Width: | Height: | Size: 195 KiB |
2
docs/README.md
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
|
||||||
|
Simple and scalable architecture.
|
2
frontend
@@ -1689,7 +1689,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "utc",
|
"timezone": "utc",
|
||||||
"title": "Electrical System",
|
"title": "Electrical System",
|
||||||
"uid": "rk0FTiIMk",
|
"uid": "pgsail_tpl_electrical",
|
||||||
"version": 11,
|
"version": 11,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -466,7 +466,7 @@
|
|||||||
"timepicker": {},
|
"timepicker": {},
|
||||||
"timezone": "utc",
|
"timezone": "utc",
|
||||||
"title": "Logbook",
|
"title": "Logbook",
|
||||||
"uid": "E_FUkx9nk",
|
"uid": "pgsail_tpl_logbook",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -732,7 +732,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "utc",
|
"timezone": "utc",
|
||||||
"title": "Monitor",
|
"title": "Monitor",
|
||||||
"uid": "apqDcPjMz",
|
"uid": "pgsail_tpl_monitor",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -1335,7 +1335,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "",
|
"timezone": "",
|
||||||
"title": "RPI System",
|
"title": "RPI System",
|
||||||
"uid": "4kxYm6j7k",
|
"uid": "pgsail_tpl_rpi",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -629,7 +629,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "utc",
|
"timezone": "utc",
|
||||||
"title": "Solar System",
|
"title": "Solar System",
|
||||||
"uid": "62bzzlr7z",
|
"uid": "pgsail_tpl_solar",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -1981,7 +1981,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "utc",
|
"timezone": "utc",
|
||||||
"title": "Weather",
|
"title": "Weather",
|
||||||
"uid": "631a97c2e",
|
"uid": "pgsail_tpl_weather",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -204,7 +204,7 @@
|
|||||||
"editorMode": "code",
|
"editorMode": "code",
|
||||||
"format": "table",
|
"format": "table",
|
||||||
"rawQuery": true,
|
"rawQuery": true,
|
||||||
"rawSql": "SELECT latitude, longitude FROM api.metrics WHERE vessel_id = '${boat}' ORDER BY time ASC LIMIT 1;",
|
"rawSql": "SELECT latitude, longitude FROM api.metrics WHERE vessel_id = '${boat}' ORDER BY time DESC LIMIT 1;",
|
||||||
"refId": "A",
|
"refId": "A",
|
||||||
"sql": {
|
"sql": {
|
||||||
"columns": [
|
"columns": [
|
||||||
@@ -291,7 +291,7 @@
|
|||||||
},
|
},
|
||||||
"timezone": "browser",
|
"timezone": "browser",
|
||||||
"title": "Home",
|
"title": "Home",
|
||||||
"uid": "d81aa15b",
|
"uid": "pgsail_tpl_home",
|
||||||
"version": 1,
|
"version": 1,
|
||||||
"weekStart": ""
|
"weekStart": ""
|
||||||
}
|
}
|
@@ -3,19 +3,22 @@ allow_sign_up = false
|
|||||||
auto_assign_org = true
|
auto_assign_org = true
|
||||||
auto_assign_org_role = Editor
|
auto_assign_org_role = Editor
|
||||||
|
|
||||||
[auth.proxy]
|
|
||||||
enabled = true
|
|
||||||
header_name = X-WEBAUTH-USER
|
|
||||||
header_property = email
|
|
||||||
headers = Login:X-WEBAUTH-LOGIN
|
|
||||||
auto_sign_up = true
|
|
||||||
enable_login_token = true
|
|
||||||
login_maximum_inactive_lifetime_duration = 12h
|
|
||||||
login_maximum_lifetime_duration = 1d
|
|
||||||
|
|
||||||
[dashboards]
|
[dashboards]
|
||||||
default_home_dashboard_path = /etc/grafana/dashboards/home.json
|
default_home_dashboard_path = /etc/grafana/dashboards/tpl/home.json
|
||||||
|
min_refresh_interval = 1m
|
||||||
|
|
||||||
|
[alerting]
|
||||||
|
enabled = false
|
||||||
|
|
||||||
|
[unified_alerting]
|
||||||
|
enabled = false
|
||||||
|
|
||||||
[analytics]
|
[analytics]
|
||||||
feedback_links_enabled = false
|
feedback_links_enabled = false
|
||||||
reporting_enabled = false
|
reporting_enabled = false
|
||||||
|
|
||||||
|
[news]
|
||||||
|
news_feed_enabled = false
|
||||||
|
|
||||||
|
[help]
|
||||||
|
enabled = false
|
||||||
|
@@ -20,6 +20,6 @@ providers:
|
|||||||
allowUiUpdates: true
|
allowUiUpdates: true
|
||||||
options:
|
options:
|
||||||
# <string, required> path to dashboard files on disk. Required when using the 'file' type
|
# <string, required> path to dashboard files on disk. Required when using the 'file' type
|
||||||
path: /etc/grafana/dashboards/
|
path: /etc/grafana/dashboards/tpl/
|
||||||
# <bool> use folder names from filesystem to create folders in Grafana
|
# <bool> use folder names from filesystem to create folders in Grafana
|
||||||
foldersFromFilesStructure: true
|
foldersFromFilesStructure: true
|
||||||
|
@@ -21,6 +21,8 @@ CREATE TABLE IF NOT EXISTS api.metadata(
|
|||||||
plugin_version TEXT NOT NULL,
|
plugin_version TEXT NOT NULL,
|
||||||
signalk_version TEXT NOT NULL,
|
signalk_version TEXT NOT NULL,
|
||||||
time TIMESTAMPTZ NOT NULL, -- should be rename to last_update !?
|
time TIMESTAMPTZ NOT NULL, -- should be rename to last_update !?
|
||||||
|
platform TEXT NULL,
|
||||||
|
configuration TEXT NULL,
|
||||||
active BOOLEAN DEFAULT True, -- trigger monitor online/offline
|
active BOOLEAN DEFAULT True, -- trigger monitor online/offline
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
@@ -28,18 +30,16 @@ CREATE TABLE IF NOT EXISTS api.metadata(
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON TABLE
|
COMMENT ON TABLE
|
||||||
api.metadata
|
api.metadata
|
||||||
IS 'Stores metadata from vessel';
|
IS 'Stores metadata received from vessel, aka signalk plugin';
|
||||||
COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline';
|
COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline';
|
||||||
-- Index
|
COMMENT ON COLUMN api.metadata.vessel_id IS 'vessel_id link auth.vessels with api.metadata';
|
||||||
CREATE INDEX metadata_vessel_id_idx ON api.metadata (vessel_id);
|
-- Duplicate Indexes
|
||||||
--CREATE INDEX metadata_mmsi_idx ON api.metadata (mmsi);
|
--CREATE INDEX metadata_vessel_id_idx ON api.metadata (vessel_id);
|
||||||
-- is unused index ?
|
|
||||||
CREATE INDEX metadata_name_idx ON api.metadata (name);
|
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Metrics from signalk
|
-- Metrics from signalk
|
||||||
-- Create vessel status enum
|
-- Create vessel status enum
|
||||||
CREATE TYPE status AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
|
CREATE TYPE status_type AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
|
||||||
-- Table api.metrics
|
-- Table api.metrics
|
||||||
CREATE TABLE IF NOT EXISTS api.metrics (
|
CREATE TABLE IF NOT EXISTS api.metrics (
|
||||||
time TIMESTAMPTZ NOT NULL,
|
time TIMESTAMPTZ NOT NULL,
|
||||||
@@ -51,8 +51,8 @@ CREATE TABLE IF NOT EXISTS api.metrics (
|
|||||||
courseOverGroundTrue DOUBLE PRECISION NULL,
|
courseOverGroundTrue DOUBLE PRECISION NULL,
|
||||||
windSpeedApparent DOUBLE PRECISION NULL,
|
windSpeedApparent DOUBLE PRECISION NULL,
|
||||||
angleSpeedApparent DOUBLE PRECISION NULL,
|
angleSpeedApparent DOUBLE PRECISION NULL,
|
||||||
status status NULL,
|
status TEXT NULL,
|
||||||
metrics jsonb NULL,
|
metrics JSONB NULL,
|
||||||
--CONSTRAINT valid_client_id CHECK (length(client_id) > 10),
|
--CONSTRAINT valid_client_id CHECK (length(client_id) > 10),
|
||||||
--CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90),
|
--CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90),
|
||||||
--CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180),
|
--CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180),
|
||||||
@@ -131,6 +131,8 @@ COMMENT ON COLUMN api.logbook.duration IS 'Best to use standard ISO 8601';
|
|||||||
|
|
||||||
-- Index todo!
|
-- Index todo!
|
||||||
CREATE INDEX logbook_vessel_id_idx ON api.logbook (vessel_id);
|
CREATE INDEX logbook_vessel_id_idx ON api.logbook (vessel_id);
|
||||||
|
CREATE INDEX logbook_from_time_idx ON api.logbook (_from_time);
|
||||||
|
CREATE INDEX logbook_to_time_idx ON api.logbook (_to_time);
|
||||||
CREATE INDEX logbook_from_moorage_id_idx ON api.logbook (_from_moorage_id);
|
CREATE INDEX logbook_from_moorage_id_idx ON api.logbook (_from_moorage_id);
|
||||||
CREATE INDEX logbook_to_moorage_id_idx ON api.logbook (_to_moorage_id);
|
CREATE INDEX logbook_to_moorage_id_idx ON api.logbook (_to_moorage_id);
|
||||||
CREATE INDEX ON api.logbook USING GIST ( track_geom );
|
CREATE INDEX ON api.logbook USING GIST ( track_geom );
|
||||||
@@ -162,6 +164,7 @@ CREATE TABLE IF NOT EXISTS api.stays(
|
|||||||
COMMENT ON TABLE
|
COMMENT ON TABLE
|
||||||
api.stays
|
api.stays
|
||||||
IS 'Stores generated stays';
|
IS 'Stores generated stays';
|
||||||
|
COMMENT ON COLUMN api.stays.duration IS 'Best to use standard ISO 8601';
|
||||||
|
|
||||||
-- Index
|
-- Index
|
||||||
CREATE INDEX stays_vessel_id_idx ON api.stays (vessel_id);
|
CREATE INDEX stays_vessel_id_idx ON api.stays (vessel_id);
|
||||||
@@ -169,7 +172,6 @@ CREATE INDEX stays_moorage_id_idx ON api.stays (moorage_id);
|
|||||||
CREATE INDEX ON api.stays USING GIST ( geog );
|
CREATE INDEX ON api.stays USING GIST ( geog );
|
||||||
COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||||
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||||
COMMENT ON COLUMN api.stays.duration IS 'Best to use standard ISO 8601';
|
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Moorages
|
-- Moorages
|
||||||
@@ -256,6 +258,8 @@ CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert
|
|||||||
ship_type = NEW.ship_type,
|
ship_type = NEW.ship_type,
|
||||||
plugin_version = NEW.plugin_version,
|
plugin_version = NEW.plugin_version,
|
||||||
signalk_version = NEW.signalk_version,
|
signalk_version = NEW.signalk_version,
|
||||||
|
platform = NEW.platform,
|
||||||
|
configuration = NEW.configuration,
|
||||||
-- time = NEW.time, ignore the time sent by the vessel as it is out of sync sometimes.
|
-- time = NEW.time, ignore the time sent by the vessel as it is out of sync sometimes.
|
||||||
time = NOW(), -- overwrite the time sent by the vessel
|
time = NOW(), -- overwrite the time sent by the vessel
|
||||||
active = true
|
active = true
|
||||||
@@ -303,6 +307,22 @@ COMMENT ON FUNCTION
|
|||||||
public.metadata_notification_trigger_fn
|
public.metadata_notification_trigger_fn
|
||||||
IS 'process metadata notification from vessel, monitoring_online';
|
IS 'process metadata notification from vessel, monitoring_online';
|
||||||
|
|
||||||
|
-- FUNCTION Metadata grafana provisioning for new vessel after insert
|
||||||
|
DROP FUNCTION IF EXISTS metadata_grafana_trigger_fn;
|
||||||
|
CREATE FUNCTION metadata_grafana_trigger_fn() RETURNS trigger AS $metadata_grafana$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE 'metadata_grafana_trigger_fn [%]', NEW;
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('grafana', NEW.id, now(), NEW.vessel_id);
|
||||||
|
RETURN NULL;
|
||||||
|
END;
|
||||||
|
$metadata_grafana$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.metadata_grafana_trigger_fn
|
||||||
|
IS 'process metadata grafana provisioning from vessel';
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Trigger metadata table
|
-- Trigger metadata table
|
||||||
--
|
--
|
||||||
@@ -320,7 +340,15 @@ CREATE TRIGGER metadata_notification_trigger AFTER INSERT ON api.metadata
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON TRIGGER
|
COMMENT ON TRIGGER
|
||||||
metadata_notification_trigger ON api.metadata
|
metadata_notification_trigger ON api.metadata
|
||||||
IS 'AFTER INSERT ON api.metadata run function metadata_update_trigger_fn for notification on new vessel';
|
IS 'AFTER INSERT ON api.metadata run function metadata_notification_trigger_fn for later notification on new vessel';
|
||||||
|
|
||||||
|
-- Metadata trigger AFTER INSERT
|
||||||
|
CREATE TRIGGER metadata_grafana_trigger AFTER INSERT ON api.metadata
|
||||||
|
FOR EACH ROW EXECUTE FUNCTION metadata_grafana_trigger_fn();
|
||||||
|
-- Description
|
||||||
|
COMMENT ON TRIGGER
|
||||||
|
metadata_grafana_trigger ON api.metadata
|
||||||
|
IS 'AFTER INSERT ON api.metadata run function metadata_grafana_trigger_fn for later grafana provisioning on new vessel';
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Trigger Functions metrics table
|
-- Trigger Functions metrics table
|
||||||
@@ -430,10 +458,10 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
|||||||
RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
||||||
END IF;
|
END IF;
|
||||||
-- Check if status is valid enum
|
-- Check if status is valid enum
|
||||||
SELECT NEW.status::name = any(enum_range(null::status)::name[]) INTO valid_status;
|
SELECT NEW.status::name = any(enum_range(null::status_type)::name[]) INTO valid_status;
|
||||||
IF valid_status IS False THEN
|
IF valid_status IS False THEN
|
||||||
-- Ignore entry if status is invalid
|
-- Ignore entry if status is invalid
|
||||||
RAISE WARNING 'Metrics Ignoring metric, invalid status [%]', NEW.status;
|
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], invalid status [%]', NEW.vessel_id, NEW.status;
|
||||||
RETURN NULL;
|
RETURN NULL;
|
||||||
END IF;
|
END IF;
|
||||||
-- Check if speedOverGround is valid value
|
-- Check if speedOverGround is valid value
|
||||||
@@ -478,7 +506,7 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
|||||||
WHERE id = stay_id;
|
WHERE id = stay_id;
|
||||||
-- Add stay entry to process queue for further processing
|
-- Add stay entry to process queue for further processing
|
||||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
|
VALUES ('new_stay', stay_id, NOW(), current_setting('vessel.id', true));
|
||||||
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
|
||||||
ELSE
|
ELSE
|
||||||
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
|
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
|
||||||
@@ -529,7 +557,7 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
|||||||
WHERE id = logbook_id;
|
WHERE id = logbook_id;
|
||||||
-- Add logbook entry to process queue for later processing
|
-- Add logbook entry to process queue for later processing
|
||||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
VALUES ('new_logbook', logbook_id, now(), current_setting('vessel.id', true));
|
VALUES ('pre_logbook', logbook_id, NOW(), current_setting('vessel.id', true));
|
||||||
ELSE
|
ELSE
|
||||||
RAISE WARNING 'Metrics Invalid logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
|
RAISE WARNING 'Metrics Invalid logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
|
||||||
END IF;
|
END IF;
|
||||||
@@ -540,7 +568,7 @@ $metrics$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.metrics_trigger_fn
|
public.metrics_trigger_fn
|
||||||
IS 'process metrics from vessel, generate new_logbook and new_stay.';
|
IS 'process metrics from vessel, generate pre_logbook and new_stay.';
|
||||||
|
|
||||||
--
|
--
|
||||||
-- Triggers logbook update on metrics insert
|
-- Triggers logbook update on metrics insert
|
||||||
@@ -603,4 +631,64 @@ CREATE TRIGGER moorage_delete_trigger BEFORE DELETE ON api.moorages
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON TRIGGER moorage_delete_trigger
|
COMMENT ON TRIGGER moorage_delete_trigger
|
||||||
ON api.moorages
|
ON api.moorages
|
||||||
IS 'Automatic update of name and stay_code on logbook and stays reference';
|
IS 'Automatic delete logbook and stays reference when delete a moorage';
|
||||||
|
|
||||||
|
-- Function process_new on completed logbook
|
||||||
|
DROP FUNCTION IF EXISTS logbook_completed_trigger_fn;
|
||||||
|
CREATE FUNCTION logbook_completed_trigger_fn() RETURNS trigger AS $logbook_completed$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE 'logbook_completed_trigger_fn [%]', OLD;
|
||||||
|
RAISE NOTICE 'logbook_completed_trigger_fn [%] [%]', OLD._to_time, NEW._to_time;
|
||||||
|
-- Add logbook entry to process queue for later processing
|
||||||
|
--IF ( OLD._to_time <> NEW._to_time ) THEN
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('new_logbook', NEW.id, NOW(), current_setting('vessel.id', true));
|
||||||
|
--END IF;
|
||||||
|
RETURN OLD; -- result is ignored since this is an AFTER trigger
|
||||||
|
END;
|
||||||
|
$logbook_completed$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.logbook_completed_trigger_fn
|
||||||
|
IS 'Automatic process_queue for completed logbook._to_time';
|
||||||
|
|
||||||
|
-- Triggers logbook completed
|
||||||
|
--CREATE TRIGGER logbook_completed_trigger AFTER UPDATE ON api.logbook
|
||||||
|
-- FOR EACH ROW
|
||||||
|
-- WHEN (OLD._to_time IS DISTINCT FROM NEW._to_time)
|
||||||
|
-- EXECUTE FUNCTION logbook_completed_trigger_fn();
|
||||||
|
-- Description
|
||||||
|
--COMMENT ON TRIGGER logbook_completed_trigger
|
||||||
|
-- ON api.logbook
|
||||||
|
-- IS 'Automatic process_queue for completed logbook';
|
||||||
|
|
||||||
|
-- Function process_new on completed Stay
|
||||||
|
DROP FUNCTION IF EXISTS stay_completed_trigger_fn;
|
||||||
|
CREATE FUNCTION stay_completed_trigger_fn() RETURNS trigger AS $stay_completed$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE 'stay_completed_trigger_fn [%]', OLD;
|
||||||
|
RAISE NOTICE 'stay_completed_trigger_fn [%] [%]', OLD.departed, NEW.departed;
|
||||||
|
-- Add stay entry to process queue for later processing
|
||||||
|
--IF ( OLD.departed <> NEW.departed ) THEN
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('new_stay', NEW.id, NOW(), current_setting('vessel.id', true));
|
||||||
|
--END IF;
|
||||||
|
RETURN OLD; -- result is ignored since this is an AFTER trigger
|
||||||
|
END;
|
||||||
|
$stay_completed$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.stay_completed_trigger_fn
|
||||||
|
IS 'Automatic process_queue for completed stay.departed';
|
||||||
|
|
||||||
|
-- Triggers stay completed
|
||||||
|
--CREATE TRIGGER stay_completed_trigger AFTER UPDATE ON api.stays
|
||||||
|
-- FOR EACH ROW
|
||||||
|
-- WHEN (OLD.departed IS DISTINCT FROM NEW.departed)
|
||||||
|
-- EXECUTE FUNCTION stay_completed_trigger_fn();
|
||||||
|
-- Description
|
||||||
|
--COMMENT ON TRIGGER stay_completed_trigger
|
||||||
|
-- ON api.stays
|
||||||
|
-- IS 'Automatic process_queue for completed stay';
|
||||||
|
@@ -7,6 +7,12 @@
|
|||||||
--
|
--
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
-- PostgREST Media Type Handlers
|
||||||
|
CREATE DOMAIN "text/xml" AS xml;
|
||||||
|
CREATE DOMAIN "application/geo+json" AS jsonb;
|
||||||
|
CREATE DOMAIN "application/gpx+xml" AS xml;
|
||||||
|
CREATE DOMAIN "application/vnd.google-earth.kml+xml" AS xml;
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Functions API schema
|
-- Functions API schema
|
||||||
-- Timelapse - replay logs
|
-- Timelapse - replay logs
|
||||||
@@ -41,7 +47,7 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
|
|||||||
WITH logbook as (
|
WITH logbook as (
|
||||||
SELECT track_geom
|
SELECT track_geom
|
||||||
FROM api.logbook
|
FROM api.logbook
|
||||||
WHERE _from_time >= start_log::TIMESTAMPTZ
|
WHERE _from_time >= start_date::TIMESTAMPTZ
|
||||||
AND _to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
AND _to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
AND track_geom IS NOT NULL
|
AND track_geom IS NOT NULL
|
||||||
ORDER BY _from_time ASC
|
ORDER BY _from_time ASC
|
||||||
@@ -69,7 +75,7 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
|
|||||||
-- Return a GeoJSON MultiLineString
|
-- Return a GeoJSON MultiLineString
|
||||||
-- result _geojson [null, null]
|
-- result _geojson [null, null]
|
||||||
--raise WARNING 'result _geojson %' , _geojson;
|
--raise WARNING 'result _geojson %' , _geojson;
|
||||||
SELECT json_build_object(
|
SELECT jsonb_build_object(
|
||||||
'type', 'FeatureCollection',
|
'type', 'FeatureCollection',
|
||||||
'features', ARRAY[_geojson] ) INTO geojson;
|
'features', ARRAY[_geojson] ) INTO geojson;
|
||||||
END;
|
END;
|
||||||
@@ -79,6 +85,75 @@ COMMENT ON FUNCTION
|
|||||||
api.timelapse_fn
|
api.timelapse_fn
|
||||||
IS 'Export all selected logs geometry `track_geom` to a geojson as MultiLineString with empty properties';
|
IS 'Export all selected logs geometry `track_geom` to a geojson as MultiLineString with empty properties';
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS api.timelapse2_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION api.timelapse2_fn(
|
||||||
|
IN start_log INTEGER DEFAULT NULL,
|
||||||
|
IN end_log INTEGER DEFAULT NULL,
|
||||||
|
IN start_date TEXT DEFAULT NULL,
|
||||||
|
IN end_date TEXT DEFAULT NULL,
|
||||||
|
OUT geojson JSONB) RETURNS JSONB AS $timelapse2$
|
||||||
|
DECLARE
|
||||||
|
_geojson jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Using sub query to force id order by
|
||||||
|
-- Merge GIS track_geom into a GeoJSON Points
|
||||||
|
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook
|
||||||
|
WHERE id >= start_log
|
||||||
|
AND id <= end_log
|
||||||
|
AND track_geojson IS NOT NULL
|
||||||
|
ORDER BY _from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'Point';
|
||||||
|
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook
|
||||||
|
WHERE _from_time >= start_date::TIMESTAMPTZ
|
||||||
|
AND _to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
AND track_geojson IS NOT NULL
|
||||||
|
ORDER BY _from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'Point';
|
||||||
|
ELSE
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook
|
||||||
|
WHERE track_geojson IS NOT NULL
|
||||||
|
ORDER BY _from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'Point';
|
||||||
|
END IF;
|
||||||
|
-- Return a GeoJSON MultiLineString
|
||||||
|
-- result _geojson [null, null]
|
||||||
|
raise WARNING 'result _geojson %' , _geojson;
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', _geojson ) INTO geojson;
|
||||||
|
END;
|
||||||
|
$timelapse2$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.timelapse2_fn
|
||||||
|
IS 'Export all selected logs geometry `track_geom` to a geojson as points with notes properties';
|
||||||
|
|
||||||
-- export_logbook_geojson_fn
|
-- export_logbook_geojson_fn
|
||||||
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
|
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
|
||||||
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSONB) RETURNS JSONB AS $export_logbook_geojson$
|
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSONB) RETURNS JSONB AS $export_logbook_geojson$
|
||||||
@@ -111,7 +186,7 @@ COMMENT ON FUNCTION
|
|||||||
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html
|
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html
|
||||||
--
|
--
|
||||||
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
|
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS pg_catalog.xml
|
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS "text/xml"
|
||||||
AS $export_logbook_gpx$
|
AS $export_logbook_gpx$
|
||||||
DECLARE
|
DECLARE
|
||||||
app_settings jsonb;
|
app_settings jsonb;
|
||||||
@@ -169,7 +244,7 @@ COMMENT ON FUNCTION
|
|||||||
-- https://developers.google.com/kml/documentation/kml_tut
|
-- https://developers.google.com/kml/documentation/kml_tut
|
||||||
-- TODO https://developers.google.com/kml/documentation/time#timespans
|
-- TODO https://developers.google.com/kml/documentation/time#timespans
|
||||||
DROP FUNCTION IF EXISTS api.export_logbook_kml_fn;
|
DROP FUNCTION IF EXISTS api.export_logbook_kml_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.export_logbook_kml_fn(IN _id INTEGER) RETURNS pg_catalog.xml
|
CREATE OR REPLACE FUNCTION api.export_logbook_kml_fn(IN _id INTEGER) RETURNS "text/xml"
|
||||||
AS $export_logbook_kml$
|
AS $export_logbook_kml$
|
||||||
DECLARE
|
DECLARE
|
||||||
logbook_rec record;
|
logbook_rec record;
|
||||||
@@ -212,7 +287,7 @@ COMMENT ON FUNCTION
|
|||||||
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
|
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
|
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
|
||||||
IN start_log INTEGER DEFAULT NULL,
|
IN start_log INTEGER DEFAULT NULL,
|
||||||
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml
|
IN end_log INTEGER DEFAULT NULL) RETURNS "application/gpx+xml"
|
||||||
AS $export_logbooks_gpx$
|
AS $export_logbooks_gpx$
|
||||||
declare
|
declare
|
||||||
merged_jsonb jsonb;
|
merged_jsonb jsonb;
|
||||||
@@ -276,7 +351,7 @@ COMMENT ON FUNCTION
|
|||||||
DROP FUNCTION IF EXISTS api.export_logbooks_kml_fn;
|
DROP FUNCTION IF EXISTS api.export_logbooks_kml_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.export_logbooks_kml_fn(
|
CREATE OR REPLACE FUNCTION api.export_logbooks_kml_fn(
|
||||||
IN start_log INTEGER DEFAULT NULL,
|
IN start_log INTEGER DEFAULT NULL,
|
||||||
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml
|
IN end_log INTEGER DEFAULT NULL) RETURNS "text/xml"
|
||||||
AS $export_logbooks_kml$
|
AS $export_logbooks_kml$
|
||||||
DECLARE
|
DECLARE
|
||||||
_geom geometry;
|
_geom geometry;
|
||||||
@@ -334,7 +409,7 @@ COMMENT ON FUNCTION
|
|||||||
|
|
||||||
-- Find all log from and to moorage geopoint within 100m
|
-- Find all log from and to moorage geopoint within 100m
|
||||||
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
|
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_from_moorage$
|
CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geojson JSONB) RETURNS JSONB AS $find_log_from_moorage$
|
||||||
DECLARE
|
DECLARE
|
||||||
moorage_rec record;
|
moorage_rec record;
|
||||||
_geojson jsonb;
|
_geojson jsonb;
|
||||||
@@ -357,7 +432,7 @@ CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geoj
|
|||||||
1000 -- in meters ?
|
1000 -- in meters ?
|
||||||
);
|
);
|
||||||
-- Return a GeoJSON filter on LineString
|
-- Return a GeoJSON filter on LineString
|
||||||
SELECT json_build_object(
|
SELECT jsonb_build_object(
|
||||||
'type', 'FeatureCollection',
|
'type', 'FeatureCollection',
|
||||||
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
||||||
END;
|
END;
|
||||||
@@ -368,7 +443,7 @@ COMMENT ON FUNCTION
|
|||||||
IS 'Find all log from moorage geopoint within 100m';
|
IS 'Find all log from moorage geopoint within 100m';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS api.find_log_to_moorage_fn;
|
DROP FUNCTION IF EXISTS api.find_log_to_moorage_fn;
|
||||||
CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_to_moorage$
|
CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojson JSONB) RETURNS JSONB AS $find_log_to_moorage$
|
||||||
DECLARE
|
DECLARE
|
||||||
moorage_rec record;
|
moorage_rec record;
|
||||||
_geojson jsonb;
|
_geojson jsonb;
|
||||||
@@ -391,7 +466,7 @@ CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojso
|
|||||||
1000 -- in meters ?
|
1000 -- in meters ?
|
||||||
);
|
);
|
||||||
-- Return a GeoJSON filter on LineString
|
-- Return a GeoJSON filter on LineString
|
||||||
SELECT json_build_object(
|
SELECT jsonb_build_object(
|
||||||
'type', 'FeatureCollection',
|
'type', 'FeatureCollection',
|
||||||
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
|
||||||
END;
|
END;
|
||||||
@@ -529,7 +604,7 @@ DROP FUNCTION IF EXISTS api.export_moorages_geojson_fn;
|
|||||||
CREATE FUNCTION api.export_moorages_geojson_fn(OUT geojson JSONB) RETURNS JSONB AS $export_moorages_geojson$
|
CREATE FUNCTION api.export_moorages_geojson_fn(OUT geojson JSONB) RETURNS JSONB AS $export_moorages_geojson$
|
||||||
DECLARE
|
DECLARE
|
||||||
BEGIN
|
BEGIN
|
||||||
SELECT json_build_object(
|
SELECT jsonb_build_object(
|
||||||
'type', 'FeatureCollection',
|
'type', 'FeatureCollection',
|
||||||
'features',
|
'features',
|
||||||
( SELECT
|
( SELECT
|
||||||
@@ -552,7 +627,7 @@ COMMENT ON FUNCTION
|
|||||||
IS 'Export moorages as geojson';
|
IS 'Export moorages as geojson';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS api.export_moorages_gpx_fn;
|
DROP FUNCTION IF EXISTS api.export_moorages_gpx_fn;
|
||||||
CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS pg_catalog.xml AS $export_moorages_gpx$
|
CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS "text/xml" AS $export_moorages_gpx$
|
||||||
DECLARE
|
DECLARE
|
||||||
app_settings jsonb;
|
app_settings jsonb;
|
||||||
BEGIN
|
BEGIN
|
||||||
@@ -582,8 +657,8 @@ CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS pg_catalog.xml AS $export_m
|
|||||||
xmlelement(name type, 'WPT'),
|
xmlelement(name type, 'WPT'),
|
||||||
xmlelement(name link, xmlattributes(concat(app_settings->>'app.url','moorage/', m.id) as href),
|
xmlelement(name link, xmlattributes(concat(app_settings->>'app.url','moorage/', m.id) as href),
|
||||||
xmlelement(name text, m.name)),
|
xmlelement(name text, m.name)),
|
||||||
xmlelement(name extensions, xmlelement(name "postgsail:mooorage_id", 1),
|
xmlelement(name extensions, xmlelement(name "postgsail:mooorage_id", m.id),
|
||||||
xmlelement(name "postgsail:link", concat(app_settings->>'app.url','moorage/', m.id)),
|
xmlelement(name "postgsail:link", concat(app_settings->>'app.url','/moorage/', m.id)),
|
||||||
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
||||||
xmlelement(name "opencpn:viz", '1'),
|
xmlelement(name "opencpn:viz", '1'),
|
||||||
xmlelement(name "opencpn:scale_min_max", xmlattributes(true as UseScale, 30000 as ScaleMin, 0 as ScaleMax)
|
xmlelement(name "opencpn:scale_min_max", xmlattributes(true as UseScale, 30000 as ScaleMin, 0 as ScaleMax)
|
||||||
@@ -604,7 +679,7 @@ DROP FUNCTION IF EXISTS api.stats_logs_fn;
|
|||||||
CREATE OR REPLACE FUNCTION api.stats_logs_fn(
|
CREATE OR REPLACE FUNCTION api.stats_logs_fn(
|
||||||
IN start_date TEXT DEFAULT NULL,
|
IN start_date TEXT DEFAULT NULL,
|
||||||
IN end_date TEXT DEFAULT NULL,
|
IN end_date TEXT DEFAULT NULL,
|
||||||
OUT stats JSON) RETURNS JSON AS $stats_logs$
|
OUT stats JSONB) RETURNS JSONB AS $stats_logs$
|
||||||
DECLARE
|
DECLARE
|
||||||
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||||
_end_date TIMESTAMPTZ DEFAULT NOW();
|
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||||
@@ -748,6 +823,12 @@ CREATE OR REPLACE FUNCTION api.delete_logbook_fn(IN _id integer) RETURNS BOOLEAN
|
|||||||
SET notes = 'mark for deletion'
|
SET notes = 'mark for deletion'
|
||||||
WHERE l.vessel_id = current_setting('vessel.id', false)
|
WHERE l.vessel_id = current_setting('vessel.id', false)
|
||||||
AND id = logbook_rec.id;
|
AND id = logbook_rec.id;
|
||||||
|
-- Update metrics status to moored
|
||||||
|
UPDATE api.metrics
|
||||||
|
SET status = 'moored'
|
||||||
|
WHERE time >= logbook_rec._from_time::TIMESTAMPTZ
|
||||||
|
AND time <= logbook_rec._to_time::TIMESTAMPTZ
|
||||||
|
AND vessel_id = current_setting('vessel.id', false);
|
||||||
-- Get related stays
|
-- Get related stays
|
||||||
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
|
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
|
||||||
FROM api.stays s
|
FROM api.stays s
|
||||||
@@ -776,7 +857,11 @@ CREATE OR REPLACE FUNCTION api.delete_logbook_fn(IN _id integer) RETURNS BOOLEAN
|
|||||||
RAISE WARNING '-> delete_logbook_fn delete logbook [%]', logbook_rec.id;
|
RAISE WARNING '-> delete_logbook_fn delete logbook [%]', logbook_rec.id;
|
||||||
DELETE FROM api.stays WHERE id = current_stays_id;
|
DELETE FROM api.stays WHERE id = current_stays_id;
|
||||||
RAISE WARNING '-> delete_logbook_fn delete stays [%]', current_stays_id;
|
RAISE WARNING '-> delete_logbook_fn delete stays [%]', current_stays_id;
|
||||||
-- TODO should we subtract (-1) moorages ref count or reprocess it?!?
|
-- Clean up, Subtract (-1) moorages ref count
|
||||||
|
UPDATE api.moorages
|
||||||
|
SET reference_count = reference_count - 1
|
||||||
|
WHERE vessel_id = current_setting('vessel.id', false)
|
||||||
|
AND id = previous_stays_id;
|
||||||
RETURN TRUE;
|
RETURN TRUE;
|
||||||
END;
|
END;
|
||||||
$delete_logbook$ LANGUAGE plpgsql;
|
$delete_logbook$ LANGUAGE plpgsql;
|
||||||
@@ -784,3 +869,70 @@ $delete_logbook$ LANGUAGE plpgsql;
|
|||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
api.delete_logbook_fn
|
api.delete_logbook_fn
|
||||||
IS 'Delete a logbook and dependency stay';
|
IS 'Delete a logbook and dependency stay';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION api.monitoring_history_fn(IN time_interval TEXT DEFAULT '24', OUT history_metrics JSONB) RETURNS JSONB AS $monitoring_history$
|
||||||
|
DECLARE
|
||||||
|
bucket_interval interval := '5 minutes';
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE '-> monitoring_history_fn';
|
||||||
|
SELECT CASE time_interval
|
||||||
|
WHEN '24' THEN '5 minutes'
|
||||||
|
WHEN '48' THEN '2 hours'
|
||||||
|
WHEN '72' THEN '4 hours'
|
||||||
|
WHEN '168' THEN '7 hours'
|
||||||
|
ELSE '5 minutes'
|
||||||
|
END bucket INTO bucket_interval;
|
||||||
|
RAISE NOTICE '-> monitoring_history_fn % %', time_interval, bucket_interval;
|
||||||
|
WITH history_table AS (
|
||||||
|
SELECT time_bucket(bucket_interval::INTERVAL, time) AS time_bucket,
|
||||||
|
avg((metrics->'environment.water.temperature')::numeric) AS waterTemperature,
|
||||||
|
avg((metrics->'environment.inside.temperature')::numeric) AS insideTemperature,
|
||||||
|
avg((metrics->'environment.outside.temperature')::numeric) AS outsideTemperature,
|
||||||
|
avg((metrics->'environment.wind.speedOverGround')::numeric) AS windSpeedOverGround,
|
||||||
|
avg((metrics->'environment.inside.relativeHumidity')::numeric) AS insideHumidity,
|
||||||
|
avg((metrics->'environment.outside.relativeHumidity')::numeric) AS outsideHumidity,
|
||||||
|
avg((metrics->'environment.outside.pressure')::numeric) AS outsidePressure,
|
||||||
|
avg((metrics->'environment.inside.pressure')::numeric) AS insidePressure,
|
||||||
|
avg((metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS batteryCharge,
|
||||||
|
avg((metrics->'electrical.batteries.House.voltage')::numeric) AS batteryVoltage,
|
||||||
|
avg((metrics->'environment.depth.belowTransducer')::numeric) AS depth
|
||||||
|
FROM api.metrics
|
||||||
|
WHERE time > (NOW() AT TIME ZONE 'UTC' - INTERVAL '1 hours' * time_interval::NUMERIC)
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket asc
|
||||||
|
)
|
||||||
|
SELECT jsonb_agg(history_table) INTO history_metrics FROM history_table;
|
||||||
|
END
|
||||||
|
$monitoring_history$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.monitoring_history_fn
|
||||||
|
IS 'Export metrics from a time period 24h, 48h, 72h, 7d';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION api.status_fn(out status jsonb) RETURNS JSONB AS $status_fn$
|
||||||
|
DECLARE
|
||||||
|
in_route BOOLEAN := False;
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE '-> status_fn';
|
||||||
|
SELECT EXISTS ( SELECT id
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE active IS True
|
||||||
|
LIMIT 1
|
||||||
|
) INTO in_route;
|
||||||
|
IF in_route IS True THEN
|
||||||
|
-- In route from <logbook.from_name> arrived at <>
|
||||||
|
SELECT jsonb_build_object('status', sa.description, 'location', m.name, 'departed', l._from_time) INTO status
|
||||||
|
from api.logbook l, api.stays_at sa, api.moorages m
|
||||||
|
where s.stay_code = sa.stay_code AND l._from_moorage_id = m.id AND l.active IS True;
|
||||||
|
ELSE
|
||||||
|
-- At <Stat_at.Desc> in <Moorage.name> departed at <>
|
||||||
|
SELECT jsonb_build_object('status', sa.description, 'location', m.name, 'arrived', s.arrived) INTO status
|
||||||
|
from api.stays s, api.stays_at sa, api.moorages m
|
||||||
|
where s.stay_code = sa.stay_code AND s.moorage_id = m.id AND s.active IS True;
|
||||||
|
END IF;
|
||||||
|
END
|
||||||
|
$status_fn$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.status_fn
|
||||||
|
IS 'generate vessel status';
|
@@ -15,12 +15,12 @@
|
|||||||
-- security_invoker=true,security_barrier=true
|
-- security_invoker=true,security_barrier=true
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
|
|
||||||
CREATE VIEW first_metric AS
|
CREATE VIEW public.first_metric AS
|
||||||
SELECT *
|
SELECT *
|
||||||
FROM api.metrics
|
FROM api.metrics
|
||||||
ORDER BY time ASC LIMIT 1;
|
ORDER BY time ASC LIMIT 1;
|
||||||
|
|
||||||
CREATE VIEW last_metric AS
|
CREATE VIEW public.last_metric AS
|
||||||
SELECT *
|
SELECT *
|
||||||
FROM api.metrics
|
FROM api.metrics
|
||||||
ORDER BY time DESC LIMIT 1;
|
ORDER BY time DESC LIMIT 1;
|
||||||
@@ -392,9 +392,13 @@ CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=tru
|
|||||||
'properties', jsonb_build_object(
|
'properties', jsonb_build_object(
|
||||||
'name', current_setting('vessel.name', false),
|
'name', current_setting('vessel.name', false),
|
||||||
'latitude', m.latitude,
|
'latitude', m.latitude,
|
||||||
'longitude', m.longitude
|
'longitude', m.longitude,
|
||||||
|
'time', m.time,
|
||||||
|
'speedoverground', m.speedoverground,
|
||||||
|
'windspeedapparent', m.windspeedapparent
|
||||||
)::jsonb ) AS geojson,
|
)::jsonb ) AS geojson,
|
||||||
current_setting('vessel.name', false) AS name
|
current_setting('vessel.name', false) AS name
|
||||||
|
--( SELECT api.status_fn() ) AS status
|
||||||
FROM api.metrics m
|
FROM api.metrics m
|
||||||
ORDER BY time DESC LIMIT 1;
|
ORDER BY time DESC LIMIT 1;
|
||||||
COMMENT ON VIEW
|
COMMENT ON VIEW
|
||||||
|
@@ -8,6 +8,36 @@ select current_database();
|
|||||||
-- connect to the DB
|
-- connect to the DB
|
||||||
\c signalk
|
\c signalk
|
||||||
|
|
||||||
|
-- Check for new logbook pending validation
|
||||||
|
CREATE FUNCTION cron_process_pre_logbook_fn() RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
process_rec record;
|
||||||
|
BEGIN
|
||||||
|
-- Check for new logbook pending update
|
||||||
|
RAISE NOTICE 'cron_process_pre_logbook_fn init loop';
|
||||||
|
FOR process_rec in
|
||||||
|
SELECT * FROM process_queue
|
||||||
|
WHERE channel = 'pre_logbook' AND processed IS NULL
|
||||||
|
ORDER BY stored ASC LIMIT 100
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE 'cron_process_pre_logbook_fn processing queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||||
|
-- update logbook
|
||||||
|
PERFORM process_pre_logbook_fn(process_rec.payload::INTEGER);
|
||||||
|
-- update process_queue table , processed
|
||||||
|
UPDATE process_queue
|
||||||
|
SET
|
||||||
|
processed = NOW()
|
||||||
|
WHERE id = process_rec.id;
|
||||||
|
RAISE NOTICE 'cron_process_pre_logbook_fn processed queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_process_pre_logbook_fn
|
||||||
|
IS 'init by pg_cron to check for new logbook pending update, if so perform process_logbook_valid_fn';
|
||||||
|
|
||||||
|
|
||||||
-- Check for new logbook pending update
|
-- Check for new logbook pending update
|
||||||
CREATE FUNCTION cron_process_new_logbook_fn() RETURNS void AS $$
|
CREATE FUNCTION cron_process_new_logbook_fn() RETURNS void AS $$
|
||||||
declare
|
declare
|
||||||
@@ -94,7 +124,7 @@ $$ language plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.cron_process_new_moorage_fn
|
public.cron_process_new_moorage_fn
|
||||||
IS 'init by pg_cron to check for new moorage pending update, if so perform process_moorage_queue_fn';
|
IS 'Deprecated, init by pg_cron to check for new moorage pending update, if so perform process_moorage_queue_fn';
|
||||||
|
|
||||||
-- CRON Monitor offline pending notification
|
-- CRON Monitor offline pending notification
|
||||||
create function cron_process_monitor_offline_fn() RETURNS void AS $$
|
create function cron_process_monitor_offline_fn() RETURNS void AS $$
|
||||||
@@ -329,6 +359,144 @@ COMMENT ON FUNCTION
|
|||||||
public.cron_process_new_notification_fn
|
public.cron_process_new_notification_fn
|
||||||
IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn';
|
IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn';
|
||||||
|
|
||||||
|
-- CRON for new vessel metadata pending grafana provisioning
|
||||||
|
CREATE FUNCTION cron_process_grafana_fn() RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
process_rec record;
|
||||||
|
data_rec record;
|
||||||
|
app_settings jsonb;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- We run grafana provisioning only after the first received vessel metadata
|
||||||
|
-- Check for new vessel metadata pending grafana provisioning
|
||||||
|
RAISE NOTICE 'cron_process_grafana_fn';
|
||||||
|
FOR process_rec in
|
||||||
|
SELECT * from process_queue
|
||||||
|
where channel = 'grafana' and processed is null
|
||||||
|
order by stored asc
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
|
||||||
|
-- Gather url from app settings
|
||||||
|
app_settings := get_app_settings_fn();
|
||||||
|
-- Get vessel details base on metadata id
|
||||||
|
SELECT * INTO data_rec
|
||||||
|
FROM api.metadata m, auth.vessels v
|
||||||
|
WHERE m.id = process_rec.payload::INTEGER
|
||||||
|
AND m.vessel_id = v.vessel_id;
|
||||||
|
-- as we got data from the vessel we can do the grafana provisioning.
|
||||||
|
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||||
|
--RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||||
|
-- add user in keycloak
|
||||||
|
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
|
||||||
|
-- update process_queue entry as processed
|
||||||
|
UPDATE process_queue
|
||||||
|
SET
|
||||||
|
processed = NOW()
|
||||||
|
WHERE id = process_rec.id;
|
||||||
|
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_process_grafana_fn
|
||||||
|
IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_process_windy_fn() RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
windy_rec record;
|
||||||
|
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||||
|
last_metric TIMESTAMPTZ;
|
||||||
|
metric_rec record;
|
||||||
|
windy_metric jsonb;
|
||||||
|
app_settings jsonb;
|
||||||
|
user_settings jsonb;
|
||||||
|
windy_pws jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Check for new observations pending update
|
||||||
|
RAISE NOTICE 'cron_windy_fn';
|
||||||
|
-- Gather url from app settings
|
||||||
|
app_settings := get_app_settings_fn();
|
||||||
|
-- Find users with Windy active and with an active vessel
|
||||||
|
-- Map account id to Windy Station ID
|
||||||
|
FOR windy_rec in
|
||||||
|
SELECT
|
||||||
|
a.id,a.email,v.vessel_id,v.name,
|
||||||
|
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||||
|
WHERE (a.preferences->'public_windy')::boolean = True
|
||||||
|
AND m.active = True
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_windy_fn for [%]', windy_rec;
|
||||||
|
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||||
|
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking user_settings [%]', user_settings;
|
||||||
|
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||||
|
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||||
|
FOR metric_rec in
|
||||||
|
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||||
|
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||||
|
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||||
|
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||||
|
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||||
|
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||||
|
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||||
|
last(latitude, time) AS lat,
|
||||||
|
last(longitude, time) AS lng
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE vessel_id = windy_rec.vessel_id
|
||||||
|
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket ASC LIMIT 100
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking metrics [%]', metric_rec;
|
||||||
|
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||||
|
-- temp from kelvin to celcuis
|
||||||
|
-- winddir from radiant to degres
|
||||||
|
-- rh from ratio to percentage
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'dateutc', metric_rec.time_bucket,
|
||||||
|
'station', windy_rec.id,
|
||||||
|
'name', windy_rec.name,
|
||||||
|
'lat', metric_rec.lat,
|
||||||
|
'lon', metric_rec.lng,
|
||||||
|
'wind', metric_rec.wind,
|
||||||
|
'gust', metric_rec.gust,
|
||||||
|
'pressure', metric_rec.pressure,
|
||||||
|
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||||
|
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||||
|
'rh', valToPercent(metric_rec.rh::numeric)
|
||||||
|
) INTO windy_metric;
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking windy_metrics [%]', windy_metric;
|
||||||
|
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||||
|
RAISE NOTICE '-> cron_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||||
|
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||||
|
RAISE NOTICE '-> cron_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||||
|
-- Send metrics to Windy
|
||||||
|
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||||
|
END IF;
|
||||||
|
-- Record last metrics time
|
||||||
|
SELECT metric_rec.time_bucket INTO last_metric;
|
||||||
|
END LOOP;
|
||||||
|
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_process_windy_fn
|
||||||
|
IS 'init by pg_cron to create (or update) station and uploading observations to Windy Personal Weather Station observations';
|
||||||
|
|
||||||
-- CRON for Vacuum database
|
-- CRON for Vacuum database
|
||||||
CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$
|
CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$
|
||||||
-- ERROR: VACUUM cannot be executed from a function
|
-- ERROR: VACUUM cannot be executed from a function
|
||||||
@@ -349,27 +517,305 @@ COMMENT ON FUNCTION
|
|||||||
IS 'init by pg_cron to full vacuum tables on schema api';
|
IS 'init by pg_cron to full vacuum tables on schema api';
|
||||||
|
|
||||||
-- CRON for alerts notification
|
-- CRON for alerts notification
|
||||||
CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$
|
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $$
|
||||||
DECLARE
|
DECLARE
|
||||||
alert_rec record;
|
alert_rec record;
|
||||||
|
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||||
|
last_metric TIMESTAMPTZ;
|
||||||
|
metric_rec record;
|
||||||
|
app_settings JSONB;
|
||||||
|
user_settings JSONB;
|
||||||
|
alerting JSONB;
|
||||||
|
_alarms JSONB;
|
||||||
|
alarms TEXT;
|
||||||
|
alert_default JSONB := '{
|
||||||
|
"low_pressure_threshold": 990,
|
||||||
|
"high_wind_speed_threshold": 30,
|
||||||
|
"low_water_depth_threshold": 1,
|
||||||
|
"min_notification_interval": 6,
|
||||||
|
"high_pressure_drop_threshold": 12,
|
||||||
|
"low_battery_charge_threshold": 90,
|
||||||
|
"low_battery_voltage_threshold": 12.5,
|
||||||
|
"low_water_temperature_threshold": 10,
|
||||||
|
"low_indoor_temperature_threshold": 7,
|
||||||
|
"low_outdoor_temperature_threshold": 3
|
||||||
|
}';
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Check for new event notification pending update
|
-- Check for new event notification pending update
|
||||||
RAISE NOTICE 'cron_process_alerts_fn';
|
RAISE NOTICE 'cron_alerts_fn';
|
||||||
FOR alert_rec in
|
FOR alert_rec in
|
||||||
SELECT
|
SELECT
|
||||||
a.user_id,a.email,v.vessel_id
|
a.user_id,a.email,v.vessel_id,
|
||||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||||
WHERE m.vessel_id = v.vessel_id
|
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||||
AND a.email = v.owner_email
|
(a.preferences->'alarms')::JSONB as alarms
|
||||||
AND (preferences->'alerting'->'enabled')::boolean = false
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||||
|
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||||
|
AND m.active = True
|
||||||
LOOP
|
LOOP
|
||||||
RAISE NOTICE '-> cron_process_alert_rec_fn for [%]', alert_rec;
|
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||||
|
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||||
|
PERFORM set_config('user.email', alert_rec.email, false);
|
||||||
|
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||||
|
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||||
|
FOR metric_rec in
|
||||||
|
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||||
|
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||||
|
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||||
|
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||||
|
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||||
|
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||||
|
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||||
|
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||||
|
avg((m.metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS charge
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE vessel_id = alert_rec.vessel_id
|
||||||
|
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket ASC LIMIT 100
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||||
|
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||||
|
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||||
|
END IF;
|
||||||
|
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||||
|
END IF;
|
||||||
|
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||||
|
END IF;
|
||||||
|
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||||
|
END IF;
|
||||||
|
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||||
|
END IF;
|
||||||
|
-- Record last metrics time
|
||||||
|
SELECT metric_rec.time_bucket INTO last_metric;
|
||||||
|
END LOOP;
|
||||||
|
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||||
END LOOP;
|
END LOOP;
|
||||||
END;
|
END;
|
||||||
$$ language plpgsql;
|
$$ language plpgsql;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.cron_process_alerts_fn
|
public.cron_alerts_fn
|
||||||
IS 'init by pg_cron to check for alerts';
|
IS 'init by pg_cron to check for alerts';
|
||||||
|
|
||||||
-- CRON for no vessel notification
|
-- CRON for no vessel notification
|
||||||
@@ -437,7 +883,7 @@ DECLARE
|
|||||||
no_activity_rec record;
|
no_activity_rec record;
|
||||||
user_settings jsonb;
|
user_settings jsonb;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Check for vessel with no activity for more than 200 days
|
-- Check for vessel with no activity for more than 230 days
|
||||||
RAISE NOTICE 'cron_process_no_activity_fn';
|
RAISE NOTICE 'cron_process_no_activity_fn';
|
||||||
FOR no_activity_rec in
|
FOR no_activity_rec in
|
||||||
SELECT
|
SELECT
|
||||||
@@ -445,7 +891,7 @@ BEGIN
|
|||||||
FROM auth.accounts a
|
FROM auth.accounts a
|
||||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS'
|
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'
|
||||||
LOOP
|
LOOP
|
||||||
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
||||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
@@ -458,7 +904,7 @@ $no_activity$ language plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.cron_process_no_activity_fn
|
public.cron_process_no_activity_fn
|
||||||
IS 'init by pg_cron, check for vessel with no activity for more than 200 days then send notification';
|
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||||
|
|
||||||
-- CRON for deactivated/deletion
|
-- CRON for deactivated/deletion
|
||||||
CREATE FUNCTION cron_process_deactivated_fn() RETURNS void AS $deactivated$
|
CREATE FUNCTION cron_process_deactivated_fn() RETURNS void AS $deactivated$
|
||||||
@@ -531,7 +977,7 @@ COMMENT ON FUNCTION
|
|||||||
-- Need to be in the postgres database.
|
-- Need to be in the postgres database.
|
||||||
\c postgres
|
\c postgres
|
||||||
-- CRON for clean up job details logs
|
-- CRON for clean up job details logs
|
||||||
CREATE FUNCTION job_run_details_cleanup_fn() RETURNS void AS $$
|
CREATE FUNCTION public.job_run_details_cleanup_fn() RETURNS void AS $$
|
||||||
DECLARE
|
DECLARE
|
||||||
BEGIN
|
BEGIN
|
||||||
-- Remove job run log older than 3 months
|
-- Remove job run log older than 3 months
|
||||||
|
@@ -105,34 +105,49 @@ INSERT INTO public.email_templates VALUES
|
|||||||
E'You requested a password recovery. Check your email!\n'),
|
E'You requested a password recovery. Check your email!\n'),
|
||||||
('telegram_otp',
|
('telegram_otp',
|
||||||
'Telegram bot',
|
'Telegram bot',
|
||||||
E'Hello,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nThe PostgSail Team',
|
E'Hello,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nFrancois',
|
||||||
'Telegram bot',
|
'Telegram bot',
|
||||||
E'Hello,\nTo connect your account to a @postgsail_bot. Check your email!\n'),
|
E'Hello,\nTo connect your account to a @postgsail_bot. Check your email!\n'),
|
||||||
('telegram_valid',
|
('telegram_valid',
|
||||||
'Telegram bot',
|
'Telegram bot',
|
||||||
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\n\nThe PostgSail Team',
|
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\nFrancois',
|
||||||
'Telegram bot!',
|
'Telegram bot!',
|
||||||
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'),
|
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'),
|
||||||
('no_vessel',
|
('no_vessel',
|
||||||
'PostgSail add your boat',
|
'PostgSail add your boat',
|
||||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not created your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not added your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois',
|
||||||
'PostgSail next step',
|
'PostgSail next step',
|
||||||
E'Hello,\nYou should create your vessel. Check your email!\n'),
|
E'Hello,\nYou should create your vessel. Check your email!\n'),
|
||||||
('no_metadata',
|
('no_metadata',
|
||||||
'PostgSail connect your boat',
|
'PostgSail connect your boat',
|
||||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois',
|
||||||
'PostgSail next step',
|
'PostgSail next step',
|
||||||
E'Hello,\nYou should connect your vessel. Check your email!\n'),
|
E'Hello,\nYou should connect your vessel. Check your email!\n'),
|
||||||
('no_activity',
|
('no_activity',
|
||||||
'PostgSail boat inactivity',
|
'PostgSail boat inactivity',
|
||||||
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois.',
|
||||||
'PostgSail inactivity!',
|
'PostgSail inactivity!',
|
||||||
E'We detected inactivity. Check your email!\n'),
|
E'We detected inactivity. Check your email!\n'),
|
||||||
('deactivated',
|
('deactivated',
|
||||||
'PostgSail account deactivated',
|
'PostgSail account deactivated',
|
||||||
E'Hello __RECIPIENT__,\nYour account has been deactivated and all your data has been removed from PostgSail system.',
|
E'Hello __RECIPIENT__,\nYour account has been deactivated and all your data has been removed from PostgSail system.',
|
||||||
'PostgSail deactivated!',
|
'PostgSail deactivated!',
|
||||||
E'We removed your account. Check your email!\n');
|
E'We removed your account. Check your email!\n'),
|
||||||
|
('grafana',
|
||||||
|
'PostgSail Grafana integration',
|
||||||
|
E'Hello __RECIPIENT__,\nCongratulations! You unlocked Grafana dashboard.\nSee more details at https://app.openplotter.cloud\nHappy sailing!\nFrancois',
|
||||||
|
'PostgSail Grafana!',
|
||||||
|
E'Congratulations!\nYou unlocked Grafana dashboard.\nSee more details at https://app.openplotter.cloud\n'),
|
||||||
|
('windy',
|
||||||
|
'PostgSail Windy Weather station',
|
||||||
|
E'Hello __RECIPIENT__,\nCongratulations! Your boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\nHappy sailing!\nFrancois',
|
||||||
|
'PostgSail Windy!',
|
||||||
|
E'Congratulations!\nYour boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\n'),
|
||||||
|
('alert',
|
||||||
|
'PostgSail Alert',
|
||||||
|
E'Hello __RECIPIENT__,\nWe detected an alert __ALERT__.\nSee more details at __APP_URL__\nStay safe.\nFrancois',
|
||||||
|
'PostgSail Alert!',
|
||||||
|
E'We detected an alert __ALERT__.\n');
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Queue handling
|
-- Queue handling
|
||||||
@@ -178,7 +193,10 @@ $new_account_entry$ language plpgsql;
|
|||||||
|
|
||||||
create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$
|
create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$
|
||||||
begin
|
begin
|
||||||
|
-- Add email_otp check only if not from oauth server
|
||||||
|
if (NEW.preferences->>'email_verified')::boolean IS NOT True then
|
||||||
insert into process_queue (channel, payload, stored, ref_id) values ('email_otp', NEW.email, now(), NEW.user_id);
|
insert into process_queue (channel, payload, stored, ref_id) values ('email_otp', NEW.email, now(), NEW.user_id);
|
||||||
|
end if;
|
||||||
return NEW;
|
return NEW;
|
||||||
END;
|
END;
|
||||||
$new_account_otp_validation_entry$ language plpgsql;
|
$new_account_otp_validation_entry$ language plpgsql;
|
||||||
|
@@ -14,7 +14,6 @@ CREATE SCHEMA IF NOT EXISTS public;
|
|||||||
-- Functions public schema
|
-- Functions public schema
|
||||||
-- process single cron event, process_[logbook|stay|moorage]_queue_fn()
|
-- process single cron event, process_[logbook|stay|moorage]_queue_fn()
|
||||||
--
|
--
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION public.logbook_metrics_dwithin_fn(
|
CREATE OR REPLACE FUNCTION public.logbook_metrics_dwithin_fn(
|
||||||
IN _start text,
|
IN _start text,
|
||||||
IN _end text,
|
IN _end text,
|
||||||
@@ -40,7 +39,7 @@ $logbook_metrics_dwithin$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.logbook_metrics_dwithin_fn
|
public.logbook_metrics_dwithin_fn
|
||||||
IS 'Check if all entries for a logbook are in stationary movement with 15 meters';
|
IS 'Check if all entries for a logbook are in stationary movement with 50 meters';
|
||||||
|
|
||||||
-- Update a logbook with avg data
|
-- Update a logbook with avg data
|
||||||
-- TODO using timescale function
|
-- TODO using timescale function
|
||||||
@@ -145,8 +144,9 @@ CREATE FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text,
|
|||||||
time,
|
time,
|
||||||
courseovergroundtrue,
|
courseovergroundtrue,
|
||||||
speedoverground,
|
speedoverground,
|
||||||
anglespeedapparent,
|
windspeedapparent,
|
||||||
longitude,latitude,
|
longitude,latitude,
|
||||||
|
'' AS notes,
|
||||||
st_makepoint(longitude,latitude) AS geo_point
|
st_makepoint(longitude,latitude) AS geo_point
|
||||||
FROM api.metrics m
|
FROM api.metrics m
|
||||||
WHERE m.latitude IS NOT NULL
|
WHERE m.latitude IS NOT NULL
|
||||||
@@ -380,16 +380,7 @@ CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void
|
|||||||
log_settings jsonb;
|
log_settings jsonb;
|
||||||
user_settings jsonb;
|
user_settings jsonb;
|
||||||
geojson jsonb;
|
geojson jsonb;
|
||||||
_invalid_time boolean;
|
|
||||||
_invalid_interval boolean;
|
|
||||||
_invalid_distance boolean;
|
|
||||||
count_metric numeric;
|
|
||||||
previous_stays_id numeric;
|
|
||||||
current_stays_departed text;
|
|
||||||
current_stays_id numeric;
|
|
||||||
current_stays_active boolean;
|
|
||||||
extra_json jsonb;
|
extra_json jsonb;
|
||||||
geo jsonb;
|
|
||||||
BEGIN
|
BEGIN
|
||||||
-- If _id is not NULL
|
-- If _id is not NULL
|
||||||
IF _id IS NULL OR _id < 1 THEN
|
IF _id IS NULL OR _id < 1 THEN
|
||||||
@@ -414,89 +405,22 @@ CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void
|
|||||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
|
||||||
-- Check if all metrics are within 50meters base on geo loc
|
|
||||||
count_metric := logbook_metrics_dwithin_fn(logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT, logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
|
||||||
RAISE NOTICE '-> process_logbook_queue_fn logbook_metrics_dwithin_fn count:[%]', count_metric;
|
|
||||||
|
|
||||||
-- Calculate logbook data average and geo
|
-- Calculate logbook data average and geo
|
||||||
-- Update logbook entry with the latest metric data and calculate data
|
-- Update logbook entry with the latest metric data and calculate data
|
||||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||||
|
|
||||||
-- Avoid/ignore/delete logbook stationary movement or time sync issue
|
|
||||||
-- Check time start vs end
|
|
||||||
SELECT logbook_rec._to_time::TIMESTAMPTZ < logbook_rec._from_time::TIMESTAMPTZ INTO _invalid_time;
|
|
||||||
-- Is distance is less than 0.010
|
|
||||||
SELECT geo_rec._track_distance < 0.010 INTO _invalid_distance;
|
|
||||||
-- Is duration is less than 100sec
|
|
||||||
SELECT (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) < (100::text||' secs')::interval INTO _invalid_interval;
|
|
||||||
-- if stationary fix data metrics,logbook,stays,moorage
|
|
||||||
IF _invalid_time IS True OR _invalid_distance IS True
|
|
||||||
OR _invalid_interval IS True OR count_metric = avg_rec.count_metric
|
|
||||||
OR avg_rec.count_metric <= 2 THEN
|
|
||||||
RAISE NOTICE '-> process_logbook_queue_fn invalid logbook data id [%], _invalid_time [%], _invalid_distance [%], _invalid_interval [%], count_metric_in_zone [%], count_metric_log [%]',
|
|
||||||
logbook_rec.id, _invalid_time, _invalid_distance, _invalid_interval, count_metric, avg_rec.count_metric;
|
|
||||||
-- Update metrics status to moored
|
|
||||||
UPDATE api.metrics
|
|
||||||
SET status = 'moored'
|
|
||||||
WHERE time >= logbook_rec._from_time::TIMESTAMPTZ
|
|
||||||
AND time <= logbook_rec._to_time::TIMESTAMPTZ
|
|
||||||
AND vessel_id = current_setting('vessel.id', false);
|
|
||||||
-- Update logbook
|
|
||||||
UPDATE api.logbook
|
|
||||||
SET notes = 'invalid logbook data, stationary need to fix metrics?'
|
|
||||||
WHERE id = logbook_rec.id;
|
|
||||||
-- Get related stays
|
|
||||||
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
|
|
||||||
FROM api.stays s
|
|
||||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
|
||||||
AND s.arrived = logbook_rec._to_time;
|
|
||||||
-- Update related stays
|
|
||||||
UPDATE api.stays
|
|
||||||
SET notes = 'invalid stays data, stationary need to fix metrics?'
|
|
||||||
WHERE vessel_id = current_setting('vessel.id', false)
|
|
||||||
AND arrived = logbook_rec._to_time;
|
|
||||||
-- Find previous stays
|
|
||||||
SELECT id INTO previous_stays_id
|
|
||||||
FROM api.stays s
|
|
||||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
|
||||||
AND s.arrived < logbook_rec._to_time
|
|
||||||
ORDER BY s.arrived DESC LIMIT 1;
|
|
||||||
-- Update previous stays with the departed time from current stays
|
|
||||||
-- and set the active state from current stays
|
|
||||||
UPDATE api.stays
|
|
||||||
SET departed = current_stays_departed::TIMESTAMPTZ,
|
|
||||||
active = current_stays_active
|
|
||||||
WHERE vessel_id = current_setting('vessel.id', false)
|
|
||||||
AND id = previous_stays_id;
|
|
||||||
-- Clean up, remove invalid logbook and stay entry
|
|
||||||
DELETE FROM api.logbook WHERE id = logbook_rec.id;
|
|
||||||
RAISE WARNING '-> process_logbook_queue_fn delete invalid logbook [%]', logbook_rec.id;
|
|
||||||
DELETE FROM api.stays WHERE id = current_stays_id;
|
|
||||||
RAISE WARNING '-> process_logbook_queue_fn delete invalid stays [%]', current_stays_id;
|
|
||||||
-- TODO should we subtract (-1) moorages ref count or reprocess it?!?
|
|
||||||
RETURN;
|
|
||||||
END IF;
|
|
||||||
|
|
||||||
-- Do we have an existing moorage within 300m of the new log
|
-- Do we have an existing moorage within 300m of the new log
|
||||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||||
from_moorage := process_lat_lon_fn(logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
from_moorage := process_lat_lon_fn(logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||||
to_moorage := process_lat_lon_fn(logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
to_moorage := process_lat_lon_fn(logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
||||||
SELECT CONCAT(from_moorage.moorage_name, ' to ' , to_moorage.moorage_name) INTO log_name;
|
SELECT CONCAT(from_moorage.moorage_name, ' to ' , to_moorage.moorage_name) INTO log_name;
|
||||||
-- Generate logbook name, concat _from_location and _to_location
|
|
||||||
-- geo reverse _from_lng _from_lat
|
|
||||||
-- geo reverse _to_lng _to_lat
|
|
||||||
--geo := reverse_geocode_py_fn('nominatim', logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
|
||||||
--from_name := geo->>'name';
|
|
||||||
--geo := reverse_geocode_py_fn('nominatim', logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
|
||||||
--to_name := geo->>'name';
|
|
||||||
--SELECT CONCAT(from_name, ' to ' , to_name) INTO log_name;
|
|
||||||
|
|
||||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||||
-- Calculate extra json
|
-- Calculate extra json
|
||||||
extra_json := logbook_update_extra_json_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
extra_json := logbook_update_extra_json_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||||
|
|
||||||
RAISE NOTICE 'Updating valid logbook entry [%] [%] [%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
||||||
UPDATE api.logbook
|
UPDATE api.logbook
|
||||||
SET
|
SET
|
||||||
duration = (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ),
|
duration = (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ),
|
||||||
@@ -510,7 +434,8 @@ CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void
|
|||||||
name = log_name,
|
name = log_name,
|
||||||
track_geom = geo_rec._track_geom,
|
track_geom = geo_rec._track_geom,
|
||||||
distance = geo_rec._track_distance,
|
distance = geo_rec._track_distance,
|
||||||
extra = extra_json
|
extra = extra_json,
|
||||||
|
notes = NULL -- reset pre_log process
|
||||||
WHERE id = logbook_rec.id;
|
WHERE id = logbook_rec.id;
|
||||||
|
|
||||||
-- GeoJSON require track_geom field
|
-- GeoJSON require track_geom field
|
||||||
@@ -531,8 +456,8 @@ CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void
|
|||||||
-- Process badges
|
-- Process badges
|
||||||
RAISE NOTICE '-> debug process_logbook_queue_fn user_settings [%]', user_settings->>'email'::TEXT;
|
RAISE NOTICE '-> debug process_logbook_queue_fn user_settings [%]', user_settings->>'email'::TEXT;
|
||||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||||
PERFORM badges_logbook_fn(logbook_rec.id);
|
PERFORM badges_logbook_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||||
PERFORM badges_geom_fn(logbook_rec.id);
|
PERFORM badges_geom_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||||
END;
|
END;
|
||||||
$process_logbook_queue$ LANGUAGE plpgsql;
|
$process_logbook_queue$ LANGUAGE plpgsql;
|
||||||
-- Description
|
-- Description
|
||||||
@@ -603,6 +528,9 @@ CREATE OR REPLACE FUNCTION process_stay_queue_fn(IN _id integer) RETURNS void AS
|
|||||||
select sum(departed-arrived) from api.stays where moorage_id = moorage.moorage_id
|
select sum(departed-arrived) from api.stays where moorage_id = moorage.moorage_id
|
||||||
)
|
)
|
||||||
WHERE id = moorage.moorage_id;
|
WHERE id = moorage.moorage_id;
|
||||||
|
|
||||||
|
-- Process badges
|
||||||
|
PERFORM badges_moorages_fn();
|
||||||
END;
|
END;
|
||||||
$process_stay_queue$ LANGUAGE plpgsql;
|
$process_stay_queue$ LANGUAGE plpgsql;
|
||||||
-- Description
|
-- Description
|
||||||
@@ -712,7 +640,7 @@ $process_moorage_queue$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_moorage_queue_fn
|
public.process_moorage_queue_fn
|
||||||
IS 'Handle moorage insert or update from stays';
|
IS 'Handle moorage insert or update from stays, deprecated';
|
||||||
|
|
||||||
-- process new account notification
|
-- process new account notification
|
||||||
DROP FUNCTION IF EXISTS process_account_queue_fn;
|
DROP FUNCTION IF EXISTS process_account_queue_fn;
|
||||||
@@ -750,7 +678,7 @@ $process_account_queue$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_account_queue_fn
|
public.process_account_queue_fn
|
||||||
IS 'process new account notification';
|
IS 'process new account notification, deprecated';
|
||||||
|
|
||||||
-- process new account otp validation notification
|
-- process new account otp validation notification
|
||||||
DROP FUNCTION IF EXISTS process_account_otp_validation_queue_fn;
|
DROP FUNCTION IF EXISTS process_account_otp_validation_queue_fn;
|
||||||
@@ -790,7 +718,7 @@ $process_account_otp_validation_queue$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_account_otp_validation_queue_fn
|
public.process_account_otp_validation_queue_fn
|
||||||
IS 'process new account otp validation notification';
|
IS 'process new account otp validation notification, deprecated';
|
||||||
|
|
||||||
-- process new event notification
|
-- process new event notification
|
||||||
DROP FUNCTION IF EXISTS process_notification_queue_fn;
|
DROP FUNCTION IF EXISTS process_notification_queue_fn;
|
||||||
@@ -843,7 +771,7 @@ $process_notification_queue$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_notification_queue_fn
|
public.process_notification_queue_fn
|
||||||
IS 'process new event type notification';
|
IS 'process new event type notification, new_account, new_vessel, email_otp';
|
||||||
|
|
||||||
-- process new vessel notification
|
-- process new vessel notification
|
||||||
DROP FUNCTION IF EXISTS process_vessel_queue_fn;
|
DROP FUNCTION IF EXISTS process_vessel_queue_fn;
|
||||||
@@ -882,7 +810,7 @@ $process_vessel_queue$ LANGUAGE plpgsql;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_vessel_queue_fn
|
public.process_vessel_queue_fn
|
||||||
IS 'process new vessel notification';
|
IS 'process new vessel notification, deprecated';
|
||||||
|
|
||||||
-- Get application settings details from a log entry
|
-- Get application settings details from a log entry
|
||||||
DROP FUNCTION IF EXISTS get_app_settings_fn;
|
DROP FUNCTION IF EXISTS get_app_settings_fn;
|
||||||
@@ -899,14 +827,17 @@ BEGIN
|
|||||||
name LIKE 'app.email%'
|
name LIKE 'app.email%'
|
||||||
OR name LIKE 'app.pushover%'
|
OR name LIKE 'app.pushover%'
|
||||||
OR name LIKE 'app.url'
|
OR name LIKE 'app.url'
|
||||||
OR name LIKE 'app.telegram%';
|
OR name LIKE 'app.telegram%'
|
||||||
|
OR name LIKE 'app.grafana_admin_uri'
|
||||||
|
OR name LIKE 'app.keycloak_uri'
|
||||||
|
OR name LIKE 'app.windy_apikey';
|
||||||
END;
|
END;
|
||||||
$get_app_settings$
|
$get_app_settings$
|
||||||
LANGUAGE plpgsql;
|
LANGUAGE plpgsql;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.get_app_settings_fn
|
public.get_app_settings_fn
|
||||||
IS 'get application settings details, email, pushover, telegram';
|
IS 'get application settings details, email, pushover, telegram, grafana_admin_uri';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS get_app_url_fn;
|
DROP FUNCTION IF EXISTS get_app_url_fn;
|
||||||
CREATE OR REPLACE FUNCTION get_app_url_fn(OUT app_settings jsonb)
|
CREATE OR REPLACE FUNCTION get_app_url_fn(OUT app_settings jsonb)
|
||||||
@@ -1012,9 +943,7 @@ AS $get_user_settings_from_vesselid$
|
|||||||
'boat' , v.name,
|
'boat' , v.name,
|
||||||
'recipient', a.first,
|
'recipient', a.first,
|
||||||
'email', v.owner_email,
|
'email', v.owner_email,
|
||||||
'settings', a.preferences,
|
'settings', a.preferences
|
||||||
'pushover_key', a.preferences->'pushover_key'
|
|
||||||
--'badges', a.preferences->'badges'
|
|
||||||
) INTO user_settings
|
) INTO user_settings
|
||||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||||
WHERE m.vessel_id = v.vessel_id
|
WHERE m.vessel_id = v.vessel_id
|
||||||
@@ -1063,7 +992,7 @@ COMMENT ON FUNCTION
|
|||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- Badges
|
-- Badges
|
||||||
--
|
--
|
||||||
CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETURNS VOID AS $badges_logbook$
|
CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id INTEGER, IN logbook_time TEXT) RETURNS VOID AS $badges_logbook$
|
||||||
DECLARE
|
DECLARE
|
||||||
_badges jsonb;
|
_badges jsonb;
|
||||||
_exist BOOLEAN := null;
|
_exist BOOLEAN := null;
|
||||||
@@ -1081,7 +1010,7 @@ CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETUR
|
|||||||
select count(*) into total from api.logbook l where vessel_id = current_setting('vessel.id', false);
|
select count(*) into total from api.logbook l where vessel_id = current_setting('vessel.id', false);
|
||||||
if total >= 1 then
|
if total >= 1 then
|
||||||
-- Add badge
|
-- Add badge
|
||||||
badge := '{"Helmsman": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"Helmsman": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
-- Merge badges
|
-- Merge badges
|
||||||
@@ -1105,7 +1034,7 @@ CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETUR
|
|||||||
--RAISE WARNING '-> Wake Maker max_wind_speed %', max_wind_speed;
|
--RAISE WARNING '-> Wake Maker max_wind_speed %', max_wind_speed;
|
||||||
if max_wind_speed >= 15 then
|
if max_wind_speed >= 15 then
|
||||||
-- Create badge
|
-- Create badge
|
||||||
badge := '{"Wake Maker": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"Wake Maker": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
--RAISE WARNING '-> Wake Maker max_wind_speed badge %', badge;
|
--RAISE WARNING '-> Wake Maker max_wind_speed badge %', badge;
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
@@ -1130,7 +1059,7 @@ CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETUR
|
|||||||
--RAISE WARNING '-> Stormtrooper max_wind_speed %', max_wind_speed;
|
--RAISE WARNING '-> Stormtrooper max_wind_speed %', max_wind_speed;
|
||||||
if max_wind_speed >= 30 then
|
if max_wind_speed >= 30 then
|
||||||
-- Create badge
|
-- Create badge
|
||||||
badge := '{"Stormtrooper": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"Stormtrooper": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
--RAISE WARNING '-> Stormtrooper max_wind_speed badge %', badge;
|
--RAISE WARNING '-> Stormtrooper max_wind_speed badge %', badge;
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
@@ -1153,7 +1082,7 @@ CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETUR
|
|||||||
select l.distance into distance from api.logbook l where l.id = logbook_id AND l.distance >= 100 and vessel_id = current_setting('vessel.id', false);
|
select l.distance into distance from api.logbook l where l.id = logbook_id AND l.distance >= 100 and vessel_id = current_setting('vessel.id', false);
|
||||||
if distance >= 100 then
|
if distance >= 100 then
|
||||||
-- Create badge
|
-- Create badge
|
||||||
badge := '{"Navigator Award": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"Navigator Award": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
-- Merge badges
|
-- Merge badges
|
||||||
@@ -1174,7 +1103,7 @@ CREATE OR REPLACE FUNCTION public.badges_logbook_fn(IN logbook_id integer) RETUR
|
|||||||
select sum(l.distance) into distance from api.logbook l where vessel_id = current_setting('vessel.id', false);
|
select sum(l.distance) into distance from api.logbook l where vessel_id = current_setting('vessel.id', false);
|
||||||
if distance >= 1000 then
|
if distance >= 1000 then
|
||||||
-- Create badge
|
-- Create badge
|
||||||
badge := '{"Captain Award": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"Captain Award": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
-- Merge badges
|
-- Merge badges
|
||||||
@@ -1281,7 +1210,7 @@ COMMENT ON FUNCTION
|
|||||||
public.badges_moorages_fn
|
public.badges_moorages_fn
|
||||||
IS 'check moorages for new badges, eg: Explorer, Mooring Pro, Anchormaster';
|
IS 'check moorages for new badges, eg: Explorer, Mooring Pro, Anchormaster';
|
||||||
|
|
||||||
CREATE OR REPLACE FUNCTION public.badges_geom_fn(IN logbook_id integer) RETURNS VOID AS $badges_geom$
|
CREATE OR REPLACE FUNCTION public.badges_geom_fn(IN logbook_id INTEGER, IN logbook_time TEXT) RETURNS VOID AS $badges_geom$
|
||||||
DECLARE
|
DECLARE
|
||||||
_badges jsonb;
|
_badges jsonb;
|
||||||
_exist BOOLEAN := false;
|
_exist BOOLEAN := false;
|
||||||
@@ -1310,7 +1239,7 @@ CREATE OR REPLACE FUNCTION public.badges_geom_fn(IN logbook_id integer) RETURNS
|
|||||||
--RAISE WARNING 'geography_marine [%]', _exist;
|
--RAISE WARNING 'geography_marine [%]', _exist;
|
||||||
if _exist is false then
|
if _exist is false then
|
||||||
-- Create badge
|
-- Create badge
|
||||||
badge := '{"' || marine_rec.name || '": {"log": '|| logbook_id ||', "date":"' || NOW()::timestamp || '"}}';
|
badge := '{"' || marine_rec.name || '": {"log": '|| logbook_id ||', "date":"' || logbook_time || '"}}';
|
||||||
-- Get existing badges
|
-- Get existing badges
|
||||||
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
SELECT preferences->'badges' INTO _badges FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
-- Merge badges
|
-- Merge badges
|
||||||
@@ -1333,8 +1262,8 @@ COMMENT ON FUNCTION
|
|||||||
public.badges_geom_fn
|
public.badges_geom_fn
|
||||||
IS 'check geometry logbook for new badges, eg: Tropic, Alaska, Geographic zone';
|
IS 'check geometry logbook for new badges, eg: Tropic, Alaska, Geographic zone';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS public.process_logbook_valid_fn;
|
DROP FUNCTION IF EXISTS public.process_pre_logbook_fn;
|
||||||
CREATE OR REPLACE FUNCTION public.process_logbook_valid_fn(IN _id integer) RETURNS void AS $process_logbook_valid$
|
CREATE OR REPLACE FUNCTION public.process_pre_logbook_fn(IN _id integer) RETURNS void AS $process_pre_logbook$
|
||||||
DECLARE
|
DECLARE
|
||||||
logbook_rec record;
|
logbook_rec record;
|
||||||
avg_rec record;
|
avg_rec record;
|
||||||
@@ -1342,15 +1271,17 @@ CREATE OR REPLACE FUNCTION public.process_logbook_valid_fn(IN _id integer) RETUR
|
|||||||
_invalid_time boolean;
|
_invalid_time boolean;
|
||||||
_invalid_interval boolean;
|
_invalid_interval boolean;
|
||||||
_invalid_distance boolean;
|
_invalid_distance boolean;
|
||||||
|
_invalid_ratio boolean;
|
||||||
count_metric numeric;
|
count_metric numeric;
|
||||||
previous_stays_id numeric;
|
previous_stays_id numeric;
|
||||||
current_stays_departed text;
|
current_stays_departed text;
|
||||||
current_stays_id numeric;
|
current_stays_id numeric;
|
||||||
current_stays_active boolean;
|
current_stays_active boolean;
|
||||||
|
timebucket boolean;
|
||||||
BEGIN
|
BEGIN
|
||||||
-- If _id is not NULL
|
-- If _id is not NULL
|
||||||
IF _id IS NULL OR _id < 1 THEN
|
IF _id IS NULL OR _id < 1 THEN
|
||||||
RAISE WARNING '-> process_logbook_valid_fn invalid input %', _id;
|
RAISE WARNING '-> process_pre_logbook_fn invalid input %', _id;
|
||||||
RETURN;
|
RETURN;
|
||||||
END IF;
|
END IF;
|
||||||
-- Get the logbook record with all necessary fields exist
|
-- Get the logbook record with all necessary fields exist
|
||||||
@@ -1364,16 +1295,16 @@ CREATE OR REPLACE FUNCTION public.process_logbook_valid_fn(IN _id integer) RETUR
|
|||||||
AND _to_lat IS NOT NULL;
|
AND _to_lat IS NOT NULL;
|
||||||
-- Ensure the query is successful
|
-- Ensure the query is successful
|
||||||
IF logbook_rec.vessel_id IS NULL THEN
|
IF logbook_rec.vessel_id IS NULL THEN
|
||||||
RAISE WARNING '-> process_logbook_valid_fn invalid logbook %', _id;
|
RAISE WARNING '-> process_pre_logbook_fn invalid logbook %', _id;
|
||||||
RETURN;
|
RETURN;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
|
||||||
-- Check if all metrics are within 10meters base on geo loc
|
-- Check if all metrics are within 50meters base on geo loc
|
||||||
count_metric := logbook_metrics_dwithin_fn(logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT, logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
count_metric := logbook_metrics_dwithin_fn(logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT, logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||||
RAISE NOTICE '-> process_logbook_valid_fn logbook_metrics_dwithin_fn count:[%]', count_metric;
|
RAISE NOTICE '-> process_pre_logbook_fn logbook_metrics_dwithin_fn count:[%]', count_metric;
|
||||||
|
|
||||||
-- Calculate logbook data average and geo
|
-- Calculate logbook data average and geo
|
||||||
-- Update logbook entry with the latest metric data and calculate data
|
-- Update logbook entry with the latest metric data and calculate data
|
||||||
@@ -1387,11 +1318,18 @@ CREATE OR REPLACE FUNCTION public.process_logbook_valid_fn(IN _id integer) RETUR
|
|||||||
SELECT geo_rec._track_distance < 0.010 INTO _invalid_distance;
|
SELECT geo_rec._track_distance < 0.010 INTO _invalid_distance;
|
||||||
-- Is duration is less than 100sec
|
-- Is duration is less than 100sec
|
||||||
SELECT (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) < (100::text||' secs')::interval INTO _invalid_interval;
|
SELECT (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) < (100::text||' secs')::interval INTO _invalid_interval;
|
||||||
|
-- If we have less than 15 metrics
|
||||||
|
-- Is within metrics represent more or equal than 60% of the total entry
|
||||||
|
IF count_metric::NUMERIC <= 15 THEN
|
||||||
|
SELECT (count_metric::NUMERIC / avg_rec.count_metric::NUMERIC) >= 0.60 INTO _invalid_ratio;
|
||||||
|
END IF;
|
||||||
-- if stationary fix data metrics,logbook,stays,moorage
|
-- if stationary fix data metrics,logbook,stays,moorage
|
||||||
IF _invalid_time IS True OR _invalid_distance IS True
|
IF _invalid_time IS True OR _invalid_distance IS True
|
||||||
OR _invalid_interval IS True OR count_metric = avg_rec.count_metric THEN
|
OR _invalid_interval IS True OR count_metric = avg_rec.count_metric
|
||||||
RAISE NOTICE '-> process_logbook_queue_fn invalid logbook data id [%], _invalid_time [%], _invalid_distance [%], _invalid_interval [%], within count_metric == total count_metric [%]',
|
OR _invalid_ratio IS True
|
||||||
logbook_rec.id, _invalid_time, _invalid_distance, _invalid_interval, count_metric;
|
OR avg_rec.count_metric <= 3 THEN
|
||||||
|
RAISE NOTICE '-> process_pre_logbook_fn invalid logbook data id [%], _invalid_time [%], _invalid_distance [%], _invalid_interval [%], count_metric_in_zone [%], count_metric_log [%], _invalid_ratio [%]',
|
||||||
|
logbook_rec.id, _invalid_time, _invalid_distance, _invalid_interval, count_metric, avg_rec.count_metric, _invalid_ratio;
|
||||||
-- Update metrics status to moored
|
-- Update metrics status to moored
|
||||||
UPDATE api.metrics
|
UPDATE api.metrics
|
||||||
SET status = 'moored'
|
SET status = 'moored'
|
||||||
@@ -1428,19 +1366,40 @@ CREATE OR REPLACE FUNCTION public.process_logbook_valid_fn(IN _id integer) RETUR
|
|||||||
AND id = previous_stays_id;
|
AND id = previous_stays_id;
|
||||||
-- Clean up, remove invalid logbook and stay entry
|
-- Clean up, remove invalid logbook and stay entry
|
||||||
DELETE FROM api.logbook WHERE id = logbook_rec.id;
|
DELETE FROM api.logbook WHERE id = logbook_rec.id;
|
||||||
RAISE WARNING '-> process_logbook_queue_fn delete invalid logbook [%]', logbook_rec.id;
|
RAISE WARNING '-> process_pre_logbook_fn delete invalid logbook [%]', logbook_rec.id;
|
||||||
DELETE FROM api.stays WHERE id = current_stays_id;
|
DELETE FROM api.stays WHERE id = current_stays_id;
|
||||||
RAISE WARNING '-> process_logbook_queue_fn delete invalid stays [%]', current_stays_id;
|
RAISE WARNING '-> process_pre_logbook_fn delete invalid stays [%]', current_stays_id;
|
||||||
-- TODO should we subtract (-1) moorages ref count or reprocess it?!?
|
|
||||||
RETURN;
|
RETURN;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
|
--IF (logbook_rec.notes IS NULL) THEN -- run one time only
|
||||||
|
-- -- If duration is over 24h or number of entry is over 400, check for stays and potential multiple logs with stationary location
|
||||||
|
-- IF (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ) > INTERVAL '24 hours'
|
||||||
|
-- OR avg_rec.count_metric > 400 THEN
|
||||||
|
-- timebucket := public.logbook_metrics_timebucket_fn('15 minutes'::TEXT, logbook_rec.id, logbook_rec._from_time::TIMESTAMPTZ, logbook_rec._to_time::TIMESTAMPTZ);
|
||||||
|
-- -- If true exit current process as the current logbook need to be re-process.
|
||||||
|
-- IF timebucket IS True THEN
|
||||||
|
-- RETURN;
|
||||||
|
-- END IF;
|
||||||
|
-- ELSE
|
||||||
|
-- timebucket := public.logbook_metrics_timebucket_fn('5 minutes'::TEXT, logbook_rec.id, logbook_rec._from_time::TIMESTAMPTZ, logbook_rec._to_time::TIMESTAMPTZ);
|
||||||
|
-- -- If true exit current process as the current logbook need to be re-process.
|
||||||
|
-- IF timebucket IS True THEN
|
||||||
|
-- RETURN;
|
||||||
|
-- END IF;
|
||||||
|
-- END IF;
|
||||||
|
--END IF;
|
||||||
|
|
||||||
|
-- Add logbook entry to process queue for later processing
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('new_logbook', logbook_rec.id, NOW(), current_setting('vessel.id', true));
|
||||||
|
|
||||||
END;
|
END;
|
||||||
$process_logbook_valid$ LANGUAGE plpgsql;
|
$process_pre_logbook$ LANGUAGE plpgsql;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.process_logbook_queue_fn
|
public.process_pre_logbook_fn
|
||||||
IS 'Avoid/ignore/delete logbook stationary movement or time sync issue';
|
IS 'Detect/Avoid/ignore/delete logbook stationary movement or time sync issue';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS process_lat_lon_fn;
|
DROP FUNCTION IF EXISTS process_lat_lon_fn;
|
||||||
CREATE OR REPLACE FUNCTION process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
CREATE OR REPLACE FUNCTION process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
||||||
@@ -1459,10 +1418,9 @@ CREATE OR REPLACE FUNCTION process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
|||||||
geo jsonb;
|
geo jsonb;
|
||||||
overpass jsonb;
|
overpass jsonb;
|
||||||
BEGIN
|
BEGIN
|
||||||
RAISE NOTICE 'process_lat_lon_fn';
|
RAISE NOTICE '-> process_lat_lon_fn';
|
||||||
-- If _id is valid, not NULL
|
|
||||||
IF lon IS NULL OR lat IS NULL THEN
|
IF lon IS NULL OR lat IS NULL THEN
|
||||||
RAISE WARNING '-> process_lat_lon_fn invalid input lon,lat %', _id;
|
RAISE WARNING '-> process_lat_lon_fn invalid input lon %, lat %', lon, lat;
|
||||||
--return NULL;
|
--return NULL;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
@@ -1490,7 +1448,7 @@ CREATE OR REPLACE FUNCTION process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
|||||||
END IF;
|
END IF;
|
||||||
END LOOP;
|
END LOOP;
|
||||||
|
|
||||||
-- if with in 200m use existing name and stay_code
|
-- if with in 300m use existing name and stay_code
|
||||||
-- else insert new entry
|
-- else insert new entry
|
||||||
IF existing_rec.id IS NOT NULL AND existing_rec.id > 0 THEN
|
IF existing_rec.id IS NOT NULL AND existing_rec.id > 0 THEN
|
||||||
RAISE NOTICE '-> process_lat_lon_fn found close by moorage using existing name and stay_code %', existing_rec;
|
RAISE NOTICE '-> process_lat_lon_fn found close by moorage using existing name and stay_code %', existing_rec;
|
||||||
@@ -1501,7 +1459,7 @@ CREATE OR REPLACE FUNCTION process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
|||||||
RAISE NOTICE '-> process_lat_lon_fn create new moorage';
|
RAISE NOTICE '-> process_lat_lon_fn create new moorage';
|
||||||
-- query overpass api to guess moorage type
|
-- query overpass api to guess moorage type
|
||||||
overpass := overpass_py_fn(lon::NUMERIC, lat::NUMERIC);
|
overpass := overpass_py_fn(lon::NUMERIC, lat::NUMERIC);
|
||||||
RAISE NOTICE '-> process_lat_lon_fn overpass name:[%] type:[%]', overpass->'name', overpass->'seamark:type';
|
RAISE NOTICE '-> process_lat_lon_fn overpass name:[%] seamark:type:[%]', overpass->'name', overpass->'seamark:type';
|
||||||
moorage_type = 1; -- Unknown
|
moorage_type = 1; -- Unknown
|
||||||
IF overpass->>'seamark:type' = 'harbour' AND overpass->>'seamark:harbour:category' = 'marina' then
|
IF overpass->>'seamark:type' = 'harbour' AND overpass->>'seamark:harbour:category' = 'marina' then
|
||||||
moorage_type = 4; -- Dock
|
moorage_type = 4; -- Dock
|
||||||
@@ -1557,6 +1515,207 @@ COMMENT ON FUNCTION
|
|||||||
public.process_lat_lon_fn
|
public.process_lat_lon_fn
|
||||||
IS 'Add or Update moorage base on lat/lon';
|
IS 'Add or Update moorage base on lat/lon';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.logbook_metrics_timebucket_fn(
|
||||||
|
IN bucket_interval TEXT,
|
||||||
|
IN _id INTEGER,
|
||||||
|
IN _start TIMESTAMPTZ,
|
||||||
|
IN _end TIMESTAMPTZ,
|
||||||
|
OUT timebucket boolean) AS $logbook_metrics_timebucket$
|
||||||
|
DECLARE
|
||||||
|
time_rec record;
|
||||||
|
stay_rec record;
|
||||||
|
log_rec record;
|
||||||
|
geo_rec record;
|
||||||
|
ref_time timestamptz;
|
||||||
|
stay_id integer;
|
||||||
|
stay_lat DOUBLE PRECISION;
|
||||||
|
stay_lng DOUBLE PRECISION;
|
||||||
|
stay_arv timestamptz;
|
||||||
|
in_interval boolean := False;
|
||||||
|
log_id integer;
|
||||||
|
log_lat DOUBLE PRECISION;
|
||||||
|
log_lng DOUBLE PRECISION;
|
||||||
|
log_start timestamptz;
|
||||||
|
in_log boolean := False;
|
||||||
|
BEGIN
|
||||||
|
timebucket := False;
|
||||||
|
-- Agg metrics over a bucket_interval
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn Starting loop by [%], _start[%], _end[%]', bucket_interval, _start, _end;
|
||||||
|
for time_rec in
|
||||||
|
WITH tbl_bucket AS (
|
||||||
|
SELECT time_bucket(bucket_interval::INTERVAL, time) AS time_bucket,
|
||||||
|
avg(speedoverground) AS speed,
|
||||||
|
last(latitude, time) AS lat,
|
||||||
|
last(longitude, time) AS lng,
|
||||||
|
st_makepoint(avg(longitude),avg(latitude)) AS geo_point
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE
|
||||||
|
m.latitude IS NOT NULL
|
||||||
|
AND m.longitude IS NOT NULL
|
||||||
|
AND m.time >= _start::TIMESTAMPTZ
|
||||||
|
AND m.time <= _end::TIMESTAMPTZ
|
||||||
|
AND m.vessel_id = current_setting('vessel.id', false)
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket asc
|
||||||
|
),
|
||||||
|
tbl_bucket2 AS (
|
||||||
|
SELECT time_bucket,
|
||||||
|
speed,
|
||||||
|
geo_point,lat,lng,
|
||||||
|
LEAD(time_bucket,1) OVER (
|
||||||
|
ORDER BY time_bucket asc
|
||||||
|
) time_interval,
|
||||||
|
LEAD(geo_point,1) OVER (
|
||||||
|
ORDER BY time_bucket asc
|
||||||
|
) geo_interval
|
||||||
|
FROM tbl_bucket
|
||||||
|
WHERE speed <= 0.5
|
||||||
|
)
|
||||||
|
SELECT time_bucket,
|
||||||
|
speed,
|
||||||
|
geo_point,lat,lng,
|
||||||
|
time_interval,
|
||||||
|
bucket_interval,
|
||||||
|
(bucket_interval::interval * 2) AS min_interval,
|
||||||
|
(time_bucket - time_interval) AS diff_interval,
|
||||||
|
(time_bucket - time_interval)::INTERVAL < (bucket_interval::interval * 2)::INTERVAL AS to_be_process
|
||||||
|
FROM tbl_bucket2
|
||||||
|
WHERE (time_bucket - time_interval)::INTERVAL < (bucket_interval::interval * 2)::INTERVAL
|
||||||
|
loop
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn ref_time [%] interval [%] bucket_interval[%]', ref_time, time_rec.time_bucket, bucket_interval;
|
||||||
|
select ref_time + bucket_interval::interval * 1 >= time_rec.time_bucket into in_interval;
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn ref_time+inverval[%] interval [%], in_interval [%]', ref_time + bucket_interval::interval * 1, time_rec.time_bucket, in_interval;
|
||||||
|
if ST_DWithin(Geography(ST_MakePoint(stay_lng, stay_lat)), Geography(ST_MakePoint(time_rec.lng, time_rec.lat)), 50) IS True then
|
||||||
|
in_interval := True;
|
||||||
|
end if;
|
||||||
|
if ST_DWithin(Geography(ST_MakePoint(log_lng, log_lat)), Geography(ST_MakePoint(time_rec.lng, time_rec.lat)), 50) IS False then
|
||||||
|
in_interval := False;
|
||||||
|
end if;
|
||||||
|
if in_interval is true then
|
||||||
|
ref_time := time_rec.time_bucket;
|
||||||
|
end if;
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn ref_time is stay within of next point %', ST_DWithin(Geography(ST_MakePoint(stay_lng, stay_lat)), Geography(ST_MakePoint(time_rec.lng, time_rec.lat)), 50);
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn ref_time is NOT log within of next point %', ST_DWithin(Geography(ST_MakePoint(log_lng, log_lat)), Geography(ST_MakePoint(time_rec.lng, time_rec.lat)), 50);
|
||||||
|
if time_rec.time_bucket::TIMESTAMPTZ < _start::TIMESTAMPTZ + bucket_interval::interval * 1 then
|
||||||
|
in_interval := True;
|
||||||
|
end if;
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn ref_time is NOT before start[%] or +interval[%]', (time_rec.time_bucket::TIMESTAMPTZ < _start::TIMESTAMPTZ), (time_rec.time_bucket::TIMESTAMPTZ < _start::TIMESTAMPTZ + bucket_interval::interval * 1);
|
||||||
|
continue when in_interval is True;
|
||||||
|
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn after continue stay_id[%], in_log[%]', stay_id, in_log;
|
||||||
|
if stay_id is null THEN
|
||||||
|
RAISE NOTICE '-> Close current logbook logbook_id ref_time [%] time_rec.time_bucket [%]', ref_time, time_rec.time_bucket;
|
||||||
|
-- Close current logbook
|
||||||
|
geo_rec := logbook_update_geom_distance_fn(_id, _start::TEXT, time_rec.time_bucket::TEXT);
|
||||||
|
UPDATE api.logbook
|
||||||
|
SET
|
||||||
|
active = false,
|
||||||
|
_to_time = time_rec.time_bucket,
|
||||||
|
_to_lat = time_rec.lat,
|
||||||
|
_to_lng = time_rec.lng,
|
||||||
|
track_geom = geo_rec._track_geom,
|
||||||
|
notes = 'updated time_bucket'
|
||||||
|
WHERE id = _id;
|
||||||
|
-- Add logbook entry to process queue for later processing
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('pre_logbook', _id, NOW(), current_setting('vessel.id', true));
|
||||||
|
RAISE WARNING '-> Updated existing logbook logbook_id [%] [%] and add to process_queue', _id, time_rec.time_bucket;
|
||||||
|
-- Add new stay
|
||||||
|
INSERT INTO api.stays
|
||||||
|
(vessel_id, active, arrived, latitude, longitude, notes)
|
||||||
|
VALUES (current_setting('vessel.id', false), false, time_rec.time_bucket, time_rec.lat, time_rec.lng, 'autogenerated time_bucket')
|
||||||
|
RETURNING id, latitude, longitude, arrived INTO stay_id, stay_lat, stay_lng, stay_arv;
|
||||||
|
RAISE WARNING '-> Add new stay stay_id [%] [%]', stay_id, time_rec.time_bucket;
|
||||||
|
timebucket := True;
|
||||||
|
elsif in_log is false THEN
|
||||||
|
-- Close current stays
|
||||||
|
UPDATE api.stays
|
||||||
|
SET
|
||||||
|
active = false,
|
||||||
|
departed = ref_time,
|
||||||
|
notes = 'autogenerated time_bucket'
|
||||||
|
WHERE id = stay_id;
|
||||||
|
-- Add stay entry to process queue for further processing
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
|
||||||
|
RAISE WARNING '-> Updated existing stays stay_id [%] departed [%] and add to process_queue', stay_id, ref_time;
|
||||||
|
-- Add new logbook
|
||||||
|
INSERT INTO api.logbook
|
||||||
|
(vessel_id, active, _from_time, _from_lat, _from_lng, notes)
|
||||||
|
VALUES (current_setting('vessel.id', false), false, ref_time, stay_lat, stay_lng, 'autogenerated time_bucket')
|
||||||
|
RETURNING id, _from_lat, _from_lng, _from_time INTO log_id, log_lat, log_lng, log_start;
|
||||||
|
RAISE WARNING '-> Add new logbook, logbook_id [%] [%]', log_id, ref_time;
|
||||||
|
in_log := true;
|
||||||
|
stay_id := 0;
|
||||||
|
stay_lat := null;
|
||||||
|
stay_lng := null;
|
||||||
|
timebucket := True;
|
||||||
|
elsif in_log is true THEN
|
||||||
|
RAISE NOTICE '-> Close current logbook logbook_id [%], ref_time [%], time_rec.time_bucket [%]', log_id, ref_time, time_rec.time_bucket;
|
||||||
|
-- Close current logbook
|
||||||
|
geo_rec := logbook_update_geom_distance_fn(_id, log_start::TEXT, time_rec.time_bucket::TEXT);
|
||||||
|
UPDATE api.logbook
|
||||||
|
SET
|
||||||
|
active = false,
|
||||||
|
_to_time = time_rec.time_bucket,
|
||||||
|
_to_lat = time_rec.lat,
|
||||||
|
_to_lng = time_rec.lng,
|
||||||
|
track_geom = geo_rec._track_geom,
|
||||||
|
notes = 'autogenerated time_bucket'
|
||||||
|
WHERE id = log_id;
|
||||||
|
-- Add logbook entry to process queue for later processing
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('pre_logbook', log_id, NOW(), current_setting('vessel.id', true));
|
||||||
|
RAISE WARNING '-> Update Existing logbook logbook_id [%] [%] and add to process_queue', log_id, time_rec.time_bucket;
|
||||||
|
-- Add new stay
|
||||||
|
INSERT INTO api.stays
|
||||||
|
(vessel_id, active, arrived, latitude, longitude, notes)
|
||||||
|
VALUES (current_setting('vessel.id', false), false, time_rec.time_bucket, time_rec.lat, time_rec.lng, 'autogenerated time_bucket')
|
||||||
|
RETURNING id, latitude, longitude, arrived INTO stay_id, stay_lat, stay_lng, stay_arv;
|
||||||
|
RAISE WARNING '-> Add new stay stay_id [%] [%]', stay_id, time_rec.time_bucket;
|
||||||
|
in_log := false;
|
||||||
|
log_id := null;
|
||||||
|
log_lat := null;
|
||||||
|
log_lng := null;
|
||||||
|
timebucket := True;
|
||||||
|
end if;
|
||||||
|
RAISE WARNING '-> Update new ref_time [%]', ref_time;
|
||||||
|
ref_time := time_rec.time_bucket;
|
||||||
|
end loop;
|
||||||
|
|
||||||
|
RAISE NOTICE '-> logbook_metrics_timebucket_fn Ending loop stay_id[%], in_log[%]', stay_id, in_log;
|
||||||
|
if in_log is true then
|
||||||
|
RAISE NOTICE '-> Ending log ref_time [%] interval [%]', ref_time, time_rec.time_bucket;
|
||||||
|
end if;
|
||||||
|
if stay_id > 0 then
|
||||||
|
RAISE NOTICE '-> Ending stay ref_time [%] interval [%]', ref_time, time_rec.time_bucket;
|
||||||
|
select * into stay_rec from api.stays s where arrived = _end;
|
||||||
|
-- Close current stays
|
||||||
|
UPDATE api.stays
|
||||||
|
SET
|
||||||
|
active = false,
|
||||||
|
arrived = stay_arv,
|
||||||
|
notes = 'updated time_bucket'
|
||||||
|
WHERE id = stay_rec.id;
|
||||||
|
-- Add stay entry to process queue for further processing
|
||||||
|
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||||
|
VALUES ('new_stay', stay_rec.id, now(), current_setting('vessel.id', true));
|
||||||
|
RAISE WARNING '-> Ending Update Existing stays stay_id [%] arrived [%] and add to process_queue', stay_rec.id, stay_arv;
|
||||||
|
delete from api.stays where id = stay_id;
|
||||||
|
RAISE WARNING '-> Ending Delete Existing stays stay_id [%]', stay_id;
|
||||||
|
stay_arv := null;
|
||||||
|
stay_id := null;
|
||||||
|
stay_lat := null;
|
||||||
|
stay_lng := null;
|
||||||
|
timebucket := True;
|
||||||
|
end if;
|
||||||
|
END;
|
||||||
|
$logbook_metrics_timebucket$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.logbook_metrics_timebucket_fn
|
||||||
|
IS 'Check if all entries for a logbook are in stationary movement per time bucket of 15 or 5 min, speed < 0.6knot, d_within 50m of the stay point';
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- TODO add alert monitoring for Battery
|
-- TODO add alert monitoring for Battery
|
||||||
|
|
||||||
@@ -1649,6 +1808,12 @@ BEGIN
|
|||||||
--RAISE WARNING 'public.check_jwt() user_role vessel.id [%]', current_setting('vessel.id', false);
|
--RAISE WARNING 'public.check_jwt() user_role vessel.id [%]', current_setting('vessel.id', false);
|
||||||
--RAISE WARNING 'public.check_jwt() user_role vessel.name [%]', current_setting('vessel.name', false);
|
--RAISE WARNING 'public.check_jwt() user_role vessel.name [%]', current_setting('vessel.name', false);
|
||||||
ELSIF _role = 'vessel_role' THEN
|
ELSIF _role = 'vessel_role' THEN
|
||||||
|
SELECT current_setting('request.path', true) into _path;
|
||||||
|
--RAISE WARNING 'req path %', current_setting('request.path', true);
|
||||||
|
-- Function allow without defined vessel like for anonymous role
|
||||||
|
IF _path ~ '^\/rpc\/(oauth_\w+)$' THEN
|
||||||
|
RETURN;
|
||||||
|
END IF;
|
||||||
-- Extract vessel_id from jwt token
|
-- Extract vessel_id from jwt token
|
||||||
SELECT current_setting('request.jwt.claims', true)::json->>'vid' INTO _vid;
|
SELECT current_setting('request.jwt.claims', true)::json->>'vid' INTO _vid;
|
||||||
-- Check the vessel and user exist
|
-- Check the vessel and user exist
|
||||||
@@ -1666,7 +1831,7 @@ BEGIN
|
|||||||
--RAISE WARNING 'public.check_jwt() user_role vessel.name %', current_setting('vessel.name', false);
|
--RAISE WARNING 'public.check_jwt() user_role vessel.name %', current_setting('vessel.name', false);
|
||||||
--RAISE WARNING 'public.check_jwt() user_role vessel.id %', current_setting('vessel.id', false);
|
--RAISE WARNING 'public.check_jwt() user_role vessel.id %', current_setting('vessel.id', false);
|
||||||
ELSIF _role = 'api_anonymous' THEN
|
ELSIF _role = 'api_anonymous' THEN
|
||||||
RAISE WARNING 'public.check_jwt() api_anonymous';
|
--RAISE WARNING 'public.check_jwt() api_anonymous';
|
||||||
-- Check if path is the a valid allow anonymous path
|
-- Check if path is the a valid allow anonymous path
|
||||||
SELECT current_setting('request.path', true) ~ '^/(logs_view|log_view|rpc/timelapse_fn|monitoring_view|stats_logs_view|stats_moorages_view|rpc/stats_logs_fn)$' INTO _ppath;
|
SELECT current_setting('request.path', true) ~ '^/(logs_view|log_view|rpc/timelapse_fn|monitoring_view|stats_logs_view|stats_moorages_view|rpc/stats_logs_fn)$' INTO _ppath;
|
||||||
if _ppath is True then
|
if _ppath is True then
|
||||||
@@ -1703,7 +1868,7 @@ BEGIN
|
|||||||
END IF;
|
END IF;
|
||||||
-- Check if boat name match public_vessel name
|
-- Check if boat name match public_vessel name
|
||||||
boat := '^' || _pvessel || '$';
|
boat := '^' || _pvessel || '$';
|
||||||
IF _ptype ~ '^public_(logs|timelapse)$' AND _pid IS NOT NULL THEN
|
IF _ptype ~ '^public_(logs|timelapse)$' AND _pid > 0 THEN
|
||||||
WITH log as (
|
WITH log as (
|
||||||
SELECT vessel_id from api.logbook l where l.id = _pid
|
SELECT vessel_id from api.logbook l where l.id = _pid
|
||||||
)
|
)
|
||||||
@@ -1757,6 +1922,8 @@ BEGIN
|
|||||||
-- In correct order
|
-- In correct order
|
||||||
perform public.cron_process_new_notification_fn();
|
perform public.cron_process_new_notification_fn();
|
||||||
perform public.cron_process_monitor_online_fn();
|
perform public.cron_process_monitor_online_fn();
|
||||||
|
--perform public.cron_process_grafana_fn();
|
||||||
|
perform public.cron_process_pre_logbook_fn();
|
||||||
perform public.cron_process_new_logbook_fn();
|
perform public.cron_process_new_logbook_fn();
|
||||||
perform public.cron_process_new_stay_fn();
|
perform public.cron_process_new_stay_fn();
|
||||||
--perform public.cron_process_new_moorage_fn();
|
--perform public.cron_process_new_moorage_fn();
|
||||||
@@ -1769,16 +1936,16 @@ $$ language plpgsql;
|
|||||||
CREATE OR REPLACE FUNCTION public.delete_account_fn(IN _email TEXT, IN _vessel_id TEXT) RETURNS BOOLEAN
|
CREATE OR REPLACE FUNCTION public.delete_account_fn(IN _email TEXT, IN _vessel_id TEXT) RETURNS BOOLEAN
|
||||||
AS $delete_account$
|
AS $delete_account$
|
||||||
BEGIN
|
BEGIN
|
||||||
select count(*) from api.metrics m where vessel_id = _vessel_id;
|
--select count(*) from api.metrics m where vessel_id = _vessel_id;
|
||||||
delete from api.metrics m where vessel_id = _vessel_id;
|
delete from api.metrics m where vessel_id = _vessel_id;
|
||||||
select * from api.metadata m where vessel_id = _vessel_id;
|
--select * from api.metadata m where vessel_id = _vessel_id;
|
||||||
delete from api.logbook l where vessel_id = _vessel_id;
|
|
||||||
delete from api.moorages m where vessel_id = _vessel_id;
|
delete from api.moorages m where vessel_id = _vessel_id;
|
||||||
|
delete from api.logbook l where vessel_id = _vessel_id;
|
||||||
delete from api.stays s where vessel_id = _vessel_id;
|
delete from api.stays s where vessel_id = _vessel_id;
|
||||||
delete from api.metadata m where vessel_id = _vessel_id;
|
delete from api.metadata m where vessel_id = _vessel_id;
|
||||||
select * from auth.vessels v where vessel_id = _vessel_id;
|
--select * from auth.vessels v where vessel_id = _vessel_id;
|
||||||
delete from auth.vessels v where vessel_id = _vessel_id;
|
delete from auth.vessels v where vessel_id = _vessel_id;
|
||||||
select * from auth.accounts a where email = _email;
|
--select * from auth.accounts a where email = _email;
|
||||||
delete from auth.accounts a where email = _email;
|
delete from auth.accounts a where email = _email;
|
||||||
RETURN True;
|
RETURN True;
|
||||||
END
|
END
|
||||||
|
@@ -151,3 +151,90 @@ $jsonb_diff_val$ LANGUAGE plpgsql;
|
|||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.jsonb_diff_val
|
public.jsonb_diff_val
|
||||||
IS 'Compare two jsonb objects';
|
IS 'Compare two jsonb objects';
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- uuid v7 helpers
|
||||||
|
--
|
||||||
|
-- https://gist.github.com/kjmph/5bd772b2c2df145aa645b837da7eca74
|
||||||
|
CREATE OR REPLACE FUNCTION public.timestamp_from_uuid_v7(_uuid uuid)
|
||||||
|
RETURNS timestamp without time zone
|
||||||
|
LANGUAGE sql
|
||||||
|
-- Based off IETF draft, https://datatracker.ietf.org/doc/draft-peabody-dispatch-new-uuid-format/
|
||||||
|
IMMUTABLE PARALLEL SAFE STRICT LEAKPROOF
|
||||||
|
AS $$
|
||||||
|
SELECT to_timestamp(('x0000' || substr(_uuid::text, 1, 8) || substr(_uuid::text, 10, 4))::bit(64)::bigint::numeric / 1000);
|
||||||
|
$$
|
||||||
|
;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.timestamp_from_uuid_v7
|
||||||
|
IS 'extract the timestamp from the uuid.';
|
||||||
|
|
||||||
|
create or replace function public.uuid_generate_v7()
|
||||||
|
returns uuid
|
||||||
|
as $$
|
||||||
|
begin
|
||||||
|
-- use random v4 uuid as starting point (which has the same variant we need)
|
||||||
|
-- then overlay timestamp
|
||||||
|
-- then set version 7 by flipping the 2 and 1 bit in the version 4 string
|
||||||
|
return encode(
|
||||||
|
set_bit(
|
||||||
|
set_bit(
|
||||||
|
overlay(uuid_send(gen_random_uuid())
|
||||||
|
placing substring(int8send(floor(extract(epoch from clock_timestamp()) * 1000)::bigint) from 3)
|
||||||
|
from 1 for 6
|
||||||
|
),
|
||||||
|
52, 1
|
||||||
|
),
|
||||||
|
53, 1
|
||||||
|
),
|
||||||
|
'hex')::uuid;
|
||||||
|
end
|
||||||
|
$$
|
||||||
|
language plpgsql volatile;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.uuid_generate_v7
|
||||||
|
IS 'Generate UUID v7, Based off IETF draft, https://datatracker.ietf.org/doc/draft-peabody-dispatch-new-uuid-format/';
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- Conversion helpers
|
||||||
|
--
|
||||||
|
CREATE OR REPLACE FUNCTION public.kelvinToCel(IN temperature NUMERIC)
|
||||||
|
RETURNS NUMERIC
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
RETURN ROUND((((temperature)::numeric - 273.15) * 10) / 10);
|
||||||
|
END
|
||||||
|
$$
|
||||||
|
LANGUAGE plpgsql IMMUTABLE;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.kelvinToCel
|
||||||
|
IS 'convert kelvin To Celsius';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.radiantToDegrees(IN angle NUMERIC)
|
||||||
|
RETURNS NUMERIC
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
RETURN ROUND((((angle)::numeric * 57.2958) * 10) / 10);
|
||||||
|
END
|
||||||
|
$$
|
||||||
|
LANGUAGE plpgsql IMMUTABLE;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.radiantToDegrees
|
||||||
|
IS 'convert radiant To Degrees';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.valToPercent(IN val NUMERIC)
|
||||||
|
RETURNS NUMERIC
|
||||||
|
AS $$
|
||||||
|
BEGIN
|
||||||
|
RETURN (val * 100);
|
||||||
|
END
|
||||||
|
$$
|
||||||
|
LANGUAGE plpgsql IMMUTABLE;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.valToPercent
|
||||||
|
IS 'convert radiant To Degrees';
|
@@ -142,6 +142,8 @@ AS $send_email_py$
|
|||||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||||
if 'reset_qs' in _user and _user['reset_qs']:
|
if 'reset_qs' in _user and _user['reset_qs']:
|
||||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||||
|
if 'alert' in _user and _user['alert']:
|
||||||
|
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||||
|
|
||||||
if 'app.url' in app and app['app.url']:
|
if 'app.url' in app and app['app.url']:
|
||||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||||
@@ -231,6 +233,8 @@ AS $send_pushover_py$
|
|||||||
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
||||||
if 'badge' in _user and _user['badge']:
|
if 'badge' in _user and _user['badge']:
|
||||||
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
||||||
|
if 'alert' in _user and _user['alert']:
|
||||||
|
pushover_message = pushover_message.replace('__ALERT__', _user['alert'])
|
||||||
|
|
||||||
if 'app.url' in app and app['app.url']:
|
if 'app.url' in app and app['app.url']:
|
||||||
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
||||||
@@ -307,6 +311,8 @@ AS $send_telegram_py$
|
|||||||
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
||||||
if 'badge' in _user and _user['badge']:
|
if 'badge' in _user and _user['badge']:
|
||||||
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
||||||
|
if 'alert' in _user and _user['alert']:
|
||||||
|
telegram_message = telegram_message.replace('__ALERT__', _user['alert'])
|
||||||
|
|
||||||
if 'app.url' in app and app['app.url']:
|
if 'app.url' in app and app['app.url']:
|
||||||
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
||||||
@@ -381,11 +387,11 @@ AS $reverse_geoip_py$
|
|||||||
#plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
|
#plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
|
||||||
if r.status_code == 200:
|
if r.status_code == 200:
|
||||||
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
|
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
|
||||||
return r.json();
|
return r.json()
|
||||||
else:
|
else:
|
||||||
plpy.error('Failed to get ip details')
|
plpy.error('Failed to get ip details')
|
||||||
return '{}'
|
return {}
|
||||||
$reverse_geoip_py$ LANGUAGE plpython3u;
|
$reverse_geoip_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.reverse_geoip_py_fn
|
public.reverse_geoip_py_fn
|
||||||
@@ -434,7 +440,7 @@ IMMUTABLE STRICT;
|
|||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.geojson_py_fn
|
public.geojson_py_fn
|
||||||
IS 'Parse geojson using plpython3u (should be done in PGSQL)';
|
IS 'Parse geojson using plpython3u (should be done in PGSQL), deprecated';
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS overpass_py_fn;
|
DROP FUNCTION IF EXISTS overpass_py_fn;
|
||||||
CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
|
CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
|
||||||
@@ -442,7 +448,7 @@ CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
|
|||||||
AS $overpass_py$
|
AS $overpass_py$
|
||||||
"""
|
"""
|
||||||
Return https://overpass-turbo.eu seamark details within 400m
|
Return https://overpass-turbo.eu seamark details within 400m
|
||||||
https://overpass-turbo.eu/s/1D91
|
https://overpass-turbo.eu/s/1EaG
|
||||||
https://wiki.openstreetmap.org/wiki/Key:seamark:type
|
https://wiki.openstreetmap.org/wiki/Key:seamark:type
|
||||||
"""
|
"""
|
||||||
import requests
|
import requests
|
||||||
@@ -452,6 +458,12 @@ AS $overpass_py$
|
|||||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
payload = """
|
payload = """
|
||||||
[out:json][timeout:20];
|
[out:json][timeout:20];
|
||||||
|
is_in({0},{1})->.result_areas;
|
||||||
|
(
|
||||||
|
area.result_areas["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||||
|
area.result_areas["leisure"="marina"][~"name"~"."];
|
||||||
|
);
|
||||||
|
out tags;
|
||||||
nwr(around:400.0,{0},{1})->.all;
|
nwr(around:400.0,{0},{1})->.all;
|
||||||
(
|
(
|
||||||
nwr.all["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
nwr.all["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||||
@@ -459,7 +471,7 @@ AS $overpass_py$
|
|||||||
nwr.all["leisure"="marina"];
|
nwr.all["leisure"="marina"];
|
||||||
nwr.all["natural"~"(bay|beach)"];
|
nwr.all["natural"~"(bay|beach)"];
|
||||||
);
|
);
|
||||||
out tags qt;
|
out tags;
|
||||||
""".format(lat, lon)
|
""".format(lat, lon)
|
||||||
data = urllib.parse.quote(payload, safe="");
|
data = urllib.parse.quote(payload, safe="");
|
||||||
url = f'https://overpass-api.de/api/interpreter?data={data}'.format(data)
|
url = f'https://overpass-api.de/api/interpreter?data={data}'.format(data)
|
||||||
@@ -473,12 +485,375 @@ AS $overpass_py$
|
|||||||
if r_dict["elements"]:
|
if r_dict["elements"]:
|
||||||
if "tags" in r_dict["elements"][0] and r_dict["elements"][0]["tags"]:
|
if "tags" in r_dict["elements"][0] and r_dict["elements"][0]["tags"]:
|
||||||
return r_dict["elements"][0]["tags"]; # return the first element
|
return r_dict["elements"][0]["tags"]; # return the first element
|
||||||
return '{}'
|
return {}
|
||||||
else:
|
else:
|
||||||
plpy.notice('overpass-api Failed to get overpass-api details')
|
plpy.notice('overpass-api Failed to get overpass-api details')
|
||||||
return '{}'
|
return {}
|
||||||
$overpass_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
$overpass_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON FUNCTION
|
COMMENT ON FUNCTION
|
||||||
public.overpass_py_fn
|
public.overpass_py_fn
|
||||||
IS 'Return https://overpass-turbo.eu seamark details within 400m using plpython3u';
|
IS 'Return https://overpass-turbo.eu seamark details within 400m using plpython3u';
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- Provision Grafana SQL
|
||||||
|
--
|
||||||
|
CREATE OR REPLACE FUNCTION grafana_py_fn(IN _v_name TEXT, IN _v_id TEXT,
|
||||||
|
IN _u_email TEXT, IN app JSONB) RETURNS VOID
|
||||||
|
AS $grafana_py$
|
||||||
|
"""
|
||||||
|
https://grafana.com/docs/grafana/latest/developers/http_api/
|
||||||
|
Create organization base on vessel name
|
||||||
|
Create user base on user email
|
||||||
|
Add user to organization
|
||||||
|
Add data_source to organization
|
||||||
|
Add dashboard to organization
|
||||||
|
Update organization preferences
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
grafana_uri = None
|
||||||
|
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
|
||||||
|
grafana_uri = app['app.grafana_admin_uri']
|
||||||
|
else:
|
||||||
|
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
b_name = None
|
||||||
|
if not _v_name:
|
||||||
|
b_name = _v_id
|
||||||
|
else:
|
||||||
|
b_name = _v_name
|
||||||
|
|
||||||
|
# add vessel org
|
||||||
|
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||||
|
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||||
|
path = 'api/orgs'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data_dict = {'name':b_name}
|
||||||
|
data = json.dumps(data_dict)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
if r.status_code == 200 and "orgId" in r.json():
|
||||||
|
org_id = r.json()['orgId']
|
||||||
|
else:
|
||||||
|
plpy.error('Error grafana add vessel org %', r.json())
|
||||||
|
return None
|
||||||
|
|
||||||
|
# add user to vessel org
|
||||||
|
path = 'api/admin/users'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
|
||||||
|
data = json.dumps(data_dict)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
if r.status_code == 200 and "id" in r.json():
|
||||||
|
user_id = r.json()['id']
|
||||||
|
else:
|
||||||
|
plpy.error('Error grafana add user to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
# read data_source
|
||||||
|
path = 'api/datasources/1'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
r = requests.get(url, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
data_source = r.json()
|
||||||
|
data_source['id'] = 0
|
||||||
|
data_source['orgId'] = org_id
|
||||||
|
data_source['uid'] = "ds_" + _v_id
|
||||||
|
data_source['name'] = "ds_" + _v_id
|
||||||
|
data_source['secureJsonData'] = {}
|
||||||
|
data_source['secureJsonData']['password'] = 'password'
|
||||||
|
data_source['readOnly'] = True
|
||||||
|
del data_source['secureJsonFields']
|
||||||
|
|
||||||
|
# add data_source to vessel org
|
||||||
|
path = 'api/datasources'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data = json.dumps(data_source)
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
del headers['X-Grafana-Org-Id']
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana add data_source to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
|
||||||
|
for dashboard in dashboards_tpl:
|
||||||
|
# read dashboard template by uid
|
||||||
|
path = 'api/dashboards/uid'
|
||||||
|
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
|
||||||
|
if 'X-Grafana-Org-Id' in headers:
|
||||||
|
del headers['X-Grafana-Org-Id']
|
||||||
|
r = requests.get(url, headers=headers)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana read dashboard template')
|
||||||
|
return
|
||||||
|
new_dashboard = r.json()
|
||||||
|
del new_dashboard['meta']
|
||||||
|
new_dashboard['dashboard']['version'] = 0
|
||||||
|
new_dashboard['dashboard']['id'] = 0
|
||||||
|
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
|
||||||
|
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
|
||||||
|
# add dashboard to vessel org
|
||||||
|
path = 'api/dashboards/db'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data = json.dumps(new_dashboard)
|
||||||
|
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.post(url, data=new_data, headers=headers)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana add dashboard to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Update Org Prefs
|
||||||
|
path = 'api/org/preferences'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
home_dashboard = {}
|
||||||
|
home_dashboard['timezone'] = 'utc'
|
||||||
|
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
|
||||||
|
data = json.dumps(home_dashboard)
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.patch(url, data=data, headers=headers)
|
||||||
|
#plpy.notice(r.json())
|
||||||
|
if r.status_code != 200:
|
||||||
|
plpy.error('Error grafana update org preferences')
|
||||||
|
return
|
||||||
|
|
||||||
|
plpy.notice('Done')
|
||||||
|
$grafana_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.grafana_py_fn
|
||||||
|
IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
|
||||||
|
|
||||||
|
-- https://stackoverflow.com/questions/65517230/how-to-set-user-attribute-value-in-keycloak-using-api
|
||||||
|
DROP FUNCTION IF EXISTS keycloak_py_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION keycloak_py_fn(IN user_id TEXT, IN vessel_id TEXT,
|
||||||
|
IN app JSONB) RETURNS JSONB
|
||||||
|
AS $keycloak_py$
|
||||||
|
"""
|
||||||
|
Add vessel_id user attribute to keycloak user {user_id}
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
safe_uri = host = user = pwd = None
|
||||||
|
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||||
|
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||||
|
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||||
|
host = _.netloc.split('@')[-1]
|
||||||
|
user = _.netloc.split(':')[0]
|
||||||
|
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||||
|
else:
|
||||||
|
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not host or not user or not pwd:
|
||||||
|
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
|
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||||
|
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||||
|
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(url)
|
||||||
|
if r.status_code == 200 and 'access_token' in r.json():
|
||||||
|
response = r.json()
|
||||||
|
plpy.notice(response)
|
||||||
|
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||||
|
_headers['Content-Type'] = 'application/json'
|
||||||
|
_payload = { 'attributes': {'vessel_id': vessel_id} }
|
||||||
|
url = f'{keycloak_uri}/admin/realms/postgsail/users/{user_id}'.format(keycloak_uri,user_id)
|
||||||
|
#plpy.notice(url)
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.put(url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 204:
|
||||||
|
plpy.notice("Error updating user: {status} [{text}]".format(
|
||||||
|
status=r.status_code, text=r.text))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
plpy.notice("Updated user : {user} [{text}]".format(user=user_id, text=r.text))
|
||||||
|
else:
|
||||||
|
plpy.notice(f'Error getting admin access_token: {status} [{text}]'.format(
|
||||||
|
status=r.status_code, text=r.text))
|
||||||
|
return None
|
||||||
|
$keycloak_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.keycloak_py_fn
|
||||||
|
IS 'Set oauth user attribute into keycloak using plpython3u';
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS keycloak_auth_py_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION keycloak_auth_py_fn(IN _v_id TEXT,
|
||||||
|
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||||
|
AS $keycloak_auth_py$
|
||||||
|
"""
|
||||||
|
Add keycloak user
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
safe_uri = host = user = pwd = None
|
||||||
|
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||||
|
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||||
|
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||||
|
host = _.netloc.split('@')[-1]
|
||||||
|
user = _.netloc.split(':')[0]
|
||||||
|
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||||
|
else:
|
||||||
|
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
if not host or not user or not pwd:
|
||||||
|
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not 'email' in _user and _user['email']:
|
||||||
|
plpy.error('Error parsing user email, check user settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
if not _v_id:
|
||||||
|
plpy.error('Error parsing vessel_id')
|
||||||
|
return none
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
|
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||||
|
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||||
|
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(url)
|
||||||
|
if r.status_code == 200 and 'access_token' in r.json():
|
||||||
|
response = r.json()
|
||||||
|
#plpy.notice(response)
|
||||||
|
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||||
|
_headers['Content-Type'] = 'application/json'
|
||||||
|
url = f'{_.scheme}://{host}/admin/realms/postgsail/users'.format(_.scheme, host)
|
||||||
|
_payload = {
|
||||||
|
"enabled": "true",
|
||||||
|
"email": _user['email'],
|
||||||
|
"firstName": _user['recipient'],
|
||||||
|
"attributes": {"vessel_id": _v_id},
|
||||||
|
"emailVerified": True,
|
||||||
|
"requiredActions":["UPDATE_PROFILE", "UPDATE_PASSWORD"]
|
||||||
|
}
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.post(url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 201:
|
||||||
|
#print("Error creating user: {status}".format(status=r.status_code))
|
||||||
|
plpy.error(f'Error creating user: {user} {status}'.format(user=_payload['email'], status=r.status_code))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
#print("Created user : {u}]".format(u=_payload['email']))
|
||||||
|
plpy.notice('Created user : {u} {t}, {l}'.format(u=_payload['email'], t=r.text, l=r.headers['location']))
|
||||||
|
user_url = "{user_url}/execute-actions-email".format(user_url=r.headers['location'])
|
||||||
|
_payload = ["UPDATE_PASSWORD"]
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.put(user_url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 204:
|
||||||
|
plpy.error('Error execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||||
|
else:
|
||||||
|
plpy.notice('execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
plpy.error(f'Error getting admin access_token: {status}'.format(status=r.status_code))
|
||||||
|
return None
|
||||||
|
$keycloak_auth_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.keycloak_auth_py_fn
|
||||||
|
IS 'Create an oauth user into keycloak using plpython3u';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION windy_pws_py_fn(IN metric JSONB,
|
||||||
|
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||||
|
AS $windy_pws_py$
|
||||||
|
"""
|
||||||
|
Send environment data from boat instruments to Windy as a Personal Weather Station (PWS)
|
||||||
|
https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import decimal
|
||||||
|
|
||||||
|
if not 'app.windy_apikey' in app and not app['app.windy_apikey']:
|
||||||
|
plpy.error('Error no windy_apikey defined, check app settings')
|
||||||
|
return none
|
||||||
|
if not 'station' in metric and not metric['station']:
|
||||||
|
plpy.error('Error no metrics defined')
|
||||||
|
return none
|
||||||
|
if not 'temp' in metric and not metric['temp']:
|
||||||
|
plpy.error('Error no metrics defined')
|
||||||
|
return none
|
||||||
|
if not _user:
|
||||||
|
plpy.error('Error no user defined, check user settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com', 'Content-Type': 'application/json'}
|
||||||
|
_payload = {
|
||||||
|
'stations': [
|
||||||
|
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||||
|
'name': metric['name'],
|
||||||
|
'shareOption': 'Open',
|
||||||
|
'type': 'SignalK PostgSail Plugin',
|
||||||
|
'provider': 'PostgSail',
|
||||||
|
'url': 'https://iot.openplotter.cloud/{name}/monitoring'.format(name=metric['name']),
|
||||||
|
'lat': float(decimal.Decimal(metric['lat'])),
|
||||||
|
'lon': float(decimal.Decimal(metric['lon'])),
|
||||||
|
'elevation': 1 }
|
||||||
|
],
|
||||||
|
'observations': [
|
||||||
|
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||||
|
'temp': float(decimal.Decimal(metric['temp'])),
|
||||||
|
'wind': round(float(decimal.Decimal(metric['wind']))),
|
||||||
|
'gust': round(float(decimal.Decimal(metric['wind']))),
|
||||||
|
'winddir': int(decimal.Decimal(metric['winddir'])),
|
||||||
|
'pressure': int(decimal.Decimal(metric['pressure'])),
|
||||||
|
'rh': float(decimal.Decimal(metric['rh'])) }
|
||||||
|
]}
|
||||||
|
#print(_payload)
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
api_url = 'https://stations.windy.com/pws/update/{api_key}'.format(api_key=app['app.windy_apikey'])
|
||||||
|
r = requests.post(api_url, data=data, headers=_headers, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
if r.status_code == 200:
|
||||||
|
#print('Data sent successfully!')
|
||||||
|
plpy.notice('Data sent successfully to Windy!')
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
if not 'windy' in _user['settings']:
|
||||||
|
api_url = 'https://stations.windy.com/pws/station/{api_key}/{station}'.format(api_key=app['app.windy_apikey'], station=metric['station'])
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
r = requests.get(api_url, timeout=(5, 60))
|
||||||
|
if r.status_code == 200:
|
||||||
|
#print('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||||
|
plpy.notice('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||||
|
return r.json()
|
||||||
|
else:
|
||||||
|
plpy.error(f'Failed to gather PWS details. Status code: {r.status_code}')
|
||||||
|
else:
|
||||||
|
plpy.error(f'Failed to send data. Status code: {r.status_code}')
|
||||||
|
#print(f'Failed to send data. Status code: {r.status_code}')
|
||||||
|
#print(r.text)
|
||||||
|
return {}
|
||||||
|
$windy_pws_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.windy_pws_py_fn
|
||||||
|
IS 'Forward vessel data to Windy as a Personal Weather Station using plpython3u';
|
||||||
|
@@ -21,7 +21,8 @@ CREATE EXTENSION IF NOT EXISTS "pgcrypto"; -- provides cryptographic functions
|
|||||||
|
|
||||||
DROP TABLE IF EXISTS auth.accounts CASCADE;
|
DROP TABLE IF EXISTS auth.accounts CASCADE;
|
||||||
CREATE TABLE IF NOT EXISTS auth.accounts (
|
CREATE TABLE IF NOT EXISTS auth.accounts (
|
||||||
public_id INT UNIQUE NOT NULL GENERATED ALWAYS AS IDENTITY,
|
id INT UNIQUE GENERATED ALWAYS AS IDENTITY,
|
||||||
|
--id TEXT NOT NULL UNIQUE DEFAULT uuid_generate_v7(),
|
||||||
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||||
email CITEXT PRIMARY KEY CHECK ( email ~* '^.+@.+\..+$' ),
|
email CITEXT PRIMARY KEY CHECK ( email ~* '^.+@.+\..+$' ),
|
||||||
first TEXT NOT NULL CHECK (length(pass) < 512),
|
first TEXT NOT NULL CHECK (length(pass) < 512),
|
||||||
@@ -60,22 +61,20 @@ CREATE TABLE IF NOT EXISTS auth.vessels (
|
|||||||
vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||||
-- user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT,
|
-- user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT,
|
||||||
owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
|
owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
|
||||||
-- mmsi TEXT UNIQUE, -- Should be a numeric range between 100000000 and 800000000.
|
|
||||||
mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique
|
mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique
|
||||||
name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512),
|
name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512),
|
||||||
-- pass text not null check (length(pass) < 512), -- unused
|
|
||||||
role name not null check (length(role) < 512),
|
role name not null check (length(role) < 512),
|
||||||
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
-- CONSTRAINT valid_length_mmsi CHECK (length(mmsi) < 10 OR length(mmsi) = 0)
|
|
||||||
CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000)
|
CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000)
|
||||||
);
|
);
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON TABLE
|
COMMENT ON TABLE
|
||||||
auth.vessels
|
auth.vessels
|
||||||
IS 'vessels table link to accounts email user_id column';
|
IS 'vessels table link to accounts email user_id column';
|
||||||
-- Indexes
|
COMMENT ON COLUMN
|
||||||
CREATE INDEX vessels_vesselid_idx ON auth.vessels (vessel_id);
|
auth.vessels.mmsi
|
||||||
|
IS 'MMSI can be optional but if present must be a valid one and unique but must be in numeric range between 100000000 and 800000000';
|
||||||
|
|
||||||
CREATE TRIGGER vessels_moddatetime
|
CREATE TRIGGER vessels_moddatetime
|
||||||
BEFORE UPDATE ON auth.vessels
|
BEFORE UPDATE ON auth.vessels
|
||||||
@@ -86,6 +85,32 @@ COMMENT ON TRIGGER vessels_moddatetime
|
|||||||
ON auth.vessels
|
ON auth.vessels
|
||||||
IS 'Automatic update of updated_at on table modification';
|
IS 'Automatic update of updated_at on table modification';
|
||||||
|
|
||||||
|
CREATE TABLE auth.users (
|
||||||
|
id NAME PRIMARY KEY DEFAULT current_setting('request.jwt.claims', true)::json->>'sub',
|
||||||
|
email NAME NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'email',
|
||||||
|
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||||
|
first TEXT NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'given_name',
|
||||||
|
last TEXT NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'family_name',
|
||||||
|
role NAME NOT NULL DEFAULT 'user_role' CHECK (length(role) < 512),
|
||||||
|
preferences JSONB NULL DEFAULT '{"email_notifications":true, "email_valid": true, "email_verified": true}',
|
||||||
|
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
|
||||||
|
connected_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
|
||||||
|
);
|
||||||
|
-- Description
|
||||||
|
COMMENT ON TABLE
|
||||||
|
auth.users
|
||||||
|
IS 'Keycloak Oauth user, map user details from access token';
|
||||||
|
|
||||||
|
CREATE TRIGGER user_moddatetime
|
||||||
|
BEFORE UPDATE ON auth.users
|
||||||
|
FOR EACH ROW
|
||||||
|
EXECUTE PROCEDURE moddatetime (updated_at);
|
||||||
|
-- Description
|
||||||
|
COMMENT ON TRIGGER user_moddatetime
|
||||||
|
ON auth.users
|
||||||
|
IS 'Automatic update of updated_at on table modification';
|
||||||
|
|
||||||
create or replace function
|
create or replace function
|
||||||
auth.check_role_exists() returns trigger as $$
|
auth.check_role_exists() returns trigger as $$
|
||||||
begin
|
begin
|
||||||
@@ -263,6 +288,96 @@ begin
|
|||||||
end;
|
end;
|
||||||
$$ language plpgsql security definer;
|
$$ language plpgsql security definer;
|
||||||
|
|
||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- API account Oauth functions
|
||||||
|
--
|
||||||
|
-- oauth is on your exposed schema
|
||||||
|
create or replace function
|
||||||
|
api.oauth() returns void as $$
|
||||||
|
declare
|
||||||
|
_exist boolean;
|
||||||
|
begin
|
||||||
|
-- Ensure we have the required key/value in the access token
|
||||||
|
if current_setting('request.jwt.claims', true)::json->>'sub' is null OR
|
||||||
|
current_setting('request.jwt.claims', true)::json->>'email' is null THEN
|
||||||
|
return;
|
||||||
|
end if;
|
||||||
|
-- check email exist
|
||||||
|
select exists( select email from auth.users
|
||||||
|
where id = current_setting('request.jwt.claims', true)::json->>'sub'
|
||||||
|
) INTO _exist;
|
||||||
|
if NOT FOUND then
|
||||||
|
RAISE WARNING 'Register new oauth user email:[%]', current_setting('request.jwt.claims', true)::json->>'email';
|
||||||
|
-- insert new user, default value from the oauth access token
|
||||||
|
INSERT INTO auth.users (role, preferences)
|
||||||
|
VALUES ('user_role', '{"email_notifications":true, "email_valid": true, "email_verified": true}');
|
||||||
|
end if;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.oauth
|
||||||
|
IS 'openid/oauth user register entry point';
|
||||||
|
|
||||||
|
create or replace function
|
||||||
|
api.oauth_vessel(in _mmsi text, in _name text) returns void as $$
|
||||||
|
declare
|
||||||
|
_exist boolean;
|
||||||
|
vessel_name text := _name;
|
||||||
|
vessel_mmsi text := _mmsi;
|
||||||
|
_vessel_id text := null;
|
||||||
|
vessel_rec record;
|
||||||
|
app_settings jsonb;
|
||||||
|
_user_id text := null;
|
||||||
|
begin
|
||||||
|
RAISE WARNING 'oauth_vessel:[%]', current_setting('user.email', true);
|
||||||
|
RAISE WARNING 'oauth_vessel:[%]', current_setting('request.jwt.claims', true)::json->>'email';
|
||||||
|
-- Ensure we have the required key/value in the access token
|
||||||
|
if current_setting('request.jwt.claims', true)::json->>'sub' is null OR
|
||||||
|
current_setting('request.jwt.claims', true)::json->>'email' is null THEN
|
||||||
|
return;
|
||||||
|
end if;
|
||||||
|
|
||||||
|
-- check email exist
|
||||||
|
select exists( select email from auth.accounts
|
||||||
|
where email = current_setting('request.jwt.claims', true)::json->>'email'
|
||||||
|
) INTO _exist;
|
||||||
|
if _exist is False then
|
||||||
|
RAISE WARNING 'Register new oauth user email:[%]', current_setting('request.jwt.claims', true)::json->>'email';
|
||||||
|
-- insert new user, default value from the oauth access token
|
||||||
|
INSERT INTO auth.users VALUES(DEFAULT) RETURNING user_id INTO _user_id;
|
||||||
|
-- insert new user to account table from the oauth access token
|
||||||
|
INSERT INTO auth.accounts (email, first, last, pass, user_id, role, preferences)
|
||||||
|
VALUES (current_setting('request.jwt.claims', true)::json->>'email',
|
||||||
|
current_setting('request.jwt.claims', true)::json->>'given_name',
|
||||||
|
current_setting('request.jwt.claims', true)::json->>'family_name',
|
||||||
|
current_setting('request.jwt.claims', true)::json->>'sub',
|
||||||
|
_user_id, 'user_role', '{"email_notifications":true, "email_valid": true, "email_verified": true}');
|
||||||
|
end if;
|
||||||
|
|
||||||
|
IF public.isnumeric(vessel_mmsi) IS False THEN
|
||||||
|
vessel_mmsi = NULL;
|
||||||
|
END IF;
|
||||||
|
-- check vessel exist
|
||||||
|
SELECT * INTO vessel_rec
|
||||||
|
FROM auth.vessels vessel
|
||||||
|
WHERE vessel.owner_email = current_setting('request.jwt.claims', true)::json->>'email';
|
||||||
|
IF vessel_rec IS NULL THEN
|
||||||
|
RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, current_setting('request.jwt.claims', true)::json->>'email';
|
||||||
|
INSERT INTO auth.vessels (owner_email, mmsi, name, role)
|
||||||
|
VALUES (current_setting('request.jwt.claims', true)::json->>'email', vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id;
|
||||||
|
-- Gather url from app settings
|
||||||
|
app_settings := get_app_settings_fn();
|
||||||
|
-- set oauth user vessel_id attributes for token generation
|
||||||
|
PERFORM keycloak_py_fn(current_setting('request.jwt.claims', true)::json->>'sub'::TEXT, _vessel_id::TEXT, app_settings);
|
||||||
|
END IF;
|
||||||
|
end;
|
||||||
|
$$ language plpgsql security definer;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.oauth_vessel
|
||||||
|
IS 'user and vessel register entry point from signalk plugin';
|
||||||
|
|
||||||
---------------------------------------------------------------------------
|
---------------------------------------------------------------------------
|
||||||
-- API vessel helper functions
|
-- API vessel helper functions
|
||||||
-- register_vessel should be on your exposed schema
|
-- register_vessel should be on your exposed schema
|
||||||
|
@@ -242,16 +242,17 @@ $vessel_details$
|
|||||||
DECLARE
|
DECLARE
|
||||||
BEGIN
|
BEGIN
|
||||||
RETURN ( WITH tbl AS (
|
RETURN ( WITH tbl AS (
|
||||||
SELECT mmsi,ship_type,length,beam,height,plugin_version FROM api.metadata WHERE vessel_id = current_setting('vessel.id', false)
|
SELECT mmsi,ship_type,length,beam,height,plugin_version,platform FROM api.metadata WHERE vessel_id = current_setting('vessel.id', false)
|
||||||
)
|
)
|
||||||
SELECT json_build_object(
|
SELECT json_build_object(
|
||||||
'ship_type', (SELECT ais.description FROM aistypes ais, tbl t WHERE t.ship_type = ais.id),
|
'ship_type', (SELECT ais.description FROM aistypes ais, tbl t WHERE t.ship_type = ais.id),
|
||||||
'country', (SELECT mid.country FROM mid, tbl t WHERE LEFT(cast(t.mmsi as text), 3)::NUMERIC = mid.id),
|
'country', (SELECT mid.country FROM mid, tbl t WHERE LEFT(cast(t.mmsi as text), 3)::NUMERIC = mid.id),
|
||||||
'alpha_2', (SELECT o.alpha_2 FROM mid m, iso3166 o, tbl t WHERE LEFT(cast(t.mmsi as text), 3)::NUMERIC = m.id AND m.country_id = o.id),
|
'alpha_2', (SELECT o.alpha_2 FROM mid m, iso3166 o, tbl t WHERE LEFT(cast(t.mmsi as text), 3)::NUMERIC = m.id AND m.country_id = o.id),
|
||||||
'length', t.ship_type,
|
'length', t.length,
|
||||||
'beam', t.beam,
|
'beam', t.beam,
|
||||||
'height', t.height,
|
'height', t.height,
|
||||||
'plugin_version', t.plugin_version)
|
'plugin_version', t.plugin_version,
|
||||||
|
'platform', t.platform)
|
||||||
FROM tbl t
|
FROM tbl t
|
||||||
);
|
);
|
||||||
END;
|
END;
|
||||||
@@ -265,8 +266,8 @@ DROP VIEW IF EXISTS api.eventlogs_view;
|
|||||||
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||||
SELECT pq.*
|
SELECT pq.*
|
||||||
FROM public.process_queue pq
|
FROM public.process_queue pq
|
||||||
WHERE ref_id = current_setting('user.id', true)
|
WHERE channel <> 'pre_logbook' AND (ref_id = current_setting('user.id', true)
|
||||||
OR ref_id = current_setting('vessel.id', true)
|
OR ref_id = current_setting('vessel.id', true))
|
||||||
ORDER BY id ASC;
|
ORDER BY id ASC;
|
||||||
-- Description
|
-- Description
|
||||||
COMMENT ON VIEW
|
COMMENT ON VIEW
|
||||||
@@ -315,7 +316,8 @@ BEGIN
|
|||||||
RETURN False;
|
RETURN False;
|
||||||
END IF;
|
END IF;
|
||||||
|
|
||||||
IF _type ~ '^public_(logs|timelapse)$' AND _id IS NOT NULL THEN
|
RAISE WARNING '-> ispublic_fn _type [%], _id [%]', _type, _id;
|
||||||
|
IF _type ~ '^public_(logs|timelapse)$' AND _id > 0 THEN
|
||||||
WITH log as (
|
WITH log as (
|
||||||
SELECT vessel_id from api.logbook l where l.id = _id
|
SELECT vessel_id from api.logbook l where l.id = _id
|
||||||
)
|
)
|
||||||
|
@@ -22,7 +22,8 @@ COMMENT ON TABLE
|
|||||||
IS 'Stores temporal otp code for up to 15 minutes';
|
IS 'Stores temporal otp code for up to 15 minutes';
|
||||||
-- Indexes
|
-- Indexes
|
||||||
CREATE INDEX otp_pass_idx ON auth.otp (otp_pass);
|
CREATE INDEX otp_pass_idx ON auth.otp (otp_pass);
|
||||||
CREATE INDEX otp_user_email_idx ON auth.otp (user_email);
|
-- Duplicate Indexes
|
||||||
|
--CREATE INDEX otp_user_email_idx ON auth.otp (user_email);
|
||||||
|
|
||||||
DROP FUNCTION IF EXISTS public.generate_uid_fn;
|
DROP FUNCTION IF EXISTS public.generate_uid_fn;
|
||||||
CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT
|
CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT
|
||||||
|
@@ -40,6 +40,9 @@ grant execute on function api.telegram_otp_fn(text) to api_anonymous;
|
|||||||
--grant execute on function api.generate_otp_fn(text) to api_anonymous;
|
--grant execute on function api.generate_otp_fn(text) to api_anonymous;
|
||||||
grant execute on function api.ispublic_fn(text,text,integer) to api_anonymous;
|
grant execute on function api.ispublic_fn(text,text,integer) to api_anonymous;
|
||||||
grant execute on function api.timelapse_fn to api_anonymous;
|
grant execute on function api.timelapse_fn to api_anonymous;
|
||||||
|
grant execute on function api.stats_logs_fn to api_anonymous;
|
||||||
|
grant execute on function api.stats_stays_fn to api_anonymous;
|
||||||
|
grant execute on function api.status_fn to api_anonymous;
|
||||||
-- Allow read on TABLES on API schema
|
-- Allow read on TABLES on API schema
|
||||||
--GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO api_anonymous;
|
--GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO api_anonymous;
|
||||||
-- Allow read on VIEWS on API schema
|
-- Allow read on VIEWS on API schema
|
||||||
@@ -90,6 +93,7 @@ GRANT SELECT ON TABLE auth.accounts TO grafana_auth;
|
|||||||
GRANT SELECT ON TABLE auth.vessels TO grafana_auth;
|
GRANT SELECT ON TABLE auth.vessels TO grafana_auth;
|
||||||
-- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth;
|
-- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth;
|
||||||
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
|
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
|
||||||
|
GRANT ALL ON SCHEMA public TO grafana_auth; -- Important if grafana database in pg
|
||||||
|
|
||||||
-- User:
|
-- User:
|
||||||
-- nologin, web api only
|
-- nologin, web api only
|
||||||
@@ -152,6 +156,9 @@ GRANT EXECUTE ON FUNCTION public.stay_in_progress_fn(text) to vessel_role;
|
|||||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role;
|
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role;
|
||||||
-- on metrics st_makepoint
|
-- on metrics st_makepoint
|
||||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO vessel_role;
|
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO vessel_role;
|
||||||
|
-- Oauth registration
|
||||||
|
GRANT EXECUTE ON FUNCTION api.oauth() TO vessel_role;
|
||||||
|
GRANT EXECUTE ON FUNCTION api.oauth_vessel(text,text) TO vessel_role;
|
||||||
|
|
||||||
--- Scheduler:
|
--- Scheduler:
|
||||||
-- TODO: currently cron function are run as super user, switch to scheduler role.
|
-- TODO: currently cron function are run as super user, switch to scheduler role.
|
||||||
@@ -278,6 +285,10 @@ CREATE POLICY api_scheduler_role ON api.stays TO scheduler
|
|||||||
CREATE POLICY grafana_role ON api.stays TO grafana
|
CREATE POLICY grafana_role ON api.stays TO grafana
|
||||||
USING (vessel_id = current_setting('vessel.id', false))
|
USING (vessel_id = current_setting('vessel.id', false))
|
||||||
WITH CHECK (false);
|
WITH CHECK (false);
|
||||||
|
-- Allow anonymous to select based on the vessel.id
|
||||||
|
CREATE POLICY api_anonymous_role ON api.stays TO api_anonymous
|
||||||
|
USING (vessel_id = current_setting('vessel.id', false))
|
||||||
|
WITH CHECK (false);
|
||||||
|
|
||||||
-- Be sure to enable row level security on the table
|
-- Be sure to enable row level security on the table
|
||||||
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
|
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
|
||||||
@@ -301,6 +312,10 @@ CREATE POLICY api_scheduler_role ON api.moorages TO scheduler
|
|||||||
CREATE POLICY grafana_role ON api.moorages TO grafana
|
CREATE POLICY grafana_role ON api.moorages TO grafana
|
||||||
USING (vessel_id = current_setting('vessel.id', false))
|
USING (vessel_id = current_setting('vessel.id', false))
|
||||||
WITH CHECK (false);
|
WITH CHECK (false);
|
||||||
|
-- Allow anonymous to select based on the vessel.id
|
||||||
|
CREATE POLICY api_anonymous_role ON api.moorages TO api_anonymous
|
||||||
|
USING (vessel_id = current_setting('vessel.id', false))
|
||||||
|
WITH CHECK (false);
|
||||||
|
|
||||||
-- Be sure to enable row level security on the table
|
-- Be sure to enable row level security on the table
|
||||||
ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY;
|
ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY;
|
||||||
|
@@ -10,19 +10,23 @@ CREATE EXTENSION IF NOT EXISTS pg_cron; -- provides a simple cron-based job sche
|
|||||||
-- TRUNCATE table jobs
|
-- TRUNCATE table jobs
|
||||||
--TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
|
--TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
|
||||||
|
|
||||||
-- Create a every 5 minutes or minute job cron_process_new_logbook_fn ??
|
-- Create a every 5 minutes or minute job cron_process_pre_logbook_fn ??
|
||||||
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()');
|
SELECT cron.schedule('cron_pre_logbook', '*/5 * * * *', 'select public.cron_process_pre_logbook_fn()');
|
||||||
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_pre_logbook';
|
||||||
|
|
||||||
|
-- Create a every 6 minutes or minute job cron_process_new_logbook_fn ??
|
||||||
|
SELECT cron.schedule('cron_new_logbook', '*/6 * * * *', 'select public.cron_process_new_logbook_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook';
|
||||||
|
|
||||||
-- Create a every 5 minute job cron_process_new_stay_fn
|
-- Create a every 7 minute job cron_process_new_stay_fn
|
||||||
SELECT cron.schedule('cron_new_stay', '*/6 * * * *', 'select public.cron_process_new_stay_fn()');
|
SELECT cron.schedule('cron_new_stay', '*/7 * * * *', 'select public.cron_process_new_stay_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_stay';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_stay';
|
||||||
|
|
||||||
-- Create a every 6 minute job cron_process_new_moorage_fn, delay from stay to give time to generate geo reverse location, eg: name
|
-- Create a every 6 minute job cron_process_new_moorage_fn, delay from stay to give time to generate geo reverse location, eg: name
|
||||||
--SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_process_new_moorage_fn()');
|
--SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_process_new_moorage_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage';
|
||||||
|
|
||||||
-- Create a every 10 minute job cron_process_monitor_offline_fn
|
-- Create a every 11 minute job cron_process_monitor_offline_fn
|
||||||
SELECT cron.schedule('cron_monitor_offline', '*/11 * * * *', 'select public.cron_process_monitor_offline_fn()');
|
SELECT cron.schedule('cron_monitor_offline', '*/11 * * * *', 'select public.cron_process_monitor_offline_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_offline';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_offline';
|
||||||
|
|
||||||
@@ -42,41 +46,51 @@ SELECT cron.schedule('cron_monitor_online', '*/10 * * * *', 'select public.cron_
|
|||||||
--SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()');
|
--SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp';
|
||||||
|
|
||||||
|
-- Create a every 5 minute job cron_process_grafana_fn
|
||||||
|
SELECT cron.schedule('cron_grafana', '*/5 * * * *', 'select public.cron_process_grafana_fn()');
|
||||||
|
|
||||||
|
-- Create a every 5 minute job cron_process_windy_fn
|
||||||
|
SELECT cron.schedule('cron_windy', '*/5 * * * *', 'select public.cron_windy_fn()');
|
||||||
|
|
||||||
-- Notification
|
-- Notification
|
||||||
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
|
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
|
||||||
SELECT cron.schedule('cron_new_notification', '*/2 * * * *', 'select public.cron_process_new_notification_fn()');
|
SELECT cron.schedule('cron_new_notification', '*/1 * * * *', 'select public.cron_process_new_notification_fn()');
|
||||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification';
|
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification';
|
||||||
|
|
||||||
-- Maintenance
|
-- Maintenance
|
||||||
-- Vacuum database at “At 01:01 on Sunday.”
|
-- Vacuum database schema api at "At 01:31 on Sunday."
|
||||||
SELECT cron.schedule('cron_vacuum', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;');
|
SELECT cron.schedule('cron_vacuum_api', '31 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;');
|
||||||
-- Remove all jobs log at “At 02:02 on Sunday.”
|
-- Vacuum database schema auth at "At 01:01 on Sunday."
|
||||||
|
SELECT cron.schedule('cron_vacuum_auth', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) auth.accounts,auth.vessels,auth.otp;');
|
||||||
|
-- Remove old jobs log at "At 02:02 on Sunday."
|
||||||
SELECT cron.schedule('job_run_details_cleanup', '2 2 * * 0', 'select public.job_run_details_cleanup_fn()');
|
SELECT cron.schedule('job_run_details_cleanup', '2 2 * * 0', 'select public.job_run_details_cleanup_fn()');
|
||||||
-- Rebuilding indexes at “first day of each month at 23:01.”
|
-- Rebuilding indexes schema api at "first day of each month at 23:15."
|
||||||
SELECT cron.schedule('cron_reindex', '1 23 1 * *', 'REINDEX TABLE api.logbook; REINDEX TABLE api.stays; REINDEX TABLE api.moorages; REINDEX TABLE api.metadata; REINDEX TABLE api.metrics;');
|
SELECT cron.schedule('cron_reindex_api', '15 23 1 * *', 'REINDEX TABLE CONCURRENTLY api.logbook; REINDEX TABLE CONCURRENTLY api.stays; REINDEX TABLE CONCURRENTLY api.moorages; REINDEX TABLE CONCURRENTLY api.metadata;');
|
||||||
|
-- Rebuilding indexes schema auth at "first day of each month at 23:01."
|
||||||
|
SELECT cron.schedule('cron_reindex_auth', '1 23 1 * *', 'REINDEX TABLE CONCURRENTLY auth.accounts; REINDEX TABLE CONCURRENTLY auth.vessels; REINDEX TABLE CONCURRENTLY auth.otp;');
|
||||||
-- Any other maintenance require?
|
-- Any other maintenance require?
|
||||||
|
|
||||||
-- OTP
|
-- OTP
|
||||||
-- Create a every 15 minute job cron_process_prune_otp_fn
|
-- Create a every 15 minute job cron_prune_otp_fn
|
||||||
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_process_prune_otp_fn()');
|
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_prune_otp_fn()');
|
||||||
|
|
||||||
-- Alerts
|
-- Alerts
|
||||||
-- Create a every 11 minute job cron_process_alerts_fn
|
-- Create a every 11 minute job cron_alerts_fn
|
||||||
--SELECT cron.schedule('cron_alerts', '*/11 * * * *', 'select public.cron_process_alerts_fn()');
|
SELECT cron.schedule('cron_alerts', '*/11 * * * *', 'select public.cron_alerts_fn()');
|
||||||
|
|
||||||
-- Notifications/Reminders of no vessel & no metadata & no activity
|
-- Notifications/Reminders of no vessel & no metadata & no activity
|
||||||
-- At 08:05 on Sunday.
|
-- At 08:05 on Sunday.
|
||||||
-- At 08:05 on every 4th day-of-month if it's on Sunday.
|
-- At 08:05 on every 4th day-of-month if it's on Sunday.
|
||||||
SELECT cron.schedule('cron_no_vessel', '5 8 */4 * 0', 'select public.cron_process_no_vessel_fn()');
|
SELECT cron.schedule('cron_no_vessel', '5 8 */4 * 0', 'select public.cron_no_vessel_fn()');
|
||||||
SELECT cron.schedule('cron_no_metadata', '5 8 */4 * 0', 'select public.cron_process_no_metadata_fn()');
|
SELECT cron.schedule('cron_no_metadata', '5 8 */4 * 0', 'select public.cron_no_metadata_fn()');
|
||||||
SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_process_no_activity_fn()');
|
SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_no_activity_fn()');
|
||||||
|
|
||||||
-- Cron job settings
|
-- Cron job settings
|
||||||
UPDATE cron.job SET database = 'signalk';
|
UPDATE cron.job SET database = 'signalk';
|
||||||
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
|
UPDATE cron.job SET username = current_user; -- TODO update to scheduler, pending process_queue update
|
||||||
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vaccuum permissions
|
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vacuum permissions
|
||||||
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
|
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
|
||||||
UPDATE cron.job SET database = 'postgresql' WHERE jobname = 'job_run_details_cleanup_fn';
|
UPDATE cron.job SET database = 'postgres' WHERE jobname = 'job_run_details_cleanup';
|
||||||
-- check job lists
|
-- check job lists
|
||||||
SELECT * FROM cron.job;
|
SELECT * FROM cron.job;
|
||||||
-- unschedule by job id
|
-- unschedule by job id
|
||||||
|
457
initdb/99_migrations_202401.sql
Normal file
@@ -0,0 +1,457 @@
|
|||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- TODO
|
||||||
|
--
|
||||||
|
----------------------------------------
|
||||||
|
----- TODO --------------
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
-- List current database
|
||||||
|
select current_database();
|
||||||
|
|
||||||
|
-- connect to the DB
|
||||||
|
\c signalk
|
||||||
|
|
||||||
|
\echo 'Force timezone, just in case'
|
||||||
|
set timezone to 'UTC';
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_process_new_moorage_fn
|
||||||
|
IS 'Deprecated, init by pg_cron to check for new moorage pending update, if so perform process_moorage_queue_fn';
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS reverse_geoip_py_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION reverse_geoip_py_fn(IN _ip TEXT) RETURNS JSONB
|
||||||
|
AS $reverse_geoip_py$
|
||||||
|
"""
|
||||||
|
Return ipapi.co ip details
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
|
||||||
|
# requests
|
||||||
|
url = f'https://ipapi.co/{_ip}/json/'
|
||||||
|
r = requests.get(url)
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
|
||||||
|
if r.status_code == 200:
|
||||||
|
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
|
||||||
|
return r.json()
|
||||||
|
else:
|
||||||
|
plpy.error('Failed to get ip details')
|
||||||
|
return {}
|
||||||
|
$reverse_geoip_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.reverse_geoip_py_fn
|
||||||
|
IS 'Retrieve reverse geo IP location via ipapi.co using plpython3u';
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS overpass_py_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
|
||||||
|
OUT geo JSONB) RETURNS JSONB
|
||||||
|
AS $overpass_py$
|
||||||
|
"""
|
||||||
|
Return https://overpass-turbo.eu seamark details within 400m
|
||||||
|
https://overpass-turbo.eu/s/1EaG
|
||||||
|
https://wiki.openstreetmap.org/wiki/Key:seamark:type
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
|
payload = """
|
||||||
|
[out:json][timeout:20];
|
||||||
|
is_in({0},{1})->.result_areas;
|
||||||
|
(
|
||||||
|
area.result_areas["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||||
|
area.result_areas["leisure"="marina"][~"name"~"."];
|
||||||
|
);
|
||||||
|
out tags;
|
||||||
|
nwr(around:400.0,{0},{1})->.all;
|
||||||
|
(
|
||||||
|
nwr.all["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||||
|
nwr.all["seamark:type"~"(anchorage|anchor_berth|berth)"];
|
||||||
|
nwr.all["leisure"="marina"];
|
||||||
|
nwr.all["natural"~"(bay|beach)"];
|
||||||
|
);
|
||||||
|
out tags;
|
||||||
|
""".format(lat, lon)
|
||||||
|
data = urllib.parse.quote(payload, safe="");
|
||||||
|
url = f'https://overpass-api.de/api/interpreter?data={data}'.format(data)
|
||||||
|
r = requests.get(url, headers)
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(url)
|
||||||
|
plpy.notice('overpass-api coord lon[{}] lat[{}] [{}]'.format(lon, lat, r.status_code))
|
||||||
|
if r.status_code == 200 and "elements" in r.json():
|
||||||
|
r_dict = r.json()
|
||||||
|
plpy.notice('overpass-api Got [{}]'.format(r_dict["elements"]))
|
||||||
|
if r_dict["elements"]:
|
||||||
|
if "tags" in r_dict["elements"][0] and r_dict["elements"][0]["tags"]:
|
||||||
|
return r_dict["elements"][0]["tags"]; # return the first element
|
||||||
|
return {}
|
||||||
|
else:
|
||||||
|
plpy.notice('overpass-api Failed to get overpass-api details')
|
||||||
|
return {}
|
||||||
|
$overpass_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.overpass_py_fn
|
||||||
|
IS 'Return https://overpass-turbo.eu seamark details within 400m using plpython3u';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION get_app_settings_fn(OUT app_settings jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
AS $get_app_settings$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
SELECT
|
||||||
|
jsonb_object_agg(name, value) INTO app_settings
|
||||||
|
FROM
|
||||||
|
public.app_settings
|
||||||
|
WHERE
|
||||||
|
name LIKE 'app.email%'
|
||||||
|
OR name LIKE 'app.pushover%'
|
||||||
|
OR name LIKE 'app.url'
|
||||||
|
OR name LIKE 'app.telegram%'
|
||||||
|
OR name LIKE 'app.grafana_admin_uri'
|
||||||
|
OR name LIKE 'app.keycloak_uri';
|
||||||
|
END;
|
||||||
|
$get_app_settings$
|
||||||
|
LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION keycloak_auth_py_fn(IN _v_id TEXT,
|
||||||
|
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||||
|
AS $keycloak_auth_py$
|
||||||
|
"""
|
||||||
|
Addkeycloak user
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
safe_uri = host = user = pwd = None
|
||||||
|
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||||
|
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||||
|
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||||
|
host = _.netloc.split('@')[-1]
|
||||||
|
user = _.netloc.split(':')[0]
|
||||||
|
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||||
|
else:
|
||||||
|
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
if not host or not user or not pwd:
|
||||||
|
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not 'email' in _user and _user['email']:
|
||||||
|
plpy.error('Error parsing user email, check user settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
if not _v_id:
|
||||||
|
plpy.error('Error parsing vessel_id')
|
||||||
|
return none
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
|
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||||
|
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||||
|
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(url)
|
||||||
|
if r.status_code == 200 and 'access_token' in r.json():
|
||||||
|
response = r.json()
|
||||||
|
plpy.notice(response)
|
||||||
|
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||||
|
_headers['Content-Type'] = 'application/json'
|
||||||
|
url = f'{_.scheme}://{host}/admin/realms/postgsail/users'.format(_.scheme, host)
|
||||||
|
_payload = {
|
||||||
|
"enabled": "true",
|
||||||
|
"email": _user['email'],
|
||||||
|
"firstName": _user['recipient'],
|
||||||
|
"attributes": {"vessel_id": _v_id},
|
||||||
|
"emailVerified": True,
|
||||||
|
"requiredActions":["UPDATE_PROFILE", "UPDATE_PASSWORD"]
|
||||||
|
}
|
||||||
|
plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.post(url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 201:
|
||||||
|
#print("Error creating user: {status}".format(status=r.status_code))
|
||||||
|
plpy.error(f'Error creating user: {user} {status}'.format(user=_payload['email'], status=r.status_code))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
#print("Created user : {u}]".format(u=_payload['email']))
|
||||||
|
plpy.notice('Created user : {u} {t}, {l}'.format(u=_payload['email'], t=r.text, l=r.headers['location']))
|
||||||
|
user_url = "{user_url}/execute-actions-email".format(user_url=r.headers['location'])
|
||||||
|
_payload = ["UPDATE_PASSWORD"]
|
||||||
|
plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.put(user_url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 204:
|
||||||
|
plpy.error('Error execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||||
|
else:
|
||||||
|
plpy.notice('execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
plpy.error(f'Error getting admin access_token: {status}'.format(status=r.status_code))
|
||||||
|
return None
|
||||||
|
$keycloak_auth_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.keycloak_auth_py_fn
|
||||||
|
IS 'Return set oauth user attribute into keycloak using plpython3u';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION keycloak_py_fn(IN user_id TEXT, IN vessel_id TEXT,
|
||||||
|
IN app JSONB) RETURNS JSONB
|
||||||
|
AS $keycloak_py$
|
||||||
|
"""
|
||||||
|
Add vessel_id user attribute to keycloak user {user_id}
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import urllib.parse
|
||||||
|
|
||||||
|
safe_uri = host = user = pwd = None
|
||||||
|
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||||
|
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||||
|
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||||
|
host = _.netloc.split('@')[-1]
|
||||||
|
user = _.netloc.split(':')[0]
|
||||||
|
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||||
|
else:
|
||||||
|
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if not host or not user or not pwd:
|
||||||
|
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||||
|
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||||
|
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||||
|
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(url)
|
||||||
|
if r.status_code == 200 and 'access_token' in r.json():
|
||||||
|
response = r.json()
|
||||||
|
plpy.notice(response)
|
||||||
|
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||||
|
_headers['Content-Type'] = 'application/json'
|
||||||
|
_payload = { 'attributes': {'vessel_id': vessel_id} }
|
||||||
|
url = f'{keycloak_uri}/admin/realms/postgsail/users/{user_id}'.format(keycloak_uri,user_id)
|
||||||
|
#plpy.notice(url)
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
r = requests.put(url, headers=_headers, data=data, timeout=(5, 60))
|
||||||
|
if r.status_code != 204:
|
||||||
|
plpy.notice("Error updating user: {status} [{text}]".format(
|
||||||
|
status=r.status_code, text=r.text))
|
||||||
|
return None
|
||||||
|
else:
|
||||||
|
plpy.notice("Updated user : {user} [{text}]".format(user=user_id, text=r.text))
|
||||||
|
else:
|
||||||
|
plpy.notice(f'Error getting admin access_token: {status} [{text}]'.format(
|
||||||
|
status=r.status_code, text=r.text))
|
||||||
|
return None
|
||||||
|
$keycloak_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
|
||||||
|
UPDATE public.email_templates
|
||||||
|
SET pushover_message='Congratulations!
|
||||||
|
You unlocked Grafana dashboard.
|
||||||
|
See more details at https://app.openplotter.cloud
|
||||||
|
',email_content='Hello __RECIPIENT__,
|
||||||
|
Congratulations! You unlocked Grafana dashboard.
|
||||||
|
See more details at https://app.openplotter.cloud
|
||||||
|
Happy sailing!
|
||||||
|
Francois'
|
||||||
|
WHERE "name"='grafana';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_process_grafana_fn()
|
||||||
|
RETURNS void
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
process_rec record;
|
||||||
|
data_rec record;
|
||||||
|
app_settings jsonb;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- We run grafana provisioning only after the first received vessel metadata
|
||||||
|
-- Check for new vessel metadata pending grafana provisioning
|
||||||
|
RAISE NOTICE 'cron_process_grafana_fn';
|
||||||
|
FOR process_rec in
|
||||||
|
SELECT * from process_queue
|
||||||
|
where channel = 'grafana' and processed is null
|
||||||
|
order by stored asc
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
|
||||||
|
-- Gather url from app settings
|
||||||
|
app_settings := get_app_settings_fn();
|
||||||
|
-- Get vessel details base on metadata id
|
||||||
|
SELECT * INTO data_rec
|
||||||
|
FROM api.metadata m, auth.vessels v
|
||||||
|
WHERE m.id = process_rec.payload::INTEGER
|
||||||
|
AND m.vessel_id = v.vessel_id;
|
||||||
|
-- as we got data from the vessel we can do the grafana provisioning.
|
||||||
|
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||||
|
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||||
|
-- add user in keycloak
|
||||||
|
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
|
||||||
|
-- update process_queue entry as processed
|
||||||
|
UPDATE process_queue
|
||||||
|
SET
|
||||||
|
processed = NOW()
|
||||||
|
WHERE id = process_rec.id;
|
||||||
|
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
COMMENT ON FUNCTION public.cron_process_grafana_fn() IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||||
|
|
||||||
|
-- DROP FUNCTION public.grafana_py_fn(text, text, text, jsonb);
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.grafana_py_fn(_v_name text, _v_id text, _u_email text, app jsonb)
|
||||||
|
RETURNS void
|
||||||
|
TRANSFORM FOR TYPE jsonb
|
||||||
|
LANGUAGE plpython3u
|
||||||
|
AS $function$
|
||||||
|
"""
|
||||||
|
https://grafana.com/docs/grafana/latest/developers/http_api/
|
||||||
|
Create organization base on vessel name
|
||||||
|
Create user base on user email
|
||||||
|
Add user to organization
|
||||||
|
Add data_source to organization
|
||||||
|
Add dashboard to organization
|
||||||
|
Update organization preferences
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
|
grafana_uri = None
|
||||||
|
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
|
||||||
|
grafana_uri = app['app.grafana_admin_uri']
|
||||||
|
else:
|
||||||
|
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||||
|
return None
|
||||||
|
|
||||||
|
b_name = None
|
||||||
|
if not _v_name:
|
||||||
|
b_name = _v_id
|
||||||
|
else:
|
||||||
|
b_name = _v_name
|
||||||
|
|
||||||
|
# add vessel org
|
||||||
|
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||||
|
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||||
|
path = 'api/orgs'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data_dict = {'name':b_name}
|
||||||
|
data = json.dumps(data_dict)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
if r.status_code == 200 and "orgId" in r.json():
|
||||||
|
org_id = r.json()['orgId']
|
||||||
|
else:
|
||||||
|
plpy.error('Error grafana add vessel org %', r.json())
|
||||||
|
return none
|
||||||
|
|
||||||
|
# add user to vessel org
|
||||||
|
path = 'api/admin/users'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
|
||||||
|
data = json.dumps(data_dict)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
if r.status_code == 200 and "id" in r.json():
|
||||||
|
user_id = r.json()['id']
|
||||||
|
else:
|
||||||
|
plpy.error('Error grafana add user to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
# read data_source
|
||||||
|
path = 'api/datasources/1'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
r = requests.get(url, headers=headers)
|
||||||
|
#print(r.text)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
data_source = r.json()
|
||||||
|
data_source['id'] = 0
|
||||||
|
data_source['orgId'] = org_id
|
||||||
|
data_source['uid'] = "ds_" + _v_id
|
||||||
|
data_source['name'] = "ds_" + _v_id
|
||||||
|
data_source['secureJsonData'] = {}
|
||||||
|
data_source['secureJsonData']['password'] = 'mysecretpassword'
|
||||||
|
data_source['readOnly'] = True
|
||||||
|
del data_source['secureJsonFields']
|
||||||
|
|
||||||
|
# add data_source to vessel org
|
||||||
|
path = 'api/datasources'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data = json.dumps(data_source)
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.post(url, data=data, headers=headers)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
del headers['X-Grafana-Org-Id']
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana add data_source to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
|
||||||
|
for dashboard in dashboards_tpl:
|
||||||
|
# read dashboard template by uid
|
||||||
|
path = 'api/dashboards/uid'
|
||||||
|
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
|
||||||
|
if 'X-Grafana-Org-Id' in headers:
|
||||||
|
del headers['X-Grafana-Org-Id']
|
||||||
|
r = requests.get(url, headers=headers)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana read dashboard template')
|
||||||
|
return
|
||||||
|
new_dashboard = r.json()
|
||||||
|
del new_dashboard['meta']
|
||||||
|
new_dashboard['dashboard']['version'] = 0
|
||||||
|
new_dashboard['dashboard']['id'] = 0
|
||||||
|
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
|
||||||
|
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
|
||||||
|
# add dashboard to vessel org
|
||||||
|
path = 'api/dashboards/db'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
data = json.dumps(new_dashboard)
|
||||||
|
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.post(url, data=new_data, headers=headers)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
if r.status_code != 200 and "id" not in r.json():
|
||||||
|
plpy.error('Error grafana add dashboard to vessel org')
|
||||||
|
return
|
||||||
|
|
||||||
|
# Update Org Prefs
|
||||||
|
path = 'api/org/preferences'
|
||||||
|
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||||
|
home_dashboard = {}
|
||||||
|
home_dashboard['timezone'] = 'utc'
|
||||||
|
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
|
||||||
|
data = json.dumps(home_dashboard)
|
||||||
|
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||||
|
r = requests.patch(url, data=data, headers=headers)
|
||||||
|
plpy.notice(r.json())
|
||||||
|
if r.status_code != 200:
|
||||||
|
plpy.error('Error grafana update org preferences')
|
||||||
|
return
|
||||||
|
|
||||||
|
plpy.notice('Done')
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION public.grafana_py_fn(text, text, text, jsonb) IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
|
||||||
|
|
||||||
|
UPDATE public.app_settings
|
||||||
|
SET value='0.6.1'
|
||||||
|
WHERE "name"='app.version';
|
740
initdb/99_migrations_202402.sql
Normal file
@@ -0,0 +1,740 @@
|
|||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- TODO
|
||||||
|
--
|
||||||
|
----------------------------------------
|
||||||
|
----- TODO --------------
|
||||||
|
----------------------------------------
|
||||||
|
|
||||||
|
-- List current database
|
||||||
|
select current_database();
|
||||||
|
|
||||||
|
-- connect to the DB
|
||||||
|
\c signalk
|
||||||
|
|
||||||
|
\echo 'Force timezone, just in case'
|
||||||
|
set timezone to 'UTC';
|
||||||
|
|
||||||
|
-- Update email_templates
|
||||||
|
--INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||||
|
-- VALUES ('windy','PostgSail Windy Weather station',E'Hello __RECIPIENT__,\nCongratulations! Your boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\nHappy sailing!\nFrancois','PostgSail Windy!',E'Congratulations!\nYour boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\n');
|
||||||
|
--INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||||
|
--VALUES ('alert','PostgSail Alert',E'Hello __RECIPIENT__,\nWe detected an alert __ALERT__.\nSee more details at __APP_URL__\nStay safe.\nFrancois','PostgSail Alert!',E'Congratulations!\nWe detected an alert __ALERT__.\n');
|
||||||
|
|
||||||
|
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||||
|
VALUES ('windy_error','PostgSail Windy Weather station Error','Hello __RECIPIENT__,\nSorry!We could not convert your boat to a Windy Personal Weather Station.\nWindy Personal Weather Station is now disable.','PostgSail Windy error!','Sorry!\nWe could not convert your boat to a Windy Personal Weather Station.');
|
||||||
|
|
||||||
|
-- Update app_settings
|
||||||
|
CREATE OR REPLACE FUNCTION public.get_app_settings_fn(OUT app_settings jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
AS $get_app_settings$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
SELECT
|
||||||
|
jsonb_object_agg(name, value) INTO app_settings
|
||||||
|
FROM
|
||||||
|
public.app_settings
|
||||||
|
WHERE
|
||||||
|
name LIKE 'app.email%'
|
||||||
|
OR name LIKE 'app.pushover%'
|
||||||
|
OR name LIKE 'app.url'
|
||||||
|
OR name LIKE 'app.telegram%'
|
||||||
|
OR name LIKE 'app.grafana_admin_uri'
|
||||||
|
OR name LIKE 'app.keycloak_uri'
|
||||||
|
OR name LIKE 'app.windy_apikey';
|
||||||
|
END;
|
||||||
|
$get_app_settings$
|
||||||
|
LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.get_user_settings_from_vesselid_fn(
|
||||||
|
IN vesselid TEXT,
|
||||||
|
OUT user_settings JSONB
|
||||||
|
) RETURNS JSONB
|
||||||
|
AS $get_user_settings_from_vesselid$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
-- If vessel_id is not NULL
|
||||||
|
IF vesselid IS NULL OR vesselid = '' THEN
|
||||||
|
RAISE WARNING '-> get_user_settings_from_vesselid_fn invalid input %', vesselid;
|
||||||
|
END IF;
|
||||||
|
SELECT
|
||||||
|
json_build_object(
|
||||||
|
'boat' , v.name,
|
||||||
|
'recipient', a.first,
|
||||||
|
'email', v.owner_email,
|
||||||
|
'settings', a.preferences
|
||||||
|
) INTO user_settings
|
||||||
|
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||||
|
WHERE m.vessel_id = v.vessel_id
|
||||||
|
AND m.vessel_id = vesselid
|
||||||
|
AND a.email = v.owner_email;
|
||||||
|
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||||
|
PERFORM set_config('user.recipient', user_settings->>'recipient'::TEXT, false);
|
||||||
|
END;
|
||||||
|
$get_user_settings_from_vesselid$ LANGUAGE plpgsql;
|
||||||
|
|
||||||
|
-- Create Windy PWS integration
|
||||||
|
CREATE OR REPLACE FUNCTION public.windy_pws_py_fn(IN metric JSONB,
|
||||||
|
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||||
|
AS $windy_pws_py$
|
||||||
|
"""
|
||||||
|
Send environment data from boat instruments to Windy as a Personal Weather Station (PWS)
|
||||||
|
https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||||
|
"""
|
||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import decimal
|
||||||
|
|
||||||
|
if not 'app.windy_apikey' in app and not app['app.windy_apikey']:
|
||||||
|
plpy.error('Error no windy_apikey defined, check app settings')
|
||||||
|
return none
|
||||||
|
if not 'station' in metric and not metric['station']:
|
||||||
|
plpy.error('Error no metrics defined')
|
||||||
|
return none
|
||||||
|
if not 'temp' in metric and not metric['temp']:
|
||||||
|
plpy.error('Error no metrics defined')
|
||||||
|
return none
|
||||||
|
if not _user:
|
||||||
|
plpy.error('Error no user defined, check user settings')
|
||||||
|
return none
|
||||||
|
|
||||||
|
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com', 'Content-Type': 'application/json'}
|
||||||
|
_payload = {
|
||||||
|
'stations': [
|
||||||
|
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||||
|
'name': metric['name'],
|
||||||
|
'shareOption': 'Open',
|
||||||
|
'type': 'SignalK PostgSail Plugin',
|
||||||
|
'provider': 'PostgSail',
|
||||||
|
'url': 'https://iot.openplotter.cloud/{name}/monitoring'.format(name=metric['name']),
|
||||||
|
'lat': float(decimal.Decimal(metric['lat'])),
|
||||||
|
'lon': float(decimal.Decimal(metric['lon'])),
|
||||||
|
'elevation': 1 }
|
||||||
|
],
|
||||||
|
'observations': [
|
||||||
|
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||||
|
'temp': float(decimal.Decimal(metric['temp'])),
|
||||||
|
'wind': round(float(decimal.Decimal(metric['wind']))),
|
||||||
|
'gust': round(float(decimal.Decimal(metric['gust']))),
|
||||||
|
'winddir': int(decimal.Decimal(metric['winddir'])),
|
||||||
|
'pressure': int(decimal.Decimal(metric['pressure'])),
|
||||||
|
'rh': float(decimal.Decimal(metric['rh'])) }
|
||||||
|
]}
|
||||||
|
#print(_payload)
|
||||||
|
#plpy.notice(_payload)
|
||||||
|
data = json.dumps(_payload)
|
||||||
|
api_url = 'https://stations.windy.com/pws/update/{api_key}'.format(api_key=app['app.windy_apikey'])
|
||||||
|
r = requests.post(api_url, data=data, headers=_headers, timeout=(5, 60))
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
if r.status_code == 200:
|
||||||
|
#print('Data sent successfully!')
|
||||||
|
plpy.notice('Data sent successfully to Windy!')
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
if not 'windy' in _user['settings']:
|
||||||
|
api_url = 'https://stations.windy.com/pws/station/{api_key}/{station}'.format(api_key=app['app.windy_apikey'], station=metric['station'])
|
||||||
|
#print(r.text)
|
||||||
|
#plpy.notice(api_url)
|
||||||
|
r = requests.get(api_url, timeout=(5, 60))
|
||||||
|
if r.status_code == 200:
|
||||||
|
#print('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||||
|
plpy.notice('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||||
|
return r.json()
|
||||||
|
else:
|
||||||
|
plpy.error(f'Failed to gather PWS details. Status code: {r.status_code}')
|
||||||
|
else:
|
||||||
|
plpy.error(f'Failed to send data. Status code: {r.status_code}')
|
||||||
|
#print(f'Failed to send data. Status code: {r.status_code}')
|
||||||
|
#print(r.text)
|
||||||
|
return {}
|
||||||
|
$windy_pws_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.windy_pws_py_fn
|
||||||
|
IS 'Forward vessel data to Windy as a Personal Weather Station using plpython3u';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_windy_fn() RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
windy_rec record;
|
||||||
|
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||||
|
last_metric TIMESTAMPTZ;
|
||||||
|
metric_rec record;
|
||||||
|
windy_metric jsonb;
|
||||||
|
app_settings jsonb;
|
||||||
|
user_settings jsonb;
|
||||||
|
windy_pws jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Check for new observations pending update
|
||||||
|
RAISE NOTICE 'cron_windy_fn';
|
||||||
|
-- Gather url from app settings
|
||||||
|
app_settings := get_app_settings_fn();
|
||||||
|
-- Find users with Windy active and with an active vessel
|
||||||
|
-- Map account id to Windy Station ID
|
||||||
|
FOR windy_rec in
|
||||||
|
SELECT
|
||||||
|
a.id,a.email,v.vessel_id,v.name,
|
||||||
|
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||||
|
WHERE (a.preferences->'public_windy')::boolean = True
|
||||||
|
AND m.active = True
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_windy_fn for [%]', windy_rec;
|
||||||
|
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||||
|
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking user_settings [%]', user_settings;
|
||||||
|
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||||
|
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||||
|
FOR metric_rec in
|
||||||
|
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||||
|
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||||
|
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||||
|
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||||
|
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||||
|
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||||
|
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||||
|
last(latitude, time) AS lat,
|
||||||
|
last(longitude, time) AS lng
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE vessel_id = windy_rec.vessel_id
|
||||||
|
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket ASC LIMIT 100
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking metrics [%]', metric_rec;
|
||||||
|
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||||
|
-- temp from kelvin to celcuis
|
||||||
|
-- winddir from radiant to degres
|
||||||
|
-- rh from ratio to percentage
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'dateutc', metric_rec.time_bucket,
|
||||||
|
'station', windy_rec.id,
|
||||||
|
'name', windy_rec.name,
|
||||||
|
'lat', metric_rec.lat,
|
||||||
|
'lon', metric_rec.lng,
|
||||||
|
'wind', metric_rec.wind,
|
||||||
|
'gust', metric_rec.gust,
|
||||||
|
'pressure', metric_rec.pressure,
|
||||||
|
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||||
|
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||||
|
'rh', valToPercent(metric_rec.rh::numeric)
|
||||||
|
) INTO windy_metric;
|
||||||
|
RAISE NOTICE '-> cron_windy_fn checking windy_metrics [%]', windy_metric;
|
||||||
|
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||||
|
RAISE NOTICE '-> cron_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||||
|
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||||
|
RAISE NOTICE '-> cron_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||||
|
-- Send metrics to Windy
|
||||||
|
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||||
|
END IF;
|
||||||
|
-- Record last metrics time
|
||||||
|
SELECT metric_rec.time_bucket INTO last_metric;
|
||||||
|
END LOOP;
|
||||||
|
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_windy_fn
|
||||||
|
IS 'init by pg_cron to create (or update) station and uploading observations to Windy Personal Weather Station observations';
|
||||||
|
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $$
|
||||||
|
DECLARE
|
||||||
|
alert_rec record;
|
||||||
|
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||||
|
last_metric TIMESTAMPTZ;
|
||||||
|
metric_rec record;
|
||||||
|
app_settings JSONB;
|
||||||
|
user_settings JSONB;
|
||||||
|
alerting JSONB;
|
||||||
|
_alarms JSONB;
|
||||||
|
alarms TEXT;
|
||||||
|
alert_default JSONB := '{
|
||||||
|
"low_pressure_threshold": 990,
|
||||||
|
"high_wind_speed_threshold": 30,
|
||||||
|
"low_water_depth_threshold": 1,
|
||||||
|
"min_notification_interval": 6,
|
||||||
|
"high_pressure_drop_threshold": 12,
|
||||||
|
"low_battery_charge_threshold": 90,
|
||||||
|
"low_battery_voltage_threshold": 12.5,
|
||||||
|
"low_water_temperature_threshold": 10,
|
||||||
|
"low_indoor_temperature_threshold": 7,
|
||||||
|
"low_outdoor_temperature_threshold": 3
|
||||||
|
}';
|
||||||
|
BEGIN
|
||||||
|
-- Check for new event notification pending update
|
||||||
|
RAISE NOTICE 'cron_alerts_fn';
|
||||||
|
FOR alert_rec in
|
||||||
|
SELECT
|
||||||
|
a.user_id,a.email,v.vessel_id,
|
||||||
|
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||||
|
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||||
|
(a.preferences->'alarms')::JSONB as alarms
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||||
|
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||||
|
AND m.active = True
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||||
|
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||||
|
PERFORM set_config('user.email', alert_rec.email, false);
|
||||||
|
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||||
|
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||||
|
FOR metric_rec in
|
||||||
|
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||||
|
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||||
|
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||||
|
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||||
|
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||||
|
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||||
|
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||||
|
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||||
|
avg((m.metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS charge
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE vessel_id = alert_rec.vessel_id
|
||||||
|
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||||
|
GROUP BY time_bucket
|
||||||
|
ORDER BY time_bucket ASC LIMIT 100
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||||
|
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||||
|
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||||
|
END IF;
|
||||||
|
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||||
|
END IF;
|
||||||
|
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||||
|
END IF;
|
||||||
|
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||||
|
END IF;
|
||||||
|
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||||
|
END IF;
|
||||||
|
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||||
|
-- Get latest alarms
|
||||||
|
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||||
|
-- Is alarm in the min_notification_interval time frame
|
||||||
|
IF (
|
||||||
|
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||||
|
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||||
|
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||||
|
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||||
|
) THEN
|
||||||
|
-- Add alarm
|
||||||
|
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||||
|
-- Merge alarms
|
||||||
|
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||||
|
-- Update alarms for user
|
||||||
|
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||||
|
-- Gather user settings
|
||||||
|
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||||
|
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||||
|
-- DEBUG
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||||
|
END IF;
|
||||||
|
-- Record last metrics time
|
||||||
|
SELECT metric_rec.time_bucket INTO last_metric;
|
||||||
|
END LOOP;
|
||||||
|
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_alerts_fn
|
||||||
|
IS 'init by pg_cron to check for alerts';
|
||||||
|
|
||||||
|
-- CRON for no vessel notification
|
||||||
|
CREATE FUNCTION public.cron_no_vessel_fn() RETURNS void AS $no_vessel$
|
||||||
|
DECLARE
|
||||||
|
no_vessel record;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Check for user with no vessel register
|
||||||
|
RAISE NOTICE 'cron_no_vessel_fn';
|
||||||
|
FOR no_vessel in
|
||||||
|
SELECT a.user_id,a.email,a.first
|
||||||
|
FROM auth.accounts a
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT *
|
||||||
|
FROM auth.vessels v
|
||||||
|
WHERE v.owner_email = a.email)
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_no_vessel_rec_fn for [%]', no_vessel;
|
||||||
|
SELECT json_build_object('email', no_vessel.email, 'recipient', no_vessel.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_no_vessel_rec_fn [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$no_vessel$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_no_vessel_fn
|
||||||
|
IS 'init by pg_cron, check for user with no vessel register then send notification';
|
||||||
|
|
||||||
|
CREATE FUNCTION public.cron_no_metadata_fn() RETURNS void AS $no_metadata$
|
||||||
|
DECLARE
|
||||||
|
no_metadata_rec record;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Check for vessel register but with no metadata
|
||||||
|
RAISE NOTICE 'cron_no_metadata_fn';
|
||||||
|
FOR no_metadata_rec in
|
||||||
|
SELECT
|
||||||
|
a.user_id,a.email,a.first
|
||||||
|
FROM auth.accounts a, auth.vessels v
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT *
|
||||||
|
FROM api.metadata m
|
||||||
|
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_no_metadata_rec_fn for [%]', no_metadata_rec;
|
||||||
|
SELECT json_build_object('email', no_metadata_rec.email, 'recipient', no_metadata_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_process_no_metadata_rec_fn [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$no_metadata$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_no_metadata_fn
|
||||||
|
IS 'init by pg_cron, check for vessel with no metadata then send notification';
|
||||||
|
|
||||||
|
CREATE FUNCTION public.cron_no_activity_fn() RETURNS void AS $no_activity$
|
||||||
|
DECLARE
|
||||||
|
no_activity_rec record;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Check for vessel with no activity for more than 230 days
|
||||||
|
RAISE NOTICE 'cron_no_activity_fn';
|
||||||
|
FOR no_activity_rec in
|
||||||
|
SELECT
|
||||||
|
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||||
|
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_process_no_activity_rec_fn [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$no_activity$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_no_activity_fn
|
||||||
|
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||||
|
|
||||||
|
CREATE FUNCTION public.cron_deactivated_fn() RETURNS void AS $deactivated$
|
||||||
|
DECLARE
|
||||||
|
no_activity_rec record;
|
||||||
|
user_settings jsonb;
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE 'cron_deactivated_fn';
|
||||||
|
|
||||||
|
-- List accounts with vessel inactivity for more than 1 YEAR
|
||||||
|
FOR no_activity_rec in
|
||||||
|
SELECT
|
||||||
|
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||||
|
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_deactivated_rec_fn for inactivity [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn inactivity [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||||
|
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- List accounts with no vessel metadata for more than 1 YEAR
|
||||||
|
FOR no_activity_rec in
|
||||||
|
SELECT
|
||||||
|
a.user_id,a.email,a.first,a.created_at
|
||||||
|
FROM auth.accounts a, auth.vessels v
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT *
|
||||||
|
FROM api.metadata m
|
||||||
|
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||||
|
AND v.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no metadata [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no metadata [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||||
|
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
|
||||||
|
-- List accounts with no vessel created for more than 1 YEAR
|
||||||
|
FOR no_activity_rec in
|
||||||
|
SELECT a.user_id,a.email,a.first,a.created_at
|
||||||
|
FROM auth.accounts a
|
||||||
|
WHERE NOT EXISTS (
|
||||||
|
SELECT *
|
||||||
|
FROM auth.vessels v
|
||||||
|
WHERE v.owner_email = a.email)
|
||||||
|
AND a.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no vessel [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no vessel [%]', user_settings;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||||
|
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$deactivated$ language plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.cron_deactivated_fn
|
||||||
|
IS 'init by pg_cron, check for vessel with no activity for more than 1 year then send notification and delete data';
|
||||||
|
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_prune_otp_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_no_vessel_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_no_metadata_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_no_activity_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_deactivated_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_windy_fn();
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_alerts_fn();
|
||||||
|
|
||||||
|
-- Remove deprecated fn
|
||||||
|
DROP FUNCTION public.cron_process_new_account_fn();
|
||||||
|
DROP FUNCTION public.cron_process_new_account_otp_validation_fn();
|
||||||
|
DROP FUNCTION public.cron_process_new_moorage_fn();
|
||||||
|
DROP FUNCTION public.cron_process_new_vessel_fn();
|
||||||
|
|
||||||
|
-- Update version
|
||||||
|
UPDATE public.app_settings
|
||||||
|
SET value='0.7.0'
|
||||||
|
WHERE "name"='app.version';
|
||||||
|
|
||||||
|
-- Create a cron job
|
||||||
|
\c postgres
|
||||||
|
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_prune_otp_fn()'
|
||||||
|
WHERE jobname = 'cron_prune_otp';
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_no_vessel_fn()'
|
||||||
|
WHERE jobname = 'cron_no_vessel';
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_no_metadata_fn()'
|
||||||
|
WHERE jobname = 'cron_no_metadata';
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_no_activity_fn()'
|
||||||
|
WHERE jobname = 'cron_no_activity';
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_windy_fn()'
|
||||||
|
WHERE jobname = 'cron_windy';
|
||||||
|
UPDATE cron.job
|
||||||
|
SET command='select public.cron_alerts_fn()'
|
||||||
|
WHERE jobname = 'cron_alerts';
|
||||||
|
|
@@ -17,6 +17,8 @@ INSERT INTO app_settings (name, value) VALUES
|
|||||||
('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'),
|
('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'),
|
||||||
('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'),
|
('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'),
|
||||||
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
|
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
|
||||||
|
('app.grafana_admin_uri', '${PGSAIL_GRAFANA_ADMIN_URI}'),
|
||||||
|
('app.keycloak_uri', '${PGSAIL_KEYCLOAK_URI}'),
|
||||||
('app.url', '${PGSAIL_APP_URL}'),
|
('app.url', '${PGSAIL_APP_URL}'),
|
||||||
('app.version', '${PGSAIL_VERSION}');
|
('app.version', '${PGSAIL_VERSION}');
|
||||||
-- Update comment with version
|
-- Update comment with version
|
||||||
|
@@ -1 +1 @@
|
|||||||
0.5.1
|
0.7.0
|
||||||
|
@@ -604,8 +604,12 @@ request.set('User-Agent', 'PostgSail unit tests');
|
|||||||
// Override client_id
|
// Override client_id
|
||||||
data[i]['client_id'] = test.vessel_metadata.client_id;
|
data[i]['client_id'] = test.vessel_metadata.client_id;
|
||||||
}
|
}
|
||||||
// Force last entry to be back in time from previous, it should be ignore silently
|
// The last entry are invalid and should be ignore.
|
||||||
data.at(-1).time = moment.utc(data.at(-2).time).subtract(1, 'minutes').format();
|
// - Invalid status
|
||||||
|
// - Invalid speedoverground
|
||||||
|
// - Invalid time previous time is duplicate
|
||||||
|
// Force last valid entry to be back in time from previous, it should be ignore silently
|
||||||
|
data.at(-1).time = moment.utc(data.at(-3).time).subtract(1, 'minutes').format();
|
||||||
//console.log(data[0]);
|
//console.log(data[0]);
|
||||||
|
|
||||||
it('/metrics?select=time', function(done) {
|
it('/metrics?select=time', function(done) {
|
||||||
@@ -625,7 +629,7 @@ request.set('User-Agent', 'PostgSail unit tests');
|
|||||||
res.header['content-type'].should.match(new RegExp('json','g'));
|
res.header['content-type'].should.match(new RegExp('json','g'));
|
||||||
res.header['server'].should.match(new RegExp('postgrest','g'));
|
res.header['server'].should.match(new RegExp('postgrest','g'));
|
||||||
should.exist(res.body);
|
should.exist(res.body);
|
||||||
res.body.length.should.match(test.vessel_metrics['metrics'].length-1);
|
res.body.length.should.match(test.vessel_metrics['metrics'].length-3);
|
||||||
done(err);
|
done(err);
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@@ -50,6 +50,24 @@ var moment = require("moment");
|
|||||||
payload: null,
|
payload: null,
|
||||||
res: {},
|
res: {},
|
||||||
},
|
},
|
||||||
|
timelapse_full: {
|
||||||
|
url: "/rpc/timelapse_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,0") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stats_logs: {
|
||||||
|
url: "/rpc/stats_logs_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stats_stays: {
|
||||||
|
url: "/rpc/stats_stay_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
export_gpx: {
|
export_gpx: {
|
||||||
url: "/rpc/export_logbook_gpx_fn",
|
url: "/rpc/export_logbook_gpx_fn",
|
||||||
header: { name: "x-is-public", value: btoa("kapla,public_logs,0") },
|
header: { name: "x-is-public", value: btoa("kapla,public_logs,0") },
|
||||||
@@ -79,11 +97,29 @@ var moment = require("moment");
|
|||||||
res: {},
|
res: {},
|
||||||
},
|
},
|
||||||
timelapse: {
|
timelapse: {
|
||||||
|
url: "/rpc/timelapse_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("aava,public_timelapse,3") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
timelapse_full: {
|
||||||
url: "/rpc/timelapse_fn",
|
url: "/rpc/timelapse_fn",
|
||||||
header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") },
|
header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") },
|
||||||
payload: null,
|
payload: null,
|
||||||
res: {},
|
res: {},
|
||||||
},
|
},
|
||||||
|
stats_logs: {
|
||||||
|
url: "/rpc/stats_logs_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("aava,public_stats,0") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stats_stays: {
|
||||||
|
url: "/rpc/stats_stay_fn",
|
||||||
|
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
export_gpx: {
|
export_gpx: {
|
||||||
url: "/rpc/export_logbook_gpx_fn",
|
url: "/rpc/export_logbook_gpx_fn",
|
||||||
header: { name: "x-is-public", value: btoa("aava,public_logs,0") },
|
header: { name: "x-is-public", value: btoa("aava,public_logs,0") },
|
||||||
@@ -97,7 +133,7 @@ var moment = require("moment");
|
|||||||
request = supertest.agent(test.cname);
|
request = supertest.agent(test.cname);
|
||||||
request.set("User-Agent", "PostgSail unit tests");
|
request.set("User-Agent", "PostgSail unit tests");
|
||||||
|
|
||||||
describe("Get JWT api_anonymous", function () {
|
describe("With no JWT as api_anonymous", function () {
|
||||||
it("/logs_view, api_anonymous no jwt token", function (done) {
|
it("/logs_view, api_anonymous no jwt token", function (done) {
|
||||||
// Reset agent so we do not save cookies
|
// Reset agent so we do not save cookies
|
||||||
request = supertest.agent(test.cname);
|
request = supertest.agent(test.cname);
|
||||||
@@ -156,7 +192,7 @@ var moment = require("moment");
|
|||||||
.set("Accept", "application/json")
|
.set("Accept", "application/json")
|
||||||
.end(function (err, res) {
|
.end(function (err, res) {
|
||||||
console.log(res.text);
|
console.log(res.text);
|
||||||
res.status.should.equal(404);
|
res.status.should.equal(404); // return 404 as it is not enable in user settings.
|
||||||
should.exist(res.header["content-type"]);
|
should.exist(res.header["content-type"]);
|
||||||
should.exist(res.header["server"]);
|
should.exist(res.header["server"]);
|
||||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
@@ -573,7 +573,31 @@
|
|||||||
"courseovergroundtrue" : 197.6,
|
"courseovergroundtrue" : 197.6,
|
||||||
"windspeedapparent" : 15.9,
|
"windspeedapparent" : 15.9,
|
||||||
"anglespeedapparent" : 31.0,
|
"anglespeedapparent" : 31.0,
|
||||||
|
"status" : "ais-sart",
|
||||||
|
"metrics" : {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"time" : "2022-07-31T12:14:29.168Z",
|
||||||
|
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||||
|
"latitude" : 59.7124174,
|
||||||
|
"longitude" : 24.7289112,
|
||||||
|
"speedoverground" : 55.7,
|
||||||
|
"courseovergroundtrue" : 197.6,
|
||||||
|
"windspeedapparent" : 15.9,
|
||||||
|
"anglespeedapparent" : 31.0,
|
||||||
"status" : "anchored",
|
"status" : "anchored",
|
||||||
"metrics" : {"navigation.log": 17442506, "navigation.trip.log": 81321, "navigation.headingTrue": 3.571, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 13.749, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.07, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 43.9, "electrical.batteries.1.voltage": 14.54, "navigation.gnss.antennaAltitude": 2.05, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 13.75, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 40, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 4, "network.n2k.ngt-1.130356.ch1.bandwidth": 3, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 5, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
"metrics" : {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"time" : "2022-07-31T12:14:29.168Z",
|
||||||
|
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||||
|
"latitude" : 59.7124174,
|
||||||
|
"longitude" : 24.7289112,
|
||||||
|
"speedoverground" : 5.7,
|
||||||
|
"courseovergroundtrue" : 197.6,
|
||||||
|
"windspeedapparent" : 15.9,
|
||||||
|
"anglespeedapparent" : 31.0,
|
||||||
|
"status" : "anchored",
|
||||||
|
"metrics" : {}
|
||||||
}
|
}
|
||||||
]}
|
]}
|
||||||
|
@@ -633,7 +633,31 @@
|
|||||||
"courseovergroundtrue" : 122.0,
|
"courseovergroundtrue" : 122.0,
|
||||||
"windspeedapparent" : 7.2,
|
"windspeedapparent" : 7.2,
|
||||||
"anglespeedapparent" : 10.0,
|
"anglespeedapparent" : 10.0,
|
||||||
|
"status" : "unknown",
|
||||||
|
"metrics" : {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"time" : "2022-07-30T15:41:28.867Z",
|
||||||
|
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
|
||||||
|
"latitude" : 59.86,
|
||||||
|
"longitude" : 23.365766666666666,
|
||||||
|
"speedoverground" : 60.0,
|
||||||
|
"courseovergroundtrue" : 122.0,
|
||||||
|
"windspeedapparent" : 7.2,
|
||||||
|
"anglespeedapparent" : 10.0,
|
||||||
"status" : "anchored",
|
"status" : "anchored",
|
||||||
"metrics" : {"environment.wind.speedTrue": 0.63, "navigation.speedThroughWater": 3.2255674838104293, "performance.velocityMadeGood": -2.242978345998959, "environment.wind.angleTrueWater": 2.3038346131585485, "environment.depth.belowTransducer": 17.73, "navigation.courseOverGroundMagnetic": 2.129127154994025, "navigation.courseRhumbline.crossTrackError": 0}
|
"metrics" : {}
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"time" : "2022-07-30T15:41:28.867Z",
|
||||||
|
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
|
||||||
|
"latitude" : 59.86,
|
||||||
|
"longitude" : 23.365766666666666,
|
||||||
|
"speedoverground" : 0.0,
|
||||||
|
"courseovergroundtrue" : 122.0,
|
||||||
|
"windspeedapparent" : 7.2,
|
||||||
|
"anglespeedapparent" : 10.0,
|
||||||
|
"status" : "anchored",
|
||||||
|
"metrics" : {}
|
||||||
}
|
}
|
||||||
]}
|
]}
|
||||||
|
@@ -15,6 +15,9 @@ ispublic_fn | f
|
|||||||
-[ RECORD 1 ]--
|
-[ RECORD 1 ]--
|
||||||
ispublic_fn | t
|
ispublic_fn | t
|
||||||
|
|
||||||
|
-[ RECORD 1 ]--
|
||||||
|
ispublic_fn | f
|
||||||
|
|
||||||
-[ RECORD 1 ]--
|
-[ RECORD 1 ]--
|
||||||
ispublic_fn | t
|
ispublic_fn | t
|
||||||
|
|
||||||
|
@@ -27,10 +27,10 @@ SELECT set_config('user.email', 'demo+kapla@openplotter.cloud', false);
|
|||||||
--SELECT set_config('vessel.client_id', 'vessels.urn:mrn:imo:mmsi:123456789', false);
|
--SELECT set_config('vessel.client_id', 'vessels.urn:mrn:imo:mmsi:123456789', false);
|
||||||
|
|
||||||
\echo 'Process badge'
|
\echo 'Process badge'
|
||||||
SELECT badges_logbook_fn(5);
|
SELECT badges_logbook_fn(5,NOW()::TEXT);
|
||||||
SELECT badges_logbook_fn(6);
|
SELECT badges_logbook_fn(6,NOW()::TEXT);
|
||||||
SELECT badges_geom_fn(5);
|
SELECT badges_geom_fn(5,NOW()::TEXT);
|
||||||
SELECT badges_geom_fn(6);
|
SELECT badges_geom_fn(6,NOW()::TEXT);
|
||||||
|
|
||||||
\echo 'Check badges for user'
|
\echo 'Check badges for user'
|
||||||
SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a;
|
SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a;
|
||||||
|
@@ -31,7 +31,7 @@ SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_tim
|
|||||||
\echo 'stays'
|
\echo 'stays'
|
||||||
SELECT count(*) FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
|
SELECT count(*) FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
|
||||||
\echo 'stays'
|
\echo 'stays'
|
||||||
SELECT active,name,geog,stay_code FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
|
SELECT active,name IS NOT NULL AS name,geog,stay_code FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
|
||||||
|
|
||||||
-- Test event logs view for user
|
-- Test event logs view for user
|
||||||
\echo 'eventlogs_view'
|
\echo 'eventlogs_view'
|
||||||
@@ -69,3 +69,8 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
|
|||||||
--SELECT api.export_logbook_geojson_fn(1);
|
--SELECT api.export_logbook_geojson_fn(1);
|
||||||
--SELECT api.export_logbook_gpx_fn(1);
|
--SELECT api.export_logbook_gpx_fn(1);
|
||||||
--SELECT api.export_logbook_kml_fn(1);
|
--SELECT api.export_logbook_kml_fn(1);
|
||||||
|
|
||||||
|
-- Check history
|
||||||
|
--\echo 'monitoring history fn'
|
||||||
|
--select api.monitoring_history_fn();
|
||||||
|
--select api.monitoring_history_fn('24');
|
||||||
|
@@ -27,7 +27,7 @@ duration | PT27M
|
|||||||
avg_speed | 3.6357142857142852
|
avg_speed | 3.6357142857142852
|
||||||
max_speed | 6.1
|
max_speed | 6.1
|
||||||
max_wind_speed | 22.1
|
max_wind_speed | 22.1
|
||||||
notes | new log note
|
notes |
|
||||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
name | Norra hamnen to Ekenäs
|
name | Norra hamnen to Ekenäs
|
||||||
@@ -50,23 +50,23 @@ count | 3
|
|||||||
stays
|
stays
|
||||||
-[ RECORD 1 ]-------------------------------------------------
|
-[ RECORD 1 ]-------------------------------------------------
|
||||||
active | t
|
active | t
|
||||||
name |
|
name | f
|
||||||
geog |
|
geog |
|
||||||
stay_code | 2
|
stay_code | 2
|
||||||
-[ RECORD 2 ]-------------------------------------------------
|
-[ RECORD 2 ]-------------------------------------------------
|
||||||
active | f
|
active | f
|
||||||
name | 0 days stay at Pojoviken in November 2023
|
name | t
|
||||||
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
|
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
|
||||||
stay_code | 2
|
stay_code | 2
|
||||||
-[ RECORD 3 ]-------------------------------------------------
|
-[ RECORD 3 ]-------------------------------------------------
|
||||||
active | f
|
active | f
|
||||||
name | 0 days stay at Norra hamnen in November 2023
|
name | t
|
||||||
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
|
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
|
||||||
stay_code | 4
|
stay_code | 4
|
||||||
|
|
||||||
eventlogs_view
|
eventlogs_view
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 11
|
count | 12
|
||||||
|
|
||||||
stats_logs_fn
|
stats_logs_fn
|
||||||
SELECT 1
|
SELECT 1
|
||||||
|
@@ -12,9 +12,14 @@ select current_database();
|
|||||||
\x on
|
\x on
|
||||||
|
|
||||||
-- Check the number of process pending
|
-- Check the number of process pending
|
||||||
|
\echo 'Check the number of process pending'
|
||||||
-- Should be 22
|
-- Should be 22
|
||||||
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
||||||
--set role scheduler
|
--set role scheduler
|
||||||
SELECT public.run_cron_jobs();
|
SELECT public.run_cron_jobs();
|
||||||
-- Check any pending job
|
-- Check any pending job
|
||||||
SELECT count(*) as any_pending_jobs from public.process_queue pq where pq.processed is null;
|
SELECT count(*) as any_pending_jobs from public.process_queue pq where pq.processed is null;
|
||||||
|
|
||||||
|
-- Check the number of metrics entries
|
||||||
|
\echo 'Check the number of metrics entries'
|
||||||
|
SELECT count(*) as metrics_count from api.metrics;
|
||||||
|
@@ -5,12 +5,17 @@
|
|||||||
|
|
||||||
You are now connected to database "signalk" as user "username".
|
You are now connected to database "signalk" as user "username".
|
||||||
Expanded display is on.
|
Expanded display is on.
|
||||||
|
Check the number of process pending
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
jobs | 24
|
jobs | 26
|
||||||
|
|
||||||
-[ RECORD 1 ]-+-
|
-[ RECORD 1 ]-+-
|
||||||
run_cron_jobs |
|
run_cron_jobs |
|
||||||
|
|
||||||
-[ RECORD 1 ]----+--
|
-[ RECORD 1 ]----+--
|
||||||
any_pending_jobs | 0
|
any_pending_jobs | 2
|
||||||
|
|
||||||
|
Check the number of metrics entries
|
||||||
|
-[ RECORD 1 ]-+----
|
||||||
|
metrics_count | 172
|
||||||
|
|
||||||
|
@@ -65,7 +65,7 @@ SELECT l.id, l.name, l.from, l.to, l.distance, l.duration, l._from_moorage_id, l
|
|||||||
|
|
||||||
\echo 'api.stays'
|
\echo 'api.stays'
|
||||||
--SELECT * FROM api.stays s;
|
--SELECT * FROM api.stays s;
|
||||||
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.moorage_id, m.active, m.name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m;
|
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.moorage_id, m.active, m.name IS NOT NULL AS name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m;
|
||||||
|
|
||||||
\echo 'stays_view'
|
\echo 'stays_view'
|
||||||
--SELECT * FROM api.stays_view s;
|
--SELECT * FROM api.stays_view s;
|
||||||
|
@@ -148,7 +148,7 @@ id | 3
|
|||||||
vessel_id | t
|
vessel_id | t
|
||||||
moorage_id |
|
moorage_id |
|
||||||
active | t
|
active | t
|
||||||
name |
|
name | f
|
||||||
latitude | 59.86
|
latitude | 59.86
|
||||||
longitude | 23.365766666666666
|
longitude | 23.365766666666666
|
||||||
geog |
|
geog |
|
||||||
@@ -162,7 +162,7 @@ id | 1
|
|||||||
vessel_id | t
|
vessel_id | t
|
||||||
moorage_id | 1
|
moorage_id | 1
|
||||||
active | f
|
active | f
|
||||||
name | patch stay name 3
|
name | t
|
||||||
latitude | 60.077666666666666
|
latitude | 60.077666666666666
|
||||||
longitude | 23.530866666666668
|
longitude | 23.530866666666668
|
||||||
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
|
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
|
||||||
@@ -176,7 +176,7 @@ id | 2
|
|||||||
vessel_id | t
|
vessel_id | t
|
||||||
moorage_id | 2
|
moorage_id | 2
|
||||||
active | f
|
active | f
|
||||||
name | 0 days stay at Norra hamnen in November 2023
|
name | t
|
||||||
latitude | 59.97688333333333
|
latitude | 59.97688333333333
|
||||||
longitude | 23.4321
|
longitude | 23.4321
|
||||||
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
|
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
|
||||||
|
@@ -81,6 +81,10 @@ select * from pg_policies;
|
|||||||
SELECT public.reverse_geocode_py_fn('nominatim', 1.4440116666666667, 38.82985166666667);
|
SELECT public.reverse_geocode_py_fn('nominatim', 1.4440116666666667, 38.82985166666667);
|
||||||
\echo 'Test geoip reverse_geoip_py_fn'
|
\echo 'Test geoip reverse_geoip_py_fn'
|
||||||
--SELECT reverse_geoip_py_fn('62.74.13.231');
|
--SELECT reverse_geoip_py_fn('62.74.13.231');
|
||||||
|
\echo 'Test opverpass API overpass_py_fn'
|
||||||
|
SELECT public.overpass_py_fn(2.19917, 41.386873333333334); -- Port Olimpic
|
||||||
|
SELECT public.overpass_py_fn(1.92574333333, 41.258915); -- Port de la Ginesta
|
||||||
|
SELECT public.overpass_py_fn(23.4321, 59.9768833333333); -- Norra hamnen
|
||||||
|
|
||||||
-- List details product versions
|
-- List details product versions
|
||||||
SELECT api.versions_fn();
|
SELECT api.versions_fn();
|
||||||
|
@@ -6,10 +6,10 @@
|
|||||||
You are now connected to database "signalk" as user "username".
|
You are now connected to database "signalk" as user "username".
|
||||||
Expanded display is on.
|
Expanded display is on.
|
||||||
-[ RECORD 1 ]--+-------------------------------
|
-[ RECORD 1 ]--+-------------------------------
|
||||||
server_version | 15.5 (Debian 15.5-1.pgdg110+1)
|
server_version | 16.2 (Debian 16.2-1.pgdg110+2)
|
||||||
|
|
||||||
-[ RECORD 1 ]--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
postgis_full_version | POSTGIS="3.4.0 0874ea3" [EXTENSION] PGSQL="150" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
||||||
|
|
||||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||||
Name | citext
|
Name | citext
|
||||||
@@ -48,12 +48,12 @@ Schema | pg_catalog
|
|||||||
Description | PL/Python3U untrusted procedural language
|
Description | PL/Python3U untrusted procedural language
|
||||||
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
||||||
Name | postgis
|
Name | postgis
|
||||||
Version | 3.4.0
|
Version | 3.4.2
|
||||||
Schema | public
|
Schema | public
|
||||||
Description | PostGIS geometry and geography spatial types and functions
|
Description | PostGIS geometry and geography spatial types and functions
|
||||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||||
Name | timescaledb
|
Name | timescaledb
|
||||||
Version | 2.12.2
|
Version | 2.14.1
|
||||||
Schema | public
|
Schema | public
|
||||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||||
@@ -96,24 +96,24 @@ laninline | 0
|
|||||||
lanvalidator | 2248
|
lanvalidator | 2248
|
||||||
lanacl |
|
lanacl |
|
||||||
-[ RECORD 4 ]-+-----------
|
-[ RECORD 4 ]-+-----------
|
||||||
oid | 13542
|
oid | 13545
|
||||||
lanname | plpgsql
|
lanname | plpgsql
|
||||||
lanowner | 10
|
lanowner | 10
|
||||||
lanispl | t
|
lanispl | t
|
||||||
lanpltrusted | t
|
lanpltrusted | t
|
||||||
lanplcallfoid | 13539
|
lanplcallfoid | 13542
|
||||||
laninline | 13540
|
laninline | 13543
|
||||||
lanvalidator | 13541
|
lanvalidator | 13544
|
||||||
lanacl |
|
lanacl |
|
||||||
-[ RECORD 5 ]-+-----------
|
-[ RECORD 5 ]-+-----------
|
||||||
oid | 18283
|
oid | 18175
|
||||||
lanname | plpython3u
|
lanname | plpython3u
|
||||||
lanowner | 10
|
lanowner | 10
|
||||||
lanispl | t
|
lanispl | t
|
||||||
lanpltrusted | t
|
lanpltrusted | t
|
||||||
lanplcallfoid | 18280
|
lanplcallfoid | 18172
|
||||||
laninline | 18281
|
laninline | 18173
|
||||||
lanvalidator | 18282
|
lanvalidator | 18174
|
||||||
lanacl |
|
lanacl |
|
||||||
|
|
||||||
-[ RECORD 1 ]+-----------
|
-[ RECORD 1 ]+-----------
|
||||||
@@ -243,6 +243,8 @@ schema_auth | accounts
|
|||||||
-[ RECORD 2 ]---------
|
-[ RECORD 2 ]---------
|
||||||
schema_auth | otp
|
schema_auth | otp
|
||||||
-[ RECORD 3 ]---------
|
-[ RECORD 3 ]---------
|
||||||
|
schema_auth | users
|
||||||
|
-[ RECORD 4 ]---------
|
||||||
schema_auth | vessels
|
schema_auth | vessels
|
||||||
|
|
||||||
(0 rows)
|
(0 rows)
|
||||||
@@ -321,213 +323,6 @@ cmd | ALL
|
|||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
with_check | true
|
with_check | true
|
||||||
-[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | api
|
|
||||||
tablename | metrics
|
|
||||||
policyname | api_user_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {user_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 10 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | metrics
|
|
||||||
policyname | api_scheduler_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {scheduler}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 11 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | metrics
|
|
||||||
policyname | grafana_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {grafana}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 12 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | metrics
|
|
||||||
policyname | api_anonymous_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {api_anonymous}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 13 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | admin_all
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {username}
|
|
||||||
cmd | ALL
|
|
||||||
qual | true
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 14 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | api_vessel_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {vessel_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | api_user_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {user_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 16 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | api_scheduler_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {scheduler}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | grafana_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {grafana}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 18 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | logbook
|
|
||||||
policyname | api_anonymous_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {api_anonymous}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 19 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | stays
|
|
||||||
policyname | admin_all
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {username}
|
|
||||||
cmd | ALL
|
|
||||||
qual | true
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 20 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | stays
|
|
||||||
policyname | api_vessel_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {vessel_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 21 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | stays
|
|
||||||
policyname | api_user_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {user_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 22 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | stays
|
|
||||||
policyname | api_scheduler_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {scheduler}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | stays
|
|
||||||
policyname | grafana_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {grafana}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | moorages
|
|
||||||
policyname | admin_all
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {username}
|
|
||||||
cmd | ALL
|
|
||||||
qual | true
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | moorages
|
|
||||||
policyname | api_vessel_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {vessel_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | moorages
|
|
||||||
policyname | api_user_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {user_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | moorages
|
|
||||||
policyname | api_scheduler_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {scheduler}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | api
|
|
||||||
tablename | moorages
|
|
||||||
policyname | grafana_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {grafana}
|
|
||||||
cmd | ALL
|
|
||||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | auth
|
|
||||||
tablename | vessels
|
|
||||||
policyname | admin_all
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {username}
|
|
||||||
cmd | ALL
|
|
||||||
qual | true
|
|
||||||
with_check | true
|
|
||||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | auth
|
|
||||||
tablename | vessels
|
|
||||||
policyname | api_user_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {user_role}
|
|
||||||
cmd | ALL
|
|
||||||
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
|
||||||
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
|
||||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | auth
|
|
||||||
tablename | vessels
|
|
||||||
policyname | grafana_role
|
|
||||||
permissive | PERMISSIVE
|
|
||||||
roles | {grafana}
|
|
||||||
cmd | ALL
|
|
||||||
qual | ((owner_email)::text = current_setting('user.email'::text, true))
|
|
||||||
with_check | false
|
|
||||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
|
||||||
schemaname | auth
|
schemaname | auth
|
||||||
tablename | vessels
|
tablename | vessels
|
||||||
policyname | grafana_proxy_role
|
policyname | grafana_proxy_role
|
||||||
@@ -536,7 +331,61 @@ roles | {grafana_auth}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | true
|
qual | true
|
||||||
with_check | false
|
with_check | false
|
||||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 10 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | metrics
|
||||||
|
policyname | api_user_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {user_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 11 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | metrics
|
||||||
|
policyname | api_scheduler_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {scheduler}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 12 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | metrics
|
||||||
|
policyname | grafana_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {grafana}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 13 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | metrics
|
||||||
|
policyname | api_anonymous_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {api_anonymous}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 14 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | admin_all
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {username}
|
||||||
|
cmd | ALL
|
||||||
|
qual | true
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | api_vessel_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {vessel_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 16 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | auth
|
schemaname | auth
|
||||||
tablename | accounts
|
tablename | accounts
|
||||||
policyname | admin_all
|
policyname | admin_all
|
||||||
@@ -545,8 +394,179 @@ roles | {username}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | true
|
qual | true
|
||||||
with_check | true
|
with_check | true
|
||||||
|
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | api_user_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {user_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 18 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | api_scheduler_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {scheduler}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 19 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | grafana_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {grafana}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 20 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | logbook
|
||||||
|
policyname | api_anonymous_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {api_anonymous}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 21 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | admin_all
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {username}
|
||||||
|
cmd | ALL
|
||||||
|
qual | true
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 22 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | api_vessel_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {vessel_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | api_user_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {user_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | api_scheduler_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {scheduler}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | grafana_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {grafana}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | stays
|
||||||
|
policyname | api_anonymous_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {api_anonymous}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | admin_all
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {username}
|
||||||
|
cmd | ALL
|
||||||
|
qual | true
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | api_vessel_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {vessel_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | true
|
||||||
|
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | api_user_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {user_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | api_scheduler_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {scheduler}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | grafana_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {grafana}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | api
|
||||||
|
tablename | moorages
|
||||||
|
policyname | api_anonymous_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {api_anonymous}
|
||||||
|
cmd | ALL
|
||||||
|
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | auth
|
||||||
|
tablename | vessels
|
||||||
|
policyname | admin_all
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {username}
|
||||||
|
cmd | ALL
|
||||||
|
qual | true
|
||||||
|
with_check | true
|
||||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | auth
|
schemaname | auth
|
||||||
|
tablename | vessels
|
||||||
|
policyname | api_user_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {user_role}
|
||||||
|
cmd | ALL
|
||||||
|
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||||
|
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||||
|
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | auth
|
||||||
|
tablename | vessels
|
||||||
|
policyname | grafana_role
|
||||||
|
permissive | PERMISSIVE
|
||||||
|
roles | {grafana}
|
||||||
|
cmd | ALL
|
||||||
|
qual | ((owner_email)::text = current_setting('user.email'::text, true))
|
||||||
|
with_check | false
|
||||||
|
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
|
schemaname | auth
|
||||||
tablename | accounts
|
tablename | accounts
|
||||||
policyname | api_user_role
|
policyname | api_user_role
|
||||||
permissive | PERMISSIVE
|
permissive | PERMISSIVE
|
||||||
@@ -554,7 +574,7 @@ roles | {user_role}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | auth
|
schemaname | auth
|
||||||
tablename | accounts
|
tablename | accounts
|
||||||
policyname | api_scheduler_role
|
policyname | api_scheduler_role
|
||||||
@@ -563,7 +583,7 @@ roles | {scheduler}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | auth
|
schemaname | auth
|
||||||
tablename | accounts
|
tablename | accounts
|
||||||
policyname | grafana_proxy_role
|
policyname | grafana_proxy_role
|
||||||
@@ -572,7 +592,7 @@ roles | {grafana_auth}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | true
|
qual | true
|
||||||
with_check | false
|
with_check | false
|
||||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | public
|
schemaname | public
|
||||||
tablename | process_queue
|
tablename | process_queue
|
||||||
policyname | admin_all
|
policyname | admin_all
|
||||||
@@ -581,7 +601,7 @@ roles | {username}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | true
|
qual | true
|
||||||
with_check | true
|
with_check | true
|
||||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | public
|
schemaname | public
|
||||||
tablename | process_queue
|
tablename | process_queue
|
||||||
policyname | api_vessel_role
|
policyname | api_vessel_role
|
||||||
@@ -590,7 +610,7 @@ roles | {vessel_role}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||||
with_check | true
|
with_check | true
|
||||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | public
|
schemaname | public
|
||||||
tablename | process_queue
|
tablename | process_queue
|
||||||
policyname | api_user_role
|
policyname | api_user_role
|
||||||
@@ -599,7 +619,7 @@ roles | {user_role}
|
|||||||
cmd | ALL
|
cmd | ALL
|
||||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||||
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||||
schemaname | public
|
schemaname | public
|
||||||
tablename | process_queue
|
tablename | process_queue
|
||||||
policyname | api_scheduler_role
|
policyname | api_scheduler_role
|
||||||
@@ -614,13 +634,23 @@ Test nominatim reverse_geocode_py_fn
|
|||||||
reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
|
reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
|
||||||
|
|
||||||
Test geoip reverse_geoip_py_fn
|
Test geoip reverse_geoip_py_fn
|
||||||
|
Test opverpass API overpass_py_fn
|
||||||
|
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
|
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "wikipedia": "ca:Port Olímpic de Barcelona", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
|
||||||
|
|
||||||
|
-[ RECORD 1 ]--+----------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
|
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta"}
|
||||||
|
|
||||||
|
-[ RECORD 1 ]--+----------------------------------------------
|
||||||
|
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
||||||
|
|
||||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
versions_fn | {"api_version" : "0.5.1", "sys_version" : "PostgreSQL 15.5", "timescaledb" : "2.12.2", "postgis" : "3.4.0", "postgrest" : "PostgREST 11.2.2"}
|
versions_fn | {"api_version" : "0.6.1", "sys_version" : "PostgreSQL 16.2", "timescaledb" : "2.14.1", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.0.2"}
|
||||||
|
|
||||||
-[ RECORD 1 ]-----------------
|
-[ RECORD 1 ]-----------------
|
||||||
api_version | 0.5.1
|
api_version | 0.7.0
|
||||||
sys_version | PostgreSQL 15.5
|
sys_version | PostgreSQL 16.2
|
||||||
timescaledb | 2.12.2
|
timescaledb | 2.14.1
|
||||||
postgis | 3.4.0
|
postgis | 3.4.2
|
||||||
postgrest | PostgREST 11.2.2
|
postgrest | PostgREST 12.0.2
|
||||||
|
|
||||||
|
@@ -36,7 +36,7 @@ SET vessel.name = 'kapla';
|
|||||||
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:123456789';
|
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:123456789';
|
||||||
--SELECT * FROM api.vessels_view v;
|
--SELECT * FROM api.vessels_view v;
|
||||||
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
|
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
|
||||||
SELECT name,geojson,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;
|
SELECT name,geojson->'geometry' as geometry,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;
|
||||||
|
|
||||||
SET "user.email" = 'demo+aava@openplotter.cloud';
|
SET "user.email" = 'demo+aava@openplotter.cloud';
|
||||||
SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id;
|
SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id;
|
||||||
@@ -45,4 +45,4 @@ SET vessel.name = 'aava';
|
|||||||
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:787654321';
|
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:787654321';
|
||||||
--SELECT * FROM api.vessels_view v;
|
--SELECT * FROM api.vessels_view v;
|
||||||
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
|
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
|
||||||
SELECT name,geojson,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;
|
SELECT name,geojson->'geometry' as geometry,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;
|
||||||
|
@@ -37,9 +37,9 @@ mmsi |
|
|||||||
created_at | t
|
created_at | t
|
||||||
last_contact | t
|
last_contact | t
|
||||||
|
|
||||||
-[ RECORD 1 ]------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]------+--------------------------------------------------------
|
||||||
name | kapla
|
name | kapla
|
||||||
geojson | {"type": "Feature", "geometry": {"type": "Point", "coordinates": [23.365766667, 59.86]}, "properties": {"name": "kapla", "latitude": 59.86, "longitude": 23.365766666666666}}
|
geometry | {"type": "Point", "coordinates": [23.365766667, 59.86]}
|
||||||
watertemperature |
|
watertemperature |
|
||||||
insidetemperature |
|
insidetemperature |
|
||||||
outsidetemperature |
|
outsidetemperature |
|
||||||
@@ -55,9 +55,9 @@ mmsi | 787654321
|
|||||||
created_at | t
|
created_at | t
|
||||||
last_contact | t
|
last_contact | t
|
||||||
|
|
||||||
-[ RECORD 1 ]------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]------+------------------------------------------------------------
|
||||||
name | aava
|
name | aava
|
||||||
geojson | {"type": "Feature", "geometry": {"type": "Point", "coordinates": [2.2934791, 41.465333283]}, "properties": {"name": "aava", "latitude": 41.46533328333334, "longitude": 2.2934791}}
|
geometry | {"type": "Point", "coordinates": [2.2934791, 41.465333283]}
|
||||||
watertemperature | 280.25
|
watertemperature | 280.25
|
||||||
insidetemperature |
|
insidetemperature |
|
||||||
outsidetemperature |
|
outsidetemperature |
|
||||||
|
@@ -9,6 +9,11 @@ if [[ -z "${PGSAIL_API_URI}" ]]; then
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# psql
|
||||||
|
if [[ ! -x "/usr/bin/psql" ]]; then
|
||||||
|
apt update && apt -y install postgresql-client
|
||||||
|
fi
|
||||||
|
|
||||||
# go install
|
# go install
|
||||||
if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then
|
if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then
|
||||||
#wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \
|
#wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \
|
||||||
@@ -133,6 +138,7 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Monitoring API unit tests
|
||||||
$mymocha index4.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report4.html
|
$mymocha index4.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report4.html
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo OK
|
echo OK
|
||||||
@@ -141,15 +147,7 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
$mymocha index5.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report5.html
|
# Monitoring SQL unit tests
|
||||||
if [ $? -eq 0 ]; then
|
|
||||||
echo OK
|
|
||||||
else
|
|
||||||
echo mocha index5.js
|
|
||||||
exit 1
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Monitoring unit tests
|
|
||||||
psql ${PGSAIL_DB_URI} < sql/monitoring.sql > output/monitoring.sql.output
|
psql ${PGSAIL_DB_URI} < sql/monitoring.sql > output/monitoring.sql.output
|
||||||
diff sql/monitoring.sql.output output/monitoring.sql.output > /dev/null
|
diff sql/monitoring.sql.output output/monitoring.sql.output > /dev/null
|
||||||
#diff -u sql/monitoring.sql.output output/monitoring.sql.output | wc -l
|
#diff -u sql/monitoring.sql.output output/monitoring.sql.output | wc -l
|
||||||
@@ -162,6 +160,28 @@ else
|
|||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Anonymous API unit tests
|
||||||
|
$mymocha index5.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report5.html
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo OK
|
||||||
|
else
|
||||||
|
echo mocha index5.js
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Anonymous SQL unit tests
|
||||||
|
psql ${PGSAIL_DB_URI} < sql/anonymous.sql > output/anonymous.sql.output
|
||||||
|
diff sql/anonymous.sql.output output/anonymous.sql.output > /dev/null
|
||||||
|
#diff -u sql/anonymous.sql.output output/anonymous.sql.output | wc -l
|
||||||
|
#echo 0
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo SQL anonymous.sql OK
|
||||||
|
else
|
||||||
|
echo SQL anonymous.sql FAILED
|
||||||
|
diff -u sql/anonymous.sql.output output/anonymous.sql.output
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Download and update openapi documentation
|
# Download and update openapi documentation
|
||||||
wget ${PGSAIL_API_URI} -O openapi.json
|
wget ${PGSAIL_API_URI} -O openapi.json
|
||||||
#echo 0
|
#echo 0
|
||||||
@@ -174,11 +194,11 @@ else
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
# Generate and update mermaid schema documentation
|
# Generate and update mermaid schema documentation
|
||||||
/root/go/bin/mermerd --runConfig ../ERD/mermerdConfig.yaml
|
/root/go/bin/mermerd --runConfig ../docs/ERD/mermerdConfig.yaml
|
||||||
echo $?
|
echo $?
|
||||||
echo 0
|
echo 0
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
cp postgsail.md ../ERD/postgsail.md
|
cp postgsail.md ../docs/ERD/postgsail.md
|
||||||
echo postgsail.md OK
|
echo postgsail.md OK
|
||||||
else
|
else
|
||||||
echo postgsail.md FAILED
|
echo postgsail.md FAILED
|
||||||
|