135 Commits

Author SHA1 Message Date
xbgmsharp
852d2ff583 Release v0.0.10 2023-03-03 16:09:05 +01:00
xbgmsharp
7cf7905694 Update pushover link to work in prod env 2023-03-03 08:35:08 +01:00
xbgmsharp
0f8107a672 Update api.pushover_subscribe_link_fn and fix api.generate_otp_fn 2023-02-26 23:23:07 +01:00
xbgmsharp
77dec463d1 Add urlescape_py_fn to url encode using python 2023-02-26 22:57:44 +01:00
xbgmsharp
8ff1d0a8ed Allow user_role to access new api view total_info_view, stats_logs_view, stats_moorages_view 2023-02-26 21:09:13 +01:00
xbgmsharp
859788d98d Update api.export_logbook_gpx api.export_logbook_geojson
Update api.moorage_view
Add Create api.total_info_view
Add Comment on missing api view
Add security_invoker on stats view
2023-02-25 23:11:32 +01:00
xbgmsharp
62642ffbd6 Enforce OTP verification on login 2023-02-24 15:59:08 +01:00
xbgmsharp
c3760c8689 Allow UPSERT of otp code in generate_otp_fn 2023-02-24 15:58:36 +01:00
xbgmsharp
763c9ae802 Update versions fn and view
Add new fn public.has_vessel_fn()
Deprecated unused and bad api.vessels2_view,api.vessel_p_view
2023-02-24 15:57:32 +01:00
xbgmsharp
37abb3ae1f Minimum valid distance is less than 0.010.
Exclude new function from vessel registration.
2023-02-24 15:55:55 +01:00
xbgmsharp
a6da3cab0a Fix vessel_fn to use the latest location rather than the first know location 2023-02-15 16:24:11 +01:00
xbgmsharp
22f756b3a9 Update permissions to views 2023-02-14 19:04:38 +01:00
xbgmsharp
cb3e9d8e57 Update moorages_view and moorage_view with security invoker 2023-02-14 19:04:13 +01:00
xbgmsharp
1997fe5a81 Update logbook_update_geojson_fn, expose less properties in geojson 2023-02-14 12:22:58 +01:00
xbgmsharp
5a1451ff69 Improve process_logbook_queue_fn. Detect and remove stationary movement.
Add logbook_metrics_dwithin_fn function.
2023-02-13 23:56:39 +01:00
xbgmsharp
a18abec1f1 Update views owner permission using security_invoker and security_barrier 2023-02-09 16:47:02 +01:00
xbgmsharp
322c3ed4fb Update messages templates for email,pushover, telegram 2023-02-09 16:46:23 +01:00
xbgmsharp
d648d119cc Update API expose views with the latest pg15 feature security_invoker 2023-02-09 16:31:06 +01:00
xbgmsharp
9109474e8a Fix permission issue when vessel is not connected in public.check_jwt() 2023-02-07 14:49:32 +01:00
xbgmsharp
ca92a15eba boat-listing, make last_contact retrun null rather than empty string 2023-02-07 11:19:30 +01:00
xbgmsharp
d745048a9c Update reverse_geocode_py to fallback base on more field
Don't exit with error so we don't stop the cron process
2023-02-07 11:18:25 +01:00
xbgmsharp
6a0c15d23c process_logbook_queue_fn add more debug and disable unused function 2023-02-07 11:17:24 +01:00
xbgmsharp
fc01374441 Release v0.0.9 2023-02-06 21:54:26 +01:00
xbgmsharp
0ec3f7fe02 Sending null to no value entry when no metadata available 2023-02-06 21:38:02 +01:00
xbgmsharp
2bae8bd861 Fix and Update parameters check for auth functions 2023-02-06 21:37:19 +01:00
xbgmsharp
38d185d058 Add api.recover comment function 2023-02-05 01:18:24 +01:00
xbgmsharp
4342e29c69 Update vessel check, remove mmsi dependency to vessel_id 2023-02-05 01:16:26 +01:00
xbgmsharp
13d8ad9b3d Allow api_anonymous to execute api.recover and api.reset functions 2023-02-04 23:45:31 +01:00
xbgmsharp
caec91b7f2 Add api.recover and api.reset function to allow password reset 2023-02-04 23:44:47 +01:00
xbgmsharp
665a9d30e6 Fix/update vessel_fn sub query because of function owner 2023-02-04 23:42:38 +01:00
xbgmsharp
eb3a14bee4 Add reset query-string support to send_email_py 2023-02-04 23:41:10 +01:00
xbgmsharp
ba935d7520 Update new logbook entry template message
Add new email reset template message
2023-02-04 23:37:40 +01:00
xbgmsharp
11d136214c Disable health check for api (postgrest) and app (grafana) containers
Missing wget or curl to run the test locally
2023-02-02 00:45:14 +01:00
xbgmsharp
ddbeff7d7e Add docker healthy check for PostgreSQL 2023-02-01 23:59:22 +01:00
xbgmsharp
569700e1b3 Update README 2023-01-31 21:28:50 +01:00
xbgmsharp
93f8476d26 Update main process function is retruning result.
Ensure the query is successful for process_logbook_queue_fn and process_stay_queue_fn
2023-01-31 21:17:49 +01:00
xbgmsharp
4eef5595bc Fix tipo 2023-01-31 21:16:09 +01:00
xbgmsharp
cf9c67bb64 Update README 2023-01-29 22:29:47 +01:00
xbgmsharp
1968f86448 Add debug in send_email_py 2023-01-29 22:20:33 +01:00
xbgmsharp
552faa0a16 Add docker healthy check for Grafana 2023-01-28 21:47:05 +01:00
xbgmsharp
c0b6f17488 Add Grafana ENV Settings 2023-01-28 21:34:21 +01:00
xbgmsharp
1ab6501aad Update README 2023-01-28 21:33:30 +01:00
xbgmsharp
07280f1f67 pg_cron async job
Update Notification cron job
Fix cron_vacuum cron job
Disable all cron job for debug
2023-01-28 21:32:11 +01:00
xbgmsharp
d419a582b9 Remove debug helper for OTP 2023-01-28 21:28:02 +01:00
xbgmsharp
69c8ec17f9 Add connected_at on auth.accounts table 2023-01-28 21:25:12 +01:00
xbgmsharp
89d50b7a6a Update postgreSQL conf for PG15 2023-01-28 21:24:33 +01:00
xbgmsharp
976fc52e9a Update telgram obj queries to supprt group chat id 2022-12-28 22:06:10 +01:00
xbgmsharp
cb0b89c8f3 vessel function and view, remove the millisecond from the timestamp result 2022-12-28 22:04:50 +01:00
xbgmsharp
bcbcfa040d Update telegram object queries 2022-12-28 22:04:04 +01:00
xbgmsharp
b7857e0be6 Update email_otp message, remove recipient firstname 2022-12-28 22:03:40 +01:00
xbgmsharp
089876b62a Update api.metrics status to enum type and allow vessel to switch from sailing to motoring 2022-12-25 17:54:48 +01:00
xbgmsharp
fc9fb8769a Update monitorin offline and online to use send_notification_fn 2022-12-17 23:33:19 +01:00
xbgmsharp
3432d358d3 Add reverse_geoip_py_fn python3 function 2022-12-17 23:32:23 +01:00
xbgmsharp
340bda704e Update publick.email_templates table 2022-12-17 23:17:15 +01:00
xbgmsharp
54156ae7c9 Update api.metrics to support 2 dimension hypertable. Update api.metadata tablesto with updated_at handle by moddatetime extension 2022-12-17 23:15:22 +01:00
xbgmsharp
4c4f0bbd37 Update permision for role grafana_auth and grafana 2022-12-17 23:13:18 +01:00
xbgmsharp
b58fce186a Rename grafan container to app 2022-12-12 23:52:24 +01:00
xbgmsharp
c6c78ecffc Update pgsql/timescale grafana source 2022-12-12 23:51:57 +01:00
xbgmsharp
db0e493900 Add grafana config,dashboards,provisioning 2022-12-12 22:28:01 +01:00
xbgmsharp
dea5b8ddf7 Add grafana 2022-12-12 22:12:22 +01:00
xbgmsharp
e9e63fad50 Add ERD description 2022-12-12 22:11:52 +01:00
xbgmsharp
8b45a171e8 Update vessel_role permission for new api.metrics trigger 2022-12-12 16:21:42 +01:00
xbgmsharp
a0216dad6a Refactor metrics_trigger_fn on api.metrics trigger to avoid multiple stay or logbook active 2022-12-09 12:35:18 +01:00
xbgmsharp
ca5bffd88f Add tables MMSI MID Code Filtered by Flag of Registration 2022-12-09 12:34:23 +01:00
xbgmsharp
1dbf71064e Update reverse_geocode_py_fn to return always data if name is null then fallback to address field road,neighbourhood,suburb 2022-12-09 12:33:00 +01:00
xbgmsharp
6888953cbb Add OTP notification, Add OTP DELETE, Add api.pushover_subscribe_link_fn, Add auth.telegram_session_exists_fn 2022-12-06 21:41:14 +01:00
xbgmsharp
105d6b9113 Add AIS Ship Types tables 2022-12-06 21:40:09 +01:00
xbgmsharp
0c2e4b1d83 cron_vaccum_fn does not work 2022-12-05 23:31:45 +01:00
xbgmsharp
f8b1fb472a Remove mmsi dependency, enforce add check valid longitude,latitude, ignore silently NULL longitude,latitude 2022-12-05 23:29:44 +01:00
xbgmsharp
613ac5e29a Remove mmsi dependency and ensure longitude,latitude exist prior post processing 2022-12-05 23:23:51 +01:00
xbgmsharp
5ce5b606e9 Remove mmsi dependency, update to use vessel_id instead 2022-12-05 23:19:58 +01:00
xbgmsharp
0f59a31cdc Fix SQL query IMMUTABLE STRICT 2022-12-05 23:17:45 +01:00
xbgmsharp
58407a84e9 Update auth.vessel.mmsi type to NUMERIC and add conistraint and remove dependency 2022-12-05 23:15:44 +01:00
xbgmsharp
9ae9553254 Allow to set password from env for role grafana_auth 2022-12-05 14:56:53 +01:00
xbgmsharp
494cc9a571 Add new function urlencode_py_fn()
Update debug for send_pushover and send_telegram
2022-12-02 14:36:01 +01:00
xbgmsharp
dbd29ca58a Add new variable PGSAIL_PUSHOVER_APP_URL 2022-12-02 11:50:33 +01:00
xbgmsharp
00cdd7ca18 Refactor web user notification
Add new cron job cron_new_notification
Add public.cron_process_new_notification_fn function
Add public.process_notification_queue_fn function
Update messages template table, align cron name with template notification name
2022-12-02 11:22:28 +01:00
xbgmsharp
34fe0898b2 Split public schame in file by type tables,functions and functions in python 2022-11-30 21:15:47 +01:00
xbgmsharp
3522d3b9d7 Update email type to CITEXT, https://www.postgresql.org/docs/current/citext.html 2022-11-30 21:12:49 +01:00
xbgmsharp
d4f79e7f71 A large commit with new features (pushover, telegram, otp) and fixes
Update reverse_geocode_py_fn validate input
Update email_templates add new message type for pushover, telegram and otp
Update send_email_py_fn mkae email From field humain friendly
Add send_pushover_py_fn python send pushover message
Add send_telegram_py_fn python send telegram message
Add process_account_otp_validation_queue_fn process handle for email validation
Add send_notification_fn refactor notification system to support email,pushover,telegram
Update public.process_queue table
Add new_account_otp_validation_entry_fn trigger
Update postgrest pre db check_jwt to support row security level
2022-11-29 23:50:59 +01:00
xbgmsharp
4df4fa993a U 2022-11-29 23:25:51 +01:00
xbgmsharp
94f79080aa Add new app paremeters for pushover and telegram
PGSAIL_PUSHOVER_APP_TOKEN -> app.pushover_app_token
PGSAIL_TELEGRAM_BOT_TOKEN -> app.telegram_bot_token
2022-11-29 23:24:39 +01:00
xbgmsharp
1a5c0f10c3 Fixed typo in vesselid for auth.vessels table 2022-11-29 23:20:01 +01:00
xbgmsharp
e6309875fb Limit connection to database to 100 2022-11-29 23:19:31 +01:00
xbgmsharp
2e269b9424 Update RSL to 'user.email' settings
Remove dependency to jwt for auth tables
2022-11-29 22:51:07 +01:00
xbgmsharp
40e25b1f8c Add Email OTP validation API endpoint 2022-11-29 22:49:50 +01:00
xbgmsharp
9eec9ad355 Add description to accounts and vessels triggers with moddatetime 2022-11-29 10:39:26 +01:00
xbgmsharp
90d2c3b3a0 Update OTP Code to a random 6 digit 2022-11-29 10:38:45 +01:00
xbgmsharp
d25f31ce0b Add a every 6 minute job cron_process_new_account_otp_validation_queue_fn, delay from cron_new_account 2022-11-28 22:30:55 +01:00
xbgmsharp
c8e722283c Fixed ERD images link 2022-11-28 22:18:55 +01:00
xbgmsharp
2095e9b561 Update images links 2022-11-28 22:13:53 +01:00
xbgmsharp
73addfa928 publish initial release of Entity-Relationship Diagram (ERD) 2022-11-28 22:11:13 +01:00
xbgmsharp
345f190f4e Add CRON for new account pending otp validation notification 2022-11-28 21:51:27 +01:00
xbgmsharp
0682f06ae9 Update update_user_preferences_fn to allow email or telegram user auth (to be improve) 2022-11-28 21:49:57 +01:00
xbgmsharp
8bc0fdaf17 Fixed auth.vessels table creation 2022-11-28 21:48:30 +01:00
xbgmsharp
ab1afeee42 Enable telegram bot auth 2022-11-28 21:47:28 +01:00
xbgmsharp
b6d60dd0d5 Update grafana_auth role description 2022-11-28 18:17:07 +01:00
xbgmsharp
295d0a0a5e Fix SQL vquery 2022-11-28 15:26:43 +01:00
xbgmsharp
a68a0ee3e3 Refactor auth tables (accounts,vessels)
Add unique userid column for jwt auth
Add unique vesselid column for jwt auth
Add new extensions citext,moddatetime
Update email column to citext type for fast queries
Add updated_at column to trak changed managed by moddatetime extension
Update index tables (accounts,vessels)
2022-11-25 23:14:30 +01:00
xbgmsharp
ea7301e1ed Add primary support for new stays,moorages,stats,timelapse views 2022-11-25 22:37:08 +01:00
xbgmsharp
98f5d75429 Add grafana_auth apache proxy auth role 2022-11-25 22:33:54 +01:00
xbgmsharp
adc6799c93 Add comment on role 2022-11-25 22:24:40 +01:00
xbgmsharp
a865e91ce7 Allow anonymous role to excecute telegram and pushover registration function 2022-11-25 22:22:29 +01:00
xbgmsharp
a64425b13f Add new CONSTRAINT on auth.vessels and auth.accounts tables
Add new index on auth.accounts table
2022-11-20 23:27:34 +01:00
xbgmsharp
0586d30381 Improve Row Level Security
Add Row Level Security for table auth.accounts
2022-11-20 23:25:06 +01:00
xbgmsharp
db1d7c63e2 Add function api.update_user_preferences_fn to allow user update their preferences via api 2022-11-20 23:22:29 +01:00
xbgmsharp
4acb4de539 Improve debug output 2022-11-20 23:20:19 +01:00
xbgmsharp
07043ddf08 Add new metadata index
Improve metrics debug output
2022-11-20 23:19:12 +01:00
xbgmsharp
bd05591205 Add a every 15 minute job cron_process_prune_otp_fn 2022-11-20 23:18:15 +01:00
xbgmsharp
95ff1d8ff2 Add missing foreign keys between api.metrics and api.metadata 2022-10-25 16:25:45 +02:00
xbgmsharp
e92515ba66 Fixed email notification,
Update conversion to Nautical Mile
2022-10-24 16:56:05 +02:00
xbgmsharp
8b8087e56d Release v0.0.8 2022-10-19 13:28:31 +02:00
xbgmsharp
7b7aae7dfe Limit scope of cron job using vessel.client_id
Ensure all queries to api.metrics are limited to a specific vessel using client_id
2022-10-19 13:25:49 +02:00
xbgmsharp
be27618dac Update scheduler role permissions 2022-10-19 13:19:03 +02:00
xbgmsharp
7fb24d8cae Add new parameter PGSAIL_APP_UR 2022-10-18 21:39:58 +02:00
xbgmsharp
07c7628973 Limit scope of process functions 2022-09-24 23:53:47 +02:00
xbgmsharp
e42e52eaf0 Update permisions for new API endpoint 2022-09-24 23:52:42 +02:00
xbgmsharp
97e739ffe9 Refactor user_settings 2022-09-24 23:32:17 +02:00
xbgmsharp
3fb2534263 Update README, fix tipo in links 2022-09-24 23:16:24 +02:00
xbgmsharp
9e8009a764 Release v0.0.7 2022-09-21 11:06:23 +02:00
xbgmsharp
dca77c3293 Update permisions for new API endpoint 2022-09-21 09:58:38 +02:00
xbgmsharp
8af527f574 Allow user_role to execute some fuctions without defined vessel 2022-09-21 09:57:53 +02:00
xbgmsharp
0f399293eb Add API endpoint for versions and vessels 2022-09-21 09:52:36 +02:00
xbgmsharp
57dfaf2158 Add new API endpoint to export logbook in GPX or GeoJSON format 2022-09-21 09:51:43 +02:00
xbgmsharp
3a2e091744 Update README 2022-09-21 08:58:09 +02:00
xbgmsharp
7c5bd21e80 Release v0.0.6 2022-09-01 19:33:00 +02:00
xbgmsharp
33af7bec1b Update permissions files 2022-09-01 19:31:25 +02:00
xbgmsharp
023ad56926 Add more api endpoint with dependencies 2022-09-01 19:27:11 +02:00
xbgmsharp
91cf679876 Update comment 2022-09-01 19:25:50 +02:00
xbgmsharp
1b81900036 Update permissions for log details view 2022-09-01 19:25:27 +02:00
xbgmsharp
91d4127405 Release v0.0.5 2022-08-28 23:10:39 +02:00
xbgmsharp
6315dca4b9 Update grafana ROLE permissions
Update vessel_role ROLE permissions
Update user_role ROLE permissions
Add Row Level securoty on auth.vessels
2022-08-28 22:58:29 +02:00
xbgmsharp
c35f353329 Update app url in messages table
Add logbook_update_geojson_fn
Update process_logbook_queue_fn with geojson field
Update check_jwt() include 'vessel.mmsi' and 'vessel.name' in user_role and vessel_role current session
2022-08-28 22:58:07 +02:00
xbgmsharp
54942a7558 Add api.logs_view and api.log_view 2022-08-28 22:23:28 +02:00
xbgmsharp
9a86c9f4f5 Fix tipo 2022-08-28 22:21:11 +02:00
31 changed files with 8239 additions and 1342 deletions

View File

@@ -1,15 +1,21 @@
# POSTGRESQL ENV Settings
POSTGRES_USER=username
POSTGRES_PASSWORD=password
POSTGRES_DB=postgres
# PostgSail ENV Settings
PGSAIL_AUTHENTICATOR_PASSWORD=password
PGSAIL_GRAFANA_PASSWORD=password
PGSAIL_GRAFANA_AUTH_PASSWORD=password
PGSAIL_EMAIL_FROM=root@localhost
PGSAIL_EMAIL_SERVER=localhost
#PGSAIL_EMAIL_USER= Comment if not use
#PGSAIL_EMAIL_PASS= Comment if not use
#PGSAIL_PUSHOVER_TOKEN= Comment if not use
#PGSAIL_PUSHOVER_APP= Comment if not use
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
#PGSAIL_PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
PGSAIL_APP_URL=http://localhost
# POSTGREST ENV Settings
PGRST_DB_URI=postgres://authenticator:${PGSAIL_AUTHENTICATOR_PASSWORD}@127.0.0.1:5432/signalk
PGRST_JWT_SECRET=_at_least_32__char__long__random
# Grafana ENV Settings
GF_SECURITY_ADMIN_PASSWORD=password

BIN
ERD/ERD_schema_api.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 104 KiB

BIN
ERD/ERD_schema_auth.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 24 KiB

BIN
ERD/ERD_schema_public.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 30 KiB

35
ERD/README.md Normal file
View File

@@ -0,0 +1,35 @@
# PostgSail ERD
The Entity-Relationship Diagram (ERD) provides a graphical representation of database tables, columns, and inter-relationships. ERD can give sufficient information for the database administrator to follow when developing and maintaining the database.
## A global overview
![API Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/postgsail.pgerd.png "API Schema")
## Further
There is 3 main schemas:
- API Schema ERD
- tables
- metrics
- logbook
- ...
- functions
- ...
![API Schem](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/ERD_schema_api.png "API Schema")
- Auth Schema ERD
- tables
- accounts
- vessels
- ...
- functions
- ...
![Auth Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/ERD_schema_auth.png "Auth Schema")
- Public Schema ERD
- tables
- app_settings
- tpl_messages
- ...
- functions
- ...
![Public Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/ERD_schema_public.png "Public Schema")

BIN
ERD/postgsail.pgerd.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 360 KiB

View File

@@ -1,10 +1,7 @@
# PostgSail
Effortless cloud based solution for storing and sharing your SignalK data. Allow to effortlessly log your sails and monitor your boat.
Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data.
### Context
It is all about SQL, object-relational, time-series, spatial database with a bit python.
### Features
## Features
- Automatically log your voyages without manually starting or stopping a trip.
- Automatically capture the details of your voyages (boat speed, heading, wind speed, etc).
- Timelapse video your trips!
@@ -15,13 +12,29 @@ It is all about SQL, object-relational, time-series, spatial database with a bit
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
- History: view trends.
- Alert monitoring: get notification on low voltage or low fuel remotely.
- Notification via email or PushOver.
- Notification via email or PushOver, Telegram
- Offline mode
- Low Bandwith mode
## Context
It is all about SQL, object-relational, time-series, spatial databases with a bit of python.
PostgSail is an open-source alternative to traditional vessel data management.
It is based on a well known open-source technology stack, Singalk, PostgreSQL, TimescaleDB, PostGIS, PostgREST. It does perfectly integrate with standard monitoring tool stack like Grafana.
To understand the why and how, you might want to read [Why.md](https://github.com/xbgmsharp/postgsail/tree/main/Why.md)
## Architecture
For more clarity and visibility the complete [Entity-Relationship Diagram (ERD)](https://github.com/xbgmsharp/postgsail/tree/main/ERD/README.md) is export as PNG and SVG file.
### Cloud
If you prefer not to install or administer your instance of PostgSail, hosted versions of PostgSail are available in the cloud of your choice.
The cloud advantage.
Hosted and fullymanaged options for PostgSail, designed for all your deployment and business needs. Register and try for free at https://iot.openplotter.cloud/.
## Using PostgSail
### pre-deploy configuration
To get these running, copy `.env.example` and rename to `.env` then set the value accordinly.
@@ -34,19 +47,19 @@ Notice, that `PGRST_JWT_SECRET` must be at least 32 characters long.
By default there is no network set and the postgresql data are store in a docker volume.
You can update the default settings by editing `docker-compose.yml` to your need.
Then simply excecute:
```
```bash
$ docker-compose up
```
### PostgSail Configuration
### SQL Configuration
Check and update your postgsail settings via SQL in the table `app_settings`:
```
select * from app_settings;
```sql
SELECT * FROM app_settings;
```
```
```sql
UPDATE app_settings
SET
value = 'new_value'
@@ -58,7 +71,7 @@ Next, to ingest data from signalk, you need to install [signalk-postgsail](https
Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers).
You might want to import your influxdb1 data as weel, [outflux](https://github.com/timescale/outflux).
You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux).
Any taker on influxdb2 to PostgSail? It is definitly possible.
Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests.
@@ -87,20 +100,20 @@ $ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_register_v
#### API main workflow
Check the [unit test sample](https://github.com/xbgmsharp/PostgSail/blob/main/tests/index.js).
Check the [unit test sample](https://github.com/xbgmsharp/postgsail/blob/main/tests/index.js).
### Docker dependencies
`docker-compose` is used to start environment dependencies. Dependencies consist of 2 containers:
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
- `grafana` alias `app`, visualize and monitor your data
### Optional docker images
- [Grafana](https://hub.docker.com/r/grafana/grafana), visualize and monitor your data
- [pgAdmin](https://hub.docker.com/r/dpage/pgadmin4), web UI to monitor and manage multiple PostgreSQL
- [Swagger](https://hub.docker.com/r/swaggerapi/swagger-ui), web UI to visualize documentation from PostgREST
```
docker-compose -f docker-compose-optional.yml up
```
@@ -113,9 +126,12 @@ Out of the box iot platform using docker with the following software:
- [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/)
- [Grafana, open observability platform | Grafana Labs](https://grafana.com)
### Releases & updates
PostgSail Release Notes & Future Plans: see planned and in-progress updates and detailed information about current and past releases. [PostgSail project](https://github.com/xbgmsharp?tab=projects)
### Support
To get support, please create new [issue](https://github.com/xbgmsharp/PostgSail/issues).
To get support, please create new [issue](https://github.com/xbgmsharp/postgsail/issues).
There is more likely security flows and bugs.

View File

@@ -9,6 +9,8 @@ services:
- POSTGRES_DB=postgres
- TIMESCALEDB_TELEMETRY=off
- PGDATA=/var/lib/postgresql/data/pgdata
- TZ=UTC
network_mode: "host"
ports:
- "5432:5432"
volumes:
@@ -17,6 +19,12 @@ services:
logging:
options:
max-size: 10m
healthcheck:
test: ["CMD-SHELL", "sh -c 'pg_isready -U ${POSTGRES_USER} -d signalk'"]
interval: 60s
timeout: 10s
retries: 5
start_period: 100s
api:
image: postgrest/postgrest
@@ -36,6 +44,40 @@ services:
logging:
options:
max-size: 10m
#healthcheck:
# test: ["CMD-SHELL", "sh -c 'curl --fail http://localhost:3003/live || exit 1'"]
# interval: 60s
# timeout: 10s
# retries: 5
# start_period: 100s
app:
image: grafana/grafana:latest
container_name: app
restart: unless-stopped
volumes:
- data:/var/lib/grafana
- data:/var/log/grafana
- $PWD/grafana:/etc/grafana
ports:
- "3001:3000"
network_mode: "host"
env_file: .env
environment:
- GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel
- GF_USERS_ALLOW_SIGN_UP=false
- GF_SMTP_ENABLED=false
depends_on:
- db
logging:
options:
max-size: 10m
#healthcheck:
# test: ["CMD-SHELL", "sh -c 'curl --fail http://localhost:3000/healthz || exit 1'"]
# interval: 60s
# timeout: 10s
# retries: 5
# start_period: 100s
volumes:
data: {}

View File

@@ -0,0 +1,461 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "Logs,Moorages,Stays",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 1,
"links": [
{
"asDropdown": false,
"icon": "external link",
"includeVars": true,
"keepTime": false,
"tags": [],
"targetBlank": true,
"title": "New link",
"tooltip": "",
"type": "dashboards",
"url": ""
}
],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"filterable": false,
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "id"
},
"properties": [
{
"id": "custom.width",
"value": 41
}
]
},
{
"matcher": {
"id": "byName",
"options": "distance"
},
"properties": [
{
"id": "custom.width",
"value": 104
}
]
}
]
},
"gridPos": {
"h": 10,
"w": 24,
"x": 0,
"y": 0
},
"id": 2,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "table",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "with config as ( select set_config('vessel.id', '${boat}', false) )\nSELECT * from api.logs_view",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "logs",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": []
}
],
"title": "Logbook",
"type": "table"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"filterable": false,
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "id"
},
"properties": [
{
"id": "custom.width",
"value": 41
}
]
},
{
"matcher": {
"id": "byName",
"options": "distance"
},
"properties": [
{
"id": "custom.width",
"value": 104
}
]
}
]
},
"gridPos": {
"h": 10,
"w": 24,
"x": 0,
"y": 10
},
"id": 5,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "table",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "with config as ( select set_config('vessel.id', '${boat}', false) )\nSELECT * from api.stays_view",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "logs",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": []
}
],
"title": "Stays",
"type": "table"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"custom": {
"align": "auto",
"displayMode": "auto",
"filterable": false,
"inspect": false
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
}
},
"overrides": [
{
"matcher": {
"id": "byName",
"options": "id"
},
"properties": [
{
"id": "custom.width",
"value": 41
}
]
},
{
"matcher": {
"id": "byName",
"options": "distance"
},
"properties": [
{
"id": "custom.width",
"value": 104
}
]
}
]
},
"gridPos": {
"h": 10,
"w": 24,
"x": 0,
"y": 20
},
"id": 6,
"options": {
"footer": {
"fields": "",
"reducer": [
"sum"
],
"show": false
},
"showHeader": true,
"sortBy": []
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "table",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "with config as ( select set_config('vessel.id', '${boat}', false) )\nselect * from api.moorages_view",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "logs",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": []
}
],
"title": "Moorages",
"type": "table"
}
],
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"definition": "SELECT\n v.name AS __text,\n m.client_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
"description": "Vessel Name",
"hide": 0,
"includeAll": false,
"label": "Boat",
"multi": false,
"name": "boat",
"options": [],
"query": "SELECT\n v.name AS __text,\n m.client_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"type": "query"
}
]
},
"time": {
"from": "now-15d",
"to": "now"
},
"timepicker": {},
"timezone": "utc",
"title": "Logbook",
"uid": "E_FUkx9nk",
"version": 1,
"weekStart": ""
}

View File

@@ -0,0 +1,734 @@
{
"annotations": {
"list": [
{
"builtIn": 1,
"datasource": {
"type": "datasource",
"uid": "grafana"
},
"enable": true,
"hide": true,
"iconColor": "rgba(0, 211, 255, 1)",
"name": "Annotations & Alerts",
"target": {
"limit": 100,
"matchAny": false,
"tags": [],
"type": "dashboard"
},
"type": "dashboard"
}
]
},
"description": "Monitoring view",
"editable": true,
"fiscalYearStartMonth": 0,
"graphTooltip": 0,
"id": 2,
"links": [
{
"asDropdown": false,
"icon": "external link",
"includeVars": true,
"keepTime": false,
"tags": [],
"targetBlank": true,
"title": "New link",
"tooltip": "",
"type": "dashboards",
"url": ""
}
],
"liveNow": false,
"panels": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "volt"
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 12,
"x": 0,
"y": 0
},
"id": 8,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "time_series",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "SELECT\n time AS \"time\",\n cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2Voltage\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND client_id = '${boat}'\nORDER BY 1",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "trip_in_progress",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
}
]
}
],
"title": "AUX2 Voltage",
"type": "stat"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "thresholds"
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "celsius"
},
"overrides": []
},
"gridPos": {
"h": 4,
"w": 12,
"x": 12,
"y": 0
},
"id": 7,
"options": {
"colorMode": "value",
"graphMode": "area",
"justifyMode": "auto",
"orientation": "auto",
"reduceOptions": {
"calcs": [
"lastNotNull"
],
"fields": "",
"values": false
},
"text": {},
"textMode": "auto"
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "time_series",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "SELECT\n time AS \"time\",\n cast(metrics-> 'environment.outside.temperature' AS numeric) - 273.15 AS OutsideTemperature\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND client_id = '${boat}'\nORDER BY 1",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "trip_in_progress",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
}
]
}
],
"title": "Temperature",
"type": "stat"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"graph": false,
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": [
{
"__systemRef": "hideSeriesFrom",
"matcher": {
"id": "byNames",
"options": {
"mode": "exclude",
"names": [
"aux2"
],
"prefix": "All except:",
"readOnly": true
}
},
"properties": [
{
"id": "custom.hideFrom",
"value": {
"graph": true,
"legend": false,
"tooltip": false
}
}
]
}
]
},
"gridPos": {
"h": 9,
"w": 24,
"x": 0,
"y": 4
},
"id": 4,
"options": {
"graph": {},
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "7.5.4",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "time_series",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "SELECT\n time AS \"time\",\n cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2,\n\tcast(metrics-> 'electrical.batteries.House.voltage' AS numeric) AS House,\n\tcast(metrics-> 'environment.rpi.pijuice.gpioVoltage' AS numeric) AS gpioVoltage,\n\tcast(metrics-> 'electrical.batteries.Seatalk.voltage' AS numeric) AS SeatalkVoltage,\n\tcast(metrics-> 'electrical.batteries.Starter.voltage' AS numeric) AS StarterVoltage,\n\tcast(metrics-> 'environment.rpi.pijuice.batteryVoltage' AS numeric) AS RPIBatteryVoltage,\n\tcast(metrics-> 'electrical.batteries.victronDevice.voltage' AS numeric) AS victronDeviceVoltage\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n\tAND client_id = '${boat}'\nORDER BY 1",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "trip_in_progress",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
}
]
}
],
"title": "Voltage",
"type": "timeseries"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"graph": false,
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": true,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 24,
"x": 0,
"y": 13
},
"id": 2,
"options": {
"graph": {},
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "single",
"sort": "none"
}
},
"pluginVersion": "7.5.4",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "table",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "SELECT\n time AS \"time\",\n cast(metrics-> 'environment.water.temperature' AS numeric) - 273.15 AS waterTemperature,\n\tcast(metrics-> 'environment.inside.temperature' AS numeric) - 273.15 AS insideTemperature,\n\tcast(metrics-> 'environment.outside.temperature' AS numeric) - 273.15 AS outsideTemperature\nFROM api.metrics\nWHERE\n $__timeFilter(time)\n AND client_id = '${boat}'\nORDER BY 1",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "trip_in_progress",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
}
]
}
],
"title": "Temperatures",
"type": "timeseries"
},
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"fieldConfig": {
"defaults": {
"color": {
"mode": "palette-classic"
},
"custom": {
"axisCenteredZero": false,
"axisColorMode": "text",
"axisLabel": "",
"axisPlacement": "auto",
"barAlignment": 0,
"drawStyle": "line",
"fillOpacity": 10,
"gradientMode": "none",
"hideFrom": {
"legend": false,
"tooltip": false,
"viz": false
},
"lineInterpolation": "linear",
"lineWidth": 1,
"pointSize": 5,
"scaleDistribution": {
"type": "linear"
},
"showPoints": "never",
"spanNulls": false,
"stacking": {
"group": "A",
"mode": "none"
},
"thresholdsStyle": {
"mode": "off"
}
},
"mappings": [],
"thresholds": {
"mode": "absolute",
"steps": [
{
"color": "green",
"value": null
},
{
"color": "red",
"value": 80
}
]
},
"unit": "short"
},
"overrides": []
},
"gridPos": {
"h": 9,
"w": 24,
"x": 0,
"y": 22
},
"id": 5,
"options": {
"legend": {
"calcs": [],
"displayMode": "list",
"placement": "bottom",
"showLegend": true
},
"tooltip": {
"mode": "multi",
"sort": "none"
}
},
"pluginVersion": "9.3.1",
"targets": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"editorMode": "code",
"format": "table",
"group": [],
"metricColumn": "none",
"rawQuery": true,
"rawSql": "with config as (select set_config('vessel.id', '${boat}', false) ) select * from api.monitoring_view",
"refId": "A",
"select": [
[
{
"params": [
"_from_lat"
],
"type": "column"
}
]
],
"sql": {
"columns": [
{
"parameters": [],
"type": "function"
}
],
"groupBy": [
{
"property": {
"type": "string"
},
"type": "groupBy"
}
],
"limit": 50
},
"table": "trip_in_progress",
"timeColumn": "_from_time",
"timeColumnType": "timestamp",
"where": [
{
"name": "$__timeFilter",
"params": [],
"type": "macro"
}
]
}
],
"title": "Title",
"type": "timeseries"
}
],
"refresh": false,
"schemaVersion": 37,
"style": "dark",
"tags": [],
"templating": {
"list": [
{
"datasource": {
"type": "postgres",
"uid": "PCC52D03280B7034C"
},
"definition": " SELECT\n v.name AS __text,\n m.client_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
"description": "Vessel name",
"hide": 0,
"includeAll": false,
"label": "Boat",
"multi": false,
"name": "boat",
"options": [],
"query": " SELECT\n v.name AS __text,\n m.client_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
"refresh": 1,
"regex": "",
"skipUrlSync": false,
"sort": 0,
"type": "query"
}
]
},
"time": {
"from": "now-12h",
"to": "now"
},
"timepicker": {
"refresh_intervals": [
"1m",
"5m",
"15m",
"30m",
"1h",
"2h",
"1d"
]
},
"timezone": "utc",
"title": "Monitor",
"uid": "apqDcPjMz",
"version": 1,
"weekStart": ""
}

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

13
grafana/grafana.ini Normal file
View File

@@ -0,0 +1,13 @@
[users]
allow_sign_up = false
auto_assign_org = true
auto_assign_org_role = Editor
[auth.proxy]
enabled = true
header_name = X-WEBAUTH-USER
header_property = email
auto_sign_up = true
enable_login_token = true
login_maximum_inactive_lifetime_duration = 12h
login_maximum_lifetime_duration = 1d

View File

@@ -0,0 +1,25 @@
apiVersion: 1
providers:
# <string> an unique provider name. Required
- name: 'PostgSail'
# <int> Org id. Default to 1
orgId: 1
# <string> name of the dashboard folder.
folder: 'PostgSail'
# <string> folder UID. will be automatically generated if not specified
#folderUid: ''
# <string> provider type. Default to 'file'
type: file
# <bool> disable dashboard deletion
disableDeletion: false
# <int> how often Grafana will scan for changed dashboards
updateIntervalSeconds: 10
# <bool> allow updating provisioned dashboards from the UI
allowUiUpdates: true
options:
# <string, required> path to dashboard files on disk. Required when using the 'file' type
path: /etc/grafana/dashboards/
# <bool> use folder names from filesystem to create folders in Grafana
foldersFromFilesStructure: true

View File

@@ -0,0 +1,18 @@
apiVersion: 1
datasources:
- name: PostgreSQL
isDefault: true
type: postgres
url: 172.30.0.1:5432
database: signalk
user: grafana
secureJsonData:
password: '${PGSAIL_GRAFANA_PASSWORD}'
jsonData:
sslmode: 'disable' # disable/require/verify-ca/verify-full
maxOpenConns: 10 # Grafana v5.4+
maxIdleConns: 2 # Grafana v5.4+
connMaxLifetime: 14400 # Grafana v5.4+
postgresVersion: 1400 # 903=9.3, 904=9.4, 905=9.5, 906=9.6, 1000=10
timescaledb: true

View File

@@ -7,20 +7,30 @@ echo $PGDATA
echo "${PGDATA}/postgresql.conf"
cat << 'EOF' >> ${PGDATA}/postgresql.conf
# PostgSail pg15
# Add settings for extensions here
shared_preload_libraries = 'timescaledb,pg_stat_statements,pg_cron'
# TimescaleDB - time series database
# Disable timescaleDB telemetry
timescaledb.telemetry_level=off
# pg_cron - Run periodic jobs in PostgreSQL
# pg_cron database
#cron.database_name = 'signalk'
# pg_cron connect via a unix domain socket
cron.host = '/var/run/postgresql/'
# Increase the number of available background workers from the default of 8
#max_worker_processes = 8
# monitoring https://www.postgresql.org/docs/current/runtime-config-statistics.html#GUC-TRACK-IO-TIMING
track_io_timing = on
stats_temp_directory = '/tmp'
track_functions = all
# Remove in pg-15, does not exist anymore
#stats_temp_directory = '/tmp'
# Postgrest
# PostgREST - turns your PostgreSQL database directly into a RESTful API
# send logs where the collector can access them
#log_destination = 'stderr'
log_destination = 'stderr'
# collect stderr output to log files
#logging_collector = on
# save logs in pg_log/ under the pg data directory
@@ -29,5 +39,19 @@ stats_temp_directory = '/tmp'
#log_filename = 'postgresql-%Y-%m-%d.log'
# log every kind of SQL statement
#log_statement = 'all'
# Do not enable log_statement as its log format will not be parsed by pgBadger.
# pgBadger - a fast PostgreSQL log analysis report
# log all the queries that are taking more than 1 second:
#log_min_duration_statement = 1000
#log_checkpoints = on
#log_connections = on
#log_disconnections = on
#log_lock_waits = on
#log_temp_files = 0
#log_autovacuum_min_duration = 0
#log_error_verbosity = default
# Francois
log_min_messages = NOTICE
EOF

View File

@@ -1,5 +1,5 @@
---------------------------------------------------------------------------
-- PostSail => Postgres + TimescaleDB + PostGIS + PostgREST
-- PostgSail => Postgres + TimescaleDB + PostGIS + PostgREST
--
-- Inspired from:
-- https://groups.google.com/g/signalk/c/W2H15ODCic4
@@ -15,7 +15,7 @@
-- Always store time in UTC
---------------------------------------------------------------------------
-- vessels signalk -(POST)-> metadata -> metadata_upsert -(trigger)-> metadata_upsert_fn (INSERT or UPDATE)
-- vessels signalk -(POST)-> metadata -> metadata_upsert -(trigger)-> metadata_upsert_trigger_fn (INSERT or UPDATE)
-- vessels signalk -(POST)-> metrics -> metrics -(trigger)-> metrics_fn new log,stay,moorage
---------------------------------------------------------------------------
@@ -47,8 +47,12 @@ select version();
-- Database
CREATE DATABASE signalk;
-- Limit connection to 100
ALTER DATABASE signalk WITH CONNECTION LIMIT = 100;
-- Set timezone to UTC
ALTER DATABASE signalk SET TIMEZONE='UTC';
-- connext to the DB
-- connect to the DB
\c signalk
-- Schema
@@ -66,6 +70,7 @@ CREATE EXTENSION IF NOT EXISTS plpgsql; -- PL/pgSQL procedural language
CREATE EXTENSION IF NOT EXISTS plpython3u; -- implements PL/Python based on the Python 3 language variant.
CREATE EXTENSION IF NOT EXISTS jsonb_plpython3u CASCADE; -- tranform jsonb to python json type.
CREATE EXTENSION IF NOT EXISTS pg_stat_statements; -- provides a means for tracking planning and execution statistics of all SQL statements executed
CREATE EXTENSION IF NOT EXISTS "moddatetime"; -- provides functions for tracking last modification time
-- Trust plpython3u language by default
UPDATE pg_language SET lanpltrusted = true WHERE lanname = 'plpython3u';
@@ -73,55 +78,74 @@ UPDATE pg_language SET lanpltrusted = true WHERE lanname = 'plpython3u';
---------------------------------------------------------------------------
-- Tables
--
---------------------------------------------------------------------------
-- Metadata from signalk
CREATE TABLE IF NOT EXISTS api.metadata(
id SERIAL PRIMARY KEY,
name VARCHAR(150) NULL,
mmsi NUMERIC NULL,
client_id VARCHAR(255) UNIQUE NOT NULL,
length DOUBLE PRECISION NULL,
beam DOUBLE PRECISION NULL,
height DOUBLE PRECISION NULL,
ship_type NUMERIC NULL,
plugin_version VARCHAR(10) NOT NULL,
signalk_version VARCHAR(10) NOT NULL,
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, -- should be rename to last_update !?
active BOOLEAN DEFAULT True, -- trigger monitor online/offline
-- vessel_id link auth.vessels with api.metadata
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW()
);
-- Description
COMMENT ON TABLE
api.metadata
IS 'Stores metadata from vessel';
COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline';
-- Index
CREATE INDEX metadata_client_id_idx ON api.metadata (client_id);
CREATE INDEX metadata_mmsi_idx ON api.metadata (mmsi);
CREATE INDEX metadata_name_idx ON api.metadata (name);
---------------------------------------------------------------------------
-- Metrics from signalk
-- Create vessel status enum
CREATE TYPE status AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
-- Table api.metrics
CREATE TABLE IF NOT EXISTS api.metrics (
time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
client_id VARCHAR(255) NOT NULL,
client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
latitude DOUBLE PRECISION NULL,
longitude DOUBLE PRECISION NULL,
speedOverGround DOUBLE PRECISION NULL,
courseOverGroundTrue DOUBLE PRECISION NULL,
windSpeedApparent DOUBLE PRECISION NULL,
angleSpeedApparent DOUBLE PRECISION NULL,
status VARCHAR(100) NULL,
metrics jsonb NULL
status status NULL,
metrics jsonb NULL,
CONSTRAINT valid_client_id CHECK (length(client_id) > 10),
CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90),
CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180)
);
-- Description
COMMENT ON TABLE
api.metrics
IS 'Stores metrics from vessel';
COMMENT ON COLUMN api.metrics.latitude IS 'With CONSTRAINT but allow NULL value to be ignored silently by trigger';
COMMENT ON COLUMN api.metrics.longitude IS 'With CONSTRAINT but allow NULL value to be ignored silently by trigger';
-- Index todo!
-- Index
CREATE INDEX ON api.metrics (client_id, time DESC);
CREATE INDEX ON api.metrics (status, time DESC);
-- json index??
CREATE INDEX ON api.metrics using GIN (metrics);
-- timescaledb hypertable
SELECT create_hypertable('api.metrics', 'time');
---------------------------------------------------------------------------
-- Metadata from signalk
CREATE TABLE IF NOT EXISTS api.metadata(
id SERIAL PRIMARY KEY,
name VARCHAR(150) NULL,
mmsi VARCHAR(10) NULL,
client_id VARCHAR(255) UNIQUE NOT NULL,
length DOUBLE PRECISION NULL,
beam DOUBLE PRECISION NULL,
height DOUBLE PRECISION NULL,
ship_type VARCHAR(255) NULL,
plugin_version VARCHAR(10) NOT NULL,
signalk_version VARCHAR(10) NOT NULL,
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, -- last_update
active BOOLEAN DEFAULT True -- monitor online/offline
);
-- Description
COMMENT ON TABLE
api.metadata
IS 'Stores metadata from vessel';
-- Index todo!
CREATE INDEX metadata_client_id_idx ON api.metadata (client_id);
--SELECT create_hypertable('api.metrics', 'time');
-- timescaledb hypertable with space partitions
SELECT create_hypertable('api.metrics', 'time', 'client_id',
number_partitions => 2,
chunk_time_interval => INTERVAL '7 day',
if_not_exists => true);
---------------------------------------------------------------------------
-- Logbook
@@ -151,6 +175,7 @@ CREATE TABLE IF NOT EXISTS api.logbook(
--track_geom Geometry(LINESTRING)
track_geom geometry(LINESTRING,4326) NULL,
track_geog geography(LINESTRING) NULL,
track_geojson JSON NULL,
_from_time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
_to_time TIMESTAMP WITHOUT TIME ZONE NULL,
distance NUMERIC, -- meters?
@@ -239,8 +264,8 @@ COMMENT ON COLUMN api.moorages.geog IS 'postgis geography type default SRID 4326
---------------------------------------------------------------------------
-- Stay Type
CREATE TABLE IF NOT EXISTS api.stays_at(
stay_code INTEGER,
description TEXT
stay_code INTEGER NOT NULL,
description TEXT NOT NULL
);
-- Description
COMMENT ON TABLE api.stays_at IS 'Stay Type';
@@ -261,11 +286,13 @@ CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert
metadata_id integer;
metadata_active boolean;
BEGIN
-- Set client_id to new value to allow RLS
PERFORM set_config('vessel.client_id', NEW.client_id, false);
-- UPSERT - Insert vs Update for Metadata
RAISE NOTICE 'metadata_upsert_trigger_fn';
SELECT m.id,m.active INTO metadata_id,metadata_active
SELECT m.id,m.active INTO metadata_id, metadata_active
FROM api.metadata m
WHERE (m.mmsi IS NOT NULL AND m.mmsi = NEW.mmsi)
WHERE (m.vessel_id IS NOT NULL AND m.vessel_id = current_setting('vessel.id', true))
OR (m.client_id IS NOT NULL AND m.client_id = NEW.client_id);
RAISE NOTICE 'metadata_id %', metadata_id;
IF metadata_id IS NOT NULL THEN
@@ -292,7 +319,11 @@ CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert
WHERE id = metadata_id;
RETURN NULL; -- Ignore insert
ELSE
-- Insert new vessel metadata
IF NEW.vessel_id IS NULL THEN
-- set vessel_id from jwt if not present in INSERT query
NEW.vessel_id = current_setting('vessel.id');
END IF;
-- Insert new vessel metadata and
RETURN NEW; -- Insert new vessel metadata
END IF;
END;
@@ -302,7 +333,16 @@ COMMENT ON FUNCTION
public.metadata_upsert_trigger_fn
IS 'process metadata from vessel, upsert';
-- Metadata notification for new vessel after insert
CREATE TRIGGER metadata_moddatetime
BEFORE UPDATE ON api.metadata
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- Description
COMMENT ON TRIGGER metadata_moddatetime
ON api.metadata
IS 'Automatic update of updated_at on table modification';
-- FUNCTION Metadata notification for new vessel after insert
DROP FUNCTION IF EXISTS metadata_notification_trigger_fn;
CREATE FUNCTION metadata_notification_trigger_fn() RETURNS trigger AS $metadata_notification$
DECLARE
@@ -318,6 +358,9 @@ COMMENT ON FUNCTION
public.metadata_notification_trigger_fn
IS 'process metadata notification from vessel, monitoring_online';
---------------------------------------------------------------------------
-- Trigger metadata table
--
-- Metadata trigger BEFORE INSERT
CREATE TRIGGER metadata_upsert_trigger BEFORE INSERT ON api.metadata
FOR EACH ROW EXECUTE FUNCTION metadata_upsert_trigger_fn();
@@ -348,96 +391,144 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
stay_code integer;
logbook_id integer;
stay_id integer;
valid_status BOOLEAN;
BEGIN
RAISE NOTICE 'metrics_trigger_fn';
-- todo: Check we have the boat metadata?
-- Do we have a log in progress?
-- Do we have a stay in progress?
-- Set client_id to new value to allow RLS
PERFORM set_config('vessel.client_id', NEW.client_id, false);
--RAISE NOTICE 'metrics_trigger_fn client_id [%]', NEW.client_id;
-- Boat metadata are check using api.metrics REFERENCES to api.metadata
-- Fetch the latest entry to compare status against the new status to be insert
SELECT coalesce(m.status, 'moored'), m.time INTO previous_status, previous_time
FROM api.metrics m
WHERE m.client_id IS NOT NULL
AND m.client_id = NEW.client_id
ORDER BY m.time DESC LIMIT 1;
RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_status;
--RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_status;
IF previous_time = NEW.time THEN
-- Ignore entry if same time
RAISE WARNING 'Metrics Ignoring metric, duplicate time [%] = [%]', previous_time, NEW.time;
RETURN NULL;
END IF;
IF previous_time > NEW.time THEN
-- Ignore entry if new time is later than previous time
RAISE WARNING 'Metrics Ignoring metric, new time is older [%] > [%]', previous_time, NEW.time;
RETURN NULL;
END IF;
-- Check if latitude or longitude are null
IF NEW.latitude IS NULL OR NEW.longitude IS NULL THEN
-- Ignore entry if null latitude,longitude
RAISE WARNING 'Metrics Ignoring metric, null latitude,longitude [%] [%]', NEW.latitude, NEW.longitude;
RETURN NULL;
END IF;
-- Check if status is null
IF NEW.status IS NULL THEN
RAISE WARNING 'Invalid new status [%], update to default moored', NEW.status;
RAISE WARNING 'Metrics Unknow NEW.status from vessel [%], set to default moored', NEW.status;
NEW.status := 'moored';
END IF;
IF previous_status IS NULL THEN
RAISE WARNING 'Invalid previous status [%], update to default moored', previous_status;
previous_status := 'moored';
IF NEW.status = 'anchored' THEN
RAISE WARNING 'Metrics Unknow previous_status from vessel [%], set to default current status [%]', previous_status, NEW.status;
previous_status := NEW.status;
ELSE
RAISE WARNING 'Metrics Unknow previous_status from vessel [%], set to default status moored vs [%]', previous_status, NEW.status;
previous_status := 'moored';
END IF;
-- Add new stay as no previous entry exist
INSERT INTO api.stays
(client_id, active, arrived, latitude, longitude, stay_code)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, 1)
RETURNING id INTO stay_id;
-- Add stay entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored) values ('new_stay', stay_id, now());
RAISE WARNING 'Insert first stay as no previous metrics exist, stay_id %', stay_id;
INSERT INTO process_queue (channel, payload, stored)
VALUES ('new_stay', stay_id, now());
RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id %', stay_id;
END IF;
IF previous_time = NEW.time THEN
-- Ignore entry if same time
RAISE WARNING 'Ignoring metric, duplicate time [%] = [%]', previous_time, NEW.time;
-- Check if status is valid enum
SELECT NEW.status::name = any(enum_range(null::status)::name[]) INTO valid_status;
IF valid_status IS False THEN
-- Ignore entry if status is invalid
RAISE WARNING 'Metrics Ignoring metric, invalid status [%]', NEW.status;
RETURN NULL;
END IF;
--
-- Check the state and if any previous/current entry
IF previous_status <> NEW.status AND (NEW.status = 'sailing' OR NEW.status = 'motoring') THEN
-- If new status is sailing or motoring
IF previous_status::TEXT <> NEW.status::TEXT AND
( (NEW.status::TEXT = 'sailing' AND previous_status::TEXT <> 'motoring')
OR (NEW.status::TEXT = 'motoring' AND previous_status::TEXT <> 'sailing') ) THEN
RAISE WARNING 'Metrics Update status, try new logbook, New:[%] Previous:[%]', NEW.status, previous_status;
-- Start new log
RAISE WARNING 'Start new log, New:[%] Previous:[%]', NEW.status, previous_status;
RAISE NOTICE 'Inserting new trip [%]', NEW.status;
INSERT INTO api.logbook
(client_id, active, _from_time, _from_lat, _from_lng)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude);
logbook_id := public.trip_in_progress_fn(NEW.client_id::TEXT);
IF logbook_id IS NULL THEN
INSERT INTO api.logbook
(client_id, active, _from_time, _from_lat, _from_lng)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude)
RETURNING id INTO logbook_id;
RAISE WARNING 'Metrics Insert new logbook, logbook_id %', logbook_id;
ELSE
UPDATE api.logbook
SET
active = false,
_to_time = NEW.time,
_to_lat = NEW.latitude,
_to_lng = NEW.longitude
WHERE id = logbook_id;
RAISE WARNING 'Metrics Existing Logbook logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
END IF;
-- End current stay
-- Fetch stay_id by client_id
SELECT id INTO stay_id
FROM api.stays s
WHERE s.client_id IS NOT NULL
AND s.client_id = NEW.client_id
AND active IS true
LIMIT 1;
RAISE NOTICE 'Updating stay status [%] [%] [%]', stay_id, NEW.status, NEW.time;
stay_id := public.stay_in_progress_fn(NEW.client_id::TEXT);
IF stay_id IS NOT NULL THEN
UPDATE api.stays
SET
active = false,
departed = NEW.time
WHERE id = stay_id;
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
-- Add moorage entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored) values ('new_moorage', stay_id, now());
INSERT INTO process_queue (channel, payload, stored)
VALUES ('new_moorage', stay_id, now());
ELSE
RAISE WARNING 'Invalid stay_id [%] [%]', stay_id, NEW.time;
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
END IF;
ELSIF previous_status <> NEW.status AND (NEW.status = 'moored' OR NEW.status = 'anchored') THEN
-- If new status is moored or anchored
ELSIF previous_status::TEXT <> NEW.status::TEXT AND
( (NEW.status::TEXT = 'moored' AND previous_status::TEXT <> 'anchored')
OR (NEW.status::TEXT = 'anchored' AND previous_status::TEXT <> 'moored') ) THEN
-- Start new stays
RAISE WARNING 'Start new stay, New:[%] Previous:[%]', NEW.status, previous_status;
RAISE NOTICE 'Inserting new stay [%]', NEW.status;
-- if metric status is anchored set stay_code accordingly
stay_code = 1;
IF NEW.status = 'anchored' THEN
stay_code = 2;
RAISE WARNING 'Metrics Update status, try new stay, New:[%] Previous:[%]', NEW.status, previous_status;
stay_id := public.stay_in_progress_fn(NEW.client_id::TEXT);
IF stay_id IS NULL THEN
RAISE WARNING 'Metrics Inserting new stay [%]', NEW.status;
-- If metric status is anchored set stay_code accordingly
stay_code = 1;
IF NEW.status = 'anchored' THEN
stay_code = 2;
END IF;
-- Add new stay
INSERT INTO api.stays
(client_id, active, arrived, latitude, longitude, stay_code)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
RETURNING id INTO stay_id;
-- Add stay entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored)
VALUES ('new_stay', stay_id, now());
ELSE
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
UPDATE api.stays
SET
active = false,
departed = NEW.time
WHERE id = stay_id;
END IF;
-- Add new stay
INSERT INTO api.stays
(client_id, active, arrived, latitude, longitude, stay_code)
VALUES (NEW.client_id, true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
RETURNING id INTO stay_id;
-- Add stay entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored) values ('new_stay', stay_id, now());
-- End current log/trip
-- Fetch logbook_id by client_id
SELECT id INTO logbook_id
FROM api.logbook l
WHERE l.client_id IS NOT NULL
AND l.client_id = NEW.client_id
AND active IS true
LIMIT 1;
logbook_id := public.trip_in_progress_fn(NEW.client_id::TEXT);
IF logbook_id IS NOT NULL THEN
-- todo check on time start vs end
RAISE NOTICE 'Updating trip status [%] [%] [%]', logbook_id, NEW.status, NEW.time;
RAISE WARNING 'Metrics Updating logbook status [%] [%] [%]', logbook_id, NEW.status, NEW.time;
UPDATE api.logbook
SET
active = false,
@@ -446,9 +537,10 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
_to_lng = NEW.longitude
WHERE id = logbook_id;
-- Add logbook entry to process queue for later processing
INSERT INTO process_queue (channel, payload, stored) values ('new_logbook', logbook_id, now());
INSERT INTO process_queue (channel, payload, stored)
VALUEs ('new_logbook', logbook_id, now());
ELSE
RAISE WARNING 'Invalid logbook_id [%] [%]', logbook_id, NEW.time;
RAISE WARNING 'Metrics Invalid logbook_id [%] [%]', logbook_id, NEW.time;
END IF;
END IF;
RETURN NEW; -- Finally insert the actual new metric
@@ -468,10 +560,13 @@ COMMENT ON TRIGGER
metrics_trigger ON api.metrics
IS 'BEFORE INSERT ON api.metrics run function metrics_trigger_fn';
---------------------------------------------------------------------------
-- API helper functions
--
---------------------------------------------------------------------------
---------------------------------------------------------------------------
-- Functions API schema
-- Export a log entry to geojson
DROP FUNCTION IF EXISTS api.export_logbook_geojson_point_fn;
CREATE OR REPLACE FUNCTION api.export_logbook_geojson_point_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $export_logbook_geojson_point$
@@ -527,7 +622,7 @@ CREATE FUNCTION api.export_logbook_geojson_linestring_fn(IN _id INTEGER) RETURNS
geojson json;
BEGIN
-- If _id is is not NULL and > 0
SELECT ST_AsGeoJSON(l.track_geom) INTO geojson
SELECT ST_AsGeoJSON(l.*) INTO geojson
FROM api.logbook l
WHERE l.id = _id;
RETURN geojson;
@@ -538,9 +633,100 @@ COMMENT ON FUNCTION
api.export_logbook_geojson_linestring_fn
IS 'Export a log entry to geojson feature linestring';
-- export_logbook_geojson_fn
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSON) RETURNS JSON AS $export_logbook_geojson$
-- validate with geojson.io
DECLARE
logbook_rec record;
BEGIN
-- If _id is is not NULL and > 0
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> export_logbook_geojson_fn invalid input %', _id;
RETURN;
END IF;
-- Gather log details
SELECT * INTO logbook_rec
FROM api.logbook WHERE id = _id;
-- Ensure the query is successful
IF logbook_rec.client_id IS NULL THEN
RAISE WARNING '-> export_logbook_geojson_fn invalid logbook %', _id;
RETURN;
END IF;
geojson := logbook_rec.track_geojson;
END;
$export_logbook_geojson$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.export_logbook_geojson_fn
IS 'Export a log entry to geojson feature linestring and multipoint';
-- Generate GPX XML file output
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html
--
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS pg_catalog.xml
AS $export_logbook_gpx$
DECLARE
log_rec record;
BEGIN
-- If _id is is not NULL and > 0
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> export_logbook_geojson_fn invalid input %', _id;
RETURN '';
END IF;
-- Gather log details _from_time and _to_time
SELECT * INTO log_rec
FROM
api.logbook l
WHERE l.id = _id;
-- Ensure the query is successful
IF log_rec.client_id IS NULL THEN
RAISE WARNING '-> export_logbook_gpx_fn invalid logbook %', _id;
RETURN '';
END IF;
-- Generate XML
RETURN xmlelement(name gpx,
xmlattributes( '1.1' as version,
'PostgSAIL' as creator,
'http://www.topografix.com/GPX/1/1' as xmlns,
'http://www.opencpn.org' as "xmlns:opencpn",
'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi",
'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx",
'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"),
xmlelement(name trk,
xmlelement(name name, 'Track Name'),
xmlelement(name desc, 'Track Description'),
xmlelement(name link, xmlattributes('https://openplotter.cloud/log/{_id}' as href),
xmlelement(name text, 'Link name')),
xmlelement(name extensions, xmlelement(name "opencpn:guid", uuid_generate_v4()),
xmlelement(name "opencpn:viz", '1'),
xmlelement(name "opencpn:start", log_rec._from_time),
xmlelement(name "opencpn:end", log_rec._to_time)
),
xmlelement(name trkseg, xmlagg(
xmlelement(name trkpt,
xmlattributes(latitude as lat, longitude as lon),
xmlelement(name time, time)
)))))::pg_catalog.xml
FROM api.metrics m
WHERE m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND m.time >= log_rec._from_time::TIMESTAMP WITHOUT TIME ZONE
AND m.time <= log_rec._to_time::TIMESTAMP WITHOUT TIME ZONE
AND client_id = log_rec.client_id;
-- ERROR: column "m.time" must appear in the GROUP BY clause or be used in an aggregate function at character 2304
--ORDER BY m.time ASC;
END;
$export_logbook_gpx$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.export_logbook_gpx_fn
IS 'Export a log entry to GPX XML format';
-- Find all log from and to moorage geopoint within 100m
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
CREATE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_log_from_moorage$
CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_log_from_moorage$
DECLARE
moorage_rec record;
logbook_rec record;
@@ -573,7 +759,7 @@ COMMENT ON FUNCTION
-- Find all stay within 100m of moorage geopoint
DROP FUNCTION IF EXISTS api.find_stay_from_moorage_fn;
CREATE FUNCTION api.find_stay_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_stay_from_moorage$
CREATE OR REPLACE FUNCTION api.find_stay_from_moorage_fn(IN _id INTEGER) RETURNS void AS $find_stay_from_moorage$
DECLARE
moorage_rec record;
stay_rec record;
@@ -602,9 +788,82 @@ COMMENT ON FUNCTION
api.find_stay_from_moorage_fn
IS 'Find all stay within 100m of moorage geopoint';
-- trip_in_progress_fn
DROP FUNCTION IF EXISTS public.trip_in_progress_fn;
CREATE FUNCTION public.trip_in_progress_fn(IN _client_id TEXT) RETURNS INT AS $trip_in_progress$
DECLARE
logbook_id INT := NULL;
BEGIN
SELECT id INTO logbook_id
FROM api.logbook l
WHERE l.client_id IS NOT NULL
AND l.client_id = _client_id
AND active IS true
LIMIT 1;
RETURN logbook_id;
END;
$trip_in_progress$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.trip_in_progress_fn
IS 'trip_in_progress';
-- stay_in_progress_fn
DROP FUNCTION IF EXISTS public.stay_in_progress_fn;
CREATE FUNCTION public.stay_in_progress_fn(IN _client_id TEXT) RETURNS INT AS $stay_in_progress$
DECLARE
stay_id INT := NULL;
BEGIN
SELECT id INTO stay_id
FROM api.stays s
WHERE s.client_id IS NOT NULL
AND s.client_id = _client_id
AND active IS true
LIMIT 1;
RETURN stay_id;
END;
$stay_in_progress$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.stay_in_progress_fn
IS 'stay_in_progress';
-- stay_in_progress_fn
DROP FUNCTION IF EXISTS api.logs_by_month_fn;
CREATE FUNCTION api.logs_by_month_fn(OUT charts JSONB) RETURNS JSONB AS $logs_by_month$
DECLARE
data JSONB;
BEGIN
-- Query logs by month
SELECT json_object_agg(month,count) INTO data
FROM (
SELECT
to_char(date_trunc('month', _from_time), 'MM') as month,
count(*) as count
FROM api.logbook
GROUP BY month
ORDER BY month
) AS t;
-- Merge jsonb to get all 12 months
SELECT '{"01": 0, "02": 0, "03": 0, "04": 0, "05": 0, "06": 0, "07": 0, "08": 0, "09": 0, "10": 0, "11": 0,"12": 0}'::jsonb ||
data::jsonb INTO charts;
END;
$logs_by_month$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.logs_by_month_fn
IS 'logbook by month for web charts';
---------------------------------------------------------------------------
-- API helper views
--
---------------------------------------------------------------------------
---------------------------------------------------------------------------
-- Views
--
-- Views are invoked with the privileges of the view owner,
-- make the user_role the views owner.
---------------------------------------------------------------------------
CREATE VIEW first_metric AS
SELECT *
FROM api.metrics
@@ -625,24 +884,71 @@ CREATE VIEW stay_in_progress AS
FROM api.stays
WHERE active IS true;
-- list all json keys from api.metrics.metric jsonb
--select m.time,jsonb_object_keys(m.metrics) from last_metric m where m.client_id = 'vessels.urn:mrn:imo:mmsi:787654321';
-- TODO: Use materialized views instead as it is not live data
-- Logs web view
DROP VIEW IF EXISTS api.logs_view;
CREATE VIEW api.logs_view AS
SELECT id,name,_from,_to,_from_time,_to_time,distance,duration
FROM api.logbook
WHERE _to_time IS NOT NULL
ORDER BY _from_time DESC;
CREATE OR REPLACE VIEW api.logs_view WITH (security_invoker=true,security_barrier=true) AS
SELECT id,
name as "Name",
_from as "From",
_from_time as "Started",
_to as "To",
_to_time as "Ended",
distance as "Distance",
duration as "Duration"
FROM api.logbook l
WHERE _to_time IS NOT NULL
ORDER BY _from_time DESC;
-- Description
COMMENT ON VIEW
api.logs_view
IS 'Logs web view';
-- Inital try of MATERIALIZED VIEW
CREATE MATERIALIZED VIEW api.logs_mat_view AS
SELECT id,
name as "Name",
_from as "From",
_from_time as "Started",
_to as "To",
_to_time as "Ended",
distance as "Distance",
duration as "Duration"
FROM api.logbook l
WHERE _to_time IS NOT NULL
ORDER BY _from_time DESC;
DROP VIEW IF EXISTS api.log_view;
CREATE OR REPLACE VIEW api.log_view WITH (security_invoker=true,security_barrier=true) AS
SELECT id,
name as "Name",
_from as "From",
_from_time as "Started",
_to as "To",
_to_time as "Ended",
distance as "Distance",
duration as "Duration",
notes as "Notes",
track_geojson as geojson,
avg_speed as avg_speed,
max_speed as max_speed,
max_wind_speed as max_wind_speed
FROM api.logbook l
WHERE _to_time IS NOT NULL
ORDER BY _from_time DESC;
-- Description
COMMENT ON VIEW
api.logs_view
IS 'Log web view';
-- Stays web view
-- TODO group by month
DROP VIEW IF EXISTS api.stays_view;
CREATE VIEW api.stays_view AS
SELECT
CREATE VIEW api.stays_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
SELECT id,
concat(
extract(DAYS FROM (s.departed-s.arrived)::interval),
' days',
@@ -657,7 +963,7 @@ CREATE VIEW api.stays_view AS
s.name AS Moorage,
s.arrived AS Arrived,
s.departed AS Departed,
sa.description AS "Stayed at",
sa.description AS Stayed_at,
(s.departed-s.arrived) AS Duration
FROM api.stays s, api.stays_at sa
WHERE departed is not null
@@ -669,6 +975,36 @@ COMMENT ON VIEW
api.stays_view
IS 'Stays web view';
DROP VIEW IF EXISTS api.stay_view;
CREATE VIEW api.stay_view WITH (security_invoker=true,security_barrier=true) AS -- TODO missing arrival/departured from
SELECT id,
concat(
extract(DAYS FROM (s.departed-s.arrived)::interval),
' days',
--DATE_TRUNC('day', s.departed-s.arrived),
' stay at ',
s.name,
' in ',
RTRIM(TO_CHAR(s.departed, 'Month')),
' ',
TO_CHAR(s.departed, 'YYYY')
) as Name,
s.name AS Moorage,
(s.departed-s.arrived) AS Duration,
sa.description AS "Stayed at",
s.arrived AS "Arrival Time",
s.departed AS "Departure Time",
s.notes AS "Notes"
FROM api.stays s, api.stays_at sa
WHERE departed is not null
AND s.name is not null
AND s.stay_code = sa.stay_code
ORDER BY s.arrived DESC;
-- Description
COMMENT ON VIEW
api.stay_view
IS 'Stay web view';
-- Moorages web view
-- TODO, this is wrong using distinct (m.name) should be using postgis geog feature
--DROP VIEW IF EXISTS api.moorages_view_old;
@@ -687,25 +1023,42 @@ COMMENT ON VIEW
-- the good way?
DROP VIEW IF EXISTS api.moorages_view;
CREATE OR REPLACE VIEW api.moorages_view AS
SELECT
CREATE OR REPLACE VIEW api.moorages_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
SELECT m.id,
m.name AS Moorage,
sa.description AS "Default Stay",
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS "Total Stay",
m.reference_count AS "Arrivals & Departures",
sa.description AS Default_Stay,
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay, -- in days
m.reference_count AS Arrivals_Departures,
m.geog
-- m.stay_duration,
-- justify_hours ( m.stay_duration )
FROM api.moorages m, api.stays_at sa
WHERE m.name is not null
AND m.stay_code = sa.stay_code
GROUP BY m.name,sa.description,m.stay_duration,m.reference_count,m.geog
GROUP BY m.id,m.name,sa.description,m.stay_duration,m.reference_count,m.geog
-- ORDER BY 4 DESC;
ORDER BY m.reference_count DESC;
-- Description
COMMENT ON VIEW
api.moorages_view
IS 'Moorages web view';
IS 'Moorages listing web view';
DROP VIEW IF EXISTS api.moorage_view;
CREATE OR REPLACE VIEW api.moorage_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
SELECT id,
m.name AS Name,
m.stay_code AS Default_Stay,
m.home_flag AS Home,
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay,
m.reference_count AS Arrivals_Departures,
m.notes,
m.geog
FROM api.moorages m
WHERE m.name is not null;
-- Description
COMMENT ON VIEW
api.moorage_view
IS 'Moorage details web view';
-- All moorage in 100 meters from the start of a logbook.
-- ST_DistanceSphere Returns minimum distance in meters between two lon/lat points.
@@ -731,7 +1084,7 @@ COMMENT ON VIEW
----> select sum(l.duration) as "Total Time Underway" from api.logbook l;
-- Longest Nonstop Sail from logbook, eg longest trip duration and distance
----> select max(l.duration),max(l.distance) from api.logbook l;
CREATE VIEW api.stats_logs_view AS -- todo
CREATE VIEW api.stats_logs_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
WITH
meta AS (
SELECT m.name FROM api.metadata m ),
@@ -754,6 +1107,9 @@ CREATE VIEW api.stats_logs_view AS -- todo
lm.time AS last,
l.*
FROM first_metric fm, last_metric lm, logbook l, meta m;
COMMENT ON VIEW
api.stats_logs_view
IS 'Statistics Logs web view';
-- Home Ports / Unique Moorages
----> select count(*) as "Home Ports" from api.moorages m where home_flag is true;
@@ -767,9 +1123,40 @@ CREATE VIEW api.stats_logs_view AS -- todo
----> select sum(m.stay_duration) as "Time Spent Away" from api.moorages m where home_flag is false;
-- Time Spent Away order by, group by stay_code (Dock, Anchor, Mooring Buoys, Unclassified)
----> select sa.description,sum(m.stay_duration) as "Time Spent Away" from api.moorages m, api.stays_at sa where home_flag is false AND m.stay_code = sa.stay_code group by m.stay_code,sa.description order by m.stay_code;
CREATE VIEW api.stats_moorages_view AS -- todo
select *
from api.moorages;
CREATE VIEW api.stats_moorages_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
WITH
home_ports AS (
select count(*) as home_ports from api.moorages m where home_flag is true
),
unique_moorage AS (
select count(*) as unique_moorage from api.moorages m
),
time_at_home_ports AS (
select sum(m.stay_duration) as time_at_home_ports from api.moorages m where home_flag is true
),
time_spent_away AS (
select sum(m.stay_duration) as time_spent_away from api.moorages m where home_flag is false
)
SELECT
home_ports.home_ports as "Home Ports",
unique_moorage.unique_moorage as "Unique Moorages",
time_at_home_ports.time_at_home_ports "Time Spent at Home Port(s)",
time_spent_away.time_spent_away as "Time Spent Away"
FROM home_ports, unique_moorage, time_at_home_ports, time_spent_away;
COMMENT ON VIEW
api.stats_moorages_view
IS 'Statistics Moorages web view';
CREATE VIEW api.stats_moorages_away_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
SELECT sa.description,sum(m.stay_duration) as time_spent_away_by
FROM api.moorages m, api.stays_at sa
WHERE home_flag IS false
AND m.stay_code = sa.stay_code
GROUP BY m.stay_code,sa.description
ORDER BY m.stay_code;
COMMENT ON VIEW
api.stats_moorages_away_view
IS 'Statistics Moorages Time Spent Away web view';
--CREATE VIEW api.stats_view AS -- todo
-- WITH
@@ -780,18 +1167,21 @@ CREATE VIEW api.stats_moorages_view AS -- todo
-- SELECT
-- l.*,
-- m.*
-- FROM logs l, moorages m;
-- FROM logs l, moorages m;
--COMMENT ON VIEW
-- api.stats_moorages_away_view
-- IS 'Statistics Moorages Time Spent Away web view';
-- global timelapse
-- TODO
CREATE VIEW timelapse AS -- todo
CREATE VIEW timelapse AS -- TODO
SELECT latitude, longitude from api.metrics;
-- View main monitoring for grafana
-- LAST Monitoring data from json!
CREATE VIEW api.monitoring AS
-- View main monitoring for web app
CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=true) AS
SELECT
time AS "time",
(NOW() AT TIME ZONE 'UTC' - time) > INTERVAL '70 MINUTES' as offline,
metrics-> 'environment.water.temperature' AS waterTemperature,
metrics-> 'environment.inside.temperature' AS insideTemperature,
metrics-> 'environment.outside.temperature' AS outsideTemperature,
@@ -800,9 +1190,21 @@ CREATE VIEW api.monitoring AS
metrics-> 'environment.inside.humidity' AS insideHumidity,
metrics-> 'environment.outside.humidity' AS outsideHumidity,
metrics-> 'environment.outside.pressure' AS outsidePressure,
metrics-> 'environment.inside.pressure' AS insidePressure
metrics-> 'environment.inside.pressure' AS insidePressure,
jsonb_build_object(
'type', 'Feature',
'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb,
'properties', jsonb_build_object(
'name', current_setting('vessel.name', false),
'latitude', m.latitude,
'longitude', m.longitude
)::jsonb ) AS geojson,
current_setting('vessel.name', false) AS name
FROM api.metrics m
ORDER BY time DESC LIMIT 1;
COMMENT ON VIEW
api.monitoring_view
IS 'Monitoring web view';
CREATE VIEW api.monitoring_humidity AS
SELECT
@@ -817,7 +1219,7 @@ CREATE VIEW api.monitoring_humidity AS
-- View main monitoring for grafana
-- LAST Monitoring data from json!
CREATE VIEW api.monitorin_temperatures AS
CREATE VIEW api.monitoring_temperatures AS
SELECT
time AS "time",
metrics-> 'environment.water.temperature' AS waterTemperature,
@@ -829,7 +1231,7 @@ CREATE VIEW api.monitorin_temperatures AS
-- json key regexp
-- https://stackoverflow.com/questions/38204467/selecting-for-a-jsonb-array-contains-regex-match
-- Last voltage data from json!
CREATE VIEW api.voltage AS
CREATE VIEW api.monitoring_voltage AS
SELECT
time AS "time",
cast(metrics-> 'electrical.batteries.AUX2.voltage' AS numeric) AS AUX2,
@@ -842,55 +1244,14 @@ CREATE VIEW api.voltage AS
FROM api.metrics m
ORDER BY time DESC LIMIT 1;
---------------------------------------------------------------------------
-- API helper functions
--
DROP FUNCTION IF EXISTS api.export_logbook_gpx_py_fn;
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_py_fn(IN _id INTEGER) RETURNS XML
AS $export_logbook_gpx_py$
import uuid
# BEGIN GPX XML format
gpx_data = f"""<?xml version="1.0"?>
<gpx version="1.1" creator="PostgSAIL" xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/1" xmlns:gpxx="http://www.garmin.com/xmlschemas/GpxExtensions/v3" xsi:schemaLocation="http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd" xmlns:opencpn="http://www.opencpn.org">
<trk>
<link href="https://openplotter.cloud/log/{_id}">
<text>openplotter trip log todo</text>
</link>
<extensions>
<opencpn:guid>{uuid.uuid4()}</opencpn:guid>
<opencpn:viz>1</opencpn:viz>
<opencpn:start>{mytrack[0]['time']}</opencpn:start>
<opencpn:end>{mytrack[-1]['time']}</opencpn:end>
</extensions>
<trkseg>\n""";
##print(gpx_data)
# LOOP through log entry
for entry in mytrack:
##print(entry['time'])
gpx_data += f""" <trkpt lat="{entry['lat']}" lon="{entry['lng']}">
<time>{entry['time']}</time>
</trkpt>\n""";
# END GPX XML format
gpx_data += """ </trkseg>
</trk>
</gpx>""";
return gpx_data
$export_logbook_gpx_py$ LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
api.export_logbook_gpx_py_fn
IS 'TODO, Export a log entry to GPX XML format using plpython3u';
--DROP FUNCTION IF EXISTS api.export_logbook_csv_fn;
--CREATE OR REPLACE FUNCTION api.export_logbook_csv_fn(IN _id INTEGER) RETURNS void
--AS $export_logbook_csv$
-- TODO
--$export_logbook_csv$ language plpgsql;
-- Description
--COMMENT ON FUNCTION
-- api.export_logbook_csv_fn
-- IS 'TODO, ...';
-- Infotiles web app
CREATE OR REPLACE VIEW api.total_info_view WITH (security_invoker=true,security_barrier=true) AS
-- Infotiles web app, not used calculated client side
WITH
l as (SELECT count(*) as logs FROM api.logbook),
s as (SELECT count(*) as stays FROM api.stays),
m as (SELECT count(*) as moorages FROM api.moorages)
SELECT * FROM l,s,m;
COMMENT ON VIEW
api.total_info_view
IS 'Monitoring web view';

View File

@@ -28,7 +28,7 @@ begin
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_new_logbook_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -57,7 +57,7 @@ begin
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_new_stay_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -87,7 +87,7 @@ begin
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_new_moorage_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -123,16 +123,25 @@ begin
SET
active = False
WHERE id = metadata_rec.id;
RAISE NOTICE '-> updated api.metadata table to inactive for [%]', metadata_rec.id;
IF metadata_rec.client_id IS NULL OR metadata_rec.client_id = '' THEN
RAISE WARNING '-> cron_process_monitor_offline_fn invalid metadata record client_id %', client_id;
RAISE EXCEPTION 'Invalid metadata'
USING HINT = 'Unkown client_id';
RETURN;
END IF;
PERFORM set_config('vessel.client_id', metadata_rec.client_id, false);
RAISE DEBUG '-> DEBUG cron_process_monitor_offline_fn vessel.client_id %', current_setting('vessel.client_id', false);
RAISE NOTICE '-> cron_process_monitor_offline_fn updated api.metadata table to inactive for [%] [%]', metadata_rec.id, metadata_rec.client_id;
-- Gather email and pushover app settings
app_settings = get_app_settings_fn();
--app_settings = get_app_settings_fn();
-- Gather user settings
user_settings := get_user_settings_from_metadata_fn(metadata_rec.id::INTEGER);
--user_settings := get_user_settings_from_clientid_fn(metadata_rec.id::INTEGER);
RAISE DEBUG '-> debug monitor_offline get_user_settings_from_metadata_fn [%]', user_settings;
user_settings := get_user_settings_from_clientid_fn(metadata_rec.client_id::TEXT);
RAISE DEBUG '-> cron_process_monitor_offline_fn get_user_settings_from_clientid_fn [%]', user_settings;
-- Send notification
--PERFORM send_notification_fn('monitor_offline'::TEXT, metadata_rec::RECORD);
PERFORM send_email_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
PERFORM send_notification_fn('monitor_offline'::TEXT, user_settings::JSONB);
--PERFORM send_email_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('monitor_offline'::TEXT, user_settings::JSONB, app_settings::JSONB);
-- log/insert/update process_queue table with processed
INSERT INTO process_queue
@@ -140,7 +149,7 @@ begin
VALUES
('monitoring_offline', metadata_rec.id, metadata_rec.interval, now())
RETURNING id INTO process_id;
RAISE NOTICE '-> updated process_queue table [%]', process_id;
RAISE NOTICE '-> cron_process_monitor_offline_fn updated process_queue table [%]', process_id;
END LOOP;
END;
$$ language plpgsql;
@@ -165,26 +174,35 @@ begin
where channel = 'monitoring_online' and processed is null
order by stored asc
LOOP
RAISE NOTICE '-> cron_process_monitor_online_fn metadata_id [%]', process_rec.payload;
RAISE NOTICE '-> cron_process_monitor_online_fn metadata_id [%]', process_rec.payload;
SELECT * INTO metadata_rec
FROM api.metadata
WHERE id = process_rec.payload::INTEGER;
IF metadata_rec.client_id IS NULL OR metadata_rec.client_id = '' THEN
RAISE WARNING '-> cron_process_monitor_online_fn invalid metadata record client_id %', client_id;
RAISE EXCEPTION 'Invalid metadata'
USING HINT = 'Unkown client_id';
RETURN;
END IF;
PERFORM set_config('vessel.client_id', metadata_rec.client_id, false);
RAISE DEBUG '-> DEBUG cron_process_monitor_online_fn vessel.client_id %', current_setting('vessel.client_id', false);
-- Gather email and pushover app settings
app_settings = get_app_settings_fn();
--app_settings = get_app_settings_fn();
-- Gather user settings
user_settings := get_user_settings_from_metadata_fn(metadata_rec.id::INTEGER);
--user_settings := get_user_settings_from_clientid_fn((metadata_rec.client_id::INTEGER, );
RAISE NOTICE '-> debug monitor_online get_user_settings_from_metadata_fn [%]', user_settings;
user_settings := get_user_settings_from_clientid_fn(metadata_rec.client_id::TEXT);
RAISE DEBUG '-> DEBUG cron_process_monitor_online_fn get_user_settings_from_clientid_fn [%]', user_settings;
-- Send notification
--PERFORM send_notification_fn('monitor_online'::TEXT, metadata_rec::RECORD);
PERFORM send_email_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
PERFORM send_notification_fn('monitor_online'::TEXT, user_settings::JSONB);
--PERFORM send_email_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('monitor_online'::TEXT, user_settings::JSONB, app_settings::JSONB);
-- update process_queue entry as processed
UPDATE process_queue
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_monitor_online_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -213,7 +231,7 @@ begin
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_new_account_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -222,6 +240,35 @@ COMMENT ON FUNCTION
public.cron_process_new_account_fn
IS 'init by pg_cron to check for new account pending update, if so perform process_account_queue_fn';
-- CRON for new account pending otp validation notification
CREATE FUNCTION cron_process_new_account_otp_validation_fn() RETURNS void AS $$
declare
process_rec record;
begin
-- Check for new account pending update
RAISE NOTICE 'cron_process_new_account_otp_validation_fn';
FOR process_rec in
SELECT * from process_queue
where channel = 'new_account_otp' and processed is null
order by stored asc
LOOP
RAISE NOTICE '-> cron_process_new_account_otp_validation_fn [%]', process_rec.payload;
-- update account
PERFORM process_account_otp_validation_queue_fn(process_rec.payload::TEXT);
-- update process_queue entry as processed
UPDATE process_queue
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> cron_process_new_account_otp_validation_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
-- Description
COMMENT ON FUNCTION
public.cron_process_new_account_otp_validation_fn
IS 'init by pg_cron to check for new account otp pending update, if so perform process_account_otp_validation_queue_fn';
-- CRON for new vessel pending notification
CREATE FUNCTION cron_process_new_vessel_fn() RETURNS void AS $$
declare
@@ -242,7 +289,7 @@ begin
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> updated process_queue table [%]', process_rec.id;
RAISE NOTICE '-> cron_process_new_vessel_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
@@ -251,8 +298,40 @@ COMMENT ON FUNCTION
public.cron_process_new_vessel_fn
IS 'init by pg_cron to check for new vessel pending update, if so perform process_vessel_queue_fn';
-- CRON for new event notification
CREATE FUNCTION cron_process_new_notification_fn() RETURNS void AS $$
declare
process_rec record;
begin
-- Check for new event notification pending update
RAISE NOTICE 'cron_process_new_notification_fn';
FOR process_rec in
SELECT * FROM process_queue
WHERE
(channel = 'new_account' OR channel = 'new_vessel' OR channel = 'email_otp')
and processed is null
order by stored asc
LOOP
RAISE NOTICE '-> cron_process_new_notification_fn for [%]', process_rec.payload;
-- process_notification_queue
PERFORM process_notification_queue_fn(process_rec.payload::TEXT, process_rec.channel::TEXT);
-- update process_queue entry as processed
UPDATE process_queue
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> cron_process_new_notification_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
-- Description
COMMENT ON FUNCTION
public.cron_process_new_notification_fn
IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn';
-- CRON for Vacuum database
CREATE FUNCTION cron_vaccum_fn() RETURNS void AS $$
-- ERROR: VACUUM cannot be executed from a function
declare
begin
-- Vacuum

View File

@@ -0,0 +1,620 @@
---------------------------------------------------------------------------
-- singalk db public schema tables
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
CREATE SCHEMA IF NOT EXISTS public;
COMMENT ON SCHEMA public IS 'backend functions';
---------------------------------------------------------------------------
-- Table geocoders
--
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
--
CREATE TABLE IF NOT EXISTS geocoders(
name TEXT UNIQUE,
url TEXT,
reverse_url TEXT
);
-- Description
COMMENT ON TABLE
public.geocoders
IS 'geo service nominatim url';
INSERT INTO geocoders VALUES
('nominatim',
NULL,
'https://nominatim.openstreetmap.org/reverse');
-- https://photon.komoot.io/reverse?lat=48.30587233333333&lon=14.3040525
-- https://docs.mapbox.com/playground/geocoding/?search_text=-3.1457869856990897,51.35921326434686&limit=1
---------------------------------------------------------------------------
-- Tables for message template email/pushover/telegram
--
CREATE TABLE IF NOT EXISTS email_templates(
name TEXT UNIQUE,
email_subject TEXT,
email_content TEXT,
pushover_title TEXT,
pushover_message TEXT
);
-- Description
COMMENT ON TABLE
public.email_templates
IS 'email/message templates for notifications';
-- with escape value, eg: E'A\nB\r\nC'
-- https://stackoverflow.com/questions/26638615/insert-line-break-in-postgresql-when-updating-text-field
-- TODO Update notification subject for log entry to 'logbook #NB ...'
INSERT INTO email_templates VALUES
('logbook',
'New Logbook Entry',
E'Hello __RECIPIENT__,\n\nWe just wanted to let you know that you have a new entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\n\r\nSee more details at __APP_URL__/log/__LOGBOOK_LINK__\n\nHappy sailing!\nThe PostgSail Team',
'New Logbook Entry',
E'New entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\nSee more details at __APP_URL__/log/__LOGBOOK_LINK__\n'),
('new_account',
'Welcome',
E'Hello __RECIPIENT__,\nCongratulations!\nYou successfully created an account.\nKeep in mind to register your vessel.\nHappy sailing!',
'Welcome',
E'Hi!\nYou successfully created an account\nKeep in mind to register your vessel.\n'),
('new_vessel',
'New vessel',
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n',
'New vessel',
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n'),
('monitor_offline',
'Vessel Offline',
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n',
'Vessel Offline',
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n'),
('monitor_online',
'Vessel Online',
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n',
'Vessel Online',
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n'),
('new_badge',
'New Badge!',
E'Hello __RECIPIENT__,\nCongratulations! You have just unlocked a new badge: __BADGE_NAME__\nSee more details at __APP_URL__/badges\nHappy sailing!\nThe PostgSail Team',
'New Badge!',
E'Congratulations!\nYou have just unlocked a new badge: __BADGE_NAME__\nSee more details at __APP_URL__/badges\n'),
('pushover_valid',
'Pushover integration',
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to Pushover.\n\nThe PostgSail Team',
'Pushover integration!',
E'Congratulations!\nYou have just connect your account to Pushover.\n'),
('email_otp',
'Email verification',
E'Hello,\nPlease active your account using the following code: __OTP_CODE__.\nThe code is valid 15 minutes.\nThe PostgSail Team',
'Email verification',
E'Congratulations!\nPlease validate your account. Check your email!'),
('email_valid',
'Email verified',
E'Hello __RECIPIENT__,\nCongratulations!\nYou successfully validate your account.\nThe PostgSail Team',
'Email verified',
E'Hi!\nYou successfully validate your account.\n'),
('email_reset',
'Password reset',
E'Hello,\nYou requested a password reset. To reset your password __APP_URL__/reset?__RESET_QS__.\nThe PostgSail Team',
'Password reset',
E'You requested a password recovery. Check your email!\n'),
('telegram_otp',
'Telegram bot',
E'Hello __RECIPIENT__,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nThe PostgSail Team',
'Telegram bot',
E'Congratulations!\nTo connect your account to a @postgsail_bot. Check your email!\n'),
('telegram_valid',
'Telegram bot',
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\n\nThe PostgSail Team',
'Telegram bot!',
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n');
---------------------------------------------------------------------------
-- Queue handling
--
-- https://gist.github.com/kissgyorgy/beccba1291de962702ea9c237a900c79
-- https://www.depesz.com/2012/06/13/how-to-send-mail-from-database/
-- Listen/Notify way
--create function new_logbook_entry() returns trigger as $$
--begin
-- perform pg_notify('new_logbook_entry', NEW.id::text);
-- return NEW;
--END;
--$$ language plpgsql;
-- table way
CREATE TABLE IF NOT EXISTS public.process_queue (
id SERIAL PRIMARY KEY,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
stored TIMESTAMP WITHOUT TIME ZONE NOT NULL,
processed TIMESTAMP WITHOUT TIME ZONE DEFAULT NULL
);
-- Description
COMMENT ON TABLE
public.process_queue
IS 'process queue for async job';
-- Index
CREATE INDEX ON public.process_queue (channel);
CREATE INDEX ON public.process_queue (stored);
CREATE INDEX ON public.process_queue (processed);
-- Function process_queue helpers
create function new_account_entry_fn() returns trigger as $new_account_entry$
begin
insert into process_queue (channel, payload, stored) values ('new_account', NEW.email, now());
return NEW;
END;
$new_account_entry$ language plpgsql;
create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$
begin
insert into process_queue (channel, payload, stored) values ('email_otp', NEW.email, now());
return NEW;
END;
$new_account_otp_validation_entry$ language plpgsql;
create function new_vessel_entry_fn() returns trigger as $new_vessel_entry$
begin
insert into process_queue (channel, payload, stored) values ('new_vessel', NEW.owner_email, now());
return NEW;
END;
$new_vessel_entry$ language plpgsql;
---------------------------------------------------------------------------
-- Tables Application Settings
-- https://dba.stackexchange.com/questions/27296/storing-application-settings-with-different-datatypes#27297
-- https://stackoverflow.com/questions/6893780/how-to-store-site-wide-settings-in-a-database
-- http://cvs.savannah.gnu.org/viewvc/*checkout*/gnumed/gnumed/gnumed/server/sql/gmconfiguration.sql
CREATE TABLE IF NOT EXISTS public.app_settings (
name TEXT NOT NULL UNIQUE,
value TEXT NOT NULL
);
-- Description
COMMENT ON TABLE public.app_settings IS 'application settings';
COMMENT ON COLUMN public.app_settings.name IS 'application settings name key';
COMMENT ON COLUMN public.app_settings.value IS 'application settings value';
---------------------------------------------------------------------------
-- Badges description
-- TODO add contiditions
--
CREATE TABLE IF NOT EXISTS badges(
name TEXT UNIQUE,
description TEXT
);
-- Description
COMMENT ON TABLE
public.badges
IS 'Badges descriptions';
INSERT INTO badges VALUES
('Helmsman',
'Nice work logging your first sail! You are officially a helmsman now!'),
('Wake Maker',
'Yowzers! Welcome to the 15 knot+ club ya speed demon skipper!'),
('Explorer',
'It looks like home is where the helm is. Cheers to 10 days away from home port!'),
('Mooring Pro',
'It takes a lot of skill to "thread that floating needle" but seems like you have mastered mooring with 10 nights on buoy!'),
('Anchormaster',
'Hook, line and sinker, you have this anchoring thing down! 25 days on the hook for you!'),
('Traveler',
'Who needs to fly when one can sail! You are an international sailor. À votre santé!'),
('Stormtrooper',
'Just like the elite defenders of the Empire, here you are, our braving your own hydro-empire in windspeeds above 30kts. Nice work trooper! '),
('Club Alaska',
'Home to the bears, glaciers, midnight sun and high adventure. Welcome to the Club Alaska Captain!'),
('Tropical Traveler',
'Look at you with your suntan, tropical drink and southern latitude!'),
('Aloha Award',
'Ticking off over 2300 NM across the great blue Pacific makes you the rare recipient of the Aloha Award. Well done and Aloha sailor!'),
('Tyee',
'You made it to the Tyee Outstation, the friendliest dock in Pacific Northwest!'),
-- TODO the sea is big and the world is not limited to the US
('Mediterranean Traveler',
'You made it trought the Mediterranean!');
---------------------------------------------------------------------------
-- aistypes description
--
CREATE TABLE IF NOT EXISTS aistypes(
id NUMERIC UNIQUE,
description TEXT
);
-- Description
COMMENT ON TABLE
public.aistypes
IS 'aistypes AIS Ship Types, https://api.vesselfinder.com/docs/ref-aistypes.html';
INSERT INTO aistypes VALUES
(0, 'Not available (default)'),
(20, 'Wing in ground (WIG), all ships of this type'),
(21, 'Wing in ground (WIG), Hazardous category A'),
(22, 'Wing in ground (WIG), Hazardous category B'),
(23, 'Wing in ground (WIG), Hazardous category C'),
(24, 'Wing in ground (WIG), Hazardous category D'),
(25, 'Wing in ground (WIG), Reserved for future use'),
(26, 'Wing in ground (WIG), Reserved for future use'),
(27, 'Wing in ground (WIG), Reserved for future use'),
(28, 'Wing in ground (WIG), Reserved for future use'),
(29, 'Wing in ground (WIG), Reserved for future use'),
(30, 'Fishing'),
(31, 'Towing'),
(32, 'Towing: length exceeds 200m or breadth exceeds 25m'),
(33, 'Dredging or underwater ops'),
(34, 'Diving ops'),
(35, 'Military ops'),
(36, 'Sailing'),
(37, 'Pleasure Craft'),
(38, 'Reserved'),
(39, 'Reserved'),
(40, 'High speed craft (HSC), all ships of this type'),
(41, 'High speed craft (HSC), Hazardous category A'),
(42, 'High speed craft (HSC), Hazardous category B'),
(43, 'High speed craft (HSC), Hazardous category C'),
(44, 'High speed craft (HSC), Hazardous category D'),
(45, 'High speed craft (HSC), Reserved for future use'),
(46, 'High speed craft (HSC), Reserved for future use'),
(47, 'High speed craft (HSC), Reserved for future use'),
(48, 'High speed craft (HSC), Reserved for future use'),
(49, 'High speed craft (HSC), No additional information'),
(50, 'Pilot Vessel'),
(51, 'Search and Rescue vessel'),
(52, 'Tug'),
(53, 'Port Tender'),
(54, 'Anti-pollution equipment'),
(55, 'Law Enforcement'),
(56, 'Spare - Local Vessel'),
(57, 'Spare - Local Vessel'),
(58, 'Medical Transport'),
(59, 'Noncombatant ship according to RR Resolution No. 18'),
(60, 'Passenger, all ships of this type'),
(61, 'Passenger, Hazardous category A'),
(62, 'Passenger, Hazardous category B'),
(63, 'Passenger, Hazardous category C'),
(64, 'Passenger, Hazardous category D'),
(65, 'Passenger, Reserved for future use'),
(66, 'Passenger, Reserved for future use'),
(67, 'Passenger, Reserved for future use'),
(68, 'Passenger, Reserved for future use'),
(69, 'Passenger, No additional information'),
(70, 'Cargo, all ships of this type'),
(71, 'Cargo, Hazardous category A'),
(72, 'Cargo, Hazardous category B'),
(73, 'Cargo, Hazardous category C'),
(74, 'Cargo, Hazardous category D'),
(75, 'Cargo, Reserved for future use'),
(76, 'Cargo, Reserved for future use'),
(77, 'Cargo, Reserved for future use'),
(78, 'Cargo, Reserved for future use'),
(79, 'Cargo, No additional information'),
(80, 'Tanker, all ships of this type'),
(81, 'Tanker, Hazardous category A'),
(82, 'Tanker, Hazardous category B'),
(83, 'Tanker, Hazardous category C'),
(84, 'Tanker, Hazardous category D'),
(85, 'Tanker, Reserved for future use'),
(86, 'Tanker, Reserved for future use'),
(87, 'Tanker, Reserved for future use'),
(88, 'Tanker, Reserved for future use'),
(89, 'Tanker, No additional information'),
(90, 'Other Type, all ships of this type'),
(91, 'Other Type, Hazardous category A'),
(92, 'Other Type, Hazardous category B'),
(93, 'Other Type, Hazardous category C'),
(94, 'Other Type, Hazardous category D'),
(95, 'Other Type, Reserved for future use'),
(96, 'Other Type, Reserved for future use'),
(97, 'Other Type, Reserved for future use'),
(98, 'Other Type, Reserved for future use'),
(99, 'Other Type, no additional information');
---------------------------------------------------------------------------
-- MMSI MID Codes
--
CREATE TABLE IF NOT EXISTS mid(
country TEXT,
id NUMERIC UNIQUE
);
-- Description
COMMENT ON TABLE
public.mid
IS 'MMSI MID Codes (Maritime Mobile Service Identity) Filtered by Flag of Registration, https://www.marinevesseltraffic.com/2013/11/mmsi-mid-codes-by-flag.html';
INSERT INTO mid VALUES
('Adelie Land', 501),
('Afghanistan', 401),
('Alaska', 303),
('Albania', 201),
('Algeria', 605),
('American Samoa', 559),
('Andorra', 202),
('Angola', 603),
('Anguilla', 301),
('Antigua and Barbuda', 304),
('Antigua and Barbuda', 305),
('Argentina', 701),
('Armenia', 216),
('Aruba', 307),
('Ascension Island', 608),
('Australia', 503),
('Austria', 203),
('Azerbaijan', 423),
('Azores', 204),
('Bahamas', 308),
('Bahamas', 309),
('Bahamas', 311),
('Bahrain', 408),
('Bangladesh', 405),
('Barbados', 314),
('Belarus', 206),
('Belgium', 205),
('Belize', 312),
('Benin', 610),
('Bermuda', 310),
('Bhutan', 410),
('Bolivia', 720),
('Bosnia and Herzegovina', 478),
('Botswana', 611),
('Brazil', 710),
('British Virgin Islands', 378),
('Brunei Darussalam', 508),
('Bulgaria', 207),
('Burkina Faso', 633),
('Burundi', 609),
('Cambodia', 514),
('Cambodia', 515),
('Cameroon', 613),
('Canada', 316),
('Cape Verde', 617),
('Cayman Islands', 319),
('Central African Republic', 612),
('Chad', 670),
('Chile', 725),
('China', 412),
('China', 413),
('China', 414),
('Christmas Island', 516),
('Cocos Islands', 523),
('Colombia', 730),
('Comoros', 616),
('Comoros', 620),
('Congo', 615),
('Cook Islands', 518),
('Costa Rica', 321),
(E'Côte d\'Ivoire', 619),
('Croatia', 238),
('Crozet Archipelago', 618),
('Cuba', 323),
('Cyprus', 209),
('Cyprus', 210),
('Cyprus', 212),
('Czech Republic', 270),
('Denmark', 219),
('Denmark', 220),
('Djibouti', 621),
('Dominica', 325),
('Dominican Republic', 327),
('DR Congo', 676),
('Ecuador', 735),
('Egypt', 622),
('El Salvador', 359),
('Equatorial Guinea', 631),
('Eritrea', 625),
('Estonia', 276),
('Ethiopia', 624),
('Falkland Islands', 740),
('Faroe Islands', 231),
('Fiji', 520),
('Finland', 230),
('France', 226),
('France', 227),
('France', 228),
('French Polynesia', 546),
('Gabonese Republic', 626),
('Gambia', 629),
('Georgia', 213),
('Germany', 211),
('Germany', 218),
('Ghana', 627),
('Gibraltar', 236),
('Greece', 237),
('Greece', 239),
('Greece', 240),
('Greece', 241),
('Greenland', 331),
('Grenada', 330),
('Guadeloupe', 329),
('Guatemala', 332),
('Guiana', 745),
('Guinea', 632),
('Guinea-Bissau', 630),
('Guyana', 750),
('Haiti', 336),
('Honduras', 334),
('Hong Kong', 477),
('Hungary', 243),
('Iceland', 251),
('India', 419),
('Indonesia', 525),
('Iran', 422),
('Iraq', 425),
('Ireland', 250),
('Israel', 428),
('Italy', 247),
('Jamaica', 339),
('Japan', 431),
('Japan', 432),
('Jordan', 438),
('Kazakhstan', 436),
('Kenya', 634),
('Kerguelen Islands', 635),
('Kiribati', 529),
('Kuwait', 447),
('Kyrgyzstan', 451),
('Lao', 531),
('Latvia', 275),
('Lebanon', 450),
('Lesotho', 644),
('Liberia', 636),
('Liberia', 637),
('Libya', 642),
('Liechtenstein', 252),
('Lithuania', 277),
('Luxembourg', 253),
('Macao', 453),
('Madagascar', 647),
('Madeira', 255),
('Makedonia', 274),
('Malawi', 655),
('Malaysia', 533),
('Maldives', 455),
('Mali', 649),
('Malta', 215),
('Malta', 229),
('Malta', 248),
('Malta', 249),
('Malta', 256),
('Marshall Islands', 538),
('Martinique', 347),
('Mauritania', 654),
('Mauritius', 645),
('Mexico', 345),
('Micronesia', 510),
('Moldova', 214),
('Monaco', 254),
('Mongolia', 457),
('Montenegro', 262),
('Montserrat', 348),
('Morocco', 242),
('Mozambique', 650),
('Myanmar', 506),
('Namibia', 659),
('Nauru', 544),
('Nepal', 459),
('Netherlands', 244),
('Netherlands', 245),
('Netherlands', 246),
('Netherlands Antilles', 306),
('New Caledonia', 540),
('New Zealand', 512),
('Nicaragua', 350),
('Niger', 656),
('Nigeria', 657),
('Niue', 542),
('North Korea', 445),
('Northern Mariana Islands', 536),
('Norway', 257),
('Norway', 258),
('Norway', 259),
('Oman', 461),
('Pakistan', 463),
('Palau', 511),
('Palestine', 443),
('Panama', 351),
('Panama', 352),
('Panama', 353),
('Panama', 354),
('Panama', 355),
('Panama', 356),
('Panama', 357),
('Panama', 370),
('Panama', 371),
('Panama', 372),
('Panama', 373),
('Papua New Guinea', 553),
('Paraguay', 755),
('Peru', 760),
('Philippines', 548),
('Pitcairn Island', 555),
('Poland', 261),
('Portugal', 263),
('Puerto Rico', 358),
('Qatar', 466),
('Reunion', 660),
('Romania', 264),
('Russian Federation', 273),
('Rwanda', 661),
('Saint Helena', 665),
('Saint Kitts and Nevis', 341),
('Saint Lucia', 343),
('Saint Paul and Amsterdam Islands', 607),
('Saint Pierre and Miquelon', 361),
('Samoa', 561),
('San Marino', 268),
('Sao Tome and Principe', 668),
('Saudi Arabia', 403),
('Senegal', 663),
('Serbia', 279),
('Seychelles', 664),
('Sierra Leone', 667),
('Singapore', 563),
('Singapore', 564),
('Singapore', 565),
('Singapore', 566),
('Slovakia', 267),
('Slovenia', 278),
('Solomon Islands', 557),
('Somalia', 666),
('South Africa', 601),
('South Korea', 440),
('South Korea', 441),
('South Sudan', 638),
('Spain', 224),
('Spain', 225),
('Sri Lanka', 417),
('St Vincent and the Grenadines', 375),
('St Vincent and the Grenadines', 376),
('St Vincent and the Grenadines', 377),
('Sudan', 662),
('Suriname', 765),
('Swaziland', 669),
('Sweden', 265),
('Sweden', 266),
('Switzerland', 269),
('Syria', 468),
('Taiwan', 416),
('Tajikistan', 472),
('Tanzania', 674),
('Tanzania', 677),
('Thailand', 567),
('Togolese', 671),
('Tonga', 570),
('Trinidad and Tobago', 362),
('Tunisia', 672),
('Turkey', 271),
('Turkmenistan', 434),
('Turks and Caicos Islands', 364),
('Tuvalu', 572),
('Uganda', 675),
('Ukraine', 272),
('United Arab Emirates', 470),
('United Kingdom', 232),
('United Kingdom', 233),
('United Kingdom', 234),
('United Kingdom', 235),
('Uruguay', 770),
('US Virgin Islands', 379),
('USA', 338),
('USA', 366),
('USA', 367),
('USA', 368),
('USA', 369),
('Uzbekistan', 437),
('Vanuatu', 576),
('Vanuatu', 577),
('Vatican City', 208),
('Venezuela', 775),
('Vietnam', 574),
('Wallis and Futuna Islands', 578),
('Yemen', 473),
('Yemen', 475),
('Zambia', 678),
('Zimbabwe', 679);

View File

@@ -0,0 +1,957 @@
---------------------------------------------------------------------------
-- singalk db public schema
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
CREATE SCHEMA IF NOT EXISTS public;
---------------------------------------------------------------------------
-- Functions public schema
-- process single cron event, process_[logbook|stay|moorage|badge]_queue_fn()
--
CREATE OR REPLACE FUNCTION logbook_metrics_dwithin_fn(
IN _start text,
IN _end text,
IN lgn float,
IN lat float,
OUT count_metric numeric) AS $logbook_metrics_dwithin$
BEGIN
SELECT count(*) INTO count_metric
FROM api.metrics m
WHERE
m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND m.time >= _start::TIMESTAMP WITHOUT TIME ZONE
AND m.time <= _end::TIMESTAMP WITHOUT TIME ZONE
AND client_id = current_setting('vessel.client_id', false)
AND ST_DWithin(
Geography(ST_MakePoint(m.longitude, m.latitude)),
Geography(ST_MakePoint(lgn, lat)),
10
);
END;
$logbook_metrics_dwithin$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_metrics_dwithin_fn
IS 'Check if all entries for a logbook are in stationary movement with 10 meters';
-- Update a logbook with avg data
-- TODO using timescale function
CREATE OR REPLACE FUNCTION logbook_update_avg_fn(
IN _id integer,
IN _start TEXT,
IN _end TEXT,
OUT avg_speed double precision,
OUT max_speed double precision,
OUT max_wind_speed double precision,
OUT count_metric double precision
) AS $logbook_update_avg$
BEGIN
RAISE NOTICE '-> Updating avg for logbook id=%, start:"%", end:"%"', _id, _start, _end;
SELECT AVG(speedoverground), MAX(speedoverground), MAX(windspeedapparent), COUNT(*) INTO
avg_speed, max_speed, max_wind_speed, count_metric
FROM api.metrics m
WHERE m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND m.time >= _start::TIMESTAMP WITHOUT TIME ZONE
AND m.time <= _end::TIMESTAMP WITHOUT TIME ZONE
AND client_id = current_setting('vessel.client_id', false);
RAISE NOTICE '-> Updated avg for logbook id=%, avg_speed:%, max_speed:%, max_wind_speed:%, count:%', _id, avg_speed, max_speed, max_wind_speed, count_metric;
END;
$logbook_update_avg$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_update_avg_fn
IS 'Update logbook details with calculate average and max data, AVG(speedOverGround), MAX(speedOverGround), MAX(windspeedapparent), count_metric';
-- Create a LINESTRING for Geometry
-- Todo validate st_length unit?
-- https://postgis.net/docs/ST_Length.html
DROP FUNCTION IF EXISTS logbook_update_geom_distance_fn;
CREATE FUNCTION logbook_update_geom_distance_fn(IN _id integer, IN _start text, IN _end text,
OUT _track_geom Geometry(LINESTRING),
OUT _track_distance double precision
) AS $logbook_geo_distance$
BEGIN
SELECT ST_MakeLine(
ARRAY(
--SELECT ST_SetSRID(ST_MakePoint(longitude,latitude),4326) as geo_point
SELECT st_makepoint(longitude,latitude) AS geo_point
FROM api.metrics m
WHERE m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND m.time >= _start::TIMESTAMP WITHOUT TIME ZONE
AND m.time <= _end::TIMESTAMP WITHOUT TIME ZONE
AND client_id = current_setting('vessel.client_id', false)
ORDER BY m.time ASC
)
) INTO _track_geom;
RAISE NOTICE '-> GIS LINESTRING %', _track_geom;
-- SELECT ST_Length(_track_geom,false) INTO _track_distance;
-- Meter to Nautical Mile (international) Conversion
-- SELECT TRUNC (st_length(st_transform(track_geom,4326)::geography)::INT / 1.852) from logbook where id = 209; -- in NM
-- SELECT (st_length(st_transform(track_geom,4326)::geography)::INT * 0.0005399568) from api.logbook where id = 1; -- in NM
--SELECT TRUNC (ST_Length(_track_geom,false)::INT / 1.852) INTO _track_distance; -- in NM
SELECT TRUNC (ST_Length(_track_geom,false)::INT * 0.0005399568, 4) INTO _track_distance; -- in NM
RAISE NOTICE '-> GIS Length %', _track_distance;
END;
$logbook_geo_distance$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_update_geom_distance_fn
IS 'Update logbook details with geometry data an distance, ST_Length in Nautical Mile (international)';
-- Create GeoJSON for api consum.
CREATE FUNCTION logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
OUT _track_geojson JSON
) AS $logbook_geojson$
declare
log_geojson jsonb;
metrics_geojson jsonb;
_map jsonb;
begin
-- GeoJson Feature Logbook linestring
SELECT
ST_AsGeoJSON(log.*) into log_geojson
FROM
( select
name,
distance,
duration,
avg_speed,
avg_speed,
max_wind_speed,
notes,
track_geom
FROM api.logbook
WHERE id = _id
) AS log;
-- GeoJson Feature Metrics point
SELECT
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
FROM (
( select
time,
courseovergroundtrue,
speedoverground,
anglespeedapparent,
longitude,latitude,
st_makepoint(longitude,latitude) AS geo_point
FROM api.metrics m
WHERE m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND time >= _start::TIMESTAMP WITHOUT TIME ZONE
AND time <= _end::TIMESTAMP WITHOUT TIME ZONE
AND client_id = current_setting('vessel.client_id', false)
ORDER BY m.time ASC
)
) AS t;
-- Merge jsonb
select log_geojson::jsonb || metrics_geojson::jsonb into _map;
-- output
SELECT
json_build_object(
'type', 'FeatureCollection',
'features', _map
) into _track_geojson;
END;
$logbook_geojson$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_update_geojson_fn
IS 'Update log details with geojson';
-- Update pending new logbook from process queue
DROP FUNCTION IF EXISTS process_logbook_queue_fn;
CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void AS $process_logbook_queue$
DECLARE
logbook_rec record;
from_name varchar;
to_name varchar;
log_name varchar;
avg_rec record;
geo_rec record;
log_settings jsonb;
user_settings jsonb;
geojson jsonb;
_invalid_time boolean;
_invalid_interval boolean;
_invalid_distance boolean;
count_metric numeric;
previous_stays_id numeric;
current_stays_departed text;
current_stays_id numeric;
current_stays_active boolean;
BEGIN
-- If _id is not NULL
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> process_logbook_queue_fn invalid input %', _id;
RETURN;
END IF;
-- Get the logbook record with all necesary fields exist
SELECT * INTO logbook_rec
FROM api.logbook
WHERE active IS false
AND id = _id
AND _from_lng IS NOT NULL
AND _from_lat IS NOT NULL
AND _to_lng IS NOT NULL
AND _to_lat IS NOT NULL;
-- Ensure the query is successful
IF logbook_rec.client_id IS NULL THEN
RAISE WARNING '-> process_logbook_queue_fn invalid logbook %', _id;
RETURN;
END IF;
PERFORM set_config('vessel.client_id', logbook_rec.client_id, false);
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.client_id %', current_setting('vessel.client_id', false);
-- Check if all metrics are within 10meters base on geo loc
count_metric := logbook_metrics_dwithin_fn(logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT, logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
RAISE NOTICE '-> process_logbook_queue_fn logbook_metrics_dwithin_fn count:[%]', count_metric;
-- Calculate logbook data average and geo
-- Update logbook entry with the latest metric data and calculate data
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
-- Avoid/ignore/delete logbook stationary movement or time sync issue
-- Check time start vs end
SELECT logbook_rec._to_time::timestamp without time zone < logbook_rec._from_time::timestamp without time zone INTO _invalid_time;
-- Is distance is less than 0.010
SELECT geo_rec._track_distance < 0.010 INTO _invalid_distance;
-- Is duration is less than 100sec
SELECT (logbook_rec._to_time::timestamp without time zone - logbook_rec._from_time::timestamp without time zone) < (100::text||' secs')::interval INTO _invalid_interval;
-- if stationnary fix data metrics,logbook,stays,moorage
IF _invalid_time IS True OR _invalid_distance IS True
OR _invalid_distance IS True OR count_metric = avg_rec.count_metric THEN
RAISE WARNING '-> process_logbook_queue_fn invalid logbook data [%]', logbook_rec.id;
-- Update metrics status to moored
UPDATE api.metrics
SET status = 'moored'
WHERE time >= logbook_rec._from_time::TIMESTAMP WITHOUT TIME ZONE
AND time <= logbook_rec._to_time::TIMESTAMP WITHOUT TIME ZONE
AND client_id = current_setting('vessel.client_id', false);
-- Update logbook
UPDATE api.logbook
SET notes = 'invalid logbook data, stationary need to fix metrics?'
WHERE id = logbook_rec.id;
-- Get related stays
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
FROM api.stays s
WHERE s.client_id = current_setting('vessel.client_id', false)
AND s.arrived = logbook_rec._to_time;
-- Update related stays
UPDATE api.stays
SET notes = 'invalid stays data, stationary need to fix metrics?'
WHERE client_id = current_setting('vessel.client_id', false)
AND arrived = logbook_rec._to_time;
-- Find previous stays
SELECT id INTO previous_stays_id
FROM api.stays s
WHERE s.client_id = current_setting('vessel.client_id', false)
AND s.arrived < logbook_rec._to_time
ORDER BY s.arrived DESC LIMIT 1;
-- Update previous stays with the departed time from current stays
-- and set the active state from current stays
UPDATE api.stays
SET departed = current_stays_departed::timestamp without time zone,
active = current_stays_active
WHERE client_id = current_setting('vessel.client_id', false)
AND id = previous_stays_id;
-- Clean u, remove invalid logbook and stay entry
DELETE FROM api.logbook WHERE id = logbook_rec.id;
RAISE WARNING '-> process_logbook_queue_fn delete invalid logbook [%]', logbook_rec.id;
DELETE FROM api.stays WHERE id = current_stays_id;
RAISE WARNING '-> process_logbook_queue_fn delete invalid stays [%]', current_stays_id;
-- TODO should we substract (-1) moorages ref count or reprocess it?!?
RETURN;
END IF;
-- Generate logbook name, concat _from_location and _to_locacion
-- geo reverse _from_lng _from_lat
-- geo reverse _to_lng _to_lat
from_name := reverse_geocode_py_fn('nominatim', logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
to_name := reverse_geocode_py_fn('nominatim', logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
SELECT CONCAT(from_name, ' to ' , to_name) INTO log_name;
RAISE NOTICE 'Updating valid logbook entry [%] [%] [%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
UPDATE api.logbook
SET
duration = (logbook_rec._to_time::timestamp without time zone - logbook_rec._from_time::timestamp without time zone),
avg_speed = avg_rec.avg_speed,
max_speed = avg_rec.max_speed,
max_wind_speed = avg_rec.max_wind_speed,
_from = from_name,
_to = to_name,
name = log_name,
track_geom = geo_rec._track_geom,
distance = geo_rec._track_distance
WHERE id = logbook_rec.id;
-- GeoJSON require track_geom field
geojson := logbook_update_geojson_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
UPDATE api.logbook
SET
track_geojson = geojson
WHERE id = logbook_rec.id;
-- Prepare notification, gather user settings
SELECT json_build_object('logbook_name', log_name, 'logbook_link', logbook_rec.id) into log_settings;
user_settings := get_user_settings_from_clientid_fn(logbook_rec.client_id::TEXT);
SELECT user_settings::JSONB || log_settings::JSONB into user_settings;
RAISE DEBUG '-> debug process_logbook_queue_fn get_user_settings_from_clientid_fn [%]', user_settings;
RAISE DEBUG '-> debug process_logbook_queue_fn log_settings [%]', log_settings;
-- Send notification
PERFORM send_notification_fn('logbook'::TEXT, user_settings::JSONB);
END;
$process_logbook_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_logbook_queue_fn
IS 'Update logbook details when completed, logbook_update_avg_fn, logbook_update_geom_distance_fn, reverse_geocode_py_fn';
-- Update pending new stay from process queue
DROP FUNCTION IF EXISTS process_stay_queue_fn;
CREATE OR REPLACE FUNCTION process_stay_queue_fn(IN _id integer) RETURNS void AS $process_stay_queue$
DECLARE
stay_rec record;
_name varchar;
BEGIN
RAISE NOTICE 'process_stay_queue_fn';
-- If _id is valid, not NULL
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> process_stay_queue_fn invalid input %', _id;
RETURN;
END IF;
-- Get the stay record with all necesary fields exist
SELECT * INTO stay_rec
FROM api.stays
WHERE id = _id
AND longitude IS NOT NULL
AND latitude IS NOT NULL;
-- Ensure the query is successful
IF stay_rec.client_id IS NULL THEN
RAISE WARNING '-> process_stay_queue_fn invalid stay %', _id;
RETURN;
END IF;
PERFORM set_config('vessel.client_id', stay_rec.client_id, false);
-- geo reverse _lng _lat
_name := reverse_geocode_py_fn('nominatim', stay_rec.longitude::NUMERIC, stay_rec.latitude::NUMERIC);
RAISE NOTICE 'Updating stay entry [%]', stay_rec.id;
UPDATE api.stays
SET
name = _name,
geog = Geography(ST_MakePoint(stay_rec.longitude, stay_rec.latitude))
WHERE id = stay_rec.id;
-- Notification email/pushover?
END;
$process_stay_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_stay_queue_fn
IS 'Update stay details, reverse_geocode_py_fn';
-- Handle moorage insert or update from stays
-- todo valide geography unit
-- https://postgis.net/docs/ST_DWithin.html
DROP FUNCTION IF EXISTS process_moorage_queue_fn;
CREATE OR REPLACE FUNCTION process_moorage_queue_fn(IN _id integer) RETURNS void AS $process_moorage_queue$
DECLARE
stay_rec record;
moorage_rec record;
BEGIN
RAISE NOTICE 'process_moorage_queue_fn';
-- If _id is not NULL
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> process_moorage_queue_fn invalid input %', _id;
RETURN;
END IF;
-- Get the stay record with all necesary fields exist
SELECT * INTO stay_rec
FROM api.stays
WHERE active IS false
AND departed IS NOT NULL
AND arrived IS NOT NULL
AND longitude IS NOT NULL
AND latitude IS NOT NULL
AND id = _id;
-- Ensure the query is successful
IF stay_rec.client_id IS NULL THEN
RAISE WARNING '-> process_moorage_queue_fn invalid stay %', _id;
RETURN;
END IF;
-- Do we have an existing stay within 100m of the new moorage
FOR moorage_rec in
SELECT
*
FROM api.moorages
WHERE
latitude IS NOT NULL
AND longitude IS NOT NULL
AND geog IS NOT NULL
AND ST_DWithin(
-- Geography(ST_MakePoint(stay_rec._lng, stay_rec._lat)),
stay_rec.geog,
-- Geography(ST_MakePoint(longitude, latitude)),
geog,
100 -- in meters ?
)
ORDER BY id ASC
LOOP
-- found previous stay within 100m of the new moorage
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Found previous stay within 100m of moorage %', moorage_rec;
EXIT; -- exit loop
END IF;
END LOOP;
-- if with in 100m update reference count and stay duration
-- else insert new entry
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Update moorage %', moorage_rec;
UPDATE api.moorages
SET
reference_count = moorage_rec.reference_count + 1,
stay_duration =
moorage_rec.stay_duration +
(stay_rec.departed::timestamp without time zone - stay_rec.arrived::timestamp without time zone)
WHERE id = moorage_rec.id;
ELSE
RAISE NOTICE 'Insert new moorage entry from stay %', stay_rec;
-- Ensure the stay as a name if lat,lon
IF stay_rec.name IS NULL AND stay_rec.longitude IS NOT NULL AND stay_rec.latitude IS NOT NULL THEN
stay_rec.name := reverse_geocode_py_fn('nominatim', stay_rec.longitude::NUMERIC, stay_rec.latitude::NUMERIC);
END IF;
-- Insert new moorage from stay
INSERT INTO api.moorages
(client_id, name, stay_id, stay_code, stay_duration, reference_count, latitude, longitude, geog)
VALUES (
stay_rec.client_id,
stay_rec.name,
stay_rec.id,
stay_rec.stay_code,
(stay_rec.departed::timestamp without time zone - stay_rec.arrived::timestamp without time zone),
1, -- default reference_count
stay_rec.latitude,
stay_rec.longitude,
Geography(ST_MakePoint(stay_rec.longitude, stay_rec.latitude))
);
END IF;
END;
$process_moorage_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_moorage_queue_fn
IS 'Handle moorage insert or update from stays';
-- process new account notification
DROP FUNCTION IF EXISTS process_account_queue_fn;
CREATE OR REPLACE FUNCTION process_account_queue_fn(IN _email TEXT) RETURNS void AS $process_account_queue$
DECLARE
account_rec record;
user_settings jsonb;
app_settings jsonb;
BEGIN
IF _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
SELECT * INTO account_rec
FROM auth.accounts
WHERE email = _email;
IF account_rec.email IS NULL OR account_rec.email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- set user email variable
PERFORM set_config('user.email', account_rec.email, false);
-- Gather user settings
user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '"}';
-- Send notification email, pushover
PERFORM send_notification_fn('new_account'::TEXT, user_settings::JSONB);
--PERFORM send_email_py_fn('user'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('user'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_account_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_account_queue_fn
IS 'process new account notification';
-- process new account otp validation notification
DROP FUNCTION IF EXISTS process_account_otp_validation_queue_fn;
CREATE OR REPLACE FUNCTION process_account_otp_validation_queue_fn(IN _email TEXT) RETURNS void AS $process_account_otp_validation_queue$
DECLARE
account_rec record;
user_settings jsonb;
app_settings jsonb;
otp_code text;
BEGIN
IF _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
SELECT * INTO account_rec
FROM auth.accounts
WHERE email = _email;
IF account_rec.email IS NULL OR account_rec.email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
otp_code := api.generate_otp_fn(_email);
-- set user email variable
PERFORM set_config('user.email', account_rec.email, false);
-- Gather user settings
user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '", "otp_code": "' || otp_code || '"}';
-- Send notification email, pushover
PERFORM send_notification_fn('email_otp'::TEXT, user_settings::JSONB);
--PERFORM send_email_py_fn('email_otp'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('user'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_account_otp_validation_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_account_otp_validation_queue_fn
IS 'process new account otp validation notification';
-- process new event notification
DROP FUNCTION IF EXISTS process_notification_queue_fn;
CREATE OR REPLACE FUNCTION process_notification_queue_fn(IN _email TEXT, IN message_type TEXT) RETURNS void
AS $process_notification_queue$
DECLARE
account_rec record;
vessel_rec record;
user_settings jsonb := null;
otp_code text;
BEGIN
IF _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
SELECT * INTO account_rec
FROM auth.accounts
WHERE email = _email;
IF account_rec.email IS NULL OR account_rec.email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
RAISE NOTICE '--> process_notification_queue_fn type [%] [%]', _email,message_type;
-- set user email variable
PERFORM set_config('user.email', account_rec.email, false);
-- Generate user_settings user settings
IF message_type = 'new_account' THEN
user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '"}';
ELSEIF message_type = 'new_vessel' THEN
-- Gather vessel data
SELECT * INTO vessel_rec
FROM auth.vessels
WHERE owner_email = _email;
IF vessel_rec.owner_email IS NULL OR vessel_rec.owner_email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
user_settings := '{"email": "' || vessel_rec.owner_email || '", "boat": "' || vessel_rec.name || '"}';
ELSEIF message_type = 'email_otp' THEN
otp_code := api.generate_otp_fn(_email);
user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '", "otp_code": "' || otp_code || '"}';
END IF;
PERFORM send_notification_fn(message_type::TEXT, user_settings::JSONB);
END;
$process_notification_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_notification_queue_fn
IS 'process new event type notification';
-- process new vessel notification
DROP FUNCTION IF EXISTS process_vessel_queue_fn;
CREATE OR REPLACE FUNCTION process_vessel_queue_fn(IN _email TEXT) RETURNS void AS $process_vessel_queue$
DECLARE
vessel_rec record;
user_settings jsonb;
app_settings jsonb;
BEGIN
IF _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
SELECT * INTO vessel_rec
FROM auth.vessels
WHERE owner_email = _email;
IF vessel_rec.owner_email IS NULL OR vessel_rec.owner_email = '' THEN
RAISE EXCEPTION 'Invalid email'
USING HINT = 'Unknown email';
RETURN;
END IF;
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- set user email variable
PERFORM set_config('user.email', vessel_rec.owner_email, false);
-- Gather user settings
user_settings := '{"email": "' || vessel_rec.owner_email || '", "boat": "' || vessel_rec.name || '"}';
--user_settings := get_user_settings_from_clientid_fn();
-- Send notification email, pushover
--PERFORM send_notification_fn('vessel'::TEXT, vessel_rec::RECORD);
PERFORM send_email_py_fn('new_vessel'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('vessel'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_vessel_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_vessel_queue_fn
IS 'process new vessel notification';
-- Get user settings details from a log entry
DROP FUNCTION IF EXISTS get_app_settings_fn;
CREATE OR REPLACE FUNCTION get_app_settings_fn (OUT app_settings jsonb)
RETURNS jsonb
AS $get_app_settings$
DECLARE
BEGIN
SELECT
jsonb_object_agg(name, value) INTO app_settings
FROM
public.app_settings
WHERE
name LIKE '%app.email%'
OR name LIKE '%app.pushover%'
OR name LIKE '%app.url'
OR name LIKE '%app.telegram%';
END;
$get_app_settings$
LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_app_settings_fn
IS 'get app settings details, email, pushover, telegram';
CREATE FUNCTION jsonb_key_exists(some_json jsonb, outer_key text)
RETURNS BOOLEAN AS $$
BEGIN
RETURN (some_json->outer_key) IS NOT NULL;
END;
$$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.jsonb_key_exists
IS 'function that checks if an outer key exists in some_json and returns a boolean';
-- Send notifications
DROP FUNCTION IF EXISTS send_notification_fn;
CREATE OR REPLACE FUNCTION send_notification_fn(
IN email_type TEXT,
IN user_settings JSONB) RETURNS VOID
AS $send_notification$
DECLARE
app_settings JSONB;
_email_notifications BOOLEAN := False;
_phone_notifications BOOLEAN := False;
_pushover_user_key TEXT := NULL;
pushover_settings JSONB := NULL;
_telegram_notifications BOOLEAN := False;
_telegram_chat_id TEXT := NULL;
telegram_settings JSONB := NULL;
_email TEXT := NULL;
BEGIN
-- TODO input check
--RAISE NOTICE '--> send_notification_fn type [%]', email_type;
-- Gather notification app settings, eg: email, pushover, telegram
app_settings := get_app_settings_fn();
--RAISE NOTICE '--> send_notification_fn app_settings [%]', app_settings;
--RAISE NOTICE '--> user_settings [%]', user_settings->>'email'::TEXT;
-- Gather notifications settings and merge with user settings
-- Send notification email
SELECT preferences['email_notifications'] INTO _email_notifications
FROM auth.accounts a
WHERE a.email = user_settings->>'email'::TEXT;
RAISE NOTICE '--> send_notification_fn email_notifications [%]', _email_notifications;
-- If email server app settings set and if email user settings set
IF app_settings['app.email_server'] IS NOT NULL AND _email_notifications IS True THEN
PERFORM send_email_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
END IF;
-- Send notification pushover
SELECT preferences['phone_notifications'],preferences->>'pushover_user_key' INTO _phone_notifications,_pushover_user_key
FROM auth.accounts a
WHERE a.email = user_settings->>'email'::TEXT;
RAISE NOTICE '--> send_notification_fn phone_notifications [%]', _phone_notifications;
-- If pushover app settings set and if pushover user settings set
IF app_settings['app.pushover_app_token'] IS NOT NULL AND _phone_notifications IS True THEN
SELECT json_build_object('pushover_user_key', _pushover_user_key) into pushover_settings;
SELECT user_settings::JSONB || pushover_settings::JSONB into user_settings;
--RAISE NOTICE '--> send_notification_fn user_settings + pushover [%]', user_settings;
PERFORM send_pushover_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
END IF;
-- Send notification telegram
SELECT (preferences->'telegram'->'from'->'id') IS NOT NULL,preferences['telegram']['from']['id'] INTO _telegram_notifications,_telegram_chat_id
FROM auth.accounts a
WHERE a.email = user_settings->>'email'::TEXT;
RAISE NOTICE '--> send_notification_fn telegram_notifications [%]', _telegram_notifications;
-- If telegram app settings set and if telegram user settings set
IF app_settings['app.telegram_bot_token'] IS NOT NULL AND _telegram_notifications IS True THEN
SELECT json_build_object('telegram_chat_id', _telegram_chat_id) into telegram_settings;
SELECT user_settings::JSONB || telegram_settings::JSONB into user_settings;
--RAISE NOTICE '--> send_notification_fn user_settings + telegram [%]', user_settings;
PERFORM send_telegram_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
END IF;
END;
$send_notification$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.send_notification_fn
IS 'TODO Send notifications';
DROP FUNCTION IF EXISTS get_user_settings_from_clientid_fn;
CREATE OR REPLACE FUNCTION get_user_settings_from_clientid_fn(
IN clientid TEXT,
OUT user_settings JSONB
) RETURNS JSONB
AS $get_user_settings_from_clientid$
DECLARE
BEGIN
-- If client_id is not NULL
IF clientid IS NULL OR clientid = '' THEN
RAISE WARNING '-> get_user_settings_from_clientid_fn invalid input %', clientid;
END IF;
SELECT
json_build_object(
'boat' , v.name,
'recipient', a.first,
'email', v.owner_email,
'settings', a.preferences,
'pushover_key', a.preferences->'pushover_key',
'badges', a.preferences->'badges'
) INTO user_settings
FROM auth.accounts a, auth.vessels v, api.metadata m
WHERE m.mmsi = v.mmsi
AND m.client_id = clientid
AND lower(a.email) = lower(v.owner_email);
END;
$get_user_settings_from_clientid$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_user_settings_from_clientid_fn
IS 'get user settings details from a clientid, initiate for notifications';
DROP FUNCTION IF EXISTS set_vessel_settings_from_clientid_fn;
CREATE OR REPLACE FUNCTION set_vessel_settings_from_clientid_fn(
IN clientid TEXT,
OUT vessel_settings JSONB
) RETURNS JSONB
AS $set_vessel_settings_from_clientid$
DECLARE
BEGIN
-- If client_id is not NULL
IF clientid IS NULL OR clientid = '' THEN
RAISE WARNING '-> set_vessel_settings_from_clientid_fn invalid input %', clientid;
END IF;
SELECT
json_build_object(
'name' , v.name,
'mmsi', v.mmsi,
'client_id', m.client_id
) INTO vessel_settings
FROM auth.accounts a, auth.vessels v, api.metadata m
WHERE m.mmsi = v.mmsi
AND m.client_id = clientid;
PERFORM set_config('vessel.mmsi', vessel_rec.mmsi, false);
PERFORM set_config('vessel.name', vessel_rec.name, false);
PERFORM set_config('vessel.client_id', vessel_rec.client_id, false);
END;
$set_vessel_settings_from_clientid$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.set_vessel_settings_from_clientid_fn
IS 'set_vessel settings details from a clientid, initiate for process queue functions';
create function public.process_badge_queue_fn() RETURNS void AS $process_badge_queue$
declare
badge_rec record;
badges_arr record;
begin
SELECT json_array_elements_text((a.preferences->'badges')::json) from auth.accounts a;
FOR badge_rec in
SELECT
name
FROM badges
LOOP
-- found previous stay within 100m of the new moorage
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Found previous stay within 100m of moorage %', moorage_rec;
EXIT; -- exit loop
END IF;
END LOOP;
-- Helmsman
-- select count(l.id) api.logbook l where count(l.id) = 1;
-- Wake Maker
-- select max(l.max_wind_speed) api.logbook l where l.max_wind_speed >= 15;
-- Explorer
-- select sum(m.stay_duration) api.stays s where home_flag is false;
-- Mooring Pro
-- select sum(m.stay_duration) api.stays s where stay_code = 3;
-- Anchormaster
-- select sum(m.stay_duration) api.stays s where stay_code = 2;
-- Traveler
-- todo country to country.
-- Stormtrooper
-- select max(l.max_wind_speed) api.logbook l where l.max_wind_speed >= 30;
-- Club Alaska
-- todo country zone
-- Tropical Traveler
-- todo country zone
-- Aloha Award
-- todo pacific zone
-- TODO the sea is big and the world is not limited to the US
END
$process_badge_queue$ language plpgsql;
---------------------------------------------------------------------------
-- TODO add alert monitoring for Battery
---------------------------------------------------------------------------
-- PostgREST API pre-request check
-- TODO db-pre-request = "public.check_jwt"
-- Prevent unregister user or unregister vessel access
CREATE OR REPLACE FUNCTION public.check_jwt() RETURNS void AS $$
DECLARE
_role name;
_email text;
_mmsi name;
_path name;
_clientid text;
_vid text;
account_rec record;
vessel_rec record;
BEGIN
-- Extract email and role from jwt token
--RAISE WARNING 'check_jwt jwt %', current_setting('request.jwt.claims', true);
SELECT current_setting('request.jwt.claims', true)::json->>'email' INTO _email;
PERFORM set_config('user.email', _email, false);
SELECT current_setting('request.jwt.claims', true)::json->>'role' INTO _role;
--RAISE WARNING 'jwt email %', current_setting('request.jwt.claims', true)::json->>'email';
--RAISE WARNING 'jwt role %', current_setting('request.jwt.claims', true)::json->>'role';
--RAISE WARNING 'cur_user %', current_user;
IF _role = 'user_role' THEN
-- Check the user exist in the accounts table
SELECT * INTO account_rec
FROM auth.accounts
WHERE auth.accounts.email = _email;
IF account_rec.email IS NULL THEN
RAISE EXCEPTION 'Invalid user'
USING HINT = 'Unknown user or password';
END IF;
--RAISE WARNING 'req path %', current_setting('request.path', true);
-- Function allow without defined vessel
-- openapi doc, user settings, otp code and vessel registration
SELECT current_setting('request.path', true) into _path;
IF _path = '/rpc/settings_fn'
OR _path = '/rpc/register_vessel'
OR _path = '/rpc/update_user_preferences_fn'
OR _path = '/rpc/versions_fn'
OR _path = '/rpc/email_fn'
OR _path = '/' THEN
RETURN;
END IF;
-- Check a vessel and user exist
SELECT auth.vessels.* INTO vessel_rec
FROM auth.vessels, auth.accounts
WHERE auth.vessels.owner_email = auth.accounts.email
AND auth.accounts.email = _email;
-- check if boat exist yet?
IF vessel_rec.owner_email IS NULL THEN
-- Return http status code 551 with message
RAISE sqlstate 'PT551' using
message = 'Vessel Required',
detail = 'Invalid vessel',
hint = 'Unknown vessel';
--RETURN; -- ignore if not exist
END IF;
-- Redundant?
IF vessel_rec.vessel_id IS NULL THEN
RAISE EXCEPTION 'Invalid vessel'
USING HINT = 'Unknown vessel id';
END IF;
-- Set session variables
PERFORM set_config('vessel.id', vessel_rec.vessel_id, false);
PERFORM set_config('vessel.name', vessel_rec.name, false);
-- ensure vessel is connected
SELECT coalesce(m.client_id, null) INTO _clientid
FROM auth.vessels v, api.metadata m
WHERE
m.vessel_id = current_setting('vessel.id')
AND m.vessel_id = v.vessel_id
AND v.owner_email =_email;
-- Set session variables
PERFORM set_config('vessel.client_id', _clientid, false);
--RAISE WARNING 'public.check_jwt() user_role vessel.client_id [%]', current_setting('vessel.client_id', false);
--RAISE WARNING 'public.check_jwt() user_role vessel.id [%]', current_setting('vessel.id', false);
--RAISE WARNING 'public.check_jwt() user_role vessel.name [%]', current_setting('vessel.name', false);
ELSIF _role = 'vessel_role' THEN
SELECT current_setting('request.jwt.claims', true)::json->>'vid' INTO _vid;
-- Check the vessel and user exist
SELECT auth.vessels.* INTO vessel_rec
FROM auth.vessels, auth.accounts
WHERE auth.vessels.owner_email = auth.accounts.email
AND auth.accounts.email = _email
AND auth.vessels.vessel_id = _vid;
IF vessel_rec.owner_email IS NULL THEN
RAISE EXCEPTION 'Invalid vessel'
USING HINT = 'Unknown vessel owner_email';
END IF;
PERFORM set_config('vessel.id', vessel_rec.vessel_id, false);
PERFORM set_config('vessel.name', vessel_rec.name, false);
-- TODO add client_id
--PERFORM set_config('vessel.client_id', vessel_rec.client_id, false);
--RAISE WARNING 'public.check_jwt() user_role vessel.mmsi %', current_setting('vessel.mmsi', false);
--RAISE WARNING 'public.check_jwt() user_role vessel.name %', current_setting('vessel.name', false);
--RAISE WARNING 'public.check_jwt() user_role vessel.client_id %', current_setting('vessel.client_id', false);
ELSIF _role <> 'api_anonymous' THEN
RAISE EXCEPTION 'Invalid role'
USING HINT = 'Stop being so evil and maybe you can log in';
END IF;
END
$$ language plpgsql security definer;
---------------------------------------------------------------------------
-- Function to trigger cron_jobs using API for tests.
-- Todo limit access and permision
-- Run con jobs
CREATE OR REPLACE FUNCTION public.run_cron_jobs() RETURNS void AS $$
BEGIN
-- In correct order
perform public.cron_process_new_notification_fn();
perform public.cron_process_monitor_online_fn();
perform public.cron_process_new_logbook_fn();
perform public.cron_process_new_stay_fn();
perform public.cron_process_new_moorage_fn();
perform public.cron_process_monitor_offline_fn();
END
$$ language plpgsql security definer;

View File

@@ -0,0 +1,381 @@
---------------------------------------------------------------------------
-- singalk db public schema
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
CREATE SCHEMA IF NOT EXISTS public;
---------------------------------------------------------------------------
-- python reverse_geocode
--
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
--
DROP FUNCTION IF EXISTS reverse_geocode_py_fn;
CREATE OR REPLACE FUNCTION reverse_geocode_py_fn(IN geocoder TEXT, IN lon NUMERIC, IN lat NUMERIC,
OUT geo_name TEXT)
AS $reverse_geocode_py$
import requests
# Use the shared cache to avoid preparing the geocoder metadata
if geocoder in SD:
plan = SD[geocoder]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT reverse_url AS url FROM geocoders WHERE name = $1", ["text"])
SD[geocoder] = plan
# Execute the statement with the geocoder param and limit to 1 result
rv = plpy.execute(plan, [geocoder], 1)
url = rv[0]['url']
# Validate input
if not lon or not lat:
plpy.notice('reverse_geocode_py_fn Parameters [{}] [{}]'.format(lon, lat))
plpy.error('Error missing parameters')
return None
# Make the request to the geocoder API
# https://operations.osmfoundation.org/policies/nominatim/
payload = {"lon": lon, "lat": lat, "format": "jsonv2", "zoom": 18}
r = requests.get(url, params=payload)
# Return the full address or nothing if not found
# Option1: If name is null fallback to address field road,neighbourhood,suburb
# Option2: Return the json for future reference like country
if r.status_code == 200 and "name" in r.json():
r_dict = r.json()
if r_dict["name"]:
return r_dict["name"]
elif "address" in r_dict and r_dict["address"]:
if "road" in r_dict["address"] and r_dict["address"]["road"]:
return r_dict["address"]["road"]
elif "neighbourhood" in r_dict["address"] and r_dict["address"]["neighbourhood"]:
return r_dict["address"]["neighbourhood"]
elif "suburb" in r_dict["address"] and r_dict["address"]["suburb"]:
return r_dict["address"]["suburb"]
elif "residential" in r_dict["address"] and r_dict["address"]["residential"]:
return r_dict["address"]["residential"]
elif "village" in r_dict["address"] and r_dict["address"]["village"]:
return r_dict["address"]["village"]
elif "town" in r_dict["address"] and r_dict["address"]["town"]:
return r_dict["address"]["town"]
else:
return 'n/a'
else:
return 'n/a'
else:
plpy.warning('Failed to received a geo full address %s', r.json())
#plpy.error('Failed to received a geo full address %s', r.json())
return 'unknow'
$reverse_geocode_py$ LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.reverse_geocode_py_fn
IS 'query reverse geo service to return location name using plpython3u';
---------------------------------------------------------------------------
-- python send email
--
-- https://www.programcreek.com/python/example/3684/email.utils.formatdate
DROP FUNCTION IF EXISTS send_email_py_fn;
CREATE OR REPLACE FUNCTION send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
AS $send_email_py$
# Import smtplib for the actual sending function
import smtplib
# Import the email modules we need
#from email.message import EmailMessage
from email.utils import formatdate,make_msgid
from email.mime.text import MIMEText
# Use the shared cache to avoid preparing the email metadata
if email_type in SD:
plan = SD[email_type]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
SD[email_type] = plan
# Execute the statement with the email_type param and limit to 1 result
rv = plpy.execute(plan, [email_type], 1)
email_subject = rv[0]['email_subject']
email_content = rv[0]['email_content']
# Replace fields using input jsonb obj
if not _user or not app:
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
plpy.error('Error missing parameters')
return None
if 'logbook_name' in _user and _user['logbook_name']:
email_content = email_content.replace('__LOGBOOK_NAME__', _user['logbook_name'])
if 'logbook_link' in _user and _user['logbook_link']:
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
if 'recipient' in _user and _user['recipient']:
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
if 'boat' in _user and _user['boat']:
email_content = email_content.replace('__BOAT__', _user['boat'])
if 'badge' in _user and _user['badge']:
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
if 'otp_code' in _user and _user['otp_code']:
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
if 'reset_qs' in _user and _user['reset_qs']:
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
if 'app.url' in app and app['app.url']:
email_content = email_content.replace('__APP_URL__', app['app.url'])
email_from = 'root@localhost'
if 'app.email_from' in app and app['app.email_from']:
email_from = 'PostgSail <' + app['app.email_from'] + '>'
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
email_to = 'root@localhost'
if 'email' in _user and _user['email']:
email_to = _user['email']
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
else:
plpy.error('Error email to')
return None
msg = MIMEText(email_content, 'plain', 'utf-8')
msg["Subject"] = email_subject
msg["From"] = email_from
msg["To"] = email_to
msg["Date"] = formatdate()
msg["Message-ID"] = make_msgid()
server_smtp = 'localhost'
if 'app.email_server' in app and app['app.email_server']:
server_smtp = app['app.email_server']
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
# Send the message via our own SMTP server.
try:
# send your message with credentials specified above
with smtplib.SMTP(server_smtp, 25) as server:
if 'app.email_user' in app and app['app.email_user'] \
and 'app.email_pass' in app and app['app.email_pass']:
server.starttls()
server.login(app['app.email_user'], app['app.email_pass'])
#server.send_message(msg)
server.sendmail(msg["From"], msg["To"], msg.as_string())
server.quit()
# tell the script to report if your message was sent or which errors need to be fixed
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
return None
except OSError as error:
plpy.error('OS Error occurred: ' + str(error))
except smtplib.SMTPConnectError:
plpy.error('Failed to connect to the server. Bad connection settings?')
except smtplib.SMTPServerDisconnected:
plpy.error('Failed to connect to the server. Wrong user/password?')
except smtplib.SMTPException as e:
plpy.error('SMTP error occurred: ' + str(e))
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.send_email_py_fn
IS 'Send email notification using plpython3u';
---------------------------------------------------------------------------
-- python send pushover message
-- https://pushover.net/
DROP FUNCTION IF EXISTS send_pushover_py_fn;
CREATE OR REPLACE FUNCTION send_pushover_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
AS $send_pushover_py$
import requests
# Use the shared cache to avoid preparing the email metadata
if message_type in SD:
plan = SD[message_type]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
SD[message_type] = plan
# Execute the statement with the message_type param and limit to 1 result
rv = plpy.execute(plan, [message_type], 1)
pushover_title = rv[0]['pushover_title']
pushover_message = rv[0]['pushover_message']
# Replace fields using input jsonb obj
if 'logbook_name' in _user and _user['logbook_name']:
pushover_message = pushover_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
if 'logbook_link' in _user and _user['logbook_link']:
pushover_message = pushover_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
if 'recipient' in _user and _user['recipient']:
pushover_message = pushover_message.replace('__RECIPIENT__', _user['recipient'])
if 'boat' in _user and _user['boat']:
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
if 'badge' in _user and _user['badge']:
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
if 'app.url' in app and app['app.url']:
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
pushover_token = None
if 'app.pushover_app_token' in app and app['app.pushover_app_token']:
pushover_token = app['app.pushover_app_token']
else:
plpy.error('Error no pushover token defined, check app settings')
return None
pushover_user = None
if 'pushover_user_key' in _user and _user['pushover_user_key']:
pushover_user = _user['pushover_user_key']
else:
plpy.error('Error no pushover user token defined, check user settings')
return None
# requests
r = requests.post("https://api.pushover.net/1/messages.json", data = {
"token": pushover_token,
"user": pushover_user,
"title": pushover_title,
"message": pushover_message
})
#print(r.text)
# Return ?? or None if not found
#plpy.notice('Sent pushover successfully to [{}] [{}]'.format(r.text, r.status_code))
if r.status_code == 200:
plpy.notice('Sent pushover successfully to [{}] [{}] [{}]'.format(pushover_user, pushover_title, r.text))
else:
plpy.error('Failed to send pushover')
return None
$send_pushover_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.send_pushover_py_fn
IS 'Send pushover notification using plpython3u';
---------------------------------------------------------------------------
-- python send telegram message
-- https://core.telegram.org/
DROP FUNCTION IF EXISTS send_telegram_py_fn;
CREATE OR REPLACE FUNCTION send_telegram_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
AS $send_telegram_py$
"""
Send a message to a telegram user or group specified on chatId
chat_id must be a number!
"""
import requests
import json
# Use the shared cache to avoid preparing the email metadata
if message_type in SD:
plan = SD[message_type]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
SD[message_type] = plan
# Execute the statement with the message_type param and limit to 1 result
rv = plpy.execute(plan, [message_type], 1)
telegram_title = rv[0]['pushover_title']
telegram_message = rv[0]['pushover_message']
# Replace fields using input jsonb obj
if 'logbook_name' in _user and _user['logbook_name']:
telegram_message = telegram_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
if 'logbook_link' in _user and _user['logbook_link']:
telegram_message = telegram_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
if 'recipient' in _user and _user['recipient']:
telegram_message = telegram_message.replace('__RECIPIENT__', _user['recipient'])
if 'boat' in _user and _user['boat']:
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
if 'badge' in _user and _user['badge']:
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
if 'app.url' in app and app['app.url']:
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
telegram_token = None
if 'app.telegram_bot_token' in app and app['app.telegram_bot_token']:
telegram_token = app['app.telegram_bot_token']
else:
plpy.error('Error no telegram token defined, check app settings')
return None
telegram_chat_id = None
if 'telegram_chat_id' in _user and _user['telegram_chat_id']:
telegram_chat_id = _user['telegram_chat_id']
else:
plpy.error('Error no telegram user token defined, check user settings')
return None
# requests
headers = {'Content-Type': 'application/json',
'Proxy-Authorization': 'Basic base64'}
data_dict = {'chat_id': telegram_chat_id,
'text': telegram_message,
'parse_mode': 'HTML',
'disable_notification': False}
data = json.dumps(data_dict)
url = f'https://api.telegram.org/bot{telegram_token}/sendMessage'
r = requests.post(url,
data=data,
headers=headers)
#print(r.text)
# Return something boolean?
#plpy.notice('Sent telegram successfully to [{}] [{}]'.format(r.text, r.status_code))
if r.status_code == 200:
plpy.notice('Sent telegram successfully to [{}] [{}] [{}]'.format(telegram_chat_id, telegram_title, r.text))
else:
plpy.error('Failed to send telegram')
return None
$send_telegram_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.send_telegram_py_fn
IS 'Send a message to a telegram user or group specified on chatId using plpython3u';
---------------------------------------------------------------------------
-- python url encode
CREATE OR REPLACE FUNCTION urlencode_py_fn(uri text) RETURNS text
AS $urlencode_py$
import urllib.parse
return urllib.parse.quote(uri, safe="");
$urlencode_py$ LANGUAGE plpython3u IMMUTABLE STRICT;
---------------------------------------------------------------------------
-- python
-- https://ipapi.co/
DROP FUNCTION IF EXISTS reverse_geoip_py_fn;
CREATE OR REPLACE FUNCTION reverse_geoip_py_fn(IN _ip TEXT) RETURNS void
AS $reverse_geoip_py$
"""
TODO
"""
import requests
import json
# requests
url = f'https://ipapi.co/{_ip}/json/'
r = requests.get(url)
#print(r.text)
# Return something boolean?
#plpy.notice('Sent successfully to [{}] [{}]'.format(r.text, r.status_code))
if r.status_code == 200:
plpy.notice('Sent successfully to [{}] [{}]'.format(r.text, r.status_code))
else:
plpy.error('Failed to send')
return None
$reverse_geoip_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.reverse_geoip_py_fn
IS 'Retrieve reverse geo IP location via ipapi.co using plpython3u';
CREATE OR REPLACE FUNCTION urlescape_py_fn(original text) RETURNS text LANGUAGE plpython3u AS $$
import urllib.parse
return urllib.parse.quote(original);
$$
IMMUTABLE STRICT;
-- Description
COMMENT ON FUNCTION
public.urlescape_py_fn
IS 'URL-encoding VARCHAR and TEXT values using plpython3u';

View File

@@ -1,910 +0,0 @@
---------------------------------------------------------------------------
-- singalk db public schema
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
CREATE SCHEMA IF NOT EXISTS public;
COMMENT ON SCHEMA public IS 'backend functions';
---------------------------------------------------------------------------
-- python reverse_geocode
--
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
--
CREATE TABLE IF NOT EXISTS geocoders(
name TEXT UNIQUE,
url TEXT,
reverse_url TEXT
);
-- Description
COMMENT ON TABLE
public.geocoders
IS 'geo service nominatim url';
INSERT INTO geocoders VALUES
('nominatim',
NULL,
'https://nominatim.openstreetmap.org/reverse');
DROP FUNCTION IF EXISTS reverse_geocode_py_fn;
CREATE OR REPLACE FUNCTION reverse_geocode_py_fn(IN geocoder TEXT, IN lon NUMERIC, IN lat NUMERIC,
OUT geo_name TEXT)
AS $reverse_geocode_py$
import requests
# Use the shared cache to avoid preparing the geocoder metadata
if geocoder in SD:
plan = SD[geocoder]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT reverse_url AS url FROM geocoders WHERE name = $1", ["text"])
SD[geocoder] = plan
# Execute the statement with the geocoder param and limit to 1 result
rv = plpy.execute(plan, [geocoder], 1)
url = rv[0]['url']
# Make the request to the geocoder API
payload = {"lon": lon, "lat": lat, "format": "jsonv2", "zoom": 18}
r = requests.get(url, params=payload)
# Return the full address or nothing if not found
if r.status_code == 200 and "name" in r.json():
return r.json()["name"]
else:
plpy.error('Failed to received a geo full address %s', r.json())
return 'unknow'
$reverse_geocode_py$ LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.reverse_geocode_py_fn
IS 'query reverse geo service to return location name';
---------------------------------------------------------------------------
-- python template email/pushover
--
CREATE TABLE IF NOT EXISTS email_templates(
name TEXT UNIQUE,
email_subject TEXT,
email_content TEXT,
pushover_title TEXT,
pushover_message TEXT
);
-- Description
COMMENT ON TABLE
public.email_templates
IS 'email/message templates for notifications';
-- with escape value, eg: E'A\nB\r\nC'
-- https://stackoverflow.com/questions/26638615/insert-line-break-in-postgresql-when-updating-text-field
INSERT INTO email_templates VALUES
('logbook',
'New Logbook Entry',
E'Hello __RECIPIENT__,\n\nWe just wanted to let you know that you have a new entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\n\r\nSee more details at __LOGBOOK_LINK__\n\nHappy sailing!\nThe Saillogger Team',
'New Logbook Entry',
E'We just wanted to let you know that you have a new entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\n\r\nSee more details at __LOGBOOK_LINK__\n\nHappy sailing!\nThe Saillogger Team'),
('user',
'Welcome',
E'Hello __RECIPIENT__,\nCongratulations!\nYou successfully created an account.\nKeep in mind to register your vessel.\nHappy sailing!',
'Welcome',
E'Hi!\nYou successfully created an account\nKeep in mind to register your vessel.\nHappy sailing!'),
('vessel',
'New vessel',
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.',
'New vessel',
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.'),
('monitor_offline',
'Offline',
E'__BOAT__ has been offline for more than an hour\r\nFind more details at https://openplotter.cloud/boats/\n',
'Offline',
E'__BOAT__ has been offline for more than an hour\r\nFind more details at https://openplotter.cloud/boats/\n'),
('monitor_online',
'Online',
E'__BOAT__ just came online\nFind more details at https://openplotter.cloud/boats/\n',
'Online',
E'__BOAT__ just came online\nFind more details at https://openplotter.cloud/boats/\n'),
('badge',
'New Badge!',
E'Hello __RECIPIENT__,\nCongratulations! You have just unlocked a new badge: __BADGE_NAME__\nSee more details at https://openplotter.cloud/badges\nHappy sailing!\nThe Saillogger Team',
'New Badge!',
E'Congratulations!\nYou have just unlocked a new badge: __BADGE_NAME__\nSee more details at https://openplotter.cloud/badges\nHappy sailing!\nThe Saillogger Team');
---------------------------------------------------------------------------
-- python send email
--
-- TODO read table from python or send email data as params?
-- https://www.programcreek.com/python/example/3684/email.utils.formatdate
DROP FUNCTION IF EXISTS send_email_py_fn;
CREATE OR REPLACE FUNCTION send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
AS $send_email_py$
# Import smtplib for the actual sending function
import smtplib
# Import the email modules we need
#from email.message import EmailMessage
from email.utils import formatdate,make_msgid
from email.mime.text import MIMEText
# Use the shared cache to avoid preparing the email metadata
if email_type in SD:
plan = SD[email_type]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
SD[email_type] = plan
# Execute the statement with the email_type param and limit to 1 result
rv = plpy.execute(plan, [email_type], 1)
email_subject = rv[0]['email_subject']
email_content = rv[0]['email_content']
# Replace fields using input jsonb obj
plpy.notice('Parameters [{}] [{}]'.format(_user, app))
if not _user or not app:
plpy.error('Error no parameters')
return None
if 'logbook_name' in _user and _user['logbook_name']:
email_content = email_content.replace('__LOGBOOK_NAME__', _user['logbook_name'])
if 'recipient' in _user and _user['recipient']:
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
if 'boat' in _user and _user['boat']:
email_content = email_content.replace('__BOAT__', _user['boat'])
if 'badge' in _user and _user['badge']:
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
email_from = 'root@localhost'
if 'app.email_from' in app and app['app.email_from']:
email_from = app['app.email_from']
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
email_to = 'root@localhost'
if 'email' in _user and _user['email']:
email_to = _user['email']
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
else:
plpy.error('Error email to')
return None
msg = MIMEText(email_content, 'plain', 'utf-8')
msg["Subject"] = email_subject
msg["From"] = email_from
msg["To"] = email_to
msg["Date"] = formatdate()
msg["Message-ID"] = make_msgid()
server_smtp = 'localhost'
if 'app.email_server' in app and app['app.email_server']:
server_smtp = app['app.email_server']
# Send the message via our own SMTP server.
try:
# send your message with credentials specified above
with smtplib.SMTP(server_smtp, 25) as server:
if 'app.email_user' in app and app['app.email_user'] \
and 'app.email_pass' in app and app['app.email_pass']:
server.starttls()
server.login(app['app.email_user'], app['app.email_pass'])
#server.send_message(msg)
server.sendmail(msg["To"], msg["From"], msg.as_string())
server.quit()
# tell the script to report if your message was sent or which errors need to be fixed
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
return None
except OSError as error :
plpy.error(error)
except smtplib.SMTPConnectError:
plpy.error('Failed to connect to the server. Bad connection settings?')
except smtplib.SMTPServerDisconnected:
plpy.error('Failed to connect to the server. Wrong user/password?')
except smtplib.SMTPException as e:
plpy.error('SMTP error occurred: ' + str(e))
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.send_email_py_fn
IS 'Send email notification using plpython3u';
---------------------------------------------------------------------------
-- python send pushover
--
-- TODO read app and user key from table?
-- https://pushover.net/
DROP FUNCTION IF EXISTS send_pushover_py_fn;
CREATE OR REPLACE FUNCTION send_pushover_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
AS $send_pushover_py$
import requests
# Use the shared cache to avoid preparing the email metadata
if message_type in SD:
plan = SD[message_type]
# A prepared statement from Python
else:
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
SD[message_type] = plan
# Execute the statement with the message_type param and limit to 1 result
rv = plpy.execute(plan, [message_type], 1)
pushover_title = rv[0]['pushover_title']
pushover_message = rv[0]['pushover_message']
# Replace fields using input jsonb obj
if 'logbook_name' in _user and _user['logbook_name']:
pushover_message = pushover_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
if 'recipient' in _user and _user['recipient']:
pushover_message = pushover_message.replace('__RECIPIENT__', _user['recipient'])
if 'boat' in _user and _user['boat']:
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
if 'badge' in _user and _user['badge']:
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
pushover_token = None
if 'app.pushover_token' in app and app['app.pushover_token']:
pushover_token = app['app.pushover_token']
else:
plpy.error('Error no pushover token defined, check app settings')
return None
pushover_user = None
if 'pushover_key' in _user and _user['pushover_key']:
pushover_user = _user['pushover_key']
else:
plpy.error('Error no pushover user token defined, check user settings')
return None
# requests
r = requests.post("https://api.pushover.net/1/messages.json", data = {
"token": pushover_token,
"user": pushover_user,
"title": pushover_title,
"message": pushover_message
})
#print(r.text)
# Return the full address or None if not found
plpy.notice('Sent pushover successfully to [{}] [{}]'.format(r.text, r.status_code))
if r.status_code == 200:
plpy.notice('Sent pushover successfully to [{}] [{}] [{}]'.format("__USER__", pushover_title, r.text))
else:
plpy.error('Failed to send pushover')
return None
$send_pushover_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.send_pushover_py_fn
IS 'Send pushover notification using plpython3u';
---------------------------------------------------------------------------
-- Functions public schema
--
-- Update a logbook with avg data
-- TODO using timescale function
CREATE OR REPLACE FUNCTION logbook_update_avg_fn(
IN _id integer,
IN _start TEXT,
IN _end TEXT,
OUT avg_speed double precision,
OUT max_speed double precision,
OUT max_wind_speed double precision
) AS $logbook_update_avg$
BEGIN
RAISE NOTICE '-> Updating avg for logbook id=%, start: "%", end: "%"', _id, _start, _end;
SELECT AVG(speedOverGround), MAX(speedOverGround), MAX(windspeedapparent) INTO
avg_speed, max_speed, max_wind_speed
FROM api.metrics
WHERE time >= _start::TIMESTAMP WITHOUT TIME ZONE AND
time <= _end::TIMESTAMP WITHOUT TIME ZONE;
RAISE NOTICE '-> Updated avg for logbook id=%, avg_speed:%, max_speed:%, max_wind_speed:%', _id, avg_speed, max_speed, max_wind_speed;
END;
$logbook_update_avg$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_update_avg_fn
IS 'Update logbook details with calculate average and max data, AVG(speedOverGround), MAX(speedOverGround), MAX(windspeedapparent)';
-- Create a LINESTRING for Geometry
-- Todo validate st_length unit?
-- https://postgis.net/docs/ST_Length.html
CREATE FUNCTION logbook_update_geom_distance_fn(IN _id integer, IN _start text, IN _end text,
OUT _track_geom Geometry(LINESTRING),
OUT _track_distance double precision
) AS $logbook_geo_distance$
BEGIN
SELECT ST_MakeLine(
ARRAY(
--SELECT ST_SetSRID(ST_MakePoint(longitude,latitude),4326) as geo_point
SELECT st_makepoint(longitude,latitude) AS geo_point
FROM api.metrics m
WHERE m.latitude IS NOT NULL
AND m.longitude IS NOT NULL
AND m.time >= _start::TIMESTAMP WITHOUT TIME ZONE
AND m.time <= _end::TIMESTAMP WITHOUT TIME ZONE
ORDER BY m.time ASC
)
) INTO _track_geom;
RAISE NOTICE '-> GIS LINESTRING %', _track_geom;
-- SELECT ST_Length(_track_geom,false) INTO _track_distance;
-- SELECT TRUNC (st_length(st_transform(track_geom,4326)::geography)::INT / 1.852) from logbook where id = 209; -- in NM
SELECT TRUNC (ST_Length(_track_geom,false)::INT / 1.852) INTO _track_distance; -- in NM
RAISE NOTICE '-> GIS Length %', _track_distance;
END;
$logbook_geo_distance$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_update_geom_distance_fn
IS 'Update logbook details with geometry data an distance, ST_Length';
-- Update pending new logbook from process queue
DROP FUNCTION IF EXISTS process_logbook_queue_fn;
CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void AS $process_logbook_queue$
DECLARE
logbook_rec record;
from_name varchar;
to_name varchar;
log_name varchar;
avg_rec record;
geo_rec record;
user_settings jsonb;
app_settings jsonb;
BEGIN
-- If _id is not NULL
SELECT * INTO logbook_rec
FROM api.logbook
WHERE active IS false
AND id = _id;
-- geo reverse _from_lng _from_lat
-- geo reverse _to_lng _to_lat
from_name := reverse_geocode_py_fn('nominatim', logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
to_name := reverse_geocode_py_fn('nominatim', logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
SELECT CONCAT(from_name, ' to ' , to_name) INTO log_name;
-- SELECT CONCAT("_from" , ' to ' ,"_to") from api.logbook where id = 1;
-- Generate logbook name, concat _from_location and to _to_locacion
-- Update logbook entry with the latest metric data and calculate data
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
-- todo check on time start vs end
RAISE NOTICE 'Updating logbook entry [%] [%] [%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
UPDATE api.logbook
SET
duration = (logbook_rec._to_time::timestamp without time zone - logbook_rec._from_time::timestamp without time zone),
avg_speed = avg_rec.avg_speed,
max_speed = avg_rec.max_speed,
max_wind_speed = avg_rec.max_wind_speed,
_from = from_name,
_to = to_name,
name = log_name,
track_geom = geo_rec._track_geom,
distance = geo_rec._track_distance
WHERE id = logbook_rec.id;
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- Gather user settings
user_settings := get_user_settings_from_log_fn(logbook_rec::RECORD);
--user_settings := '{"logbook_name": "' || log_name || '"}, "{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '}';
--user_settings := '{"logbook_name": "' || log_name || '"}';
-- Send notification email, pushover
--PERFORM send_notification('logbook'::TEXT, logbook_rec::RECORD);
PERFORM send_email_py_fn('logbook'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('logbook'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_logbook_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_logbook_queue_fn
IS 'Update logbook details when completed, logbook_update_avg_fn, logbook_update_geom_distance_fn, reverse_geocode_py_fn';
-- Update pending new stay from process queue
DROP FUNCTION IF EXISTS process_stay_queue_fn;
CREATE OR REPLACE FUNCTION process_stay_queue_fn(IN _id integer) RETURNS void AS $process_stay_queue$
DECLARE
stay_rec record;
_name varchar;
BEGIN
RAISE WARNING 'process_stay_queue_fn';
RAISE WARNING 'jwt %', current_setting('request.jwt.claims', true);
RAISE WARNING 'cur_user %', current_user;
-- If _id is not NULL
SELECT * INTO stay_rec
FROM api.stays
WHERE id = _id;
-- AND client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%';
-- geo reverse _lng _lat
_name := reverse_geocode_py_fn('nominatim', stay_rec.longitude::NUMERIC, stay_rec.latitude::NUMERIC);
RAISE NOTICE 'Updating stay entry [%]', stay_rec.id;
UPDATE api.stays
SET
name = _name,
geog = Geography(ST_MakePoint(stay_rec.longitude, stay_rec.latitude))
WHERE id = stay_rec.id;
-- Notification email/pushover?
END;
$process_stay_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_stay_queue_fn
IS 'Update stay details, reverse_geocode_py_fn';
-- Handle moorage insert or update from stays
-- todo valide geography unit
-- https://postgis.net/docs/ST_DWithin.html
DROP FUNCTION IF EXISTS process_moorage_queue_fn;
CREATE OR REPLACE FUNCTION process_moorage_queue_fn(IN _id integer) RETURNS void AS $process_moorage_queue$
DECLARE
stay_rec record;
moorage_rec record;
BEGIN
-- If _id is not NULL
SELECT * INTO stay_rec
FROM api.stays
WHERE active IS false
AND departed IS NOT NULL
AND id = _id;
FOR moorage_rec in
SELECT
*
FROM api.moorages
WHERE
latitude IS NOT NULL
AND longitude IS NOT NULL
AND ST_DWithin(
-- Geography(ST_MakePoint(stay_rec._lng, stay_rec._lat)),
stay_rec.geog,
-- Geography(ST_MakePoint(longitude, latitude)),
geog,
100 -- in meters ?
)
ORDER BY id ASC
LOOP
-- found previous stay within 100m of the new moorage
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Found previous stay within 100m of moorage %', moorage_rec;
EXIT; -- exit loop
END IF;
END LOOP;
-- if with in 100m update reference count and stay duration
-- else insert new entry
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Update moorage %', moorage_rec;
UPDATE api.moorages
SET
reference_count = moorage_rec.reference_count + 1,
stay_duration =
moorage_rec.stay_duration +
(stay_rec.departed::timestamp without time zone - stay_rec.arrived::timestamp without time zone)
WHERE id = moorage_rec.id;
else
RAISE NOTICE 'Insert new moorage entry from stay %', stay_rec;
-- Ensure the stay as a name
IF stay_rec.name IS NULL THEN
stay_rec.name := reverse_geocode_py_fn('nominatim', stay_rec.longitude::NUMERIC, stay_rec.latitude::NUMERIC);
END IF;
-- Insert new moorage from stay
INSERT INTO api.moorages
(client_id, name, stay_id, stay_code, stay_duration, reference_count, latitude, longitude, geog)
VALUES (
stay_rec.client_id,
stay_rec.name,
stay_rec.id,
stay_rec.stay_code,
(stay_rec.departed::timestamp without time zone - stay_rec.arrived::timestamp without time zone),
1, -- default reference_count
stay_rec.latitude,
stay_rec.longitude,
Geography(ST_MakePoint(stay_rec.longitude, stay_rec.latitude))
);
END IF;
END;
$process_moorage_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_moorage_queue_fn
IS 'Handle moorage insert or update from stays';
-- process new account notification
DROP FUNCTION IF EXISTS process_account_queue_fn;
CREATE OR REPLACE FUNCTION process_account_queue_fn(IN _email TEXT) RETURNS void AS $process_account_queue$
DECLARE
account_rec record;
user_settings jsonb;
app_settings jsonb;
BEGIN
-- If _email is not NULL
SELECT * INTO account_rec
FROM auth.accounts
WHERE email = _email;
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- Gather user settings
user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '"}';
-- Send notification email, pushover
--PERFORM send_notification_fn('user'::TEXT, account_rec::RECORD);
PERFORM send_email_py_fn('user'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('user'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_account_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_account_queue_fn
IS 'process new account notification';
-- process new vessel notification
DROP FUNCTION IF EXISTS process_vessel_queue_fn;
CREATE OR REPLACE FUNCTION process_vessel_queue_fn(IN _email TEXT) RETURNS void AS $process_vessel_queue$
DECLARE
vessel_rec record;
user_settings jsonb;
app_settings jsonb;
BEGIN
-- If _email is not NULL
SELECT * INTO vessel_rec
FROM auth.vessels
WHERE owner_email = _email;
-- Gather user_settings from
-- if notification email
-- -- Send email
--
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- Gather user settings
user_settings := '{"email": "' || vessel_rec.owner_email || '", "boat": "' || vessel_rec.name || '"}';
--user_settings := get_user_settings_from_clientid_fn();
-- Send notification email, pushover
--PERFORM send_notification_fn('vessel'::TEXT, vessel_rec::RECORD);
PERFORM send_email_py_fn('vessel'::TEXT, user_settings::JSONB, app_settings::JSONB);
--PERFORM send_pushover_py_fn('vessel'::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$process_vessel_queue$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.process_vessel_queue_fn
IS 'process new vessel notification';
-- Get user settings details from a log entry
DROP FUNCTION IF EXISTS get_app_settings_fn;
CREATE OR REPLACE FUNCTION get_app_settings_fn(OUT app_settings JSON) RETURNS JSON
AS $get_app_settings$
DECLARE
BEGIN
SELECT jsonb_object_agg(name,value) INTO app_settings
FROM public.app_settings
WHERE name LIKE '%app.email%' OR name LIKE '%app.pushover%';
END;
$get_app_settings$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_app_settings_fn
IS 'get app settings details, email, pushover';
-- Get user settings details from a log entry
DROP FUNCTION IF EXISTS get_user_settings_from_log_fn;
CREATE OR REPLACE FUNCTION get_user_settings_from_log_fn(IN logbook_rec RECORD, OUT user_settings JSON) RETURNS JSON
AS $get_user_settings_from_log$
DECLARE
BEGIN
-- If client_id is not NULL
IF logbook_rec.client_id IS NULL OR logbook_rec.client_id = '' THEN
RAISE WARNING '-> get_user_settings_from_log_fn invalid input %', logbook_rec.client_id;
END IF;
SELECT
json_build_object(
'boat' , v.name,
'recipient', a.first,
'email', v.owner_email,
'logbook_name', l.name) INTO user_settings
FROM auth.accounts a, auth.vessels v, api.metadata m, api.logbook l
WHERE lower(a.email) = lower(v.owner_email)
-- AND lower(v.name) = lower(m.name)
AND m.client_id = l.client_id
AND l.client_id = logbook_rec.client_id
AND l.id = logbook_rec.id;
END;
$get_user_settings_from_log$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_user_settings_from_log_fn
IS 'get user settings details from a log entry, initiate for logbook entry notification';
-- Get user settings details from a metadata entry
DROP FUNCTION IF EXISTS get_user_settings_from_metadata;
CREATE OR REPLACE FUNCTION get_user_settings_from_metadata_fn(IN meta_id INTEGER, OUT user_settings JSON) RETURNS JSON
AS $get_user_settings_from_metadata$
DECLARE
BEGIN
-- If meta_id is not NULL
IF meta_id IS NULL OR meta_id < 1 THEN
RAISE WARNING '-> get_user_settings_from_metadata_fn invalid input %', meta_id;
END IF;
SELECT json_build_object(
'boat' , v.name,
'email', v.owner_email) INTO user_settings
FROM auth.vessels v, api.metadata m
WHERE
--lower(v.name) = lower(m.name) AND
m.id = meta_id;
END;
$get_user_settings_from_metadata$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_user_settings_from_metadata_fn
IS 'get user settings details from a metadata entry, initiate for monitoring offline,online notification';
-- Get user settings details from a metadata entry
DROP FUNCTION IF EXISTS send_notification_fn;
CREATE OR REPLACE FUNCTION send_notification_fn(IN email_type TEXT, IN notification_rec RECORD) RETURNS JSON
AS $send_notification$
DECLARE
app_settings JSONB;
BEGIN
-- Gather email and pushover app settings
app_settings := get_app_settings_fn();
-- Gather user settings
--user_settings := '{"email": "' || vessel_rec.owner_email || '", "boat": "' || vessel_rec.name || '}';
--user_settings := get_user_settings_from_clientid_fn();
--user_settings := '{"email": "' || account_rec.email || '", "recipient": "' || account_rec.first || '}';
--user_settings := get_user_settings_from_metadata_fn();
--user_settings := '{"logbook_name": "' || log_name || '"}';
--user_settings := get_user_settings_from_log_fn();
-- Send notification email
PERFORM send_email_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
-- Send notification pushover
--PERFORM send_pushover_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
END;
$send_notification$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.send_notification_fn
IS 'TODO';
DROP FUNCTION IF EXISTS get_user_settings_from_clientid_fn;
CREATE OR REPLACE FUNCTION get_user_settings_from_clientid_fn(
IN clientid TEXT,
IN logbook_name TEXT,
OUT user_settings JSON
) RETURNS JSON
AS $get_user_settings_from_clientid$
DECLARE
BEGIN
-- If client_id is not NULL
IF clientid IS NULL OR clientid <> '' THEN
RAISE WARNING '-> get_user_settings_from_clientid_fn invalid input %', clientid;
END IF;
SELECT
json_build_object(
'boat' , v.name,
'recipient', a.first,
'email', v.owner_email ,
'settings', a.preferences,
'pushover_key', a.preferences->'pushover_key',
'badges', a.preferences->'badges',
'logbook_name', logbook_name ) INTO user_settings
FROM auth.accounts a, auth.vessels v, api.metadata m
WHERE lower(a.email) = lower(v.owner_email)
--AND lower(v.name) = lower(m.name)
AND m.mmsi = v.mmsi
AND m.client_id = clientid;
END;
$get_user_settings_from_clientid$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.get_user_settings_from_clientid_fn
IS 'get user settings details from a clientid, initiate for badge entry notification';
---------------------------------------------------------------------------
-- Queue handling
--
-- https://gist.github.com/kissgyorgy/beccba1291de962702ea9c237a900c79
-- https://www.depesz.com/2012/06/13/how-to-send-mail-from-database/
-- Listen/Notify way
--create function new_logbook_entry() returns trigger as $$
--begin
-- perform pg_notify('new_logbook_entry', NEW.id::text);
-- return NEW;
--END;
--$$ language plpgsql;
-- table way
CREATE TABLE IF NOT EXISTS public.process_queue (
id SERIAL PRIMARY KEY,
channel TEXT NOT NULL,
payload TEXT NOT NULL,
stored timestamptz NOT NULL,
processed timestamptz
);
COMMENT ON TABLE
public.process_queue
IS 'process queue for async job';
CREATE INDEX ON public.process_queue (channel);
CREATE INDEX ON public.process_queue (processed);
create function new_account_entry_fn() returns trigger as $new_account_entry$
begin
insert into process_queue (channel, payload, stored) values ('new_account', NEW.email, now());
return NEW;
END;
$new_account_entry$ language plpgsql;
create function new_vessel_entry_fn() returns trigger as $new_vessel_entry$
begin
insert into process_queue (channel, payload, stored) values ('new_vessel', NEW.owner_email, now());
return NEW;
END;
$new_vessel_entry$ language plpgsql;
---------------------------------------------------------------------------
-- App settings
-- https://dba.stackexchange.com/questions/27296/storing-application-settings-with-different-datatypes#27297
-- https://stackoverflow.com/questions/6893780/how-to-store-site-wide-settings-in-a-database
-- http://cvs.savannah.gnu.org/viewvc/*checkout*/gnumed/gnumed/gnumed/server/sql/gmconfiguration.sql
CREATE TABLE IF NOT EXISTS public.app_settings (
name TEXT NOT NULL UNIQUE,
value TEXT NOT NULL
);
COMMENT ON TABLE public.app_settings IS 'application settings';
COMMENT ON COLUMN public.app_settings.name IS 'application settings name key';
COMMENT ON COLUMN public.app_settings.value IS 'application settings value';
---------------------------------------------------------------------------
-- Badges descriptions
-- TODO add contiditions
--
CREATE TABLE IF NOT EXISTS badges(
name TEXT UNIQUE,
description TEXT
);
-- Description
COMMENT ON TABLE
public.badges
IS 'Badges descriptions';
INSERT INTO badges VALUES
('Helmsman',
'Nice work logging your first sail! You are officially a helmsman now!'),
('Wake Maker',
'Yowzers! Welcome to the 15 knot+ club ya speed demon skipper!'),
('Explorer',
'It looks like home is where the helm is. Cheers to 10 days away from home port!'),
('Mooring Pro',
'It takes a lot of skill to "thread that floating needle" but seems like you have mastered mooring with 10 nights on buoy!'),
('Anchormaster',
'Hook, line and sinker, you have this anchoring thing down! 25 days on the hook for you!'),
('Traveler',
'Who needs to fly when one can sail! You are an international sailor. À votre santé!'),
('Stormtrooper',
'Just like the elite defenders of the Empire, here you are, our braving your own hydro-empire in windspeeds above 30kts. Nice work trooper! '),
('Club Alaska',
'Home to the bears, glaciers, midnight sun and high adventure. Welcome to the Club Alaska Captain!'),
('Tropical Traveler',
'Look at you with your suntan, tropical drink and southern latitude!'),
('Aloha Award',
'Ticking off over 2300 NM across the great blue Pacific makes you the rare recipient of the Aloha Award. Well done and Aloha sailor!'),
('Tyee',
'You made it to the Tyee Outstation, the friendliest dock in Pacific Northwest!'),
-- TODO the sea is big and the world is not limited to the US
('Mediterranean Traveler',
'You made it trought the Mediterranean!');
create function public.process_badge_queue_fn() RETURNS void AS $process_badge_queue$
declare
badge_rec record;
badges_arr record;
begin
SELECT json_array_elements_text((a.preferences->'badges')::json) from auth.accounts a;
FOR badge_rec in
SELECT
name
FROM badges
LOOP
-- found previous stay within 100m of the new moorage
IF moorage_rec.id IS NOT NULL AND moorage_rec.id > 0 THEN
RAISE NOTICE 'Found previous stay within 100m of moorage %', moorage_rec;
EXIT; -- exit loop
END IF;
END LOOP;
-- Helmsman
-- select count(l.id) api.logbook l where count(l.id) = 1;
-- Wake Maker
-- select max(l.max_wind_speed) api.logbook l where l.max_wind_speed >= 15;
-- Explorer
-- select sum(m.stay_duration) api.stays s where home_flag is false;
-- Mooring Pro
-- select sum(m.stay_duration) api.stays s where stay_code = 3;
-- Anchormaster
-- select sum(m.stay_duration) api.stays s where stay_code = 2;
-- Traveler
-- todo country to country.
-- Stormtrooper
-- select max(l.max_wind_speed) api.logbook l where l.max_wind_speed >= 30;
-- Club Alaska
-- todo country zone
-- Tropical Traveler
-- todo country zone
-- Aloha Award
-- todo pacific zone
-- TODO the sea is big and the world is not limited to the US
END
$process_badge_queue$ language plpgsql;
---------------------------------------------------------------------------
-- TODO add alert monitoring for Battery
---------------------------------------------------------------------------
-- TODO db-pre-request = "public.check_jwt"
-- Prevent unregister user or unregister vessel access
CREATE OR REPLACE FUNCTION public.check_jwt() RETURNS void AS $$
DECLARE
_role name;
_email name;
_mmsi name;
account_rec record;
vessel_rec record;
BEGIN
RAISE WARNING 'jwt %', current_setting('request.jwt.claims', true);
SELECT current_setting('request.jwt.claims', true)::json->>'email' INTO _email;
SELECT current_setting('request.jwt.claims', true)::json->>'role' INTO _role;
--RAISE WARNING 'jwt email %', current_setting('request.jwt.claims', true)::json->>'email';
--RAISE WARNING 'jwt role %', current_setting('request.jwt.claims', true)::json->>'role';
--RAISE WARNING 'cur_user %', current_user;
IF _role = 'user_role' THEN
-- Check the user exist in the accounts table
SELECT * INTO account_rec
FROM auth.accounts
WHERE auth.accounts.email = _email;
IF account_rec.email IS NULL THEN
RAISE EXCEPTION 'Invalid user'
USING HINT = 'Unkown user';
END IF;
ELSIF _role = 'vessel_role' THEN
-- Check the vessel and user exist
SELECT * INTO vessel_rec
FROM auth.vessels, auth.accounts
WHERE auth.vessels.owner_email = _email
AND auth.accounts.email = _email;
IF vessel_rec.owner_email IS NULL THEN
RAISE EXCEPTION 'Invalid vessel'
USING HINT = 'Unkown vessel owner_email';
END IF;
SELECT current_setting('request.jwt.claims', true)::json->>'mmsi' INTO _mmsi;
IF vessel_rec.mmsi IS NULL OR vessel_rec.mmsi <> _mmsi THEN
RAISE EXCEPTION 'Invalid vessel'
USING HINT = 'Unkown vessel mmsi';
END IF;
PERFORM set_config('vessel.mmsi', vessel_rec.mmsi, false);
RAISE WARNING 'vessel.mmsi %', current_setting('vessel.mmsi', false);
ELSIF _role <> 'api_anonymous' THEN
RAISE EXCEPTION 'Invalid role'
USING HINT = 'Stop being so evil and maybe you can log in';
END IF;
END
$$ language plpgsql security definer;
-- Function to trigger cron_jobs using API for tests.
-- Todo limit access and permision
-- Run con jobs
CREATE OR REPLACE FUNCTION api.run_cron_jobs() RETURNS void AS $$
BEGIN
-- In correct order
perform public.cron_process_new_account_fn();
perform public.cron_process_new_vessel_fn();
perform public.cron_process_monitor_online_fn();
perform public.cron_process_new_logbook_fn();
perform public.cron_process_new_stay_fn();
perform public.cron_process_new_moorage_fn();
perform public.cron_process_monitor_offline_fn();
END
$$ language plpgsql security definer;

View File

@@ -15,41 +15,78 @@ CREATE SCHEMA IF NOT EXISTS auth;
COMMENT ON SCHEMA auth IS 'auth postgrest for users and vessels';
CREATE EXTENSION IF NOT EXISTS "uuid-ossp"; -- provides functions to generate universally unique identifiers (UUIDs)
CREATE EXTENSION IF NOT EXISTS "moddatetime"; -- provides functions for tracking last modification time
CREATE EXTENSION IF NOT EXISTS "citext"; -- provides data type for case-insensitive character strings
CREATE EXTENSION IF NOT EXISTS "pgcrypto"; -- provides cryptographic functions
DROP TABLE IF EXISTS auth.accounts CASCADE;
CREATE TABLE IF NOT EXISTS auth.accounts (
-- id UUID DEFAULT uuid_generate_v4() NOT NULL,
email text primary key check ( email ~* '^.+@.+\..+$' ),
userid UUID NOT NULL UNIQUE DEFAULT uuid_generate_v4(),
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
email CITEXT primary key check ( email ~* '^.+@.+\..+$' ),
first text not null check (length(pass) < 512),
last text not null check (length(pass) < 512),
pass text not null check (length(pass) < 512),
role name not null check (length(role) < 512),
preferences JSONB null,
created_at TIMESTAMP WITHOUT TIME ZONE default NOW()
preferences JSONB NULL DEFAULT '{"email_notifications":true}',
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
connected_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
CONSTRAINT valid_email CHECK (length(email) > 5), -- Enforce at least 5 char, eg: a@b.io
CONSTRAINT valid_first CHECK (length(first) > 1),
CONSTRAINT valid_last CHECK (length(last) > 1),
CONSTRAINT valid_pass CHECK (length(pass) > 4)
);
-- Preferences jsonb
---- PushOver Notification, bool
---- PushOver user key, varchar
---- Email notification, bool
---- Instagram Handle, varchar
---- Timezone, TZ
---- Unit, bool
---- Preferred Homepage
---- Website, varchar or text
---- Public Profile
---- References to users ?
-- Description
COMMENT ON TABLE
auth.accounts
IS 'users account table';
-- Indexes
CREATE INDEX accounts_role_idx ON auth.accounts (role);
CREATE INDEX accounts_preferences_idx ON auth.accounts using GIN (preferences);
CREATE INDEX accounts_userid_idx ON auth.accounts (userid);
CREATE TRIGGER accounts_moddatetime
BEFORE UPDATE ON auth.accounts
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- Description
COMMENT ON TRIGGER accounts_moddatetime
ON auth.accounts
IS 'Automatic update of updated_at on table modification';
DROP TABLE IF EXISTS auth.vessels;
CREATE TABLE IF NOT EXISTS auth.vessels (
-- vesselId UUID PRIMARY KEY REFERENCES auth.accounts(id) ON DELETE RESTRICT,
owner_email TEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
mmsi TEXT UNIQUE,
name TEXT,
-- owner_email TEXT,
pass UUID,
vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
-- user_id REFERENCES auth.accounts(user_id) ON DELETE RESTRICT,
owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
-- mmsi TEXT UNIQUE, -- Should be a numeric range between 100000000 and 800000000.
mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique
name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512),
-- pass text not null check (length(pass) < 512), -- unused
role name not null check (length(role) < 512),
created_at TIMESTAMP WITHOUT TIME ZONE default NOW()
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW()
-- CONSTRAINT valid_length_mmsi CHECK (length(mmsi) < 10 OR length(mmsi) = 0)
CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000)
);
-- Description
COMMENT ON TABLE
auth.vessels
IS 'vessels table link to accounts email column';
-- Indexes
CREATE INDEX vessels_role_idx ON auth.vessels (role);
CREATE INDEX vessels_name_idx ON auth.vessels (name);
CREATE INDEX vessels_vesselid_idx ON auth.vessels (vessel_id);
CREATE TRIGGER vessels_moddatetime
BEFORE UPDATE ON auth.vessels
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- Description
COMMENT ON TRIGGER vessels_moddatetime
ON auth.vessels
IS 'Automatic update of updated_at on table modification';
create or replace function
auth.check_role_exists() returns trigger as $$
@@ -72,10 +109,13 @@ create constraint trigger ensure_user_role_exists
-- trigger add queue new account
CREATE TRIGGER new_account_entry AFTER INSERT ON auth.accounts
FOR EACH ROW EXECUTE FUNCTION public.new_account_entry_fn();
-- trigger add queue new account OTP validation
CREATE TRIGGER new_account_otp_validation_entry AFTER INSERT ON auth.accounts
FOR EACH ROW EXECUTE FUNCTION public.new_account_otp_validation_entry_fn();
-- trigger check role on vessel
drop trigger if exists ensure_user_role_exists on auth.vessels;
create constraint trigger ensure_user_role_exists
drop trigger if exists ensure_vessel_role_exists on auth.vessels;
create constraint trigger ensure_vessel_role_exists
after insert or update on auth.vessels
for each row
execute procedure auth.check_role_exists();
@@ -83,8 +123,6 @@ create constraint trigger ensure_user_role_exists
CREATE TRIGGER new_vessel_entry AFTER INSERT ON auth.vessels
FOR EACH ROW EXECUTE FUNCTION public.new_vessel_entry_fn();
create extension if not exists pgcrypto;
create or replace function
auth.encrypt_pass() returns trigger as $$
begin
@@ -113,6 +151,7 @@ begin
return (
select role from auth.accounts
where accounts.email = user_role.email
and user_role.pass is NOT NULL
and accounts.pass = crypt(user_role.pass, accounts.pass)
);
end;
@@ -133,6 +172,8 @@ declare
_role name;
result auth.jwt_token;
app_jwt_secret text;
_email_valid boolean := false;
_email text := email;
begin
-- check email and password
select auth.user_role(email, pass) into _role;
@@ -145,6 +186,16 @@ begin
FROM app_settings
WHERE name = 'app.jwt_secret';
-- Check email_valid and generate OTP
SELECT preferences['email_valid'] INTO _email_valid
FROM auth.accounts a
WHERE a.email = _email;
IF _email_valid is null or _email_valid is False THEN
INSERT INTO process_queue (channel, payload, stored)
VALUES ('email_otp', email, now());
END IF;
-- Generate jwt
select jwt.sign(
-- row_to_json(r), ''
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
@@ -165,12 +216,18 @@ api.signup(in email text, in pass text, in firstname text, in lastname text) ret
declare
_role name;
begin
IF email IS NULL OR email = ''
OR pass IS NULL OR pass = '' THEN
RAISE EXCEPTION 'Invalid input'
USING HINT = 'Check your parameter';
END IF;
-- check email and password
select auth.user_role(email, pass) into _role;
if _role is null then
RAISE WARNING 'Register new account email:[%]', email;
INSERT INTO auth.accounts ( email, pass, first, last, role)
VALUES (email, pass, firstname, lastname, 'user_role');
-- TODO replace preferences default into table rather than trigger
INSERT INTO auth.accounts ( email, pass, first, last, role, preferences)
VALUES (email, pass, firstname, lastname, 'user_role', '{"email_notifications":true}');
end if;
return ( api.login(email, pass) );
end;
@@ -185,21 +242,28 @@ declare
result auth.jwt_token;
app_jwt_secret text;
vessel_rec record;
_vessel_id text;
begin
IF vessel_email IS NULL OR vessel_email = ''
OR vessel_name IS NULL OR vessel_name = '' THEN
RAISE EXCEPTION 'Invalid input'
USING HINT = 'Check your parameter';
END IF;
IF public.isnumeric(vessel_mmsi) IS False THEN
vessel_mmsi = NULL;
END IF;
-- check vessel exist
SELECT * INTO vessel_rec
FROM auth.vessels vessel
WHERE vessel.owner_email = vessel_email
AND vessel.mmsi = vessel_mmsi
AND LOWER(vessel.name) = LOWER(vessel_name);
if vessel_rec is null then
WHERE vessel.owner_email = vessel_email;
IF vessel_rec IS NULL THEN
RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, vessel_email;
INSERT INTO auth.vessels (owner_email, mmsi, name, role)
VALUES (vessel_email, vessel_mmsi, vessel_name, 'vessel_role');
VALUES (vessel_email, vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id;
vessel_rec.role := 'vessel_role';
vessel_rec.owner_email = vessel_email;
vessel_rec.mmsi = vessel_mmsi;
end if;
vessel_rec.vessel_id = _vessel_id;
END IF;
-- Get app_jwt_secret
SELECT value INTO app_jwt_secret
@@ -212,7 +276,7 @@ begin
from (
select vessel_rec.role as role,
vessel_rec.owner_email as email,
vessel_rec.mmsi as mmsi
vessel_rec.vessel_id as vid
) r
into result;
return result;

View File

@@ -0,0 +1,252 @@
---------------------------------------------------------------------------
-- signalk db api schema
-- View and Function that have dependency with auth schema
-- List current database
select current_database();
-- connect to the DB
\c signalk
-- Link auth.vessels with api.metadata
ALTER TABLE api.metadata ADD vessel_id TEXT NOT NULL REFERENCES auth.vessels(vessel_id) ON DELETE RESTRICT;
COMMENT ON COLUMN api.metadata.vessel_id IS 'Link auth.vessels with api.metadata';
-- List vessel
--TODO add geojson with position
DROP VIEW IF EXISTS api.vessels_view;
CREATE OR REPLACE VIEW api.vessels_view AS
WITH metadata AS (
SELECT COALESCE(
(SELECT m.time
FROM api.metadata m
WHERE m.vessel_id = current_setting('vessel.id')
)::TEXT ,
NULL ) as last_contact
)
SELECT
v.name as name,
v.mmsi as mmsi,
v.created_at::timestamp(0) as created_at,
m.last_contact as last_contact
FROM auth.vessels v, metadata m
WHERE v.owner_email = current_setting('user.email');
CREATE OR REPLACE VIEW api.vessels2_view AS
-- TODO
SELECT
v.name as name,
v.mmsi as mmsi,
v.created_at::timestamp(0) as created_at,
COALESCE(m.time, null) as last_contact
FROM auth.vessels v
LEFT JOIN api.metadata m ON v.owner_email = current_setting('user.email')
AND m.vessel_id = current_setting('vessel.id');
-- Description
COMMENT ON VIEW
api.vessels2_view
IS 'Expose has vessel pending validation to API - TO DELETE?';
DROP VIEW IF EXISTS api.vessel_p_view;
CREATE OR REPLACE VIEW api.vessel_p_view AS
SELECT
v.name as name,
v.mmsi as mmsi,
v.created_at::timestamp(0) as created_at,
null as last_contact
FROM auth.vessels v
WHERE v.owner_email = current_setting('user.email');
-- Description
COMMENT ON VIEW
api.vessel_p_view
IS 'Expose has vessel pending validation to API - TO DELETE?';
DROP FUNCTION IF EXISTS public.has_vessel_fn;
CREATE OR REPLACE FUNCTION public.has_vessel_fn() RETURNS BOOLEAN
AS $has_vessel$
DECLARE
BEGIN
-- Check a vessel and user exist
RETURN (
SELECT auth.vessels.name
FROM auth.vessels, auth.accounts
WHERE auth.vessels.owner_email = auth.accounts.email
AND auth.accounts.email = current_setting('user.email')
) IS NOT NULL;
END;
$has_vessel$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
public.has_vessel_fn
IS 'Expose has vessel to API';
-- Or function?
-- TODO Improve: return null until the vessel has sent metadata?
DROP FUNCTION IF EXISTS api.vessel_fn;
CREATE OR REPLACE FUNCTION api.vessel_fn(OUT vessel JSON) RETURNS JSON
AS $vessel$
DECLARE
BEGIN
SELECT
json_build_object(
'name', v.name,
'mmsi', coalesce(v.mmsi, null),
'created_at', v.created_at::timestamp(0),
'last_contact', coalesce(m.time, null),
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
)
INTO vessel
FROM auth.vessels v, api.metadata m,
( SELECT
t.*
FROM (
( select
current_setting('vessel.name') as name,
time,
courseovergroundtrue,
speedoverground,
anglespeedapparent,
longitude,latitude,
st_makepoint(longitude,latitude) AS geo_point
FROM api.metrics
WHERE
latitude IS NOT NULL
AND longitude IS NOT NULL
AND client_id = current_setting('vessel.client_id', false)
ORDER BY time DESC
)
) AS t
) AS geojson_t
WHERE
m.vessel_id = current_setting('vessel.id')
AND m.vessel_id = v.vessel_id;
--RAISE notice 'api.vessel_fn %', obj;
END;
$vessel$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.vessel_fn
IS 'Expose vessel details to API';
-- Export user settings
DROP FUNCTION IF EXISTS api.settings_fn;
CREATE FUNCTION api.settings_fn(out settings json) RETURNS JSON
AS $user_settings$
BEGIN
select row_to_json(row)::json INTO settings
from (
select email,first,last,preferences,created_at,
INITCAP(CONCAT (LEFT(first, 1), ' ', last)) AS username,
public.has_vessel_fn() as has_vessel
from auth.accounts
where email = current_setting('user.email')
) row;
END;
$user_settings$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.settings_fn
IS 'Expose user settings to API';
DROP FUNCTION IF EXISTS api.versions_fn;
CREATE OR REPLACE FUNCTION api.versions_fn() RETURNS JSON
AS $version$
DECLARE
_appv TEXT;
_sysv TEXT;
BEGIN
SELECT
value, rtrim(substring(version(), 0, 17)) AS sys_version into _appv,_sysv
FROM app_settings
WHERE name = 'app.version';
RETURN json_build_object('api_version', _appv,
'sys_version', _sysv);
END;
$version$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.versions_fn
IS 'Expose as a function, app and system version to API';
DROP VIEW IF EXISTS api.versions_view;
CREATE OR REPLACE VIEW api.versions_view AS
SELECT
value AS api_version,
--version() as sys_version
rtrim(substring(version(), 0, 17)) AS sys_version
FROM app_settings
WHERE name = 'app.version';
-- Description
COMMENT ON VIEW
api.versions_view
IS 'Expose as a table view app and system version to API';
CREATE OR REPLACE FUNCTION public.isnumeric(text) RETURNS BOOLEAN AS
$isnumeric$
DECLARE x NUMERIC;
BEGIN
x = $1::NUMERIC;
RETURN TRUE;
EXCEPTION WHEN others THEN
RETURN FALSE;
END;
$isnumeric$
STRICT
LANGUAGE plpgsql IMMUTABLE;
-- Description
COMMENT ON FUNCTION
public.isnumeric
IS 'Check typeof value is numeric';
CREATE OR REPLACE FUNCTION public.isboolean(text) RETURNS BOOLEAN AS
$isboolean$
DECLARE x BOOLEAN;
BEGIN
x = $1::BOOLEAN;
RETURN TRUE;
EXCEPTION WHEN others THEN
RETURN FALSE;
END;
$isboolean$
STRICT
LANGUAGE plpgsql IMMUTABLE;
-- Description
COMMENT ON FUNCTION
public.isboolean
IS 'Check typeof value is boolean';
DROP FUNCTION IF EXISTS api.update_user_preferences_fn;
-- Update/Add a specific user setting into preferences
CREATE OR REPLACE FUNCTION api.update_user_preferences_fn(IN key TEXT, IN value TEXT) RETURNS BOOLEAN AS
$update_user_preferences$
DECLARE
first_c TEXT := NULL;
last_c TEXT := NULL;
_value TEXT := value;
BEGIN
-- Is it the only way to check variable type?
-- Convert string to jsonb and skip type of json obj or integer or boolean
SELECT SUBSTRING(value, 1, 1),RIGHT(value, 1) INTO first_c,last_c;
IF first_c <> '{' AND last_c <> '}' AND public.isnumeric(value) IS False
AND public.isboolean(value) IS False THEN
--RAISE WARNING '-> first_c:[%] last_c:[%] pg_typeof:[%]', first_c,last_c,pg_typeof(value);
_value := to_jsonb(value)::jsonb;
END IF;
--RAISE WARNING '-> update_user_preferences_fn update preferences for user [%]', current_setting('request.jwt.claims', true)::json->>'email';
UPDATE auth.accounts
SET preferences =
jsonb_set(preferences::jsonb, key::text[], _value::jsonb)
WHERE
email = current_setting('user.email', true);
IF FOUND THEN
--RAISE WARNING '-> update_user_preferences_fn True';
RETURN True;
END IF;
--RAISE WARNING '-> update_user_preferences_fn False';
RETURN False;
END;
$update_user_preferences$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.update_user_preferences_fn
IS 'Update user preferences jsonb key pair value';

View File

@@ -0,0 +1,501 @@
---------------------------------------------------------------------------
-- signalk db auth schema
-- View and Function that have dependency with auth schema
-- List current database
select current_database();
-- connect to the DB
\c signalk
DROP TABLE IF EXISTS auth.otp;
CREATE TABLE IF NOT EXISTS auth.otp (
-- update email type to CITEXT, https://www.postgresql.org/docs/current/citext.html
user_email CITEXT NOT NULL PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
otp_pass VARCHAR(10) NOT NULL,
otp_timestamp TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(),
otp_tries SMALLINT NOT NULL DEFAULT '0'
);
-- Description
COMMENT ON TABLE
auth.otp
IS 'Stores temporal otp code for up to 15 minutes';
-- Indexes
CREATE INDEX otp_pass_idx ON auth.otp (otp_pass);
CREATE INDEX otp_user_email_idx ON auth.otp (user_email);
DROP FUNCTION IF EXISTS public.generate_uid_fn;
CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT
AS $generate_uid_fn$
DECLARE
characters TEXT := '0123456789';
bytes BYTEA := gen_random_bytes(size);
l INT := length(characters);
i INT := 0;
output TEXT := '';
BEGIN
WHILE i < size LOOP
output := output || substr(characters, get_byte(bytes, i) % l + 1, 1);
i := i + 1;
END LOOP;
RETURN output;
END;
$generate_uid_fn$ LANGUAGE plpgsql VOLATILE;
-- Description
COMMENT ON FUNCTION
public.generate_uid_fn
IS 'Generate a random digit';
-- gerenate a OTP code by email
-- Expose as an API endpoint
DROP FUNCTION IF EXISTS api.generate_otp_fn;
CREATE OR REPLACE FUNCTION api.generate_otp_fn(IN email TEXT) RETURNS TEXT
AS $generate_otp$
DECLARE
_email CITEXT := email;
_email_check TEXT := NULL;
_otp_pass VARCHAR(10) := NULL;
BEGIN
IF email IS NULL OR _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
SELECT lower(a.email) INTO _email_check FROM auth.accounts a WHERE a.email = _email;
IF _email_check IS NULL THEN
RETURN NULL;
END IF;
--SELECT substr(gen_random_uuid()::text, 1, 6) INTO otp_pass;
SELECT generate_uid_fn(6) INTO _otp_pass;
-- upsert - Insert or update otp code on conflit
INSERT INTO auth.otp (user_email, otp_pass)
VALUES (_email_check, _otp_pass)
ON CONFLICT (user_email) DO UPDATE SET otp_pass = _otp_pass, otp_timestamp = NOW();
RETURN _otp_pass;
END;
$generate_otp$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.generate_otp_fn
IS 'Generate otp code';
DROP FUNCTION IF EXISTS api.recover;
CREATE OR REPLACE FUNCTION api.recover(in email text) returns BOOLEAN
AS $recover_fn$
DECLARE
_email CITEXT := email;
_user_id TEXT := NULL;
otp_pass VARCHAR(10) := NULL;
_reset_qs TEXT := NULL;
user_settings jsonb := NULL;
BEGIN
IF _email IS NULL OR _email = '' THEN
RAISE EXCEPTION 'Invalid input'
USING HINT = 'Check your parameter';
END IF;
SELECT user_id INTO _user_id FROM auth.accounts a WHERE a.email = _email;
IF NOT FOUND THEN
RAISE EXCEPTION 'Invalid input'
USING HINT = 'Check your parameter';
END IF;
-- OTP Code
SELECT generate_uid_fn(6) INTO otp_pass;
INSERT INTO auth.otp (user_email, otp_pass) VALUES (_email, otp_pass);
SELECT CONCAT('uuid=', _user_id, '&token=', otp_pass) INTO _reset_qs;
-- Send email/notifications
user_settings := '{"email": "' || _email || '", "reset_qs": "' || _reset_qs || '"}';
PERFORM send_notification_fn('email_reset'::TEXT, user_settings::JSONB);
RETURN TRUE;
END;
$recover_fn$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.recover
IS 'Send recover password email to reset password';
DROP FUNCTION IF EXISTS api.reset;
CREATE OR REPLACE FUNCTION api.reset(in pass text, in token text, in uuid text) returns BOOLEAN
AS $reset_fn$
DECLARE
_email TEXT := NULL;
BEGIN
-- Check parameters
IF token IS NULL OR uuid IS NULL OR pass IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Verify token
SELECT auth.verify_otp_fn(token) INTO _email;
IF _email IS NOT NULL THEN
SELECT email INTO _email FROM auth.accounts WHERE user_id = uuid;
IF _email IS NULL THEN
RETURN False;
END IF;
-- Set user new password
UPDATE auth.accounts
SET pass = pass
WHERE email = _email;
-- Enable email_validation into user preferences
PERFORM api.update_user_preferences_fn('{email_valid}'::TEXT, True::TEXT);
-- Enable email_notifications
PERFORM api.update_user_preferences_fn('{email_notifications}'::TEXT, True::TEXT);
-- Delete token when validated
DELETE FROM auth.otp
WHERE user_email = _email;
RETURN True;
END IF;
RETURN False;
END;
$reset_fn$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.reset
IS 'Reset user password base on otp code and user_id send by email from api.recover';
DROP FUNCTION IF EXISTS auth.verify_otp_fn;
CREATE OR REPLACE FUNCTION auth.verify_otp_fn(IN token TEXT) RETURNS TEXT
AS $verify_otp$
DECLARE
email TEXT := NULL;
BEGIN
IF token IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Token is valid 15 minutes
SELECT user_email INTO email
FROM auth.otp
WHERE otp_timestamp > NOW() AT TIME ZONE 'UTC' - INTERVAL '15 MINUTES'
AND otp_tries < 3
AND otp_pass = token;
RETURN email;
END;
$verify_otp$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
auth.verify_otp_fn
IS 'Verify OTP';
-- CRON to purge OTP older than 15 minutes
DROP FUNCTION IF EXISTS public.cron_process_prune_otp_fn;
CREATE OR REPLACE FUNCTION public.cron_process_prune_otp_fn() RETURNS void
AS $$
DECLARE
otp_rec record;
BEGIN
-- Purge OTP older than 15 minutes
RAISE NOTICE 'cron_process_prune_otp_fn';
FOR otp_rec in
SELECT *
FROM auth.otp
WHERE otp_timestamp < NOW() AT TIME ZONE 'UTC' - INTERVAL '15 MINUTES'
ORDER BY otp_timestamp desc
LOOP
RAISE NOTICE '-> cron_process_prune_otp_fn deleting expired otp for user [%]', otp_rec.user_email;
-- remove entry
DELETE FROM auth.otp
WHERE user_email = otp_rec.user_email;
RAISE NOTICE '-> cron_process_prune_otp_fn deleted expire otp for user [%]', otp_rec.user_email;
END LOOP;
END;
$$ language plpgsql;
-- Description
COMMENT ON FUNCTION
public.cron_process_prune_otp_fn
IS 'init by pg_cron to purge older than 15 minutes OTP token';
-- Email OTP validation
-- Expose as an API endpoint
DROP FUNCTION IF EXISTS api.email_fn;
CREATE OR REPLACE FUNCTION api.email_fn(IN token TEXT) RETURNS BOOLEAN
AS $email_validation$
DECLARE
_email TEXT := NULL;
BEGIN
-- Check parameters
IF token IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Verify token
SELECT auth.verify_otp_fn(token) INTO _email;
IF _email IS NOT NULL THEN
-- Check the email JWT token match the OTP email
IF current_setting('user.email', true) <> _email THEN
RETURN False;
END IF;
-- Set user email into env to allow RLS update
--PERFORM set_config('user.email', _email, false);
-- Enable email_validation into user preferences
PERFORM api.update_user_preferences_fn('{email_valid}'::TEXT, True::TEXT);
-- Enable email_notifications
PERFORM api.update_user_preferences_fn('{email_notifications}'::TEXT, True::TEXT);
-- Delete token when validated
DELETE FROM auth.otp
WHERE user_email = _email;
-- Disable to reduce spam
-- Send Notification async
--INSERT INTO process_queue (channel, payload, stored)
-- VALUES ('email_valid', _email, now());
RETURN True;
END IF;
RETURN False;
END;
$email_validation$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.email_fn
IS 'Store email_valid into user preferences if valid token/otp';
-- Pushover Subscription API
-- Web-Based Subscription Process
-- https://pushover.net/api/subscriptions#web
-- Expose as an API endpoint
CREATE OR REPLACE FUNCTION api.pushover_subscribe_link_fn(OUT pushover_link JSON) RETURNS JSON
AS $pushover_subscribe_link$
DECLARE
app_url text;
otp_code text;
pushover_app_url text;
success text;
failure text;
email text := current_setting('user.email', true);
BEGIN
--https://pushover.net/api/subscriptions#web
-- "https://pushover.net/subscribe/PostgSail-23uvrho1d5y6n3e"
-- + "?success=" + urlencode("https://beta.openplotter.cloud/api/rpc/pushover_fn?token=" + generate_otp_fn({{email}}))
-- + "&failure=" + urlencode("https://beta.openplotter.cloud/settings");
-- get app_url
SELECT
value INTO app_url
FROM
public.app_settings
WHERE
name = 'app.url';
-- get pushover url subscribe
SELECT
value INTO pushover_app_url
FROM
public.app_settings
WHERE
name = 'app.pushover_app_url';
-- Generate OTP
otp_code := api.generate_otp_fn(email);
-- On sucess redirect to API endpoint
SELECT CONCAT(
'?success=',
public.urlescape_py_fn(CONCAT(app_url,'/pushover?token=')),
otp_code)
INTO success;
-- On failure redirect to user settings, where he does come from
SELECT CONCAT(
'&failure=',
public.urlescape_py_fn(CONCAT(app_url,'/profile'))
) INTO failure;
SELECT json_build_object('link', CONCAT(pushover_app_url, success, failure)) INTO pushover_link;
END;
$pushover_subscribe_link$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.pushover_subscribe_link_fn
IS 'Generate Pushover subscription link';
-- Confirm Pushover Subscription
-- Web-Based Subscription Process
-- https://pushover.net/api/subscriptions#web
-- Expose as an API endpoint
DROP FUNCTION IF EXISTS api.pushover_fn;
CREATE OR REPLACE FUNCTION api.pushover_fn(IN token TEXT, IN pushover_user_key TEXT) RETURNS BOOLEAN
AS $pushover$
DECLARE
_email TEXT := NULL;
BEGIN
-- Check parameters
IF token IS NULL OR pushover_user_key IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Verify token
SELECT auth.verify_otp_fn(token) INTO _email;
IF _email IS NOT NULL THEN
-- Set user email into env to allow RLS update
PERFORM set_config('user.email', _email, false);
-- Add pushover_user_key into user preferences
PERFORM api.update_user_preferences_fn('{pushover_user_key}'::TEXT, pushover_user_key::TEXT);
-- Enable phone_notifications
PERFORM api.update_user_preferences_fn('{phone_notifications}'::TEXT, True::TEXT);
-- Delete token when validated
DELETE FROM auth.otp
WHERE user_email = _email;
-- Disable Notification because
-- Pushover send a notificataion when sucesssfull with the description of the app
--
-- Send Notification async
--INSERT INTO process_queue (channel, payload, stored)
-- VALUES ('pushover_valid', _email, now());
RETURN True;
END IF;
RETURN False;
END;
$pushover$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.pushover_fn
IS 'Confirm Pushover Subscription and store pushover_user_key into user preferences if valid token/otp';
-- Telegram OTP Validation
-- Expose as an API endpoint
DROP FUNCTION IF EXISTS api.telegram_fn;
CREATE OR REPLACE FUNCTION api.telegram_fn(IN token TEXT, IN telegram_obj TEXT) RETURNS BOOLEAN
AS $telegram$
DECLARE
_email TEXT := NULL;
_updated BOOLEAN := False;
user_settings jsonb;
BEGIN
-- Check parameters
IF token IS NULL OR telegram_obj IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Verify token
SELECT auth.verify_otp_fn(token) INTO _email;
IF _email IS NOT NULL THEN
-- Set user email into env to allow RLS update
PERFORM set_config('user.email', _email, false);
-- Add telegram obj into user preferences
SELECT api.update_user_preferences_fn('{telegram}'::TEXT, telegram_obj::TEXT) INTO _updated;
-- Delete token when validated
DELETE FROM auth.otp
WHERE user_email = _email;
-- Send Notification async
INSERT INTO process_queue (channel, payload, stored)
VALUES ('telegram_valid', _email, now());
RETURN _updated;
END IF;
RETURN False;
END;
$telegram$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.telegram_fn
IS 'Confirm telegram user and store telegram chat details into user preferences if valid token/otp';
-- Telegram user validation
DROP FUNCTION IF EXISTS auth.telegram_user_exists_fn;
CREATE OR REPLACE FUNCTION auth.telegram_user_exists_fn(IN email TEXT, IN user_id BIGINT) RETURNS BOOLEAN
AS $telegram_user_exists$
DECLARE
_email CITEXT := email;
_user_id BIGINT := user_id;
BEGIN
IF _email IS NULL OR _chat_id IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Does user and telegram obj
SELECT preferences->'telegram'->'from'->'id' INTO _user_id
FROM auth.accounts a
WHERE a.email = _email
AND cast(preferences->'telegram'->'from'->'id' as BIGINT) = _user_id::BIGINT;
IF FOUND THEN
RETURN TRUE;
END IF;
RETURN FALSE;
END;
$telegram_user_exists$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
auth.telegram_user_exists_fn
IS 'Check if user exist based on email and telegram obj preferences';
-- Telegram otp validation
DROP FUNCTION IF EXISTS auth.telegram_otp_fn;
CREATE OR REPLACE FUNCTION auth.telegram_otp_fn(IN email TEXT, OUT otp_code TEXT) RETURNS TEXT
AS $telegram_otp$
DECLARE
_email CITEXT := email;
user_settings jsonb := NULL;
BEGIN
IF _email IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Generate token
otp_code := api.generate_otp_fn(_email);
IF otp_code IS NOT NULL THEN
-- Set user email into env to allow RLS update
PERFORM set_config('user.email', _email, false);
-- Send Notification
user_settings := '{"email": "' || _email || '", "otp_code": "' || otp_code || '"}';
PERFORM send_notification_fn('telegram_otp'::TEXT, user_settings::JSONB);
END IF;
END;
$telegram_otp$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
auth.telegram_otp_fn
IS 'TODO';
-- Telegram bot JWT auth
-- Expose as an API endpoint
-- Avoid sending a password so use email and chat_id as key pair
DROP FUNCTION IF EXISTS api.bot(text,BIGINT);
CREATE OR REPLACE FUNCTION api.bot(IN email TEXT, IN user_id BIGINT) RETURNS auth.jwt_token
AS $telegram_bot$
DECLARE
_email TEXT := email;
_user_id BIGINT := user_id;
_exist BOOLEAN := False;
result auth.jwt_token;
app_jwt_secret text;
BEGIN
IF _email IS NULL OR _chat_id IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- check email and _chat_id
select auth.telegram_user_exists_fn(_email, _user_id) into _exist;
if _exist is null or _exist <> True then
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
end if;
-- Get app_jwt_secret
SELECT value INTO app_jwt_secret
FROM app_settings
WHERE name = 'app.jwt_secret';
-- Generate JWT token, force user_role
select jwt.sign(
row_to_json(r)::json, app_jwt_secret
) as token
from (
select 'user_role' as role,
(select lower(_email)) as email,
extract(epoch from now())::integer + 60*60 as exp
) r
into result;
return result;
END;
$telegram_bot$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.bot
IS 'Generate a JWT user_role token from email for telegram bot';
-- Telegram chat_id Session validation
DROP FUNCTION IF EXISTS auth.telegram_session_exists_fn;
CREATE OR REPLACE FUNCTION auth.telegram_session_exists_fn(IN user_id BIGINT) RETURNS BOOLEAN
AS $telegram_session_exists$
DECLARE
_id TEXT := NULL;
_user_id BIGINT := user_id;
BEGIN
IF _chat_id IS NULL THEN
RAISE EXCEPTION 'invalid input' USING HINT = 'check your parameter';
END IF;
-- Find user email based on telegram chat_id
SELECT preferences->'telegram'->'from'->'id' INTO _id
FROM auth.accounts a
WHERE cast(preferences->'telegram'->'from'->'id' as BIGINT) = _user_id::BIGINT;
IF NOT FOUND then
RETURN False;
END IF;
RETURN True;
END;
$telegram_session_exists$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
auth.telegram_session_exists_fn
IS 'Check if session/user exist based on user_id';

View File

@@ -1,156 +0,0 @@
---------------------------------------------------------------------------
-- singalk db permissions
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
---------------------------------------------------------------------------
-- Permissions roles
-- Users Sharing Role
-- https://postgrest.org/en/stable/auth.html#web-users-sharing-role
--
-- api_anonymous
-- nologin
-- api_anonymous role in the database with which to execute anonymous web requests.
-- api_anonymous allows JWT token generation with an expiration time via function api.login() from auth.accounts table
create role api_anonymous nologin noinherit;
grant usage on schema api to api_anonymous;
-- explicitly limit EXECUTE privileges to only signup and login functions
grant execute on function api.login(text,text) to api_anonymous;
grant execute on function api.signup(text,text,text,text) to api_anonymous;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
grant execute on function public.check_jwt() to api_anonymous;
-- authenticator
-- login role
create role authenticator noinherit login password 'mysecretpassword';
grant api_anonymous to authenticator;
-- Grafana user and role with login, read-only
CREATE ROLE grafana WITH LOGIN PASSWORD 'mysecretpassword';
GRANT USAGE ON SCHEMA api TO grafana;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO grafana;
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO grafana;
-- User:
-- nologin
-- read-only for all and Read-Write on logbook, stays and moorage except for name COLUMN ?
CREATE ROLE user_role WITH NOLOGIN;
GRANT user_role to authenticator;
GRANT USAGE ON SCHEMA api TO user_role;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO user_role;
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO user_role;
-- Allow update on table for notes
GRANT UPDATE ON TABLE api.logbook,api.moorages,api.stays TO user_role;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
GRANT EXECUTE ON FUNCTION public.check_jwt() to user_role;
-- Allow read on VIEWS
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO user_role;
-- Vessel:
-- nologin
-- insert-update-only for api.metrics,api.logbook,api.moorages,api.stays,api.metadata and sequences and process_queue
CREATE ROLE vessel_role WITH NOLOGIN;
GRANT vessel_role to authenticator;
GRANT USAGE ON SCHEMA api TO vessel_role;
GRANT INSERT, UPDATE, SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO vessel_role;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO vessel_role;
GRANT INSERT ON TABLE public.process_queue TO vessel_role;
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO vessel_role;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
GRANT EXECUTE ON FUNCTION public.check_jwt() to vessel_role;
-- TODO: currently cron function are run as super user, switch to scheduler role.
-- Scheduler read-only all, and write on logbook, stays, moorage, process_queue
-- Crons
CREATE ROLE scheduler WITH NOLOGIN;
GRANT scheduler to authenticator;
GRANT EXECUTE ON FUNCTION api.run_cron_jobs() to scheduler;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO scheduler;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO scheduler;
GRANT SELECT,UPDATE ON TABLE process_queue TO scheduler;
GRANT USAGE ON SCHEMA auth TO scheduler;
GRANT SELECT ON ALL TABLES IN SCHEMA auth TO scheduler;
---------------------------------------------------------------------------
-- Security policy
-- ROW LEVEL Security policy
ALTER TABLE api.metadata ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.metadata TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.metadata TO vessel_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.metadata TO user_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
ALTER TABLE api.metrics ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.metrics TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.metrics TO vessel_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.metrics TO user_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
-- Be sure to enable row level security on the table
ALTER TABLE api.logbook ENABLE ROW LEVEL SECURITY;
-- Create policies
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.logbook TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.logbook TO vessel_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.logbook TO user_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
-- Be sure to enable row level security on the table
ALTER TABLE api.stays ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.stays TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.stays TO vessel_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.stays TO user_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');
-- Be sure to enable row level security on the table
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.moorages TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.moorages TO vessel_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.moorages TO user_role
USING (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%')
WITH CHECK (client_id LIKE '%' || current_setting('vessel.mmsi', false) || '%');

View File

@@ -0,0 +1,318 @@
---------------------------------------------------------------------------
-- singalk db permissions
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
---------------------------------------------------------------------------
-- Permissions roles
-- Users Sharing Role
-- https://postgrest.org/en/stable/auth.html#web-users-sharing-role
--
-- api_anonymous
-- nologin
-- api_anonymous role in the database with which to execute anonymous web requests, limit 10 connections
-- api_anonymous allows JWT token generation with an expiration time via function api.login() from auth.accounts table
create role api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOLOGIN NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10;
comment on role api_anonymous is
'The role that PostgREST will switch to when a user is not authenticated.';
-- Limit to 10 connections
--alter user api_anonymous connection limit 10;
grant usage on schema api to api_anonymous;
-- explicitly limit EXECUTE privileges to only signup and login and reset functions
grant execute on function api.login(text,text) to api_anonymous;
grant execute on function api.signup(text,text,text,text) to api_anonymous;
grant execute on function api.recover(text) to api_anonymous;
grant execute on function api.reset(text,text,text) to api_anonymous;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
grant execute on function public.check_jwt() to api_anonymous;
-- explicitly limit EXECUTE privileges to only telegram bot auth function
grant execute on function api.bot(text,bigint) to api_anonymous;
-- explicitly limit EXECUTE privileges to only pushover subscription validation function
grant execute on function api.email_fn(text) to api_anonymous;
grant execute on function api.pushover_fn(text,text) to api_anonymous;
grant execute on function api.telegram_fn(text,text) to api_anonymous;
-- authenticator
-- login role
create role authenticator NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT login password 'mysecretpassword';
comment on role authenticator is
'Role that serves as an entry-point for API servers such as PostgREST.';
grant api_anonymous to authenticator;
-- Grafana user and role with login, read-only, limit 10 connections
CREATE ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN PASSWORD 'mysecretpassword';
comment on role grafana is
'Role that grafana will use for authenticated web users.';
GRANT USAGE ON SCHEMA api TO grafana;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO grafana;
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO grafana;
-- Allow read on VIEWS
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO grafana;
--GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view,api.vessels_view TO grafana;
-- Grafana_auth authticator user and role with login, read-only on auth.accounts, limit 10 connections
CREATE ROLE grafana_auth WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN PASSWORD 'mysecretpassword';
comment on role grafana_auth is
'Role that grafana auth proxy authenticator via apache.';
GRANT USAGE ON SCHEMA auth TO grafana_auth;
--GRANT USAGE, SELECT ON SEQUENCE auth.accounts_pkey TO grafana_auth;
GRANT SELECT ON TABLE auth.accounts TO grafana_auth;
-- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth;
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
-- User:
-- nologin, web api only
-- read-only for all and Read-Write on logbook, stays and moorage except for specific (name, notes) COLUMNS
CREATE ROLE user_role WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
comment on role user_role is
'Role that PostgREST will switch to for authenticated web users.';
GRANT user_role to authenticator;
GRANT USAGE ON SCHEMA api TO user_role;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO user_role;
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO user_role;
-- To check?
GRANT SELECT ON TABLE auth.vessels TO user_role;
-- Allow users to update certain columns
GRANT UPDATE (name, notes) ON api.logbook TO user_role;
GRANT UPDATE (name, notes, stay_code) ON api.stays TO user_role;
GRANT UPDATE (name, notes, stay_code, home_flag) ON api.moorages TO user_role;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
--GRANT EXECUTE ON FUNCTION public.check_jwt() TO user_role;
-- Allow others functions or allow all in public !! ??
--GRANT EXECUTE ON FUNCTION api.export_logbook_geojson_linestring_fn(int4) TO user_role;
--GRANT EXECUTE ON FUNCTION public.st_asgeojson(text) TO user_role;
--GRANT EXECUTE ON FUNCTION public.geography_eq(geography, geography) TO user_role;
-- TODO should not be need !! ??
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
-- pg15 feature security_invoker=true,security_barrier=true
GRANT SELECT ON TABLE api.logs_view TO user_role;
GRANT SELECT ON TABLE api.log_view TO user_role;
GRANT SELECT ON TABLE api.stays_view TO user_role;
GRANT SELECT ON TABLE api.stay_view TO user_role;
GRANT SELECT ON TABLE api.moorages_view TO user_role;
GRANT SELECT ON TABLE api.monitoring_view TO user_role;
GRANT SELECT ON TABLE api.total_info_view TO user_role;
GRANT SELECT ON TABLE api.stats_logs_view TO user_role;
GRANT SELECT ON TABLE api.stats_moorages_view TO user_role;
-- Update ownership for security user_role as run by web user.
-- Web listing
--ALTER VIEW api.stays_view OWNER TO user_role;
--ALTER VIEW api.moorages_view OWNER TO user_role;
--ALTER VIEW api.logs_view OWNER TO user_role;
--ALTER VIEW api.vessel_p_view OWNER TO user_role;
--ALTER VIEW api.monitoring_view OWNER TO user_role;
-- Remove all permissions except select
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.stays_view FROM user_role;
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.moorages_view FROM user_role;
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.logs_view FROM user_role;
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.monitoring_view FROM user_role;
-- Allow read and update on VIEWS
-- Web detail view
--ALTER VIEW api.log_view OWNER TO user_role;
-- Remove all permissions except select and update
--REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.log_view FROM user_role;
ALTER VIEW api.vessels_view OWNER TO user_role;
-- Remove all permissions except select and update
REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.vessels_view FROM user_role;
ALTER VIEW api.vessel_p_view OWNER TO user_role;
-- Remove all permissions except select and update
REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.vessel_p_view FROM user_role;
-- Vessel:
-- nologin
-- insert-update-only for api.metrics,api.logbook,api.moorages,api.stays,api.metadata and sequences and process_queue
CREATE ROLE vessel_role WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
comment on role vessel_role is
'Role that PostgREST will switch to for authenticated web vessels.';
GRANT vessel_role to authenticator;
GRANT USAGE ON SCHEMA api TO vessel_role;
GRANT INSERT, UPDATE, SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO vessel_role;
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO vessel_role;
GRANT INSERT ON TABLE public.process_queue TO vessel_role;
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO vessel_role;
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
GRANT EXECUTE ON FUNCTION public.check_jwt() to vessel_role;
-- explicitly limit EXECUTE privileges to api.metrics triggers function
GRANT EXECUTE ON FUNCTION public.trip_in_progress_fn(text) to vessel_role;
GRANT EXECUTE ON FUNCTION public.stay_in_progress_fn(text) to vessel_role;
-- hypertable get_partition_hash ?!?
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role;
--- Scheduler:
-- TODO: currently cron function are run as super user, switch to scheduler role.
-- Scheduler read-only all, and write on api.logbook, api.stays, api.moorages, public.process_queue, auth.otp
-- Crons
--CREATE ROLE scheduler WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
CREATE ROLE scheduler WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN;
comment on role vessel_role is
'Role that pgcron will use to process logbook,moorages,stays,monitoring and notification.';
GRANT scheduler to authenticator;
GRANT USAGE ON SCHEMA api TO scheduler;
GRANT SELECT ON TABLE api.metrics,api.metadata TO scheduler;
GRANT INSERT, UPDATE, SELECT ON TABLE api.logbook,api.moorages,api.stays TO scheduler;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO scheduler;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO scheduler;
GRANT SELECT,UPDATE ON TABLE public.process_queue TO scheduler;
GRANT USAGE ON SCHEMA auth TO scheduler;
GRANT SELECT ON ALL TABLES IN SCHEMA auth TO scheduler;
GRANT SELECT,UPDATE,DELETE ON TABLE auth.otp TO scheduler;
---------------------------------------------------------------------------
-- Security policy
-- ROW LEVEL Security policy
ALTER TABLE api.metadata ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.metadata TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.metadata TO vessel_role
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.metadata TO user_role
USING (client_id = current_setting('vessel.client_id', true))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow scheduler to update and select based on the client_id
CREATE POLICY api_scheduler_role ON api.metadata TO scheduler
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow grafana to select based on the client_id
CREATE POLICY grafana_role ON api.metadata TO grafana
USING (client_id = client_id)
WITH CHECK (false);
ALTER TABLE api.metrics ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.metrics TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.metrics TO vessel_role
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.metrics TO user_role
USING (client_id = current_setting('vessel.client_id', true))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow scheduler to update and select based on the client_id
CREATE POLICY api_scheduler_role ON api.metrics TO scheduler
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow grafana to select based on the client_id
CREATE POLICY grafana_role ON api.metrics TO grafana
USING (client_id = client_id)
WITH CHECK (false);
-- Be sure to enable row level security on the table
ALTER TABLE api.logbook ENABLE ROW LEVEL SECURITY;
-- Create policies
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.logbook TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.logbook TO vessel_role
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.logbook TO user_role
USING (client_id = current_setting('vessel.client_id', true))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow scheduler to update and select based on the client_id
CREATE POLICY api_scheduler_role ON api.logbook TO scheduler
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
CREATE POLICY grafana_role ON api.logbook TO grafana
USING (client_id = client_id)
WITH CHECK (false);
-- Be sure to enable row level security on the table
ALTER TABLE api.stays ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.stays TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.stays TO vessel_role
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.stays TO user_role
USING (client_id = current_setting('vessel.client_id', true))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow scheduler to update and select based on the client_id
CREATE POLICY api_scheduler_role ON api.stays TO scheduler
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow grafana to select based on the client_id
CREATE POLICY grafana_role ON api.stays TO grafana
USING (client_id = client_id)
WITH CHECK (false);
-- Be sure to enable row level security on the table
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON api.moorages TO current_user
USING (true)
WITH CHECK (true);
-- Allow vessel_role to insert and select on their own records
CREATE POLICY api_vessel_role ON api.moorages TO vessel_role
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON api.moorages TO user_role
USING (client_id = current_setting('vessel.client_id', true))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow scheduler to update and select based on the client_id
CREATE POLICY api_scheduler_role ON api.moorages TO scheduler
USING (client_id = current_setting('vessel.client_id', false))
WITH CHECK (client_id = current_setting('vessel.client_id', false));
-- Allow grafana to select based on the client_id
CREATE POLICY grafana_role ON api.moorages TO grafana
USING (client_id = client_id)
WITH CHECK (false);
-- Be sure to enable row level security on the table
ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON auth.vessels TO current_user
USING (true)
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON auth.vessels TO user_role
USING (vessel_id = current_setting('vessel.id', true)
AND owner_email = current_setting('user.email', true)
)
WITH CHECK (vessel_id = current_setting('vessel.id', true)
AND owner_email = current_setting('user.email', true)
);
-- Be sure to enable row level security on the table
ALTER TABLE auth.accounts ENABLE ROW LEVEL SECURITY;
-- Administrator can see all rows and add any rows
CREATE POLICY admin_all ON auth.accounts TO current_user
USING (true)
WITH CHECK (true);
-- Allow user_role to update and select on their own records
CREATE POLICY api_user_role ON auth.accounts TO user_role
USING (email = current_setting('user.email', true)
)
WITH CHECK (email = current_setting('user.email', true)
);
-- Allow grafana_auth to select based on the email
CREATE POLICY grafana_proxy_role ON auth.accounts TO grafana_auth
USING (email = email)
WITH CHECK (false);

View File

@@ -8,7 +8,7 @@
-- List current database
select current_database();
-- connext to the DB
-- connect to the DB
\c signalk
CREATE SCHEMA IF NOT EXISTS jwt;

View File

@@ -8,10 +8,10 @@
CREATE EXTENSION IF NOT EXISTS pg_cron; -- provides a simple cron-based job scheduler for PostgreSQL
-- TRUNCATE table jobs
TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
--TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
-- Create a every 5 minutes or minute job cron_process_new_logbook_fn ??
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()') ;
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook';
-- Create a every 5 minute job cron_process_new_stay_fn
@@ -31,20 +31,35 @@ SELECT cron.schedule('cron_monitor_online', '*/10 * * * *', 'select public.cron_
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_online';
-- Create a every 5 minute job cron_process_new_account_fn
SELECT cron.schedule('cron_new_account', '*/5 * * * *', 'select public.cron_process_new_account_fn()');
--SELECT cron.schedule('cron_new_account', '*/5 * * * *', 'select public.cron_process_new_account_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account';
-- Create a every 5 minute job cron_process_new_vessel_fn
SELECT cron.schedule('cron_new_vessel', '*/5 * * * *', 'select public.cron_process_new_vessel_fn()');
--SELECT cron.schedule('cron_new_vessel', '*/5 * * * *', 'select public.cron_process_new_vessel_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_vessel';
-- Create a every 6 minute job cron_process_new_account_otp_validation_queue_fn, delay from cron_new_account
--SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp';
-- Notification
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
SELECT cron.schedule('cron_new_notification', '*/5 * * * *', 'select public.cron_process_new_notification_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification';
-- Maintenance
-- Vacuum database at “At 01:01 on Sunday.”
SELECT cron.schedule('cron_vacumm', '1 1 * * 0', 'select public.cron_vaccum_fn()');
SELECT cron.schedule('cron_vacuum', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;');
-- Any other maintenance require?
-- OTP
-- Create a every 15 minute job cron_process_prune_otp_fn
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_process_prune_otp_fn()');
-- Cron job settings
UPDATE cron.job SET database = 'signalk';
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vaccuum permissions
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
-- check job lists
SELECT * FROM cron.job;
@@ -53,6 +68,8 @@ SELECT * FROM cron.job;
-- unschedule by job name
--SELECT cron.unschedule('cron_new_logbook');
-- TRUNCATE TABLE cron.job_run_details
TRUNCATE TABLE cron.job_run_details CONTINUE IDENTITY RESTRICT;
--TRUNCATE TABLE cron.job_run_details CONTINUE IDENTITY RESTRICT;
-- check job log
select * from cron.job_run_details ORDER BY end_time DESC LIMIT 10;
-- DEBUG Disable all
UPDATE cron.job SET active = False;

View File

@@ -14,12 +14,15 @@ INSERT INTO app_settings (name, value) VALUES
('app.email_user', '${PGSAIL_EMAIL_USER}'),
('app.email_pass', '${PGSAIL_EMAIL_PASS}'),
('app.email_from', '${PGSAIL_EMAIL_FROM}'),
('app.pushover_token', '${PGSAIL_PUSHOVER_TOKEN}'),
('app.pushover_app', '_todo_'),
('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'),
('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'),
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
('app.url', '${PGSAIL_APP_URL}'),
('app.version', '${PGSAIL_VERSION}');
-- Update comment with version
COMMENT ON DATABASE signalk IS 'version ${PGSAIL_VERSION}';
COMMENT ON DATABASE signalk IS 'PostgSail version ${PGSAIL_VERSION}';
-- Update password from env
ALTER ROLE authenticator WITH PASSWORD '${PGSAIL_AUTHENTICATOR_PASSWORD}';
ALTER ROLE grafana WITH PASSWORD '${PGSAIL_GRAFANA_PASSWORD}';
ALTER ROLE grafana_auth WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
END

View File

@@ -1 +1 @@
0.0.4
0.0.10