154 Commits

Author SHA1 Message Date
xbgmsharp
c1b71cabd8 Fix test versions 2024-01-19 11:41:58 +01:00
xbgmsharp
495e25b838 Release 0.6.0 2024-01-19 11:35:29 +01:00
xbgmsharp
a547271496 Update cron_process_grafana_fn, fix SQL error 2024-01-19 09:23:03 +01:00
xbgmsharp
3a1d0baef8 Update grafana cron job, add notification 2024-01-19 00:13:17 +01:00
xbgmsharp
628de57b5f Update teamplate table, add grafana notification 2024-01-19 00:12:47 +01:00
xbgmsharp
9a5f27d21e Update api.timelapse_fn, fix typo using date. add api.timelapse2_fn to export geojson with notes 2024-01-18 23:54:47 +01:00
xbgmsharp
7892b615e0 Update frontend to latest beta 2024-01-18 22:29:41 +01:00
xbgmsharp
2c2f5d8605 Expose vessel status via API 2024-01-18 22:11:28 +01:00
xbgmsharp
47eda3dcaf Disable stationary detection in pre logbook, still some issue 2024-01-18 22:11:01 +01:00
xbgmsharp
0c0279767f Add comment for process_pre_logbook_fn 2024-01-17 23:03:57 +01:00
xbgmsharp
98e28aacea Updates tests, Update test with rls for anynomous role. Upgrade timescaledb version, 2024-01-17 20:27:23 +01:00
xbgmsharp
b04d336c0d Update permissions, add ROW LEVEL SECURITY for anonymous access.
Allow anonymous role for public access
Allow vessel role to new function for oauth
2024-01-17 20:25:51 +01:00
xbgmsharp
288c458c5a Update anonymous tests 2024-01-17 20:24:58 +01:00
xbgmsharp
d3dd46c834 Renane process_logbook_valid_fn to process_pre_logbook_fn 2024-01-17 20:20:49 +01:00
xbgmsharp
d0bc468ce7 Allow anonymous access for complete timelapse 2024-01-17 20:18:40 +01:00
xbgmsharp
3f51e89303 Update formating 2024-01-16 14:12:13 +01:00
xbgmsharp
000c5651e2 Add PostgREST Media Type Handlers support 2024-01-15 21:44:44 +01:00
xbgmsharp
4bec738826 Add new procedure, api.monitoring_history_fn 2024-01-13 23:23:53 +01:00
xbgmsharp
012812c898 Update eventlogs_view tests, as we skip pre-logbook check from user 2024-01-13 11:45:10 +01:00
xbgmsharp
1c04822cf8 Add cron_process_grafana_fn comment 2024-01-13 11:44:32 +01:00
xbgmsharp
50f018100b Update api.eventlogs_view, ignore pre_logbook check from user 2024-01-12 23:31:00 +01:00
xbgmsharp
13c461a038 Expose metadata platform to frontend 2024-01-12 23:28:16 +01:00
xbgmsharp
8763448523 fix sql versions , update versions 2024-01-12 22:53:21 +01:00
xbgmsharp
e02aaf3676 Update grafana dashboards path 2024-01-12 22:50:57 +01:00
xbgmsharp
ae61072ba4 Update default grafana config and refactor provisioning files 2024-01-12 22:47:28 +01:00
xbgmsharp
242a5554ea 0.6.0-beta 2024-01-12 22:34:00 +01:00
xbgmsharp
39888e1957 Update PostgSail and PostgREST version 2024-01-12 22:33:38 +01:00
xbgmsharp
666f69c42a Update API and SQL documentation 2024-01-12 22:31:19 +01:00
xbgmsharp
77f41251c5 Fix error from 480417917d 2024-01-12 22:22:49 +01:00
xbgmsharp
40a1e0fa39 Fix python error from 0a09d7bbfc 2024-01-12 22:19:40 +01:00
xbgmsharp
5cf2d10757 Fix syntax error from 489fb9562b 2024-01-12 22:13:54 +01:00
xbgmsharp
0dd6410589 Fix docker compose PGRST_SERVER_TIMING_ENABLED contains true 2024-01-12 22:07:16 +01:00
xbgmsharp
682c68a108 Add new postgsail settings app.keycloak_uri 2024-01-12 21:54:51 +01:00
xbgmsharp
5d1db984b8 Update postgrest container with new parameters
Update,fix grafana container, set properly the admin password
2024-01-12 21:53:01 +01:00
xbgmsharp
0a09d7bbfc Update keycloak_py, make uri and host,user,pass dynamic form config 2024-01-12 17:40:59 +01:00
xbgmsharp
14cc4f5ed2 Update cron_process_grafana_fn, add formating and comment for clarification. 2024-01-12 17:39:54 +01:00
xbgmsharp
ff23f5c2ad Update open api documentation 2024-01-10 22:47:01 +01:00
xbgmsharp
489fb9562b Trigger email otp validation only if the user is not coming for the oauth server 2024-01-10 22:39:16 +01:00
xbgmsharp
8c32345342 Feat: Allow signalk plugin webapp to register user and vessel to database via oauth 2024-01-10 22:38:04 +01:00
xbgmsharp
480417917d Feat: initial implementation for oauth support via Keycloak server or other 2024-01-10 22:37:11 +01:00
xbgmsharp
e557ed49a5 Add keycloak_py_fn. This function link the postgsail user with the oauth keycloak server 2024-01-09 22:04:22 +01:00
xbgmsharp
a3475dfe99 Update UUID v7 function 2024-01-09 22:03:45 +01:00
xbgmsharp
e670e11cd5 update ERd diagram 2024-01-07 22:18:44 +01:00
xbgmsharp
88de5003c2 feat: Add new metadata fields platform and configuration. This allow to do pre-defined configuration for well know device like Victron VenusOS decives. 2024-01-07 22:16:52 +01:00
xbgmsharp
c46a428fc2 Update tests, add more invalid metrics to be ignored, add sql metrics count check 2023-12-31 21:13:40 +01:00
xbgmsharp
db3bd6b06f Update metrics_trigger_fn, silently ignore invalid status. 2023-12-31 21:09:42 +01:00
xbgmsharp
4ea7a1b019 Update API documentation 2023-12-29 18:48:11 +01:00
xbgmsharp
ce106074dc Feat: Update tests for pre logbook check and grafana cron job support 2023-12-29 18:26:22 +01:00
xbgmsharp
e7d8229e83 Feat: Add grafana cron job 2023-12-29 18:23:07 +01:00
xbgmsharp
f14342bb07 Fix grafana cron job. update cron_process_grafana_fn 2023-12-29 18:16:59 +01:00
xbgmsharp
c4fbf7682d refactor documentation 2023-12-29 18:15:08 +01:00
xbgmsharp
f8c1f43f48 Fix typo 2023-12-29 12:16:43 +01:00
xbgmsharp
0d5089af2d Add new settings for Grafana HTTP API URL 2023-12-29 11:43:49 +01:00
xbgmsharp
da1952ed31 Feat: Add pre_logbook validation 2023-12-29 11:35:22 +01:00
xbgmsharp
a5d5585366 feat: Add pre logbook check. Split logbook process function 2023-12-29 11:34:19 +01:00
xbgmsharp
5f9a889a44 Feat: Add grafana provisioning on first contact from vessel
Feat: Add pre logbook validation
2023-12-29 11:32:57 +01:00
xbgmsharp
f9719bd174 Feat: Add cron for pre logbook check
Feat: Add cron for Grafana provisioning
2023-12-29 11:28:57 +01:00
xbgmsharp
8d1b8cb389 fix: Update template email message 2023-12-29 11:27:00 +01:00
xbgmsharp
acfd058d3b feat: Add uuid v7 helpers 2023-12-29 11:26:16 +01:00
xbgmsharp
eeae7c40c6 Feat: Add proper Grafana provisioning via HTTP API 2023-12-29 11:24:54 +01:00
xbgmsharp
2bbf27f3ad Update frontend 2023-12-29 11:24:18 +01:00
xbgmsharp
2ba81a935f Update reindex conr_job to use CONCURENTLY options, remove metrics reindex as managed by timescale. 2023-12-03 17:02:05 +01:00
xbgmsharp
0fbac67895 Remove Duplicate Indexes 2023-12-01 23:51:13 +01:00
xbgmsharp
228b234582 Disable remove duplicate index 2023-12-01 22:25:12 +01:00
xbgmsharp
75c8a9506a Update tests, fix check on stay name, add anonymous/public access tests, update badges format 2023-12-01 22:06:01 +01:00
xbgmsharp
2b48a66cd2 Update overpass_py_fn, Enforce area with a proper name tag 2023-12-01 22:01:11 +01:00
xbgmsharp
e642049e93 Update versions.
PostgreSQL-16.1
Postgis-3.4.1
Timescaledb-2.13
PostgSail-0.5.2
2023-12-01 21:59:11 +01:00
xbgmsharp
94e123c95e Update badges processing, update time properties to be at the logbook date 2023-12-01 13:15:12 +01:00
xbgmsharp
9787328990 Update badges, set badges time to log time versus processed time
Add dereprecated comment to unused functions.
2023-11-30 22:16:49 +01:00
xbgmsharp
de62d936d5 Update api.monitoring_view, add new properties wind and speed in to the geojson. 2023-11-30 21:50:21 +01:00
xbgmsharp
293a33da08 Update overpass_py_fn, improve geo location detection, first check against area, then find around with 400m 2023-11-30 21:47:56 +01:00
xbgmsharp
2b105db5c7 Update public.logbook_update_geojson_fn, fix geojson properties for the wind and add notes geojson properties for each Point.
Update process_logbook_queue_fn, improve invalid logbook detection mostly do to multiple GPS, remove logbook with more than 60% of metric are with 100m.
2023-11-30 16:10:50 +01:00
xbgmsharp
af003d5a62 Update api.export_moorages_gpx_fn, fix moorage id and url path.
Update api.delete_logbook_fn, set api.metrics to moored and substract -1 to moorage count
2023-11-30 15:53:50 +01:00
xbgmsharp
ecc9fd6d9f Update comment order 2023-11-30 12:50:46 +01:00
xbgmsharp
df5f667b41 Update README 2023-11-29 22:14:47 +01:00
xbgmsharp
1bfa04a057 Update pgcron jobs.
Fix clean_up process logs.
Add vaccum for auth schema.
Add reindex for api schema.
Run notificiation every minute instead of 2
2023-11-29 22:00:19 +01:00
xbgmsharp
a1e8827479 Release 0.5.1 2023-11-27 22:50:31 +01:00
xbgmsharp
d837dc57fb Update test version 2023-11-27 22:50:21 +01:00
xbgmsharp
90e8b24321 Update openapi documentation 2023-11-27 22:50:04 +01:00
xbgmsharp
a1ccfd5f7c Update frontend pre-release 0.8.0-beta2 2023-11-27 22:49:15 +01:00
xbgmsharp
6ecb345758 Fix Enforce public vessel name to be alphanumeric.
Fix typo for pushover notification
2023-11-27 22:44:26 +01:00
xbgmsharp
0709bc83c4 Fix api.monitoring_view, api.stays_view, api.stay_view and api.moorages_stays_view 2023-11-27 22:43:27 +01:00
xbgmsharp
8c777cd028 Update frontend pre-release 0.8.0 2023-11-24 12:06:04 +01:00
xbgmsharp
cfe3105f87 Update test to math latest release 2023-11-24 12:05:07 +01:00
xbgmsharp
5af24a1878 Release 0.5.0 2023-11-24 12:00:35 +01:00
xbgmsharp
0aae8d002b Update process_logbook enforce more than 2 metrics to be consider a log 2023-11-24 12:00:10 +01:00
xbgmsharp
22c69a2fd9 Update postgrest PGRST_DB_POOL to the connections limit of api_anonymous role 2023-11-23 17:42:34 +01:00
xbgmsharp
c5f1b85a16 Update openapi documentation 2023-11-23 15:14:58 +01:00
xbgmsharp
0157fe12e5 Update ERD schema 2023-11-23 15:14:45 +01:00
xbgmsharp
ead2b99e7f Update comments on api schema 2023-11-23 15:02:06 +01:00
xbgmsharp
711d5a0d40 Update auth.accounts, add public_id for future use. 2023-11-23 10:27:34 +01:00
xbgmsharp
7e52065ef8 Update metadata trigger, Ignore and overwrite the time sent be the vessel, fix #3 2023-11-23 10:24:16 +01:00
xbgmsharp
f65873db81 Update README 2023-11-21 20:44:36 +01:00
xbgmsharp
347299d76e Add missing description to python function 2023-11-21 20:44:09 +01:00
xbgmsharp
effeb29915 Update test to logs_view change 2023-11-20 21:22:25 +01:00
xbgmsharp
9329a6d04b Add triiger on vessel creation to set the public_vessel name 2023-11-20 21:16:17 +01:00
xbgmsharp
70be4fb295 Update logs_view, show only processed logbook 2023-11-20 21:15:37 +01:00
xbgmsharp
5960447297 Update/fix trigger, remove the actual data 2023-11-20 21:15:15 +01:00
xbgmsharp
f240222b98 Update test to match change from SERIAL to IDENTITY, fix d76964f3db 2023-11-19 22:18:07 +01:00
xbgmsharp
0218f2fa73 Follow best practive don't use timestamp(0) 2023-11-19 22:17:47 +01:00
xbgmsharp
79a96c7556 Update tests, debug mermerd Something went wrong! with github action 2023-11-19 21:50:40 +01:00
xbgmsharp
21f96483f5 Follow best practive don't use timestamp(0)
https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_timestamp.280.29_or_timestamptz.280.29
2023-11-19 21:49:19 +01:00
xbgmsharp
4c6d6290f0 Update schemalint to add more best practice support 2023-11-19 21:40:58 +01:00
xbgmsharp
dc02dc886d Update SERIAL to IDENTITY test 2023-11-19 21:38:57 +01:00
xbgmsharp
6355f98792 Update openapi documentation 2023-11-19 21:38:22 +01:00
xbgmsharp
7543c93dcf Update ERD schema 2023-11-19 21:38:07 +01:00
xbgmsharp
d76964f3db Update test to match change from SERIAL to IDENTITY 2023-11-19 21:36:15 +01:00
xbgmsharp
de651ea7ab Replace type SERIAL by IDENTITY as per best practice.
https://wiki.postgresql.org/wiki/Don%27t_Do_This#Don.27t_use_serial
2023-11-19 21:34:20 +01:00
xbgmsharp
fddd3df05e Replace type TIMESTAMP WITHOUT TIME ZONE by TIMESTAMPZ with timezone forcing UTC as per best practice. 2023-11-19 21:10:28 +01:00
xbgmsharp
472131efbd refactor metrics_trigger_fn, set previous_metric as record instead of individual value 2023-11-19 18:03:54 +01:00
xbgmsharp
1f7bb433e2 Update gpx moorages export, make urk link dynamic 2023-11-18 23:03:42 +01:00
xbgmsharp
c4fa9f5512 Update reverse geo loc, avoid loop 2023-11-18 23:03:07 +01:00
xbgmsharp
b005f592e9 Update project description 2023-11-18 22:48:48 +01:00
xbgmsharp
6cc13313f1 Update tests to match the new anonymous access feature 2023-11-18 21:38:55 +01:00
xbgmsharp
fdb466abde Refactor anonymous access, update public.check_jwt, api.ispublic_fn to allow anoymous base on public vessel name 2023-11-18 21:37:20 +01:00
xbgmsharp
5f0adb67c8 Update openapi 2023-11-18 21:34:32 +01:00
xbgmsharp
2e170c5480 iUpdate ERD 2023-11-18 21:34:22 +01:00
xbgmsharp
5dd2875b91 Update README 2023-11-18 13:34:48 +01:00
xbgmsharp
ee131e0e70 Update metrics_trigger_fn, fix stay postprocessing, only when gone. 2023-11-15 00:16:25 +01:00
xbgmsharp
6426e14d54 Update openapi documentation 2023-11-14 23:56:54 +01:00
xbgmsharp
8fff17dee3 Update process_stay_queue_fn, process stays only when gone 2023-11-14 23:51:23 +01:00
xbgmsharp
eb8ba54230 Update tests to match refactore of logs,stays,moorages. Draft anonymous access 2023-11-14 23:49:43 +01:00
xbgmsharp
f9ed13761c Update ERD mermiad diagram 2023-11-14 23:49:30 +01:00
xbgmsharp
e1e7da779e Update tests to match refactor of Logs,Stays,Moorages
Update dynamic openapi documentation
Add dynamic mermaid schema documentation
2023-11-14 23:28:20 +01:00
xbgmsharp
c879c4bdab Refactor Logs,Moorages,Stays
Update Logs,Moorages,Stays view
Add public.process_lat_lon_fn
Add public.moorage_update_trigger_fn
Add public.moorage_delete_trigger_fn
Update debug on metrics_trigger_fn
Deprecated public.process_moorage_queue_fn
2023-11-14 23:24:30 +01:00
xbgmsharp
e5f2469358 Add overpass-turbo api to improve geo location detection 2023-11-14 23:20:58 +01:00
xbgmsharp
480409de12 Update ERD schema, add mermerd config 2023-11-12 15:21:45 +01:00
xbgmsharp
9d8a7294e0 Update ERD diagram 2023-11-12 14:55:16 +01:00
xbgmsharp
e3ae6b4243 Update ERD Mermaid diagram 2023-11-12 14:53:24 +01:00
xbgmsharp
268ce5b908 Update ERD README 2023-11-12 14:51:21 +01:00
xbgmsharp
ce55a58c87 Update diagram layout 2023-11-12 14:50:30 +01:00
xbgmsharp
14e2103e0f Update Mermaid 2023-11-12 13:09:25 +01:00
xbgmsharp
8025fc4d52 Update ERD doc 2023-11-12 13:06:09 +01:00
xbgmsharp
117bdd2e3f Update ERD diagram to be dynamic 2023-11-12 13:01:46 +01:00
xbgmsharp
b37c33bccb Update api.delete_logbook_fn, add missing query 2023-11-11 11:32:29 +01:00
xbgmsharp
2507545d3f Update anonymous access, add check to allow public access if header x-public-id is present with a base64 value mapping a path and an id. 2023-11-02 21:42:03 +01:00
xbgmsharp
b6ef06d382 Add unit test for public access 2023-11-02 21:39:37 +01:00
xbgmsharp
30de9b76af Update test to prepare for anonymous access 2023-11-02 21:38:25 +01:00
xbgmsharp
8f1558f436 Update OpenAPI doc 2023-11-02 21:37:15 +01:00
xbgmsharp
636fae7ce6 Update lint format 2023-11-02 21:36:53 +01:00
xbgmsharp
8197a26c49 Release 0.4.1 2023-10-31 10:18:02 +01:00
xbgmsharp
ba3b213423 Update SQL Uppercase type 2023-10-30 21:17:21 +01:00
xbgmsharp
0be57a4e70 Update role, add iew select for user_role 2023-10-30 21:16:44 +01:00
xbgmsharp
80163d3fe2 Update schemalint, disable tests, most require large review for future reference 2023-10-30 11:59:24 +01:00
xbgmsharp
c8795b15f3 Update test versions, latest PostgREST 2023-10-29 23:08:21 +01:00
xbgmsharp
e8c0ea5c94 revert logbook naming 2023-10-29 22:54:53 +01:00
xbgmsharp
38ad6084bb Updte openAPI 2023-10-29 22:54:12 +01:00
xbgmsharp
c726187b4d Update explore view 2023-10-29 22:38:45 +01:00
xbgmsharp
3eafa2e13f Update export functiond to order by date rether than id.
Update delete logbook to return a boolean
2023-10-29 21:31:15 +01:00
xbgmsharp
d13f096d4f Update process_logbook_queue_fn, remove the gpx handler 2023-10-29 18:44:43 +01:00
xbgmsharp
e2e37e1f01 Add explore view 2023-10-29 18:44:03 +01:00
xbgmsharp
3bbe309de3 Add delete logbook and dependency stays 2023-10-29 18:43:32 +01:00
xbgmsharp
2be7c787dd Remove duplicated 2023-10-28 22:28:14 +02:00
xbgmsharp
9aecda4752 Update reverse_geocode_py_fn, improve location detection, ignore tag road or highway. Recursive over lower zoom level. 2023-10-28 21:55:29 +02:00
63 changed files with 3292 additions and 1933 deletions

View File

@@ -6,17 +6,17 @@ module.exports = {
database: process.env.PGDATABASE, database: process.env.PGDATABASE,
charset: "utf8", charset: "utf8",
}, },
rules: { rules: {
"name-casing": ["error", "snake"], "name-casing": ["error", "snake"],
"prefer-jsonb-to-json": ["error"], "prefer-jsonb-to-json": ["error"],
"prefer-text-to-varchar": ["error"], "prefer-text-to-varchar": ["error"],
"prefer-timestamptz-to-timestamp": ["error"], "prefer-timestamptz-to-timestamp": ["error"],
"prefer-identity-to-serial": ["error"], "prefer-identity-to-serial": ["error"],
"name-inflection": ["error", "singular"], //"name-inflection": ["error", "singular"],
}, },
schemas: [{ name: "public" }, { name: "api" }], schemas: [{ name: "public" }, { name: "api" },{ name: "auth" }],
ignores: [], ignores: [],
}; };

View File

@@ -1,35 +0,0 @@
# PostgSail ERD
The Entity-Relationship Diagram (ERD) provides a graphical representation of database tables, columns, and inter-relationships. ERD can give sufficient information for the database administrator to follow when developing and maintaining the database.
## A global overview
![API Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/postgsail.pgerd.png "API Schema")
## Further
There is 3 main schemas:
- API Schema ERD
- tables
- metrics
- logbook
- ...
- functions
- ...
![API Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/signalk%20-%20api.png)
- Auth Schema ERD
- tables
- accounts
- vessels
- ...
- functions
- ...
![Auth Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/signalk%20-%20auth.png "Auth Schema")
- Public Schema ERD
- tables
- app_settings
- tpl_messages
- ...
- functions
- ...
![Public Schema](https://raw.githubusercontent.com/xbgmsharp/postgsail/main/ERD/signalk%20-%20public.png "Public Schema")

View File

@@ -3,8 +3,9 @@
Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data. Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data.
[![release](https://img.shields.io/github/release/xbgmsharp/postgsail?include_prereleases=&sort=semver&color=blue)](https://github.com/xbgmsharp/postgsail/releases/latest) [![release](https://img.shields.io/github/release/xbgmsharp/postgsail?include_prereleases=&sort=semver&color=blue)](https://github.com/xbgmsharp/postgsail/releases/latest)
[![License](https://img.shields.io/badge/License-MIT-blue)](#license) [![License](https://img.shields.io/github/license/xbgmsharp/postgsail)](#license)
[![issues - postgsail](https://img.shields.io/github/issues/xbgmsharp/postgsail)](https://github.com/xbgmsharp/postgsail/issues) [![issues - postgsail](https://img.shields.io/github/issues/xbgmsharp/postgsail)](https://github.com/xbgmsharp/postgsail/issues)
[![PRs Welcome](https://img.shields.io/badge/PRs-welcome-brightgreen.svg?style=flat-square)](http://makeapullrequest.com)
[![Test services db, api](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml/badge.svg)](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml) [![Test services db, api](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml/badge.svg)](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml)
[![Test services db, api, web](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml/badge.svg)](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml) [![Test services db, api, web](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml/badge.svg)](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml)
@@ -19,13 +20,15 @@ postgsail-frontend:
postgsail-telegram-bot: postgsail-telegram-bot:
[![GitHub Release](https://img.shields.io/github/release/xbgmsharp/postgsail-telegram-bot.svg)](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest) [![GitHub Release](https://img.shields.io/github/release/xbgmsharp/postgsail-telegram-bot.svg)](https://github.com/xbgmsharp/postgsail-telegram-bot/releases/latest)
[![OpenSSF Best Practices](https://www.bestpractices.dev/projects/8124/badge)](https://www.bestpractices.dev/projects/8124)
## Features ## Features
- Automatically log your voyages without manually starting or stopping a trip. - Automatically log your voyages without manually starting or stopping a trip.
- Automatically capture the details of your voyages (boat speed, heading, wind speed, etc). - Automatically capture the details of your voyages (boat speed, heading, wind speed, etc).
- Timelapse video your trips, with or without time control. - Timelapse video your trips, with or without time control.
- Add custom notes to your logs. - Add custom notes to your logs.
- Export to CSV or GPX or KLM and download your logs. - Export to CSV, GPX, GeoJSON, KML and download your logs.
- Aggregate your trip statistics: Longest voyage, time spent at anchorages, home ports etc. - Aggregate your trip statistics: Longest voyage, time spent at anchorages, home ports etc.
- See your moorages on a global map, with incoming and outgoing voyages from each trip. - See your moorages on a global map, with incoming and outgoing voyages from each trip.
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely. - Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
@@ -144,7 +147,7 @@ Next, to ingest data from signalk, you need to install [signalk-postgsail](https
Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers). Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers).
You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux). You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux).
Any taker on influxdb2 to PostgSail? It is definitely possible. For InfluxDB 2.x and 3.x. You will need to enable the 1.x APIs to use them. Consult the InfluxDB documentation for more details.
Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests. Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests.
If everything goes well all tests pass successfully and you should receive a few notifications by email or PushOver or Telegram. If everything goes well all tests pass successfully and you should receive a few notifications by email or PushOver or Telegram.
@@ -222,4 +225,4 @@ Feel free to contribute.
### License ### License
This script is free software, Apache License Version 2.0. This is a free software, Apache License Version 2.0.

View File

@@ -45,8 +45,12 @@ services:
PGRST_DB_ANON_ROLE: api_anonymous PGRST_DB_ANON_ROLE: api_anonymous
PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000 PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000
PGRST_DB_PRE_REQUEST: public.check_jwt PGRST_DB_PRE_REQUEST: public.check_jwt
PGRST_DB_POOL: 20
PGRST_DB_URI: ${PGRST_DB_URI} PGRST_DB_URI: ${PGRST_DB_URI}
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET} PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
PGRST_SERVER_TIMING_ENABLED: 1
PGRST_DB_MAX_ROWS: 500
PGRST_JWT_CACHE_MAX_LIFETIME: 3600
depends_on: depends_on:
- db - db
logging: logging:
@@ -74,10 +78,9 @@ services:
env_file: .env env_file: .env
environment: environment:
- GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel - GF_INSTALL_PLUGINS=pr0ps-trackmap-panel,fatcloud-windrose-panel
- GF_SECURITY_ADMIN_PASSWORD=${PGSAIL_GRAFANA_PASSWORD}
- GF_USERS_ALLOW_SIGN_UP=false - GF_USERS_ALLOW_SIGN_UP=false
- GF_SMTP_ENABLED=false - GF_SMTP_ENABLED=false
- PGSAIL_GRAFANA_URI=db:5432
- PGSAIL_GRAFANA_PASSWORD=${PGSAIL_GRAFANA_PASSWORD}
depends_on: depends_on:
- db - db
logging: logging:

34
docs/ERD/README.md Normal file
View File

@@ -0,0 +1,34 @@
# PostgSail ERD
The Entity-Relationship Diagram (ERD) provides a graphical representation of database tables, columns, and inter-relationships. ERD can give sufficient information for the database administrator to follow when developing and maintaining the database.
## A global overview
Auto generated Mermaid diagram using [mermerd](https://github.com/KarnerTh/mermerd) and [MermaidJs](https://github.com/mermaid-js/mermaid).
[PostgSail SQL Schema](https://github.com/xbgmsharp/postgsail/tree/main/docs/ERD/postgsail.md "PostgSail SQL Schema")
## Further
There is 3 main schemas:
- API Schema:
- tables
- metrics
- logbook
- ...
- functions
- ...
- Auth Schema:
- tables
- accounts
- vessels
- ...
- functions
- ...
- Public Schema:
- tables
- app_settings
- tpl_messages
- ...
- functions
- ...

View File

@@ -0,0 +1,35 @@
# Connection properties
connectionString: ${PGSAIL_DB_URI}
# Define what schemas should be used
#useAllSchemas: true
# or
schema:
- "public"
- "api"
- "auth"
# Define what tables should be used
useAllTables: true
# or
#selectedTables:
# - city
# - customer
# Additional flags
showAllConstraints: true
encloseWithMermaidBackticks: true
outputFileName: "postgsail.md"
debug: true
omitConstraintLabels: true
omitAttributeKeys: true
showDescriptions:
- enumValues
- columnComments
- notNull
showSchemaPrefix: true
schemaPrefixSeparator: "_"
# Names must match the pattern <schema><schema_prefix><table>
#relationshipLabels:
# - "public_table public_another-table : label"

261
docs/ERD/postgsail.md Normal file
View File

@@ -0,0 +1,261 @@
```mermaid
erDiagram
api_logbook {
text _from
double_precision _from_lat
double_precision _from_lng
integer _from_moorage_id "Link api.moorages with api.logbook via FOREIGN KEY and REFERENCES"
timestamp_with_time_zone _from_time "{NOT_NULL}"
text _to
double_precision _to_lat
double_precision _to_lng
integer _to_moorage_id "Link api.moorages with api.logbook via FOREIGN KEY and REFERENCES"
timestamp_with_time_zone _to_time
boolean active
double_precision avg_speed
numeric distance "in NM"
interval duration "Best to use standard ISO 8601"
jsonb extra "computed signalk metrics of interest, runTime, currentLevel, etc"
integer id "{NOT_NULL}"
double_precision max_speed
double_precision max_wind_speed
text name
text notes
geography track_geog "postgis geography type default SRID 4326 Unit: degres"
jsonb track_geojson "store generated geojson with track metrics data using with LineString and Point features, we can not depend api.metrics table"
geometry track_geom "postgis geometry type EPSG:4326 Unit: degres"
text vessel_id "{NOT_NULL}"
}
api_metadata {
boolean active "trigger monitor online/offline"
boolean active
double_precision beam
text client_id
text configuration
timestamp_with_time_zone created_at "{NOT_NULL}"
double_precision height
integer id "{NOT_NULL}"
double_precision length
numeric mmsi
text name
text platform
text plugin_version "{NOT_NULL}"
numeric ship_type
text signalk_version "{NOT_NULL}"
timestamp_with_time_zone time "{NOT_NULL}"
timestamp_with_time_zone updated_at "{NOT_NULL}"
text vessel_id "Link auth.vessels with api.metadata via FOREIGN KEY and REFERENCES {NOT_NULL}"
text vessel_id "{NOT_NULL}"
}
api_metrics {
double_precision anglespeedapparent
text client_id
double_precision courseovergroundtrue
double_precision latitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
double_precision longitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
jsonb metrics
double_precision speedoverground
text status
timestamp_with_time_zone time "{NOT_NULL}"
text vessel_id "{NOT_NULL}"
double_precision windspeedapparent
}
api_moorages {
text country
geography geog "postgis geography type default SRID 4326 Unit: degres"
boolean home_flag
integer id "{NOT_NULL}"
double_precision latitude
double_precision longitude
text name
jsonb nominatim
text notes
jsonb overpass
integer reference_count
integer stay_code "Link api.stays_at with api.moorages via FOREIGN KEY and REFERENCES"
interval stay_duration "Best to use standard ISO 8601"
text vessel_id "{NOT_NULL}"
}
api_stays {
boolean active
timestamp_with_time_zone arrived "{NOT_NULL}"
timestamp_with_time_zone departed
interval duration "Best to use standard ISO 8601"
geography geog "postgis geography type default SRID 4326 Unit: degres"
integer id "{NOT_NULL}"
double_precision latitude
double_precision longitude
integer moorage_id "Link api.moorages with api.stays via FOREIGN KEY and REFERENCES"
text name
text notes
integer stay_code "Link api.stays_at with api.stays via FOREIGN KEY and REFERENCES"
text vessel_id "{NOT_NULL}"
}
api_stays_at {
text description "{NOT_NULL}"
integer stay_code "{NOT_NULL}"
}
auth_accounts {
timestamp_with_time_zone connected_at "{NOT_NULL}"
timestamp_with_time_zone created_at "{NOT_NULL}"
citext email "{NOT_NULL}"
text first "User first name with CONSTRAINT CHECK {NOT_NULL}"
integer id "{NOT_NULL}"
text last "User last name with CONSTRAINT CHECK {NOT_NULL}"
text pass "{NOT_NULL}"
jsonb preferences
name role "{NOT_NULL}"
timestamp_with_time_zone updated_at "{NOT_NULL}"
text user_id "{NOT_NULL}"
}
auth_otp {
text otp_pass "{NOT_NULL}"
timestamp_with_time_zone otp_timestamp
smallint otp_tries "{NOT_NULL}"
citext user_email "{NOT_NULL}"
}
auth_users {
timestamp_with_time_zone connected_at "{NOT_NULL}"
timestamp_with_time_zone created_at "{NOT_NULL}"
name email "{NOT_NULL}"
text first "{NOT_NULL}"
name id "{NOT_NULL}"
text last "{NOT_NULL}"
jsonb preferences
name role "{NOT_NULL}"
timestamp_with_time_zone updated_at "{NOT_NULL}"
text user_id "{NOT_NULL}"
}
auth_vessels {
timestamp_with_time_zone created_at "{NOT_NULL}"
numeric mmsi "MMSI can be optional but if present must be a valid one and unique but must be in numeric range between 100000000 and 800000000"
text name "{NOT_NULL}"
citext owner_email "{NOT_NULL}"
name role "{NOT_NULL}"
timestamp_with_time_zone updated_at "{NOT_NULL}"
text vessel_id "{NOT_NULL}"
}
public_aistypes {
text description
numeric id
}
public_app_settings {
text name "application settings name key {NOT_NULL}"
text value "application settings value {NOT_NULL}"
}
public_badges {
text description
text name
}
public_email_templates {
text email_content
text email_subject
text name
text pushover_message
text pushover_title
}
public_geocoders {
text name
text reverse_url
text url
}
public_iso3166 {
text alpha_2
text alpha_3
text country
integer id
}
public_mid {
text country
integer country_id
numeric id
}
public_ne_10m_geography_marine_polys {
text changed
text featurecla
geometry geom
integer gid "{NOT_NULL}"
text label
double_precision max_label
double_precision min_label
text name
text name_ar
text name_bn
text name_de
text name_el
text name_en
text name_es
text name_fa
text name_fr
text name_he
text name_hi
text name_hu
text name_id
text name_it
text name_ja
text name_ko
text name_nl
text name_pl
text name_pt
text name_ru
text name_sv
text name_tr
text name_uk
text name_ur
text name_vi
text name_zh
text name_zht
text namealt
bigint ne_id
text note
smallint scalerank
text wikidataid
}
public_process_queue {
text channel "{NOT_NULL}"
integer id "{NOT_NULL}"
text payload "{NOT_NULL}"
timestamp_with_time_zone processed
text ref_id "either user_id or vessel_id {NOT_NULL}"
timestamp_with_time_zone stored "{NOT_NULL}"
}
public_spatial_ref_sys {
character_varying auth_name
integer auth_srid
character_varying proj4text
integer srid "{NOT_NULL}"
character_varying srtext
}
api_logbook }o--|| api_metadata : ""
api_logbook }o--|| api_moorages : ""
api_logbook }o--|| api_moorages : ""
api_metadata }o--|| auth_vessels : ""
api_metrics }o--|| api_metadata : ""
api_moorages }o--|| api_metadata : ""
api_stays }o--|| api_metadata : ""
api_moorages }o--|| api_stays_at : ""
api_stays }o--|| api_moorages : ""
api_stays }o--|| api_stays_at : ""
auth_otp |o--|| auth_accounts : ""
auth_vessels |o--|| auth_accounts : ""
```

View File

Before

Width:  |  Height:  |  Size: 360 KiB

After

Width:  |  Height:  |  Size: 360 KiB

View File

Before

Width:  |  Height:  |  Size: 222 KiB

After

Width:  |  Height:  |  Size: 222 KiB

View File

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View File

Before

Width:  |  Height:  |  Size: 195 KiB

After

Width:  |  Height:  |  Size: 195 KiB

2
docs/README.md Normal file
View File

@@ -0,0 +1,2 @@
Simple and scalable architecture.

File diff suppressed because it is too large Load Diff

View File

@@ -3,19 +3,22 @@ allow_sign_up = false
auto_assign_org = true auto_assign_org = true
auto_assign_org_role = Editor auto_assign_org_role = Editor
[auth.proxy]
enabled = true
header_name = X-WEBAUTH-USER
header_property = email
headers = Login:X-WEBAUTH-LOGIN
auto_sign_up = true
enable_login_token = true
login_maximum_inactive_lifetime_duration = 12h
login_maximum_lifetime_duration = 1d
[dashboards] [dashboards]
default_home_dashboard_path = /etc/grafana/dashboards/home.json default_home_dashboard_path = /etc/grafana/dashboards/tpl/home.json
min_refresh_interval = 1m
[alerting]
enabled = false
[unified_alerting]
enabled = false
[analytics] [analytics]
feedback_links_enabled = false feedback_links_enabled = false
reporting_enabled = false reporting_enabled = false
[news]
news_feed_enabled = false
[help]
enabled = false

View File

@@ -20,6 +20,6 @@ providers:
allowUiUpdates: true allowUiUpdates: true
options: options:
# <string, required> path to dashboard files on disk. Required when using the 'file' type # <string, required> path to dashboard files on disk. Required when using the 'file' type
path: /etc/grafana/dashboards/ path: /etc/grafana/dashboards/tpl/
# <bool> use folder names from filesystem to create folders in Grafana # <bool> use folder names from filesystem to create folders in Grafana
foldersFromFilesStructure: true foldersFromFilesStructure: true

View File

@@ -5,18 +5,18 @@
-- https://groups.google.com/g/signalk/c/W2H15ODCic4 -- https://groups.google.com/g/signalk/c/W2H15ODCic4
-- --
-- Description: -- Description:
-- Insert data into table metadata from API using PostgREST -- Insert data into table api.metadata from API using PostgREST
-- Insert data into table metrics from API using PostgREST -- Insert data into table api.metrics from API using PostgREST
-- TimescaleDB Hypertable to store signalk metrics -- TimescaleDB Hypertable to store signalk metrics on table api.metrics
-- pgsql functions to generate logbook, stays, moorages -- pgsql functions to generate logbook, stays, moorages from table api.metrics
-- CRON functions to process logbook, stays, moorages -- CRON functions to process logbook, stays, moorages
-- python functions for geo reverse and send notification via email and/or pushover -- python functions for geo reverse and send notification via email, pushover, telegram
-- Views statistics, timelapse, monitoring, logs -- Views statistics, timelapse, monitoring, logs
-- Always store time in UTC -- Always store time in UTC
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- vessels signalk -(POST)-> metadata -> metadata_upsert -(trigger)-> metadata_upsert_trigger_fn (INSERT or UPDATE) -- vessels signalk -(POST)-> metadata -> metadata_upsert_trigger -(BEFORE INSERT)-> metadata_upsert_trigger_fn (INSERT or UPDATE)
-- vessels signalk -(POST)-> metrics -> metrics -(trigger)-> metrics_fn new log,stay,moorage -- vessels signalk -(POST)-> metrics -> metrics_trigger -(BEFORE INSERT)-> metrics_trigger_fn (INSERT or UPDATE new log,stay)
--------------------------------------------------------------------------- ---------------------------------------------------------------------------

View File

@@ -8,7 +8,7 @@
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Metadata from signalk -- Metadata from signalk
CREATE TABLE IF NOT EXISTS api.metadata( CREATE TABLE IF NOT EXISTS api.metadata(
id SERIAL PRIMARY KEY, id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
name TEXT NULL, name TEXT NULL,
mmsi NUMERIC NULL, mmsi NUMERIC NULL,
client_id TEXT NULL, client_id TEXT NULL,
@@ -20,29 +20,29 @@ CREATE TABLE IF NOT EXISTS api.metadata(
ship_type NUMERIC NULL, ship_type NUMERIC NULL,
plugin_version TEXT NOT NULL, plugin_version TEXT NOT NULL,
signalk_version TEXT NOT NULL, signalk_version TEXT NOT NULL,
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, -- should be rename to last_update !? time TIMESTAMPTZ NOT NULL, -- should be rename to last_update !?
platform TEXT NULL,
configuration TEXT NULL,
active BOOLEAN DEFAULT True, -- trigger monitor online/offline active BOOLEAN DEFAULT True, -- trigger monitor online/offline
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW() updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
); );
-- Description -- Description
COMMENT ON TABLE COMMENT ON TABLE
api.metadata api.metadata
IS 'Stores metadata from vessel'; IS 'Stores metadata received from vessel, aka signalk plugin';
COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline'; COMMENT ON COLUMN api.metadata.active IS 'trigger monitor online/offline';
-- Index COMMENT ON COLUMN api.metadata.vessel_id IS 'vessel_id link auth.vessels with api.metadata';
CREATE INDEX metadata_vessel_id_idx ON api.metadata (vessel_id); -- Duplicate Indexes
--CREATE INDEX metadata_mmsi_idx ON api.metadata (mmsi); --CREATE INDEX metadata_vessel_id_idx ON api.metadata (vessel_id);
-- is unused index ?
CREATE INDEX metadata_name_idx ON api.metadata (name);
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Metrics from signalk -- Metrics from signalk
-- Create vessel status enum -- Create vessel status enum
CREATE TYPE status AS ENUM ('sailing', 'motoring', 'moored', 'anchored'); CREATE TYPE status_type AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
-- Table api.metrics -- Table api.metrics
CREATE TABLE IF NOT EXISTS api.metrics ( CREATE TABLE IF NOT EXISTS api.metrics (
time TIMESTAMP WITHOUT TIME ZONE NOT NULL, time TIMESTAMPTZ NOT NULL,
client_id TEXT NULL, client_id TEXT NULL,
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT, vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
latitude DOUBLE PRECISION NULL, latitude DOUBLE PRECISION NULL,
@@ -51,8 +51,8 @@ CREATE TABLE IF NOT EXISTS api.metrics (
courseOverGroundTrue DOUBLE PRECISION NULL, courseOverGroundTrue DOUBLE PRECISION NULL,
windSpeedApparent DOUBLE PRECISION NULL, windSpeedApparent DOUBLE PRECISION NULL,
angleSpeedApparent DOUBLE PRECISION NULL, angleSpeedApparent DOUBLE PRECISION NULL,
status status NULL, status TEXT NULL,
metrics jsonb NULL, metrics JSONB NULL,
--CONSTRAINT valid_client_id CHECK (length(client_id) > 10), --CONSTRAINT valid_client_id CHECK (length(client_id) > 10),
--CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90), --CONSTRAINT valid_latitude CHECK (latitude >= -90 and latitude <= 90),
--CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180), --CONSTRAINT valid_longitude CHECK (longitude >= -180 and longitude <= 180),
@@ -95,13 +95,15 @@ SELECT create_hypertable('api.metrics', 'time', chunk_time_interval => INTERVAL
-- Check unused index -- Check unused index
CREATE TABLE IF NOT EXISTS api.logbook( CREATE TABLE IF NOT EXISTS api.logbook(
id SERIAL PRIMARY KEY, id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT, vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
active BOOLEAN DEFAULT false, active BOOLEAN DEFAULT false,
name TEXT, name TEXT,
_from_moorage_id INT NULL,
_from TEXT, _from TEXT,
_from_lat DOUBLE PRECISION NULL, _from_lat DOUBLE PRECISION NULL,
_from_lng DOUBLE PRECISION NULL, _from_lng DOUBLE PRECISION NULL,
_to_moorage_id INT NULL,
_to TEXT, _to TEXT,
_to_lat DOUBLE PRECISION NULL, _to_lat DOUBLE PRECISION NULL,
_to_lng DOUBLE PRECISION NULL, _to_lng DOUBLE PRECISION NULL,
@@ -109,8 +111,8 @@ CREATE TABLE IF NOT EXISTS api.logbook(
track_geom geometry(LINESTRING,4326) NULL, track_geom geometry(LINESTRING,4326) NULL,
track_geog geography(LINESTRING) NULL, track_geog geography(LINESTRING) NULL,
track_geojson JSONB NULL, track_geojson JSONB NULL,
_from_time TIMESTAMP WITHOUT TIME ZONE NOT NULL, _from_time TIMESTAMPTZ NOT NULL,
_to_time TIMESTAMP WITHOUT TIME ZONE NULL, _to_time TIMESTAMPTZ NULL,
distance NUMERIC, -- meters? distance NUMERIC, -- meters?
duration INTERVAL, -- duration in days and hours? duration INTERVAL, -- duration in days and hours?
avg_speed DOUBLE PRECISION NULL, avg_speed DOUBLE PRECISION NULL,
@@ -125,9 +127,14 @@ COMMENT ON TABLE
IS 'Stores generated logbook'; IS 'Stores generated logbook';
COMMENT ON COLUMN api.logbook.distance IS 'in NM'; COMMENT ON COLUMN api.logbook.distance IS 'in NM';
COMMENT ON COLUMN api.logbook.extra IS 'computed signalk metrics of interest, runTime, currentLevel, etc'; COMMENT ON COLUMN api.logbook.extra IS 'computed signalk metrics of interest, runTime, currentLevel, etc';
COMMENT ON COLUMN api.logbook.duration IS 'Best to use standard ISO 8601';
-- Index todo! -- Index todo!
CREATE INDEX logbook_vessel_id_idx ON api.logbook (vessel_id); CREATE INDEX logbook_vessel_id_idx ON api.logbook (vessel_id);
CREATE INDEX logbook_from_time_idx ON api.logbook (_from_time);
CREATE INDEX logbook_to_time_idx ON api.logbook (_to_time);
CREATE INDEX logbook_from_moorage_id_idx ON api.logbook (_from_moorage_id);
CREATE INDEX logbook_to_moorage_id_idx ON api.logbook (_to_moorage_id);
CREATE INDEX ON api.logbook USING GIST ( track_geom ); CREATE INDEX ON api.logbook USING GIST ( track_geom );
COMMENT ON COLUMN api.logbook.track_geom IS 'postgis geometry type EPSG:4326 Unit: degres'; COMMENT ON COLUMN api.logbook.track_geom IS 'postgis geometry type EPSG:4326 Unit: degres';
CREATE INDEX ON api.logbook USING GIST ( track_geog ); CREATE INDEX ON api.logbook USING GIST ( track_geog );
@@ -139,15 +146,16 @@ COMMENT ON COLUMN api.logbook.track_geojson IS 'store generated geojson with tra
-- Stays -- Stays
-- virtual logbook by boat? -- virtual logbook by boat?
CREATE TABLE IF NOT EXISTS api.stays( CREATE TABLE IF NOT EXISTS api.stays(
id SERIAL PRIMARY KEY, id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT, vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
active BOOLEAN DEFAULT false, active BOOLEAN DEFAULT false,
moorage_id INT NULL,
name TEXT, name TEXT,
latitude DOUBLE PRECISION NULL, latitude DOUBLE PRECISION NULL,
longitude DOUBLE PRECISION NULL, longitude DOUBLE PRECISION NULL,
geog GEOGRAPHY(POINT) NULL, geog GEOGRAPHY(POINT) NULL,
arrived TIMESTAMP WITHOUT TIME ZONE NOT NULL, arrived TIMESTAMPTZ NOT NULL,
departed TIMESTAMP WITHOUT TIME ZONE, departed TIMESTAMPTZ,
duration INTERVAL, -- duration in days and hours? duration INTERVAL, -- duration in days and hours?
stay_code INT DEFAULT 1, -- REFERENCES api.stays_at(stay_code), stay_code INT DEFAULT 1, -- REFERENCES api.stays_at(stay_code),
notes TEXT NULL notes TEXT NULL
@@ -156,9 +164,11 @@ CREATE TABLE IF NOT EXISTS api.stays(
COMMENT ON TABLE COMMENT ON TABLE
api.stays api.stays
IS 'Stores generated stays'; IS 'Stores generated stays';
COMMENT ON COLUMN api.stays.duration IS 'Best to use standard ISO 8601';
-- Index -- Index
CREATE INDEX stays_vessel_id_idx ON api.stays (vessel_id); CREATE INDEX stays_vessel_id_idx ON api.stays (vessel_id);
CREATE INDEX stays_moorage_id_idx ON api.stays (moorage_id);
CREATE INDEX ON api.stays USING GIST ( geog ); CREATE INDEX ON api.stays USING GIST ( geog );
COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres'; COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Unit: degres';
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography. -- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
@@ -167,13 +177,10 @@ COMMENT ON COLUMN api.stays.geog IS 'postgis geography type default SRID 4326 Un
-- Moorages -- Moorages
-- virtual logbook by boat? -- virtual logbook by boat?
CREATE TABLE IF NOT EXISTS api.moorages( CREATE TABLE IF NOT EXISTS api.moorages(
id SERIAL PRIMARY KEY, id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
--client_id VARCHAR(255) NULL,
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT, vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
name TEXT, name TEXT,
country TEXT, country TEXT,
stay_id INT NOT NULL, -- needed?
stay_code INT DEFAULT 1, -- needed? REFERENCES api.stays_at(stay_code) stay_code INT DEFAULT 1, -- needed? REFERENCES api.stays_at(stay_code)
stay_duration INTERVAL NULL, stay_duration INTERVAL NULL,
reference_count INT DEFAULT 1, reference_count INT DEFAULT 1,
@@ -181,7 +188,9 @@ CREATE TABLE IF NOT EXISTS api.moorages(
longitude DOUBLE PRECISION NULL, longitude DOUBLE PRECISION NULL,
geog GEOGRAPHY(POINT) NULL, geog GEOGRAPHY(POINT) NULL,
home_flag BOOLEAN DEFAULT false, home_flag BOOLEAN DEFAULT false,
notes TEXT NULL notes TEXT NULL,
overpass JSONB NULL,
nominatim JSONB NULL
); );
-- Description -- Description
COMMENT ON TABLE COMMENT ON TABLE
@@ -193,11 +202,12 @@ CREATE INDEX moorages_vessel_id_idx ON api.moorages (vessel_id);
CREATE INDEX ON api.moorages USING GIST ( geog ); CREATE INDEX ON api.moorages USING GIST ( geog );
COMMENT ON COLUMN api.moorages.geog IS 'postgis geography type default SRID 4326 Unit: degres'; COMMENT ON COLUMN api.moorages.geog IS 'postgis geography type default SRID 4326 Unit: degres';
-- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography. -- With other SRID ERROR: Only lon/lat coordinate systems are supported in geography.
COMMENT ON COLUMN api.moorages.stay_duration IS 'Best to use standard ISO 8601';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Stay Type -- Stay Type
CREATE TABLE IF NOT EXISTS api.stays_at( CREATE TABLE IF NOT EXISTS api.stays_at(
stay_code INTEGER NOT NULL, stay_code INTEGER UNIQUE NOT NULL,
description TEXT NOT NULL description TEXT NOT NULL
); );
-- Description -- Description
@@ -248,7 +258,10 @@ CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert
ship_type = NEW.ship_type, ship_type = NEW.ship_type,
plugin_version = NEW.plugin_version, plugin_version = NEW.plugin_version,
signalk_version = NEW.signalk_version, signalk_version = NEW.signalk_version,
time = NEW.time, platform = NEW.platform,
configuration = NEW.configuration,
-- time = NEW.time, ignore the time sent by the vessel as it is out of sync sometimes.
time = NOW(), -- overwrite the time sent by the vessel
active = true active = true
WHERE id = metadata_id; WHERE id = metadata_id;
RETURN NULL; -- Ignore insert RETURN NULL; -- Ignore insert
@@ -257,7 +270,9 @@ CREATE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert
-- set vessel_id from jwt if not present in INSERT query -- set vessel_id from jwt if not present in INSERT query
NEW.vessel_id := current_setting('vessel.id'); NEW.vessel_id := current_setting('vessel.id');
END IF; END IF;
-- Insert new vessel metadata and -- Ignore and overwrite the time sent by the vessel
NEW.time := NOW();
-- Insert new vessel metadata
RETURN NEW; -- Insert new vessel metadata RETURN NEW; -- Insert new vessel metadata
END IF; END IF;
END; END;
@@ -292,6 +307,22 @@ COMMENT ON FUNCTION
public.metadata_notification_trigger_fn public.metadata_notification_trigger_fn
IS 'process metadata notification from vessel, monitoring_online'; IS 'process metadata notification from vessel, monitoring_online';
-- FUNCTION Metadata grafana provisioning for new vessel after insert
DROP FUNCTION IF EXISTS metadata_grafana_trigger_fn;
CREATE FUNCTION metadata_grafana_trigger_fn() RETURNS trigger AS $metadata_grafana$
DECLARE
BEGIN
RAISE NOTICE 'metadata_grafana_trigger_fn [%]', NEW;
INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('grafana', NEW.id, now(), NEW.vessel_id);
RETURN NULL;
END;
$metadata_grafana$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.metadata_grafana_trigger_fn
IS 'process metadata grafana provisioning from vessel';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Trigger metadata table -- Trigger metadata table
-- --
@@ -309,7 +340,15 @@ CREATE TRIGGER metadata_notification_trigger AFTER INSERT ON api.metadata
-- Description -- Description
COMMENT ON TRIGGER COMMENT ON TRIGGER
metadata_notification_trigger ON api.metadata metadata_notification_trigger ON api.metadata
IS 'AFTER INSERT ON api.metadata run function metadata_update_trigger_fn for notification on new vessel'; IS 'AFTER INSERT ON api.metadata run function metadata_notification_trigger_fn for later notification on new vessel';
-- Metadata trigger AFTER INSERT
CREATE TRIGGER metadata_grafana_trigger AFTER INSERT ON api.metadata
FOR EACH ROW EXECUTE FUNCTION metadata_grafana_trigger_fn();
-- Description
COMMENT ON TRIGGER
metadata_grafana_trigger ON api.metadata
IS 'AFTER INSERT ON api.metadata run function metadata_grafana_trigger_fn for later grafana provisioning on new vessel';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Trigger Functions metrics table -- Trigger Functions metrics table
@@ -320,13 +359,13 @@ COMMENT ON TRIGGER
DROP FUNCTION IF EXISTS metrics_trigger_fn; DROP FUNCTION IF EXISTS metrics_trigger_fn;
CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
DECLARE DECLARE
previous_status varchar; previous_metric record;
previous_time TIMESTAMP WITHOUT TIME ZONE; stay_code INTEGER;
stay_code integer; logbook_id INTEGER;
logbook_id integer; stay_id INTEGER;
stay_id integer; valid_status BOOLEAN := False;
valid_status BOOLEAN;
_vessel_id TEXT; _vessel_id TEXT;
distance BOOLEAN := False;
BEGIN BEGIN
--RAISE NOTICE 'metrics_trigger_fn'; --RAISE NOTICE 'metrics_trigger_fn';
--RAISE WARNING 'metrics_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW; --RAISE WARNING 'metrics_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW;
@@ -337,20 +376,20 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
END IF; END IF;
-- Boat metadata are check using api.metrics REFERENCES to api.metadata -- Boat metadata are check using api.metrics REFERENCES to api.metadata
-- Fetch the latest entry to compare status against the new status to be insert -- Fetch the latest entry to compare status against the new status to be insert
SELECT coalesce(m.status, 'moored'), m.time INTO previous_status, previous_time SELECT * INTO previous_metric
FROM api.metrics m FROM api.metrics m
WHERE m.vessel_id IS NOT NULL WHERE m.vessel_id IS NOT NULL
AND m.vessel_id = current_setting('vessel.id', true) AND m.vessel_id = current_setting('vessel.id', true)
ORDER BY m.time DESC LIMIT 1; ORDER BY m.time DESC LIMIT 1;
--RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_status; --RAISE NOTICE 'Metrics Status, New:[%] Previous:[%]', NEW.status, previous_metric.status;
IF previous_time = NEW.time THEN IF previous_metric.time = NEW.time THEN
-- Ignore entry if same time -- Ignore entry if same time
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], duplicate time [%] = [%]', NEW.vessel_id, previous_time, NEW.time; RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], duplicate time [%] = [%]', NEW.vessel_id, previous_metric.time, NEW.time;
RETURN NULL; RETURN NULL;
END IF; END IF;
IF previous_time > NEW.time THEN IF previous_metric.time > NEW.time THEN
-- Ignore entry if new time is later than previous time -- Ignore entry if new time is later than previous time
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], new time is older than previous_time [%] > [%]', NEW.vessel_id, previous_time, NEW.time; RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], new time is older than previous_metric.time [%] > [%]', NEW.vessel_id, previous_metric.time, NEW.time;
RETURN NULL; RETURN NULL;
END IF; END IF;
-- Check if latitude or longitude are type double -- Check if latitude or longitude are type double
@@ -383,18 +422,30 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], latitude and longitude are equal [%] [%]', NEW.vessel_id, NEW.latitude, NEW.longitude; RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], latitude and longitude are equal [%] [%]', NEW.vessel_id, NEW.latitude, NEW.longitude;
RETURN NULL; RETURN NULL;
END IF; END IF;
-- Check if status is null -- Check distance with previous point is > 10km
IF NEW.status IS NULL THEN --SELECT ST_Distance(
-- ST_MakePoint(NEW.latitude,NEW.longitude)::geography,
-- ST_MakePoint(previous_metric.latitude,previous_metric.longitude)::geography) > 10000 INTO distance;
--IF distance IS True THEN
-- RAISE WARNING 'Metrics Ignoring metric, distance between previous metric and new metric is too large, vessel_id [%] distance[%]', NEW.vessel_id, distance;
-- RETURN NULL;
--END IF;
-- Check if status is null but speed is over 3knots set status to sailing
IF NEW.status IS NULL AND NEW.speedoverground >= 3 THEN
RAISE WARNING 'Metrics Unknown NEW.status, vessel_id [%], null status, set to sailing because of speedoverground is +3 from [%]', NEW.vessel_id, NEW.status;
NEW.status := 'sailing';
-- Check if status is null then set status to default moored
ELSIF NEW.status IS NULL THEN
RAISE WARNING 'Metrics Unknown NEW.status, vessel_id [%], null status, set to default moored from [%]', NEW.vessel_id, NEW.status; RAISE WARNING 'Metrics Unknown NEW.status, vessel_id [%], null status, set to default moored from [%]', NEW.vessel_id, NEW.status;
NEW.status := 'moored'; NEW.status := 'moored';
END IF; END IF;
IF previous_status IS NULL THEN IF previous_metric.status IS NULL THEN
IF NEW.status = 'anchored' THEN IF NEW.status = 'anchored' THEN
RAISE WARNING 'Metrics Unknown previous_status from vessel_id [%], [%] set to default current status [%]', NEW.vessel_id, previous_status, NEW.status; RAISE WARNING 'Metrics Unknown previous_metric.status from vessel_id [%], [%] set to default current status [%]', NEW.vessel_id, previous_metric.status, NEW.status;
previous_status := NEW.status; previous_metric.status := NEW.status;
ELSE ELSE
RAISE WARNING 'Metrics Unknown previous_status from vessel_id [%], [%] set to default status moored vs [%]', NEW.vessel_id, previous_status, NEW.status; RAISE WARNING 'Metrics Unknown previous_metric.status from vessel_id [%], [%] set to default status moored vs [%]', NEW.vessel_id, previous_metric.status, NEW.status;
previous_status := 'moored'; previous_metric.status := 'moored';
END IF; END IF;
-- Add new stay as no previous entry exist -- Add new stay as no previous entry exist
INSERT INTO api.stays INSERT INTO api.stays
@@ -404,13 +455,13 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
-- Add stay entry to process queue for further processing -- Add stay entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored, ref_id) INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true)); VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id %', stay_id; RAISE WARNING 'Metrics Insert first stay as no previous metrics exist, stay_id stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
END IF; END IF;
-- Check if status is valid enum -- Check if status is valid enum
SELECT NEW.status::name = any(enum_range(null::status)::name[]) INTO valid_status; SELECT NEW.status::name = any(enum_range(null::status_type)::name[]) INTO valid_status;
IF valid_status IS False THEN IF valid_status IS False THEN
-- Ignore entry if status is invalid -- Ignore entry if status is invalid
RAISE WARNING 'Metrics Ignoring metric, invalid status [%]', NEW.status; RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], invalid status [%]', NEW.vessel_id, NEW.status;
RETURN NULL; RETURN NULL;
END IF; END IF;
-- Check if speedOverGround is valid value -- Check if speedOverGround is valid value
@@ -422,10 +473,10 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
-- Check the state and if any previous/current entry -- Check the state and if any previous/current entry
-- If change of state and new status is sailing or motoring -- If change of state and new status is sailing or motoring
IF previous_status::TEXT <> NEW.status::TEXT AND IF previous_metric.status::TEXT <> NEW.status::TEXT AND
( (NEW.status::TEXT = 'sailing' AND previous_status::TEXT <> 'motoring') ( (NEW.status::TEXT = 'sailing' AND previous_metric.status::TEXT <> 'motoring')
OR (NEW.status::TEXT = 'motoring' AND previous_status::TEXT <> 'sailing') ) THEN OR (NEW.status::TEXT = 'motoring' AND previous_metric.status::TEXT <> 'sailing') ) THEN
RAISE WARNING 'Metrics Update status, try new logbook, New:[%] Previous:[%]', NEW.status, previous_status; RAISE WARNING 'Metrics Update status, try new logbook, New:[%] Previous:[%]', NEW.status, previous_metric.status;
-- Start new log -- Start new log
logbook_id := public.trip_in_progress_fn(current_setting('vessel.id', true)::TEXT); logbook_id := public.trip_in_progress_fn(current_setting('vessel.id', true)::TEXT);
IF logbook_id IS NULL THEN IF logbook_id IS NULL THEN
@@ -433,7 +484,7 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
(vessel_id, active, _from_time, _from_lat, _from_lng) (vessel_id, active, _from_time, _from_lat, _from_lng)
VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude) VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude)
RETURNING id INTO logbook_id; RETURNING id INTO logbook_id;
RAISE WARNING 'Metrics Insert new logbook, logbook_id %', logbook_id; RAISE WARNING 'Metrics Insert new logbook, logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
ELSE ELSE
UPDATE api.logbook UPDATE api.logbook
SET SET
@@ -442,7 +493,7 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
_to_lat = NEW.latitude, _to_lat = NEW.latitude,
_to_lng = NEW.longitude _to_lng = NEW.longitude
WHERE id = logbook_id; WHERE id = logbook_id;
RAISE WARNING 'Metrics Existing Logbook logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time; RAISE WARNING 'Metrics Existing logbook logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
END IF; END IF;
-- End current stay -- End current stay
@@ -453,20 +504,20 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
active = false, active = false,
departed = NEW.time departed = NEW.time
WHERE id = stay_id; WHERE id = stay_id;
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time; -- Add stay entry to process queue for further processing
-- Add moorage entry to process queue for further processing
INSERT INTO process_queue (channel, payload, stored, ref_id) INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('new_moorage', stay_id, now(), current_setting('vessel.id', true)); VALUES ('new_stay', stay_id, NOW(), current_setting('vessel.id', true));
RAISE WARNING 'Metrics Updating Stay end current stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
ELSE ELSE
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time; RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
END IF; END IF;
-- If change of state and new status is moored or anchored -- If change of state and new status is moored or anchored
ELSIF previous_status::TEXT <> NEW.status::TEXT AND ELSIF previous_metric.status::TEXT <> NEW.status::TEXT AND
( (NEW.status::TEXT = 'moored' AND previous_status::TEXT <> 'anchored') ( (NEW.status::TEXT = 'moored' AND previous_metric.status::TEXT <> 'anchored')
OR (NEW.status::TEXT = 'anchored' AND previous_status::TEXT <> 'moored') ) THEN OR (NEW.status::TEXT = 'anchored' AND previous_metric.status::TEXT <> 'moored') ) THEN
-- Start new stays -- Start new stays
RAISE WARNING 'Metrics Update status, try new stay, New:[%] Previous:[%]', NEW.status, previous_status; RAISE WARNING 'Metrics Update status, try new stay, New:[%] Previous:[%]', NEW.status, previous_metric.status;
stay_id := public.stay_in_progress_fn(current_setting('vessel.id', true)::TEXT); stay_id := public.stay_in_progress_fn(current_setting('vessel.id', true)::TEXT);
IF stay_id IS NULL THEN IF stay_id IS NULL THEN
RAISE WARNING 'Metrics Inserting new stay [%]', NEW.status; RAISE WARNING 'Metrics Inserting new stay [%]', NEW.status;
@@ -480,15 +531,14 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
(vessel_id, active, arrived, latitude, longitude, stay_code) (vessel_id, active, arrived, latitude, longitude, stay_code)
VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude, stay_code) VALUES (current_setting('vessel.id', true), true, NEW.time, NEW.latitude, NEW.longitude, stay_code)
RETURNING id INTO stay_id; RETURNING id INTO stay_id;
-- Add stay entry to process queue for further processing RAISE WARNING 'Metrics Insert new stay, stay_id [%] [%] [%]', stay_id, NEW.status, NEW.time;
INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('new_stay', stay_id, now(), current_setting('vessel.id', true));
ELSE ELSE
RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time; RAISE WARNING 'Metrics Invalid stay_id [%] [%]', stay_id, NEW.time;
UPDATE api.stays UPDATE api.stays
SET SET
active = false, active = false,
departed = NEW.time departed = NEW.time,
notes = 'Invalid stay?'
WHERE id = stay_id; WHERE id = stay_id;
END IF; END IF;
@@ -507,9 +557,9 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
WHERE id = logbook_id; WHERE id = logbook_id;
-- Add logbook entry to process queue for later processing -- Add logbook entry to process queue for later processing
INSERT INTO process_queue (channel, payload, stored, ref_id) INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUEs ('new_logbook', logbook_id, now(), current_setting('vessel.id', true)); VALUES ('pre_logbook', logbook_id, NOW(), current_setting('vessel.id', true));
ELSE ELSE
RAISE WARNING 'Metrics Invalid logbook_id [%] [%]', logbook_id, NEW.time; RAISE WARNING 'Metrics Invalid logbook_id [%] [%] [%]', logbook_id, NEW.status, NEW.time;
END IF; END IF;
END IF; END IF;
RETURN NEW; -- Finally insert the actual new metric RETURN NEW; -- Finally insert the actual new metric
@@ -518,7 +568,7 @@ $metrics$ LANGUAGE plpgsql;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.metrics_trigger_fn public.metrics_trigger_fn
IS 'process metrics from vessel, generate new_logbook and new_stay.'; IS 'process metrics from vessel, generate pre_logbook and new_stay.';
-- --
-- Triggers logbook update on metrics insert -- Triggers logbook update on metrics insert
@@ -528,3 +578,117 @@ CREATE TRIGGER metrics_trigger BEFORE INSERT ON api.metrics
COMMENT ON TRIGGER COMMENT ON TRIGGER
metrics_trigger ON api.metrics metrics_trigger ON api.metrics
IS 'BEFORE INSERT ON api.metrics run function metrics_trigger_fn'; IS 'BEFORE INSERT ON api.metrics run function metrics_trigger_fn';
-- Function update of name and stay_code on logbook and stays reference
DROP FUNCTION IF EXISTS moorage_update_trigger_fn;
CREATE FUNCTION moorage_update_trigger_fn() RETURNS trigger AS $moorage_update$
DECLARE
BEGIN
RAISE NOTICE 'moorages_update_trigger_fn [%]', NEW;
IF ( OLD.name != NEW.name) THEN
UPDATE api.logbook SET _from = NEW.name WHERE _from_moorage_id = NEW.id;
UPDATE api.logbook SET _to = NEW.name WHERE _to_moorage_id = NEW.id;
END IF;
IF ( OLD.stay_code != NEW.stay_code) THEN
UPDATE api.stays SET stay_code = NEW.stay_code WHERE moorage_id = NEW.id;
END IF;
RETURN NULL; -- result is ignored since this is an AFTER trigger
END;
$moorage_update$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.moorage_update_trigger_fn
IS 'Automatic update of name and stay_code on logbook and stays reference';
-- Triggers moorage update after update
CREATE TRIGGER moorage_update_trigger AFTER UPDATE ON api.moorages
FOR EACH ROW EXECUTE FUNCTION moorage_update_trigger_fn();
-- Description
COMMENT ON TRIGGER moorage_update_trigger
ON api.moorages
IS 'Automatic update of name and stay_code on logbook and stays reference';
-- Function delete logbook and stays reference when delete a moorage
DROP FUNCTION IF EXISTS moorage_delete_trigger_fn;
CREATE FUNCTION moorage_delete_trigger_fn() RETURNS trigger AS $moorage_delete$
DECLARE
BEGIN
RAISE NOTICE 'moorages_delete_trigger_fn [%]', OLD;
DELETE FROM api.stays WHERE moorage_id = OLD.id;
DELETE FROM api.logbook WHERE _from_moorage_id = OLD.id;
DELETE FROM api.logbook WHERE _to_moorage_id = OLD.id;
RETURN OLD; -- result is ignored since this is an AFTER trigger
END;
$moorage_delete$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.moorage_delete_trigger_fn
IS 'Automatic delete logbook and stays reference when delete a moorage';
-- Triggers moorage delete
CREATE TRIGGER moorage_delete_trigger BEFORE DELETE ON api.moorages
FOR EACH ROW EXECUTE FUNCTION moorage_delete_trigger_fn();
-- Description
COMMENT ON TRIGGER moorage_delete_trigger
ON api.moorages
IS 'Automatic delete logbook and stays reference when delete a moorage';
-- Function process_new on completed logbook
DROP FUNCTION IF EXISTS logbook_completed_trigger_fn;
CREATE FUNCTION logbook_completed_trigger_fn() RETURNS trigger AS $logbook_completed$
DECLARE
BEGIN
RAISE NOTICE 'logbook_completed_trigger_fn [%]', OLD;
RAISE NOTICE 'logbook_completed_trigger_fn [%] [%]', OLD._to_time, NEW._to_time;
-- Add logbook entry to process queue for later processing
--IF ( OLD._to_time <> NEW._to_time ) THEN
INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('new_logbook', NEW.id, NOW(), current_setting('vessel.id', true));
--END IF;
RETURN OLD; -- result is ignored since this is an AFTER trigger
END;
$logbook_completed$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.logbook_completed_trigger_fn
IS 'Automatic process_queue for completed logbook._to_time';
-- Triggers logbook completed
--CREATE TRIGGER logbook_completed_trigger AFTER UPDATE ON api.logbook
-- FOR EACH ROW
-- WHEN (OLD._to_time IS DISTINCT FROM NEW._to_time)
-- EXECUTE FUNCTION logbook_completed_trigger_fn();
-- Description
--COMMENT ON TRIGGER logbook_completed_trigger
-- ON api.logbook
-- IS 'Automatic process_queue for completed logbook';
-- Function process_new on completed Stay
DROP FUNCTION IF EXISTS stay_completed_trigger_fn;
CREATE FUNCTION stay_completed_trigger_fn() RETURNS trigger AS $stay_completed$
DECLARE
BEGIN
RAISE NOTICE 'stay_completed_trigger_fn [%]', OLD;
RAISE NOTICE 'stay_completed_trigger_fn [%] [%]', OLD.departed, NEW.departed;
-- Add stay entry to process queue for later processing
--IF ( OLD.departed <> NEW.departed ) THEN
INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('new_stay', NEW.id, NOW(), current_setting('vessel.id', true));
--END IF;
RETURN OLD; -- result is ignored since this is an AFTER trigger
END;
$stay_completed$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
public.stay_completed_trigger_fn
IS 'Automatic process_queue for completed stay.departed';
-- Triggers stay completed
--CREATE TRIGGER stay_completed_trigger AFTER UPDATE ON api.stays
-- FOR EACH ROW
-- WHEN (OLD.departed IS DISTINCT FROM NEW.departed)
-- EXECUTE FUNCTION stay_completed_trigger_fn();
-- Description
--COMMENT ON TRIGGER stay_completed_trigger
-- ON api.stays
-- IS 'Automatic process_queue for completed stay';

View File

@@ -7,6 +7,12 @@
-- --
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- PostgREST Media Type Handlers
CREATE DOMAIN "text/xml" AS xml;
CREATE DOMAIN "application/geo+json" AS jsonb;
CREATE DOMAIN "application/gpx+xml" AS xml;
CREATE DOMAIN "application/vnd.google-earth.kml+xml" AS xml;
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Functions API schema -- Functions API schema
-- Timelapse - replay logs -- Timelapse - replay logs
@@ -29,8 +35,7 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
WHERE id >= start_log WHERE id >= start_log
AND id <= end_log AND id <= end_log
AND track_geom IS NOT NULL AND track_geom IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) )
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM ( SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
SELECT ST_Collect( SELECT ST_Collect(
@@ -42,11 +47,10 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
WITH logbook as ( WITH logbook as (
SELECT track_geom SELECT track_geom
FROM api.logbook FROM api.logbook
WHERE _from_time >= start_log::TIMESTAMP WITHOUT TIME ZONE WHERE _from_time >= start_date::TIMESTAMPTZ
AND _to_time <= end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes' AND _to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
AND track_geom IS NOT NULL AND track_geom IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) )
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM ( SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
SELECT ST_Collect( SELECT ST_Collect(
@@ -59,8 +63,7 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
SELECT track_geom SELECT track_geom
FROM api.logbook FROM api.logbook
WHERE track_geom IS NOT NULL WHERE track_geom IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) )
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM ( SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
SELECT ST_Collect( SELECT ST_Collect(
@@ -72,7 +75,7 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
-- Return a GeoJSON MultiLineString -- Return a GeoJSON MultiLineString
-- result _geojson [null, null] -- result _geojson [null, null]
--raise WARNING 'result _geojson %' , _geojson; --raise WARNING 'result _geojson %' , _geojson;
SELECT json_build_object( SELECT jsonb_build_object(
'type', 'FeatureCollection', 'type', 'FeatureCollection',
'features', ARRAY[_geojson] ) INTO geojson; 'features', ARRAY[_geojson] ) INTO geojson;
END; END;
@@ -82,6 +85,75 @@ COMMENT ON FUNCTION
api.timelapse_fn api.timelapse_fn
IS 'Export all selected logs geometry `track_geom` to a geojson as MultiLineString with empty properties'; IS 'Export all selected logs geometry `track_geom` to a geojson as MultiLineString with empty properties';
DROP FUNCTION IF EXISTS api.timelapse2_fn;
CREATE OR REPLACE FUNCTION api.timelapse2_fn(
IN start_log INTEGER DEFAULT NULL,
IN end_log INTEGER DEFAULT NULL,
IN start_date TEXT DEFAULT NULL,
IN end_date TEXT DEFAULT NULL,
OUT geojson JSONB) RETURNS JSONB AS $timelapse2$
DECLARE
_geojson jsonb;
BEGIN
-- Using sub query to force id order by
-- Merge GIS track_geom into a GeoJSON Points
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
SELECT jsonb_agg(
jsonb_build_object('type', 'Feature',
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
) INTO _geojson
FROM (
SELECT jsonb_array_elements(track_geojson->'features') AS f
FROM api.logbook
WHERE id >= start_log
AND id <= end_log
AND track_geojson IS NOT NULL
ORDER BY _from_time ASC
) AS sub
WHERE (f->'geometry'->>'type') = 'Point';
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
SELECT jsonb_agg(
jsonb_build_object('type', 'Feature',
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
) INTO _geojson
FROM (
SELECT jsonb_array_elements(track_geojson->'features') AS f
FROM api.logbook
WHERE _from_time >= start_date::TIMESTAMPTZ
AND _to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
AND track_geojson IS NOT NULL
ORDER BY _from_time ASC
) AS sub
WHERE (f->'geometry'->>'type') = 'Point';
ELSE
SELECT jsonb_agg(
jsonb_build_object('type', 'Feature',
'properties', jsonb_build_object( 'notes', f->'properties'->>'notes'),
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
) INTO _geojson
FROM (
SELECT jsonb_array_elements(track_geojson->'features') AS f
FROM api.logbook
WHERE track_geojson IS NOT NULL
ORDER BY _from_time ASC
) AS sub
WHERE (f->'geometry'->>'type') = 'Point';
END IF;
-- Return a GeoJSON MultiLineString
-- result _geojson [null, null]
raise WARNING 'result _geojson %' , _geojson;
SELECT jsonb_build_object(
'type', 'FeatureCollection',
'features', _geojson ) INTO geojson;
END;
$timelapse2$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.timelapse2_fn
IS 'Export all selected logs geometry `track_geom` to a geojson as points with notes properties';
-- export_logbook_geojson_fn -- export_logbook_geojson_fn
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn; DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSONB) RETURNS JSONB AS $export_logbook_geojson$ CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSONB) RETURNS JSONB AS $export_logbook_geojson$
@@ -114,8 +186,8 @@ COMMENT ON FUNCTION
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html -- https://opencpn.org/OpenCPN/info/gpxvalidation.html
-- --
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn; DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS pg_catalog.xml CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS "text/xml"
AS $export_logbook_gpx2$ AS $export_logbook_gpx$
DECLARE DECLARE
app_settings jsonb; app_settings jsonb;
BEGIN BEGIN
@@ -162,7 +234,7 @@ AS $export_logbook_gpx2$
AND l.id = _id AND l.id = _id
GROUP BY l.name,l.notes,l.id; GROUP BY l.name,l.notes,l.id;
END; END;
$export_logbook_gpx2$ LANGUAGE plpgsql; $export_logbook_gpx$ LANGUAGE plpgsql;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
api.export_logbook_gpx_fn api.export_logbook_gpx_fn
@@ -172,7 +244,7 @@ COMMENT ON FUNCTION
-- https://developers.google.com/kml/documentation/kml_tut -- https://developers.google.com/kml/documentation/kml_tut
-- TODO https://developers.google.com/kml/documentation/time#timespans -- TODO https://developers.google.com/kml/documentation/time#timespans
DROP FUNCTION IF EXISTS api.export_logbook_kml_fn; DROP FUNCTION IF EXISTS api.export_logbook_kml_fn;
CREATE OR REPLACE FUNCTION api.export_logbook_kml_fn(IN _id INTEGER) RETURNS pg_catalog.xml CREATE OR REPLACE FUNCTION api.export_logbook_kml_fn(IN _id INTEGER) RETURNS "text/xml"
AS $export_logbook_kml$ AS $export_logbook_kml$
DECLARE DECLARE
logbook_rec record; logbook_rec record;
@@ -215,7 +287,7 @@ COMMENT ON FUNCTION
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn; DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn( CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
IN start_log INTEGER DEFAULT NULL, IN start_log INTEGER DEFAULT NULL,
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml IN end_log INTEGER DEFAULT NULL) RETURNS "application/gpx+xml"
AS $export_logbooks_gpx$ AS $export_logbooks_gpx$
declare declare
merged_jsonb jsonb; merged_jsonb jsonb;
@@ -232,8 +304,7 @@ AS $export_logbooks_gpx$
WHERE id >= start_log WHERE id >= start_log
AND id <= end_log AND id <= end_log
AND track_geojson IS NOT NULL AND track_geojson IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) AS sub ) AS sub
WHERE (f->'geometry'->>'type') = 'Point'; WHERE (f->'geometry'->>'type') = 'Point';
ELSE ELSE
@@ -244,8 +315,7 @@ AS $export_logbooks_gpx$
SELECT jsonb_array_elements(track_geojson->'features') AS f SELECT jsonb_array_elements(track_geojson->'features') AS f
FROM api.logbook FROM api.logbook
WHERE track_geojson IS NOT NULL WHERE track_geojson IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) AS sub ) AS sub
WHERE (f->'geometry'->>'type') = 'Point'; WHERE (f->'geometry'->>'type') = 'Point';
END IF; END IF;
@@ -281,7 +351,7 @@ COMMENT ON FUNCTION
DROP FUNCTION IF EXISTS api.export_logbooks_kml_fn; DROP FUNCTION IF EXISTS api.export_logbooks_kml_fn;
CREATE OR REPLACE FUNCTION api.export_logbooks_kml_fn( CREATE OR REPLACE FUNCTION api.export_logbooks_kml_fn(
IN start_log INTEGER DEFAULT NULL, IN start_log INTEGER DEFAULT NULL,
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml IN end_log INTEGER DEFAULT NULL) RETURNS "text/xml"
AS $export_logbooks_kml$ AS $export_logbooks_kml$
DECLARE DECLARE
_geom geometry; _geom geometry;
@@ -295,8 +365,7 @@ BEGIN
WHERE id >= start_log WHERE id >= start_log
AND id <= end_log AND id <= end_log
AND track_geom IS NOT NULL AND track_geom IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) )
SELECT ST_Collect( SELECT ST_Collect(
ARRAY( ARRAY(
@@ -307,8 +376,7 @@ BEGIN
SELECT track_geom SELECT track_geom
FROM api.logbook FROM api.logbook
WHERE track_geom IS NOT NULL WHERE track_geom IS NOT NULL
GROUP BY id ORDER BY _from_time ASC
ORDER BY id ASC
) )
SELECT ST_Collect( SELECT ST_Collect(
ARRAY( ARRAY(
@@ -341,7 +409,7 @@ COMMENT ON FUNCTION
-- Find all log from and to moorage geopoint within 100m -- Find all log from and to moorage geopoint within 100m
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn; DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_from_moorage$ CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geojson JSONB) RETURNS JSONB AS $find_log_from_moorage$
DECLARE DECLARE
moorage_rec record; moorage_rec record;
_geojson jsonb; _geojson jsonb;
@@ -364,7 +432,7 @@ CREATE OR REPLACE FUNCTION api.find_log_from_moorage_fn(IN _id INTEGER, OUT geoj
1000 -- in meters ? 1000 -- in meters ?
); );
-- Return a GeoJSON filter on LineString -- Return a GeoJSON filter on LineString
SELECT json_build_object( SELECT jsonb_build_object(
'type', 'FeatureCollection', 'type', 'FeatureCollection',
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson; 'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
END; END;
@@ -375,7 +443,7 @@ COMMENT ON FUNCTION
IS 'Find all log from moorage geopoint within 100m'; IS 'Find all log from moorage geopoint within 100m';
DROP FUNCTION IF EXISTS api.find_log_to_moorage_fn; DROP FUNCTION IF EXISTS api.find_log_to_moorage_fn;
CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojson JSON) RETURNS JSON AS $find_log_to_moorage$ CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojson JSONB) RETURNS JSONB AS $find_log_to_moorage$
DECLARE DECLARE
moorage_rec record; moorage_rec record;
_geojson jsonb; _geojson jsonb;
@@ -398,7 +466,7 @@ CREATE OR REPLACE FUNCTION api.find_log_to_moorage_fn(IN _id INTEGER, OUT geojso
1000 -- in meters ? 1000 -- in meters ?
); );
-- Return a GeoJSON filter on LineString -- Return a GeoJSON filter on LineString
SELECT json_build_object( SELECT jsonb_build_object(
'type', 'FeatureCollection', 'type', 'FeatureCollection',
'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson; 'features', public.geojson_py_fn(_geojson, 'Point'::TEXT) ) INTO geojson;
END; END;
@@ -536,7 +604,7 @@ DROP FUNCTION IF EXISTS api.export_moorages_geojson_fn;
CREATE FUNCTION api.export_moorages_geojson_fn(OUT geojson JSONB) RETURNS JSONB AS $export_moorages_geojson$ CREATE FUNCTION api.export_moorages_geojson_fn(OUT geojson JSONB) RETURNS JSONB AS $export_moorages_geojson$
DECLARE DECLARE
BEGIN BEGIN
SELECT json_build_object( SELECT jsonb_build_object(
'type', 'FeatureCollection', 'type', 'FeatureCollection',
'features', 'features',
( SELECT ( SELECT
@@ -559,16 +627,19 @@ COMMENT ON FUNCTION
IS 'Export moorages as geojson'; IS 'Export moorages as geojson';
DROP FUNCTION IF EXISTS api.export_moorages_gpx_fn; DROP FUNCTION IF EXISTS api.export_moorages_gpx_fn;
CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS pg_catalog.xml AS $export_moorages_gpx$ CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS "text/xml" AS $export_moorages_gpx$
DECLARE DECLARE
app_settings jsonb;
BEGIN BEGIN
-- Gather url from app settings
app_settings := get_app_url_fn();
-- Generate XML -- Generate XML
RETURN xmlelement(name gpx, RETURN xmlelement(name gpx,
xmlattributes( '1.1' as version, xmlattributes( '1.1' as version,
'PostgSAIL' as creator, 'PostgSAIL' as creator,
'http://www.topografix.com/GPX/1/1' as xmlns, 'http://www.topografix.com/GPX/1/1' as xmlns,
'http://www.opencpn.org' as "xmlns:opencpn", 'http://www.opencpn.org' as "xmlns:opencpn",
'https://iot.openplotter.cloud' as "xmlns:postgsail", app_settings->>'app.url' as "xmlns:postgsail",
'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi", 'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi",
'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx", 'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx",
'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"), 'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"),
@@ -580,14 +651,14 @@ CREATE FUNCTION api.export_moorages_gpx_fn() RETURNS pg_catalog.xml AS $export_m
concat('Last Stayed On: ', 'TODO last seen', concat('Last Stayed On: ', 'TODO last seen',
E'\nTotal Stays: ', m.stay_duration, E'\nTotal Stays: ', m.stay_duration,
E'\nTotal Arrivals and Departures: ', m.reference_count, E'\nTotal Arrivals and Departures: ', m.reference_count,
E'\nLink: ', concat('https://iot.openplotter.cloud/moorage/', m.id)), E'\nLink: ', concat(app_settings->>'app.url','/moorage/', m.id)),
xmlelement(name "opencpn:guid", uuid_generate_v4())), xmlelement(name "opencpn:guid", uuid_generate_v4())),
xmlelement(name sym, 'anchor'), xmlelement(name sym, 'anchor'),
xmlelement(name type, 'WPT'), xmlelement(name type, 'WPT'),
xmlelement(name link, xmlattributes(concat('https://iot.openplotter.cloud/moorage/', m.id) as href), xmlelement(name link, xmlattributes(concat(app_settings->>'app.url','moorage/', m.id) as href),
xmlelement(name text, m.name)), xmlelement(name text, m.name)),
xmlelement(name extensions, xmlelement(name "postgsail:mooorage_id", 1), xmlelement(name extensions, xmlelement(name "postgsail:mooorage_id", m.id),
xmlelement(name "postgsail:link", concat('https://iot.openplotter.cloud/moorage/', m.id)), xmlelement(name "postgsail:link", concat(app_settings->>'app.url','/moorage/', m.id)),
xmlelement(name "opencpn:guid", uuid_generate_v4()), xmlelement(name "opencpn:guid", uuid_generate_v4()),
xmlelement(name "opencpn:viz", '1'), xmlelement(name "opencpn:viz", '1'),
xmlelement(name "opencpn:scale_min_max", xmlattributes(true as UseScale, 30000 as ScaleMin, 0 as ScaleMax) xmlelement(name "opencpn:scale_min_max", xmlattributes(true as UseScale, 30000 as ScaleMin, 0 as ScaleMax)
@@ -608,15 +679,15 @@ DROP FUNCTION IF EXISTS api.stats_logs_fn;
CREATE OR REPLACE FUNCTION api.stats_logs_fn( CREATE OR REPLACE FUNCTION api.stats_logs_fn(
IN start_date TEXT DEFAULT NULL, IN start_date TEXT DEFAULT NULL,
IN end_date TEXT DEFAULT NULL, IN end_date TEXT DEFAULT NULL,
OUT stats JSON) RETURNS JSON AS $stats_logs$ OUT stats JSONB) RETURNS JSONB AS $stats_logs$
DECLARE DECLARE
_start_date TIMESTAMP WITHOUT TIME ZONE DEFAULT '1970-01-01'; _start_date TIMESTAMPTZ DEFAULT '1970-01-01';
_end_date TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(); _end_date TIMESTAMPTZ DEFAULT NOW();
BEGIN BEGIN
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
RAISE WARNING '--> stats_fn, filter result stats by date [%]', start_date; RAISE WARNING '--> stats_fn, filter result stats by date [%]', start_date;
_start_date := start_date::TIMESTAMP WITHOUT TIME ZONE; _start_date := start_date::TIMESTAMPTZ;
_end_date := end_date::TIMESTAMP WITHOUT TIME ZONE; _end_date := end_date::TIMESTAMPTZ;
END IF; END IF;
RAISE NOTICE '--> stats_fn, _start_date [%], _end_date [%]', _start_date, _end_date; RAISE NOTICE '--> stats_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
WITH WITH
@@ -625,8 +696,8 @@ CREATE OR REPLACE FUNCTION api.stats_logs_fn(
logs_view AS ( logs_view AS (
SELECT * SELECT *
FROM api.logbook l FROM api.logbook l
WHERE _from_time >= _start_date::TIMESTAMP WITHOUT TIME ZONE WHERE _from_time >= _start_date::TIMESTAMPTZ
AND _to_time <= _end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes' AND _to_time <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
), ),
first_date AS ( first_date AS (
SELECT _from_time as first_date from logs_view ORDER BY first_date ASC LIMIT 1 SELECT _from_time as first_date from logs_view ORDER BY first_date ASC LIMIT 1
@@ -678,21 +749,21 @@ CREATE OR REPLACE FUNCTION api.stats_stays_fn(
IN end_date TEXT DEFAULT NULL, IN end_date TEXT DEFAULT NULL,
OUT stats JSON) RETURNS JSON AS $stats_stays$ OUT stats JSON) RETURNS JSON AS $stats_stays$
DECLARE DECLARE
_start_date TIMESTAMP WITHOUT TIME ZONE DEFAULT '1970-01-01'; _start_date TIMESTAMPTZ DEFAULT '1970-01-01';
_end_date TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(); _end_date TIMESTAMPTZ DEFAULT NOW();
BEGIN BEGIN
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
RAISE NOTICE '--> stats_stays_fn, custom filter result stats by date [%]', start_date; RAISE NOTICE '--> stats_stays_fn, custom filter result stats by date [%]', start_date;
_start_date := start_date::TIMESTAMP WITHOUT TIME ZONE; _start_date := start_date::TIMESTAMPTZ;
_end_date := end_date::TIMESTAMP WITHOUT TIME ZONE; _end_date := end_date::TIMESTAMPTZ;
END IF; END IF;
RAISE NOTICE '--> stats_stays_fn, _start_date [%], _end_date [%]', _start_date, _end_date; RAISE NOTICE '--> stats_stays_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
WITH WITH
moorages_log AS ( moorages_log AS (
SELECT s.id as stays_id, m.id as moorages_id, * SELECT s.id as stays_id, m.id as moorages_id, *
FROM api.stays s, api.moorages m FROM api.stays s, api.moorages m
WHERE arrived >= _start_date::TIMESTAMP WITHOUT TIME ZONE WHERE arrived >= _start_date::TIMESTAMPTZ
AND departed <= _end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes' AND departed <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
AND s.id = m.stay_id AND s.id = m.stay_id
), ),
home_ports AS ( home_ports AS (
@@ -728,4 +799,140 @@ $stats_stays$ LANGUAGE plpgsql;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
api.stats_stays_fn api.stats_stays_fn
IS 'Stays/Moorages stats by date'; IS 'Stays/Moorages stats by date';
DROP FUNCTION IF EXISTS api.delete_logbook_fn;
CREATE OR REPLACE FUNCTION api.delete_logbook_fn(IN _id integer) RETURNS BOOLEAN AS $delete_logbook$
DECLARE
logbook_rec record;
previous_stays_id numeric;
current_stays_departed text;
current_stays_id numeric;
current_stays_active boolean;
BEGIN
-- If _id is not NULL
IF _id IS NULL OR _id < 1 THEN
RAISE WARNING '-> delete_logbook_fn invalid input %', _id;
RETURN FALSE;
END IF;
SELECT * INTO logbook_rec
FROM api.logbook l
WHERE id = _id;
-- Update logbook
UPDATE api.logbook l
SET notes = 'mark for deletion'
WHERE l.vessel_id = current_setting('vessel.id', false)
AND id = logbook_rec.id;
-- Update metrics status to moored
UPDATE api.metrics
SET status = 'moored'
WHERE time >= logbook_rec._from_time::TIMESTAMPTZ
AND time <= logbook_rec._to_time::TIMESTAMPTZ
AND vessel_id = current_setting('vessel.id', false);
-- Get related stays
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
FROM api.stays s
WHERE s.vessel_id = current_setting('vessel.id', false)
AND s.arrived = logbook_rec._to_time;
-- Update related stays
UPDATE api.stays s
SET notes = 'mark for deletion'
WHERE s.vessel_id = current_setting('vessel.id', false)
AND s.arrived = logbook_rec._to_time;
-- Find previous stays
SELECT id INTO previous_stays_id
FROM api.stays s
WHERE s.vessel_id = current_setting('vessel.id', false)
AND s.arrived < logbook_rec._to_time
ORDER BY s.arrived DESC LIMIT 1;
-- Update previous stays with the departed time from current stays
-- and set the active state from current stays
UPDATE api.stays
SET departed = current_stays_departed::TIMESTAMPTZ,
active = current_stays_active
WHERE vessel_id = current_setting('vessel.id', false)
AND id = previous_stays_id;
-- Clean up, remove invalid logbook and stay entry
DELETE FROM api.logbook WHERE id = logbook_rec.id;
RAISE WARNING '-> delete_logbook_fn delete logbook [%]', logbook_rec.id;
DELETE FROM api.stays WHERE id = current_stays_id;
RAISE WARNING '-> delete_logbook_fn delete stays [%]', current_stays_id;
-- Clean up, Subtract (-1) moorages ref count
UPDATE api.moorages
SET reference_count = reference_count - 1
WHERE vessel_id = current_setting('vessel.id', false)
AND id = previous_stays_id;
RETURN TRUE;
END;
$delete_logbook$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.delete_logbook_fn
IS 'Delete a logbook and dependency stay';
CREATE OR REPLACE FUNCTION api.monitoring_history_fn(IN time_interval TEXT DEFAULT '24', OUT history_metrics JSONB) RETURNS JSONB AS $monitoring_history$
DECLARE
bucket_interval interval := '5 minutes';
BEGIN
RAISE NOTICE '-> monitoring_history_fn';
SELECT CASE time_interval
WHEN '24' THEN '5 minutes'
WHEN '48' THEN '2 hours'
WHEN '72' THEN '4 hours'
WHEN '168' THEN '7 hours'
ELSE '5 minutes'
END bucket INTO bucket_interval;
RAISE NOTICE '-> monitoring_history_fn % %', time_interval, bucket_interval;
WITH history_table AS (
SELECT time_bucket(bucket_interval::INTERVAL, time) AS time_bucket,
avg((metrics->'environment.water.temperature')::numeric) AS waterTemperature,
avg((metrics->'environment.inside.temperature')::numeric) AS insideTemperature,
avg((metrics->'environment.outside.temperature')::numeric) AS outsideTemperature,
avg((metrics->'environment.wind.speedOverGround')::numeric) AS windSpeedOverGround,
avg((metrics->'environment.inside.relativeHumidity')::numeric) AS insideHumidity,
avg((metrics->'environment.outside.relativeHumidity')::numeric) AS outsideHumidity,
avg((metrics->'environment.outside.pressure')::numeric) AS outsidePressure,
avg((metrics->'environment.inside.pressure')::numeric) AS insidePressure,
avg((metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS batteryCharge,
avg((metrics->'electrical.batteries.House.voltage')::numeric) AS batteryVoltage,
avg((metrics->'environment.depth.belowTransducer')::numeric) AS depth
FROM api.metrics
WHERE time > (NOW() AT TIME ZONE 'UTC' - INTERVAL '1 hours' * time_interval::NUMERIC)
GROUP BY time_bucket
ORDER BY time_bucket asc
)
SELECT jsonb_agg(history_table) INTO history_metrics FROM history_table;
END
$monitoring_history$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.monitoring_history_fn
IS 'Export metrics from a time period 24h, 48h, 72h, 7d';
CREATE OR REPLACE FUNCTION api.status_fn(out status jsonb) RETURNS JSONB AS $status_fn$
DECLARE
in_route BOOLEAN := False;
BEGIN
RAISE NOTICE '-> status_fn';
SELECT EXISTS ( SELECT id
FROM api.logbook l
WHERE active IS True
LIMIT 1
) INTO in_route;
IF in_route IS True THEN
-- In route from <logbook.from_name> arrived at <>
SELECT jsonb_build_object('status', sa.description, 'location', m.name, 'departed', l._from_time) INTO status
from api.logbook l, api.stays_at sa, api.moorages m
where s.stay_code = sa.stay_code AND l._from_moorage_id = m.id AND l.active IS True;
ELSE
-- At <Stat_at.Desc> in <Moorage.name> departed at <>
SELECT jsonb_build_object('status', sa.description, 'location', m.name, 'arrived', s.arrived) INTO status
from api.stays s, api.stays_at sa, api.moorages m
where s.stay_code = sa.stay_code AND s.moorage_id = m.id AND s.active IS True;
END IF;
END
$status_fn$ LANGUAGE plpgsql;
-- Description
COMMENT ON FUNCTION
api.status_fn
IS 'generate vessel status';

View File

@@ -11,24 +11,28 @@
-- Views -- Views
-- Views are invoked with the privileges of the view owner, -- Views are invoked with the privileges of the view owner,
-- make the user_role the views owner. -- make the user_role the views owner.
-- to bypass this limit you need pg15+ with specific settings
-- security_invoker=true,security_barrier=true
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
CREATE VIEW first_metric AS CREATE VIEW public.first_metric AS
SELECT * SELECT *
FROM api.metrics FROM api.metrics
ORDER BY time ASC LIMIT 1; ORDER BY time ASC LIMIT 1;
CREATE VIEW last_metric AS CREATE VIEW public.last_metric AS
SELECT * SELECT *
FROM api.metrics FROM api.metrics
ORDER BY time DESC LIMIT 1; ORDER BY time DESC LIMIT 1;
CREATE VIEW trip_in_progress AS DROP VIEW IF EXISTS public.trip_in_progress;
CREATE VIEW public.trip_in_progress AS
SELECT * SELECT *
FROM api.logbook FROM api.logbook
WHERE active IS true; WHERE active IS true;
CREATE VIEW stay_in_progress AS DROP VIEW IF EXISTS public.stay_in_progress;
CREATE VIEW public.stay_in_progress AS
SELECT * SELECT *
FROM api.stays FROM api.stays
WHERE active IS true; WHERE active IS true;
@@ -44,16 +48,18 @@ CREATE OR REPLACE VIEW api.logs_view WITH (security_invoker=true,security_barrie
_to as "to", _to as "to",
_to_time as "ended", _to_time as "ended",
distance as "distance", distance as "distance",
duration as "duration" duration as "duration",
_from_moorage_id,_to_moorage_id
FROM api.logbook l FROM api.logbook l
WHERE _to_time IS NOT NULL WHERE name IS NOT NULL
AND _to_time IS NOT NULL
ORDER BY _from_time DESC; ORDER BY _from_time DESC;
-- Description -- Description
COMMENT ON VIEW COMMENT ON VIEW
api.logs_view api.logs_view
IS 'Logs web view'; IS 'Logs web view';
-- Initial try of MATERIALIZED VIEW -- Initial try of MATERIALIZED VIEW - does not support RLS
CREATE MATERIALIZED VIEW api.logs_mat_view AS CREATE MATERIALIZED VIEW api.logs_mat_view AS
SELECT id, SELECT id,
name as "name", name as "name",
@@ -62,9 +68,11 @@ CREATE MATERIALIZED VIEW api.logs_mat_view AS
_to as "to", _to as "to",
_to_time as "ended", _to_time as "ended",
distance as "distance", distance as "distance",
duration as "duration" duration as "duration",
_from_moorage_id,_to_moorage_id
FROM api.logbook l FROM api.logbook l
WHERE _to_time IS NOT NULL WHERE name IS NOT NULL
AND _to_time IS NOT NULL
ORDER BY _from_time DESC; ORDER BY _from_time DESC;
-- Description -- Description
COMMENT ON MATERIALIZED VIEW COMMENT ON MATERIALIZED VIEW
@@ -86,7 +94,9 @@ CREATE OR REPLACE VIEW api.log_view WITH (security_invoker=true,security_barrier
avg_speed as avg_speed, avg_speed as avg_speed,
max_speed as max_speed, max_speed as max_speed,
max_wind_speed as max_wind_speed, max_wind_speed as max_wind_speed,
extra as extra extra as extra,
_from_moorage_id as from_moorage_id,
_to_moorage_id as to_moorage_id
FROM api.logbook l FROM api.logbook l
WHERE _to_time IS NOT NULL WHERE _to_time IS NOT NULL
ORDER BY _from_time DESC; ORDER BY _from_time DESC;
@@ -96,34 +106,32 @@ COMMENT ON VIEW
IS 'Log web view'; IS 'Log web view';
-- Stays web view -- Stays web view
-- TODO group by month
DROP VIEW IF EXISTS api.stays_view; DROP VIEW IF EXISTS api.stays_view;
CREATE OR REPLACE VIEW api.stays_view WITH (security_invoker=true,security_barrier=true) AS CREATE OR REPLACE VIEW api.stays_view WITH (security_invoker=true,security_barrier=true) AS
SELECT s.id, SELECT s.id,
concat( s.name AS "name",
extract(DAYS FROM (s.departed-s.arrived)::interval), m.name AS "moorage",
' days',
--DATE_TRUNC('day', s.departed-s.arrived),
' stay at ',
s.name,
' in ',
RTRIM(TO_CHAR(s.departed, 'Month')),
' ',
TO_CHAR(s.departed, 'YYYY')
) as "name",
s.name AS "moorage",
m.id AS "moorage_id", m.id AS "moorage_id",
(s.departed-s.arrived) AS "duration", (s.departed-s.arrived) AS "duration",
sa.description AS "stayed_at", sa.description AS "stayed_at",
sa.stay_code AS "stayed_at_id", sa.stay_code AS "stayed_at_id",
s.arrived AS "arrived", s.arrived AS "arrived",
_from.id as "arrived_log_id",
_from._to_moorage_id as "arrived_from_moorage_id",
_from._to as "arrived_from_moorage_name",
s.departed AS "departed", s.departed AS "departed",
_to.id AS "departed_log_id",
_to._from_moorage_id AS "departed_to_moorage_id",
_to._from AS "departed_to_moorage_name",
s.notes AS "notes" s.notes AS "notes"
FROM api.stays s, api.stays_at sa, api.moorages m FROM api.stays_at sa, api.moorages m, api.stays s
WHERE departed IS NOT NULL LEFT JOIN api.logbook AS _from ON _from._from_time = s.departed
LEFT JOIN api.logbook AS _to ON _to._to_time = s.arrived
WHERE s.departed IS NOT NULL
AND _from._to_moorage_id IS NOT NULL
AND s.name IS NOT NULL AND s.name IS NOT NULL
AND s.stay_code = sa.stay_code AND s.stay_code = sa.stay_code
AND s.id = m.stay_id AND s.moorage_id = m.id
ORDER BY s.arrived DESC; ORDER BY s.arrived DESC;
-- Description -- Description
COMMENT ON VIEW COMMENT ON VIEW
@@ -133,30 +141,29 @@ COMMENT ON VIEW
DROP VIEW IF EXISTS api.stay_view; DROP VIEW IF EXISTS api.stay_view;
CREATE OR REPLACE VIEW api.stay_view WITH (security_invoker=true,security_barrier=true) AS CREATE OR REPLACE VIEW api.stay_view WITH (security_invoker=true,security_barrier=true) AS
SELECT s.id, SELECT s.id,
concat( s.name AS "name",
extract(DAYS FROM (s.departed-s.arrived)::interval), m.name AS "moorage",
' days',
--DATE_TRUNC('day', s.departed-s.arrived),
' stay at ',
s.name,
' in ',
RTRIM(TO_CHAR(s.departed, 'Month')),
' ',
TO_CHAR(s.departed, 'YYYY')
) as "name",
s.name AS "moorage",
m.id AS "moorage_id", m.id AS "moorage_id",
(s.departed-s.arrived) AS "duration", (s.departed-s.arrived) AS "duration",
sa.description AS "stayed_at", sa.description AS "stayed_at",
sa.stay_code AS "stayed_at_id", sa.stay_code AS "stayed_at_id",
s.arrived AS "arrived", s.arrived AS "arrived",
_from.id as "arrived_log_id",
_from._to_moorage_id as "arrived_from_moorage_id",
_from._to as "arrived_from_moorage_name",
s.departed AS "departed", s.departed AS "departed",
_to.id AS "departed_log_id",
_to._from_moorage_id AS "departed_to_moorage_id",
_to._from AS "departed_to_moorage_name",
s.notes AS "notes" s.notes AS "notes"
FROM api.stays s, api.stays_at sa, api.moorages m FROM api.stays_at sa, api.moorages m, api.stays s
WHERE departed IS NOT NULL LEFT JOIN api.logbook AS _from ON _from._from_time = s.departed
LEFT JOIN api.logbook AS _to ON _to._to_time = s.arrived
WHERE s.departed IS NOT NULL
AND _from._to_moorage_id IS NOT NULL
AND s.name IS NOT NULL AND s.name IS NOT NULL
AND s.stay_code = sa.stay_code AND s.stay_code = sa.stay_code
AND s.id = m.stay_id AND s.moorage_id = m.id
ORDER BY s.arrived DESC; ORDER BY s.arrived DESC;
-- Description -- Description
COMMENT ON VIEW COMMENT ON VIEW
@@ -187,17 +194,20 @@ CREATE OR REPLACE VIEW api.moorages_view WITH (security_invoker=true,security_ba
sa.description AS Default_Stay, sa.description AS Default_Stay,
sa.stay_code AS Default_Stay_Id, sa.stay_code AS Default_Stay_Id,
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay, -- in days EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay, -- in days
m.stay_duration AS Total_Duration,
m.reference_count AS Arrivals_Departures m.reference_count AS Arrivals_Departures
-- m.geog -- m.geog
-- m.stay_duration, -- m.stay_duration,
-- justify_hours ( m.stay_duration ) -- justify_hours ( m.stay_duration )
FROM api.moorages m, api.stays_at sa FROM api.moorages m, api.stays_at sa
WHERE m.name IS NOT NULL -- m.stay_duration is only process on a stay
AND geog IS NOT NULL WHERE m.stay_duration IS NOT NULL
AND m.geog IS NOT NULL
AND m.stay_code = sa.stay_code AND m.stay_code = sa.stay_code
GROUP BY m.id,m.name,sa.description,m.stay_duration,m.reference_count,m.geog,sa.stay_code GROUP BY m.id,m.name,sa.description,m.stay_duration,m.reference_count,m.geog,sa.stay_code
-- ORDER BY 4 DESC; -- ORDER BY 4 DESC;
ORDER BY m.reference_count DESC; -- ORDER BY m.reference_count DESC;
ORDER BY m.stay_duration DESC;
-- Description -- Description
COMMENT ON VIEW COMMENT ON VIEW
api.moorages_view api.moorages_view
@@ -211,11 +221,13 @@ CREATE OR REPLACE VIEW api.moorage_view WITH (security_invoker=true,security_bar
sa.stay_code AS Default_Stay_Id, sa.stay_code AS Default_Stay_Id,
m.home_flag AS Home, m.home_flag AS Home,
EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay, EXTRACT(DAY FROM justify_hours ( m.stay_duration )) AS Total_Stay,
m.stay_duration AS Total_Duration,
m.reference_count AS Arrivals_Departures, m.reference_count AS Arrivals_Departures,
m.notes m.notes
-- m.geog -- m.geog
FROM api.moorages m, api.stays_at sa FROM api.moorages m, api.stays_at sa
WHERE m.name IS NOT NULL -- m.stay_duration is only process on a stay
WHERE m.stay_duration IS NOT NULL
AND geog IS NOT NULL AND geog IS NOT NULL
AND m.stay_code = sa.stay_code; AND m.stay_code = sa.stay_code;
-- Description -- Description
@@ -223,6 +235,27 @@ COMMENT ON VIEW
api.moorage_view api.moorage_view
IS 'Moorage details web view'; IS 'Moorage details web view';
DROP VIEW IF EXISTS api.moorages_stays_view;
CREATE OR REPLACE VIEW api.moorages_stays_view WITH (security_invoker=true,security_barrier=true) AS
SELECT
_to.id AS _to_id,
_to._to_time,
_from.id AS _from_id,
_from._from_time,
s.stay_code,s.duration,m.id
FROM api.stays_at sa, api.moorages m, api.stays s
LEFT JOIN api.logbook AS _from ON _from._from_time = s.departed
LEFT JOIN api.logbook AS _to ON _to._to_time = s.arrived
WHERE s.departed IS NOT NULL
AND s.name IS NOT NULL
AND s.stay_code = sa.stay_code
AND s.moorage_id = m.id
ORDER BY _to._to_time DESC;
-- Description
COMMENT ON VIEW
api.moorages_stays_view
IS 'Moorages stay listing web view';
-- All moorage in 100 meters from the start of a logbook. -- All moorage in 100 meters from the start of a logbook.
-- ST_DistanceSphere Returns minimum distance in meters between two lon/lat points. -- ST_DistanceSphere Returns minimum distance in meters between two lon/lat points.
--SELECT --SELECT
@@ -265,7 +298,7 @@ CREATE OR REPLACE VIEW api.stats_logs_view WITH (security_invoker=true,security_
concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "longest_nonstop_sail" concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "longest_nonstop_sail"
FROM api.logbook l) FROM api.logbook l)
SELECT SELECT
m.name as Name, m.name AS name,
fm.time AS first, fm.time AS first,
lm.time AS last, lm.time AS last,
l.* l.*
@@ -345,22 +378,27 @@ CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=tru
metrics-> 'environment.inside.temperature' AS insideTemperature, metrics-> 'environment.inside.temperature' AS insideTemperature,
metrics-> 'environment.outside.temperature' AS outsideTemperature, metrics-> 'environment.outside.temperature' AS outsideTemperature,
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround, metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
metrics-> 'environment.wind.directionGround' AS windDirectionGround, metrics-> 'environment.wind.directionTrue' AS windDirectionTrue,
metrics-> 'environment.inside.relativeHumidity' AS insideHumidity, metrics-> 'environment.inside.relativeHumidity' AS insideHumidity,
metrics-> 'environment.outside.relativeHumidity' AS outsideHumidity, metrics-> 'environment.outside.relativeHumidity' AS outsideHumidity,
metrics-> 'environment.outside.pressure' AS outsidePressure, metrics-> 'environment.outside.pressure' AS outsidePressure,
metrics-> 'environment.inside.pressure' AS insidePressure, metrics-> 'environment.inside.pressure' AS insidePressure,
metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge, metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge,
metrics-> 'electrical.batteries.House.voltage' AS batteryVoltage, metrics-> 'electrical.batteries.House.voltage' AS batteryVoltage,
metrics-> 'environment.depth.belowTransducer' AS depth,
jsonb_build_object( jsonb_build_object(
'type', 'Feature', 'type', 'Feature',
'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb, 'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb,
'properties', jsonb_build_object( 'properties', jsonb_build_object(
'name', current_setting('vessel.name', false), 'name', current_setting('vessel.name', false),
'latitude', m.latitude, 'latitude', m.latitude,
'longitude', m.longitude 'longitude', m.longitude,
'time', m.time,
'speedoverground', m.speedoverground,
'windspeedapparent', m.windspeedapparent
)::jsonb ) AS geojson, )::jsonb ) AS geojson,
current_setting('vessel.name', false) AS name current_setting('vessel.name', false) AS name,
( SELECT api.status_fn() ) AS status
FROM api.metrics m FROM api.metrics m
ORDER BY time DESC LIMIT 1; ORDER BY time DESC LIMIT 1;
COMMENT ON VIEW COMMENT ON VIEW
@@ -454,3 +492,19 @@ CREATE VIEW api.total_info_view WITH (security_invoker=true,security_barrier=tru
COMMENT ON VIEW COMMENT ON VIEW
api.total_info_view api.total_info_view
IS 'total_info_view web view'; IS 'total_info_view web view';
DROP VIEW IF EXISTS api.explore_view;
CREATE VIEW api.explore_view WITH (security_invoker=true,security_barrier=true) AS
-- Expose last metrics
WITH raw_metrics AS (
SELECT m.time, m.metrics
FROM api.metrics m
ORDER BY m.time desc limit 1
)
SELECT raw_metrics.time, key, value
FROM raw_metrics,
jsonb_each_text(raw_metrics.metrics)
ORDER BY key ASC;
COMMENT ON VIEW
api.explore_view
IS 'explore_view web view';

View File

@@ -8,6 +8,36 @@ select current_database();
-- connect to the DB -- connect to the DB
\c signalk \c signalk
-- Check for new logbook pending validation
CREATE FUNCTION cron_process_pre_logbook_fn() RETURNS void AS $$
DECLARE
process_rec record;
BEGIN
-- Check for new logbook pending update
RAISE NOTICE 'cron_process_pre_logbook_fn init loop';
FOR process_rec in
SELECT * FROM process_queue
WHERE channel = 'pre_logbook' AND processed IS NULL
ORDER BY stored ASC LIMIT 100
LOOP
RAISE NOTICE 'cron_process_pre_logbook_fn processing queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
-- update logbook
PERFORM process_pre_logbook_fn(process_rec.payload::INTEGER);
-- update process_queue table , processed
UPDATE process_queue
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE 'cron_process_pre_logbook_fn processed queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
END LOOP;
END;
$$ language plpgsql;
-- Description
COMMENT ON FUNCTION
public.cron_process_pre_logbook_fn
IS 'init by pg_cron to check for new logbook pending update, if so perform process_logbook_valid_fn';
-- Check for new logbook pending update -- Check for new logbook pending update
CREATE FUNCTION cron_process_new_logbook_fn() RETURNS void AS $$ CREATE FUNCTION cron_process_new_logbook_fn() RETURNS void AS $$
declare declare
@@ -329,6 +359,51 @@ COMMENT ON FUNCTION
public.cron_process_new_notification_fn public.cron_process_new_notification_fn
IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn'; IS 'init by pg_cron to check for new event pending notifications, if so perform process_notification_queue_fn';
-- CRON for new vessel metadata pending grafana provisioning
CREATE FUNCTION cron_process_grafana_fn() RETURNS void AS $$
DECLARE
process_rec record;
data_rec record;
app_settings jsonb;
user_settings jsonb;
BEGIN
-- We run grafana provisioning only after the first received vessel metadata
-- Check for new vessel metadata pending grafana provisioning
RAISE NOTICE 'cron_process_grafana_fn';
FOR process_rec in
SELECT * from process_queue
where channel = 'grafana' and processed is null
order by stored asc
LOOP
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
-- Gather url from app settings
app_settings := get_app_settings_fn();
-- Get vessel details base on metadata id
SELECT * INTO data_rec
FROM api.metadata m, auth.vessels v
WHERE m.id = process_rec.payload::INTEGER
AND m.vessel_id = v.vessel_id;
-- as we got data from the vessel we can do the grafana provisioning.
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
-- Gather user settings
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
-- Send notification
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
-- update process_queue entry as processed
UPDATE process_queue
SET
processed = NOW()
WHERE id = process_rec.id;
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
END LOOP;
END;
$$ language plpgsql;
-- Description
COMMENT ON FUNCTION
public.cron_process_grafana_fn
IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
-- CRON for Vacuum database -- CRON for Vacuum database
CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$ CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$
-- ERROR: VACUUM cannot be executed from a function -- ERROR: VACUUM cannot be executed from a function
@@ -352,6 +427,8 @@ COMMENT ON FUNCTION
CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$ CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$
DECLARE DECLARE
alert_rec record; alert_rec record;
last_metric TIMESTAMPTZ;
metric_rec record;
BEGIN BEGIN
-- Check for new event notification pending update -- Check for new event notification pending update
RAISE NOTICE 'cron_process_alerts_fn'; RAISE NOTICE 'cron_process_alerts_fn';
@@ -361,9 +438,24 @@ BEGIN
FROM auth.accounts a, auth.vessels v, api.metadata m FROM auth.accounts a, auth.vessels v, api.metadata m
WHERE m.vessel_id = v.vessel_id WHERE m.vessel_id = v.vessel_id
AND a.email = v.owner_email AND a.email = v.owner_email
AND (preferences->'alerting'->'enabled')::boolean = false AND (a.preferences->'alerting'->'enabled')::boolean = True
AND m.active = True
LOOP LOOP
RAISE NOTICE '-> cron_process_alert_rec_fn for [%]', alert_rec; RAISE NOTICE '-> cron_process_alert_rec_fn for [%]', alert_rec;
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
-- Get time from the last metrics entry
SELECT m.time INTO last_metric FROM api.metrics m WHERE vessel_id = alert_rec.vessel_id ORDER BY m.time DESC LIMIT 1;
-- Get all metrics from the last 10 minutes
FOR metric_rec in
SELECT *
FROM api.metrics m
WHERE vessel_id = alert_rec.vessel_id
AND time >= last_metric - INTERVAL '10 MINUTES'
ORDER BY m.time DESC LIMIT 100
LOOP
RAISE NOTICE '-> cron_process_alert_rec_fn checking metrics [%]', metric_rec;
END LOOP;
END LOOP; END LOOP;
END; END;
$$ language plpgsql; $$ language plpgsql;
@@ -437,7 +529,7 @@ DECLARE
no_activity_rec record; no_activity_rec record;
user_settings jsonb; user_settings jsonb;
BEGIN BEGIN
-- Check for vessel with no activity for more than 200 days -- Check for vessel with no activity for more than 230 days
RAISE NOTICE 'cron_process_no_activity_fn'; RAISE NOTICE 'cron_process_no_activity_fn';
FOR no_activity_rec in FOR no_activity_rec in
SELECT SELECT
@@ -445,7 +537,7 @@ BEGIN
FROM auth.accounts a FROM auth.accounts a
LEFT JOIN auth.vessels v ON v.owner_email = a.email LEFT JOIN auth.vessels v ON v.owner_email = a.email
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS' WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'
LOOP LOOP
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec; RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings; SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
@@ -458,7 +550,7 @@ $no_activity$ language plpgsql;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.cron_process_no_activity_fn public.cron_process_no_activity_fn
IS 'init by pg_cron, check for vessel with no activity for more than 200 days then send notification'; IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
-- CRON for deactivated/deletion -- CRON for deactivated/deletion
CREATE FUNCTION cron_process_deactivated_fn() RETURNS void AS $deactivated$ CREATE FUNCTION cron_process_deactivated_fn() RETURNS void AS $deactivated$

View File

@@ -76,7 +76,7 @@ INSERT INTO public.email_templates VALUES
('monitor_online', ('monitor_online',
'Boat went Online', 'Boat went Online',
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n', E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n',
'Boat went Offline', 'Boat went Online',
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n'), E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n'),
('new_badge', ('new_badge',
'New Badge!', 'New Badge!',
@@ -115,24 +115,29 @@ INSERT INTO public.email_templates VALUES
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'), E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'),
('no_vessel', ('no_vessel',
'PostgSail add your boat', 'PostgSail add your boat',
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not created your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team', E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not added your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
'PostgSail next step', 'PostgSail next step',
E'Hello,\nYou should create your vessel. Check your email!\n'), E'Hello,\nYou should create your vessel. Check your email!\n'),
('no_metadata', ('no_metadata',
'PostgSail connect your boat', 'PostgSail connect your boat',
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team', E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
'PostgSail next step', 'PostgSail next step',
E'Hello,\nYou should connect your vessel. Check your email!\n'), E'Hello,\nYou should connect your vessel. Check your email!\n'),
('no_activity', ('no_activity',
'PostgSail boat inactivity', 'PostgSail boat inactivity',
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team', E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team.',
'PostgSail inactivity!', 'PostgSail inactivity!',
E'We detected inactivity. Check your email!\n'), E'We detected inactivity. Check your email!\n'),
('deactivated', ('deactivated',
'PostgSail account deactivated', 'PostgSail account deactivated',
E'Hello __RECIPIENT__,\nYour account has been deactivated and all your data has been removed from PostgSail system.', E'Hello __RECIPIENT__,\nYour account has been deactivated and all your data has been removed from PostgSail system.',
'PostgSail deactivated!', 'PostgSail deactivated!',
E'We removed your account. Check your email!\n'); E'We removed your account. Check your email!\n'),
('grafana',
'PostgSail Grafana integration',
E'Hello __RECIPIENT__,\nCongratulations! You have just unlocked Grafana\nSee more details at https://app.opneplotter.cloud\nHappy sailing!\nFrancois',
'PostgSail Grafana!',
E'Congratulations!\nYou have just unlocked Grafana\nSee more details at https://app.opneplotter.cloud\n');
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Queue handling -- Queue handling
@@ -150,12 +155,12 @@ INSERT INTO public.email_templates VALUES
-- table way -- table way
CREATE TABLE IF NOT EXISTS public.process_queue ( CREATE TABLE IF NOT EXISTS public.process_queue (
id SERIAL PRIMARY KEY, id INT PRIMARY KEY GENERATED ALWAYS AS IDENTITY,
channel TEXT NOT NULL, channel TEXT NOT NULL,
payload TEXT NOT NULL, payload TEXT NOT NULL,
ref_id TEXT NOT NULL, ref_id TEXT NOT NULL,
stored TIMESTAMP WITHOUT TIME ZONE NOT NULL, stored TIMESTAMPTZ NOT NULL,
processed TIMESTAMP WITHOUT TIME ZONE DEFAULT NULL processed TIMESTAMPTZ DEFAULT NULL
); );
-- Description -- Description
COMMENT ON TABLE COMMENT ON TABLE
@@ -178,7 +183,10 @@ $new_account_entry$ language plpgsql;
create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$ create function new_account_otp_validation_entry_fn() returns trigger as $new_account_otp_validation_entry$
begin begin
insert into process_queue (channel, payload, stored, ref_id) values ('email_otp', NEW.email, now(), NEW.user_id); -- Add email_otp check only if not from oauth server
if (NEW.preferences->>'email_verified')::boolean IS NOT True then
insert into process_queue (channel, payload, stored, ref_id) values ('email_otp', NEW.email, now(), NEW.user_id);
end if;
return NEW; return NEW;
END; END;
$new_account_otp_validation_entry$ language plpgsql; $new_account_otp_validation_entry$ language plpgsql;
@@ -190,6 +198,14 @@ begin
END; END;
$new_vessel_entry$ language plpgsql; $new_vessel_entry$ language plpgsql;
create function new_vessel_public_fn() returns trigger as $new_vessel_public$
begin
-- Update user settings with a public vessel name
perform api.update_user_preferences_fn('{public_vessel}', regexp_replace(NEW.name, '\W+', '', 'g'));
return NEW;
END;
$new_vessel_public$ language plpgsql;
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- Tables Application Settings -- Tables Application Settings
-- https://dba.stackexchange.com/questions/27296/storing-application-settings-with-different-datatypes#27297 -- https://dba.stackexchange.com/questions/27296/storing-application-settings-with-different-datatypes#27297

File diff suppressed because it is too large Load Diff

View File

@@ -79,9 +79,9 @@ COMMENT ON FUNCTION
CREATE OR REPLACE FUNCTION public.istimestamptz(text) RETURNS BOOLEAN AS CREATE OR REPLACE FUNCTION public.istimestamptz(text) RETURNS BOOLEAN AS
$isdate$ $isdate$
DECLARE x TIMESTAMP WITHOUT TIME ZONE; DECLARE x TIMESTAMPTZ;
BEGIN BEGIN
x = $1::TIMESTAMP WITHOUT TIME ZONE; x = $1::TIMESTAMPTZ;
RETURN TRUE; RETURN TRUE;
EXCEPTION WHEN others THEN EXCEPTION WHEN others THEN
RETURN FALSE; RETURN FALSE;
@@ -92,7 +92,7 @@ LANGUAGE plpgsql IMMUTABLE;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.istimestamptz public.istimestamptz
IS 'Check typeof value is TIMESTAMP WITHOUT TIME ZONE'; IS 'Check typeof value is TIMESTAMPTZ';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- JSON helpers -- JSON helpers
@@ -151,3 +151,48 @@ $jsonb_diff_val$ LANGUAGE plpgsql;
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.jsonb_diff_val public.jsonb_diff_val
IS 'Compare two jsonb objects'; IS 'Compare two jsonb objects';
---------------------------------------------------------------------------
-- uuid v7 helpers
--
-- https://gist.github.com/kjmph/5bd772b2c2df145aa645b837da7eca74
CREATE OR REPLACE FUNCTION public.timestamp_from_uuid_v7(_uuid uuid)
RETURNS timestamp without time zone
LANGUAGE sql
-- Based off IETF draft, https://datatracker.ietf.org/doc/draft-peabody-dispatch-new-uuid-format/
IMMUTABLE PARALLEL SAFE STRICT LEAKPROOF
AS $$
SELECT to_timestamp(('x0000' || substr(_uuid::text, 1, 8) || substr(_uuid::text, 10, 4))::bit(64)::bigint::numeric / 1000);
$$
;
-- Description
COMMENT ON FUNCTION
public.timestamp_from_uuid_v7
IS 'extract the timestamp from the uuid.';
create or replace function public.uuid_generate_v7()
returns uuid
as $$
begin
-- use random v4 uuid as starting point (which has the same variant we need)
-- then overlay timestamp
-- then set version 7 by flipping the 2 and 1 bit in the version 4 string
return encode(
set_bit(
set_bit(
overlay(uuid_send(gen_random_uuid())
placing substring(int8send(floor(extract(epoch from clock_timestamp()) * 1000)::bigint) from 3)
from 1 for 6
),
52, 1
),
53, 1
),
'hex')::uuid;
end
$$
language plpgsql volatile;
-- Description
COMMENT ON FUNCTION
public.uuid_generate_v7
IS 'Generate UUID v7, Based off IETF draft, https://datatracker.ietf.org/doc/draft-peabody-dispatch-new-uuid-format/';

View File

@@ -15,9 +15,9 @@ CREATE SCHEMA IF NOT EXISTS public;
-- --
-- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md -- https://github.com/CartoDB/labs-postgresql/blob/master/workshop/plpython.md
-- --
DROP FUNCTION IF EXISTS reverse_geocode_py_fn; DROP FUNCTION IF EXISTS reverse_geocode_py_fn;
CREATE OR REPLACE FUNCTION reverse_geocode_py_fn(IN geocoder TEXT, IN lon NUMERIC, IN lat NUMERIC, CREATE OR REPLACE FUNCTION reverse_geocode_py_fn(IN geocoder TEXT, IN lon NUMERIC, IN lat NUMERIC,
OUT geo jsonb) OUT geo JSONB)
AS $reverse_geocode_py$ AS $reverse_geocode_py$
import requests import requests
@@ -39,47 +39,57 @@ AS $reverse_geocode_py$
plpy.error('Error missing parameters') plpy.error('Error missing parameters')
return None return None
# Make the request to the geocoder API def georeverse(geocoder, lon, lat, zoom="18"):
# https://operations.osmfoundation.org/policies/nominatim/ # Make the request to the geocoder API
payload = {"lon": lon, "lat": lat, "format": "jsonv2", "zoom": 18} # https://operations.osmfoundation.org/policies/nominatim/
# https://nominatim.org/release-docs/latest/api/Reverse/ headers = {"Accept-Language": "en-US,en;q=0.5", "User-Agent": "PostgSail", "From": "xbgmsharp@gmail.com"}
r = requests.get(url, headers = {"Accept-Language": "en-US,en;q=0.5"}, params=payload) payload = {"lon": lon, "lat": lat, "format": "jsonv2", "zoom": zoom, "accept-language": "en"}
# https://nominatim.org/release-docs/latest/api/Reverse/
r = requests.get(url, headers=headers, params=payload)
# Parse response # Parse response
# Option1: If name is null fallback to address field road,neighbourhood,suburb # If name is null fallback to address field tags: neighbourhood,suburb
# Option2: Return the json for future reference like country # if none repeat with lower zoom level
if r.status_code == 200 and "name" in r.json(): if r.status_code == 200 and "name" in r.json():
r_dict = r.json() r_dict = r.json()
#plpy.notice('reverse_geocode_py_fn Parameters [{}] [{}] Response'.format(lon, lat, r_dict)) #plpy.notice('reverse_geocode_py_fn Parameters [{}] [{}] Response'.format(lon, lat, r_dict))
output = None output = None
country_code = None country_code = None
if "country_code" in r_dict["address"] and r_dict["address"]["country_code"]: if "country_code" in r_dict["address"] and r_dict["address"]["country_code"]:
country_code = r_dict["address"]["country_code"] country_code = r_dict["address"]["country_code"]
if r_dict["name"]: if r_dict["name"]:
return { "name": r_dict["name"], "country_code": country_code } return { "name": r_dict["name"], "country_code": country_code }
elif "address" in r_dict and r_dict["address"]: elif "address" in r_dict and r_dict["address"]:
if "neighbourhood" in r_dict["address"] and r_dict["address"]["neighbourhood"]: if "neighbourhood" in r_dict["address"] and r_dict["address"]["neighbourhood"]:
return { "name": r_dict["address"]["neighbourhood"], "country_code": country_code } return { "name": r_dict["address"]["neighbourhood"], "country_code": country_code }
elif "road" in r_dict["address"] and r_dict["address"]["road"]: elif "hamlet" in r_dict["address"] and r_dict["address"]["hamlet"]:
return { "name": r_dict["address"]["road"], "country_code": country_code } return { "name": r_dict["address"]["hamlet"], "country_code": country_code }
elif "suburb" in r_dict["address"] and r_dict["address"]["suburb"]: elif "suburb" in r_dict["address"] and r_dict["address"]["suburb"]:
return { "name": r_dict["address"]["suburb"], "country_code": country_code } return { "name": r_dict["address"]["suburb"], "country_code": country_code }
elif "residential" in r_dict["address"] and r_dict["address"]["residential"]: elif "residential" in r_dict["address"] and r_dict["address"]["residential"]:
return { "name": r_dict["address"]["residential"], "country_code": country_code } return { "name": r_dict["address"]["residential"], "country_code": country_code }
elif "village" in r_dict["address"] and r_dict["address"]["village"]: elif "village" in r_dict["address"] and r_dict["address"]["village"]:
return { "name": r_dict["address"]["village"], "country_code": country_code } return { "name": r_dict["address"]["village"], "country_code": country_code }
elif "town" in r_dict["address"] and r_dict["address"]["town"]: elif "town" in r_dict["address"] and r_dict["address"]["town"]:
return { "name": r_dict["address"]["town"], "country_code": country_code } return { "name": r_dict["address"]["town"], "country_code": country_code }
else: elif "amenity" in r_dict["address"] and r_dict["address"]["amenity"]:
return { "name": "n/a", "country_code": country_code } return { "name": r_dict["address"]["amenity"], "country_code": country_code }
else: else:
return { "name": "n/a", "country_code": country_code } if (zoom == 15):
else: plpy.notice('georeverse recursive retry with lower zoom than:[{}], Response [{}]'.format(zoom , r.json()))
plpy.warning('Failed to received a geo full address %s', r.json()) return { "name": "n/a", "country_code": country_code }
#plpy.error('Failed to received a geo full address %s', r.json()) else:
return { "name": "unknown", "country_code": "unknown" } plpy.notice('georeverse recursive retry with lower zoom than:[{}], Response [{}]'.format(zoom , r.json()))
return georeverse(geocoder, lon, lat, 15)
else:
return { "name": "n/a", "country_code": country_code }
else:
plpy.warning('Failed to received a geo full address %s', r.json())
#plpy.error('Failed to received a geo full address %s', r.json())
return { "name": "unknown", "country_code": "unknown" }
return georeverse(geocoder, lon, lat)
$reverse_geocode_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u; $reverse_geocode_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.reverse_geocode_py_fn public.reverse_geocode_py_fn
@@ -347,6 +357,10 @@ AS $urlencode_py$
import urllib.parse import urllib.parse
return urllib.parse.quote(uri, safe=""); return urllib.parse.quote(uri, safe="");
$urlencode_py$ LANGUAGE plpython3u IMMUTABLE STRICT; $urlencode_py$ LANGUAGE plpython3u IMMUTABLE STRICT;
-- Description
COMMENT ON FUNCTION
public.urlencode_py_fn
IS 'python url encode using plpython3u';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- python -- python
@@ -364,16 +378,14 @@ AS $reverse_geoip_py$
url = f'https://ipapi.co/{_ip}/json/' url = f'https://ipapi.co/{_ip}/json/'
r = requests.get(url) r = requests.get(url)
#print(r.text) #print(r.text)
# Return something boolean? #plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
plpy.warning('IP [{}] [{}]'.format(_ip, r.status_code))
if r.status_code == 200: if r.status_code == 200:
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code)) #plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
return r.text; return r.json();
else: else:
plpy.error('Failed to get ip details') plpy.error('Failed to get ip details')
return '{}' return '{}'
$reverse_geoip_py$ LANGUAGE plpython3u; $reverse_geoip_py$ LANGUAGE plpython3u;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.reverse_geoip_py_fn public.reverse_geoip_py_fn
@@ -422,4 +434,252 @@ IMMUTABLE STRICT;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
public.geojson_py_fn public.geojson_py_fn
IS 'Parse geojson using plpython3u (should be done in PGSQL)'; IS 'Parse geojson using plpython3u (should be done in PGSQL), deprecated';
DROP FUNCTION IF EXISTS overpass_py_fn;
CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
OUT geo JSONB) RETURNS JSONB
AS $overpass_py$
"""
Return https://overpass-turbo.eu seamark details within 400m
https://overpass-turbo.eu/s/1EaG
https://wiki.openstreetmap.org/wiki/Key:seamark:type
"""
import requests
import json
import urllib.parse
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
payload = """
[out:json][timeout:20];
is_in({0},{1})->.result_areas;
(
area.result_areas["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
area.result_areas["leisure"="marina"][~"name"~"."];
);
out tags;
nwr(around:400.0,{0},{1})->.all;
(
nwr.all["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
nwr.all["seamark:type"~"(anchorage|anchor_berth|berth)"];
nwr.all["leisure"="marina"];
nwr.all["natural"~"(bay|beach)"];
);
out tags;
""".format(lat, lon)
data = urllib.parse.quote(payload, safe="");
url = f'https://overpass-api.de/api/interpreter?data={data}'.format(data)
r = requests.get(url, headers)
#print(r.text)
#plpy.notice(url)
plpy.notice('overpass-api coord lon[{}] lat[{}] [{}]'.format(lon, lat, r.status_code))
if r.status_code == 200 and "elements" in r.json():
r_dict = r.json()
plpy.notice('overpass-api Got [{}]'.format(r_dict["elements"]))
if r_dict["elements"]:
if "tags" in r_dict["elements"][0] and r_dict["elements"][0]["tags"]:
return r_dict["elements"][0]["tags"]; # return the first element
return '{}'
else:
plpy.notice('overpass-api Failed to get overpass-api details')
return '{}'
$overpass_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.overpass_py_fn
IS 'Return https://overpass-turbo.eu seamark details within 400m using plpython3u';
---------------------------------------------------------------------------
-- Provision Grafana SQL
--
CREATE OR REPLACE FUNCTION grafana_py_fn(IN _v_name TEXT, IN _v_id TEXT,
IN _u_email TEXT, IN app JSONB) RETURNS VOID
AS $grafana_py$
"""
https://grafana.com/docs/grafana/latest/developers/http_api/
Create organization base on vessel name
Create user base on user email
Add user to organization
Add data_source to organization
Add dashboard to organization
Update organization preferences
"""
import requests
import json
import re
grafana_uri = None
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
grafana_uri = app['app.grafana_admin_uri']
else:
plpy.error('Error no grafana_admin_uri defined, check app settings')
return None
# add vessel org
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
'Accept': 'application/json', 'Content-Type': 'application/json'}
path = 'api/orgs'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
data_dict = {'name':_v_name}
data = json.dumps(data_dict)
r = requests.post(url, data=data, headers=headers)
#print(r.text)
plpy.notice(r.json())
if r.status_code == 200 and "orgId" in r.json():
org_id = r.json()['orgId']
else:
plpy.error('Error grafana add vessel org %', r.json())
return None
# add user to vessel org
path = 'api/admin/users'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
data = json.dumps(data_dict)
r = requests.post(url, data=data, headers=headers)
#print(r.text)
plpy.notice(r.json())
if r.status_code == 200 and "id" in r.json():
user_id = r.json()['id']
else:
plpy.error('Error grafana add user to vessel org')
return
# read data_source
path = 'api/datasources/1'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
r = requests.get(url, headers=headers)
#print(r.text)
plpy.notice(r.json())
data_source = r.json()
data_source['id'] = 0
data_source['orgId'] = org_id
data_source['uid'] = "ds_" + _v_id
data_source['name'] = "ds_" + _v_id
data_source['secureJsonData'] = {}
data_source['secureJsonData']['password'] = 'password'
data_source['readOnly'] = True
del data_source['secureJsonFields']
# add data_source to vessel org
path = 'api/datasources'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
data = json.dumps(data_source)
headers['X-Grafana-Org-Id'] = str(org_id)
r = requests.post(url, data=data, headers=headers)
plpy.notice(r.json())
del headers['X-Grafana-Org-Id']
if r.status_code != 200 and "id" not in r.json():
plpy.error('Error grafana add data_source to vessel org')
return
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
for dashboard in dashboards_tpl:
# read dashboard template by uid
path = 'api/dashboards/uid'
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
if 'X-Grafana-Org-Id' in headers:
del headers['X-Grafana-Org-Id']
r = requests.get(url, headers=headers)
plpy.notice(r.json())
if r.status_code != 200 and "id" not in r.json():
plpy.error('Error grafana read dashboard template')
return
new_dashboard = r.json()
del new_dashboard['meta']
new_dashboard['dashboard']['version'] = 0
new_dashboard['dashboard']['id'] = 0
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
# add dashboard to vessel org
path = 'api/dashboards/db'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
data = json.dumps(new_dashboard)
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
headers['X-Grafana-Org-Id'] = str(org_id)
r = requests.post(url, data=new_data, headers=headers)
plpy.notice(r.json())
if r.status_code != 200 and "id" not in r.json():
plpy.error('Error grafana add dashboard to vessel org')
return
# Update Org Prefs
path = 'api/org/preferences'
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
home_dashboard = {}
home_dashboard['timezone'] = 'utc'
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
data = json.dumps(home_dashboard)
headers['X-Grafana-Org-Id'] = str(org_id)
r = requests.patch(url, data=data, headers=headers)
plpy.notice(r.json())
if r.status_code != 200:
plpy.error('Error grafana update org preferences')
return
plpy.notice('Done')
$grafana_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.grafana_py_fn
IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
-- https://stackoverflow.com/questions/65517230/how-to-set-user-attribute-value-in-keycloak-using-api
DROP FUNCTION IF EXISTS keycloak_py_fn;
CREATE OR REPLACE FUNCTION keycloak_py_fn(IN user_id TEXT, IN vessel_id TEXT,
IN app JSONB) RETURNS JSONB
AS $keycloak_py$
"""
Add vessel_id user attribute to keycloak user {user_id}
"""
import requests
import json
import urllib.parse
safe_uri = host = user = pwd = None
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
_ = urllib.parse.urlparse(safe_uri)
host = _.netloc.split('@')[-1]
user = _.netloc.split('@')[0].split(':')[0]
pwd = _.netloc.split('@')[0].split(':')[1]
else:
plpy.error('Error no keycloak_uri defined, check app settings')
return None
if not host or not user or not pwd:
plpy.error('Error parsing keycloak_uri, check app settings')
return None
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
#print(r.text)
#plpy.notice(url)
if r.status_code == 200 and 'access_token' in r.json():
response = r.json()
plpy.notice(response)
_headers['Authorization'] = 'Bearer '+ response['access_token']
_headers['Content-Type'] = 'application/json'
_payload = { 'attributes': {'vessel_id': vessel_id} }
url = f'{keycloak_uri}/admin/realms/postgsail/users/{user_id}'.format(keycloak_uri,user_id)
#plpy.notice(url)
#plpy.notice(_payload)
data = json.dumps(_payload)
r = requests.put(url, headers=_headers, data=data, timeout=(5, 60))
if r.status_code != 204:
plpy.notice("Error updating user: {status} [{text}]".format(
status=r.status_code, text=r.text))
return None
else:
plpy.notice("Updated user : {user} [{text}]".format(user=user_id, text=r.text))
else:
plpy.notice(f'Error getting admin access_token: {status} [{text}]'.format(
status=r.status_code, text=r.text))
return None
$keycloak_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
-- Description
COMMENT ON FUNCTION
public.keycloak_py_fn
IS 'Return set oauth user attribute into keycloak using plpython3u';

View File

@@ -21,7 +21,8 @@ CREATE EXTENSION IF NOT EXISTS "pgcrypto"; -- provides cryptographic functions
DROP TABLE IF EXISTS auth.accounts CASCADE; DROP TABLE IF EXISTS auth.accounts CASCADE;
CREATE TABLE IF NOT EXISTS auth.accounts ( CREATE TABLE IF NOT EXISTS auth.accounts (
public_id SERIAL UNIQUE NOT NULL, id INT UNIQUE GENERATED ALWAYS AS IDENTITY,
--id TEXT NOT NULL UNIQUE DEFAULT uuid_generate_v7(),
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12), user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
email CITEXT PRIMARY KEY CHECK ( email ~* '^.+@.+\..+$' ), email CITEXT PRIMARY KEY CHECK ( email ~* '^.+@.+\..+$' ),
first TEXT NOT NULL CHECK (length(pass) < 512), first TEXT NOT NULL CHECK (length(pass) < 512),
@@ -29,9 +30,9 @@ CREATE TABLE IF NOT EXISTS auth.accounts (
pass TEXT NOT NULL CHECK (length(pass) < 512), pass TEXT NOT NULL CHECK (length(pass) < 512),
role name NOT NULL CHECK (length(role) < 512), role name NOT NULL CHECK (length(role) < 512),
preferences JSONB NULL DEFAULT '{"email_notifications":true}', preferences JSONB NULL DEFAULT '{"email_notifications":true}',
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
connected_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), connected_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
CONSTRAINT valid_email CHECK (length(email) > 5), -- Enforce at least 5 char, eg: a@b.io CONSTRAINT valid_email CHECK (length(email) > 5), -- Enforce at least 5 char, eg: a@b.io
CONSTRAINT valid_first CHECK (length(first) > 1), CONSTRAINT valid_first CHECK (length(first) > 1),
CONSTRAINT valid_last CHECK (length(last) > 1), CONSTRAINT valid_last CHECK (length(last) > 1),
@@ -42,11 +43,7 @@ COMMENT ON TABLE
auth.accounts auth.accounts
IS 'users account table'; IS 'users account table';
-- Indexes -- Indexes
-- is unused index?
--CREATE INDEX accounts_role_idx ON auth.accounts (role);
CREATE INDEX accounts_preferences_idx ON auth.accounts USING GIN (preferences); CREATE INDEX accounts_preferences_idx ON auth.accounts USING GIN (preferences);
CREATE INDEX accounts_public_id_idx ON auth.accounts (public_id);
COMMENT ON COLUMN auth.accounts.public_id IS 'User public_id to allow mapping for anonymous access, could be use as well for as Grafana orgId';
COMMENT ON COLUMN auth.accounts.first IS 'User first name with CONSTRAINT CHECK'; COMMENT ON COLUMN auth.accounts.first IS 'User first name with CONSTRAINT CHECK';
COMMENT ON COLUMN auth.accounts.last IS 'User last name with CONSTRAINT CHECK'; COMMENT ON COLUMN auth.accounts.last IS 'User last name with CONSTRAINT CHECK';
@@ -61,39 +58,59 @@ COMMENT ON TRIGGER accounts_moddatetime
DROP TABLE IF EXISTS auth.vessels; DROP TABLE IF EXISTS auth.vessels;
CREATE TABLE IF NOT EXISTS auth.vessels ( CREATE TABLE IF NOT EXISTS auth.vessels (
vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12), vessel_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
-- user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT, -- user_id TEXT NOT NULL REFERENCES auth.accounts(user_id) ON DELETE RESTRICT,
owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT, owner_email CITEXT PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
-- mmsi TEXT UNIQUE, -- Should be a numeric range between 100000000 and 800000000.
mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique mmsi NUMERIC UNIQUE, -- MMSI can be optional but if present must be a valid one and unique
name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512), name TEXT NOT NULL CHECK (length(name) >= 3 AND length(name) < 512),
-- pass text not null check (length(pass) < 512), -- unused
role name not null check (length(role) < 512), role name not null check (length(role) < 512),
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(), created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW() updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
-- CONSTRAINT valid_length_mmsi CHECK (length(mmsi) < 10 OR length(mmsi) = 0)
CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000) CONSTRAINT valid_range_mmsi CHECK (mmsi > 100000000 AND mmsi < 800000000)
); );
-- Description -- Description
COMMENT ON TABLE COMMENT ON TABLE
auth.vessels auth.vessels
IS 'vessels table link to accounts email user_id column'; IS 'vessels table link to accounts email user_id column';
-- Indexes COMMENT ON COLUMN
-- is unused index? auth.vessels.mmsi
--CREATE INDEX vessels_role_idx ON auth.vessels (role); IS 'MMSI can be optional but if present must be a valid one and unique but must be in numeric range between 100000000 and 800000000';
-- is unused index?
--CREATE INDEX vessels_name_idx ON auth.vessels (name);
CREATE INDEX vessels_vesselid_idx ON auth.vessels (vessel_id);
CREATE TRIGGER vessels_moddatetime CREATE TRIGGER vessels_moddatetime
BEFORE UPDATE ON auth.vessels BEFORE UPDATE ON auth.vessels
FOR EACH ROW FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at); EXECUTE PROCEDURE moddatetime (updated_at);
-- Description -- Description
COMMENT ON TRIGGER vessels_moddatetime COMMENT ON TRIGGER vessels_moddatetime
ON auth.vessels ON auth.vessels
IS 'Automatic update of updated_at on table modification'; IS 'Automatic update of updated_at on table modification';
CREATE TABLE auth.users (
id NAME PRIMARY KEY DEFAULT current_setting('request.jwt.claims', true)::json->>'sub',
email NAME NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'email',
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
first TEXT NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'given_name',
last TEXT NOT NULL DEFAULT current_setting('request.jwt.claims', true)::json->>'family_name',
role NAME NOT NULL DEFAULT 'user_role' CHECK (length(role) < 512),
preferences JSONB NULL DEFAULT '{"email_notifications":true, "email_valid": true, "email_verified": true}',
created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(),
connected_at TIMESTAMPTZ NOT NULL DEFAULT NOW()
);
-- Description
COMMENT ON TABLE
auth.users
IS 'Keycloak Oauth user, map user details from access token';
CREATE TRIGGER user_moddatetime
BEFORE UPDATE ON auth.users
FOR EACH ROW
EXECUTE PROCEDURE moddatetime (updated_at);
-- Description
COMMENT ON TRIGGER user_moddatetime
ON auth.users
IS 'Automatic update of updated_at on table modification';
create or replace function create or replace function
auth.check_role_exists() returns trigger as $$ auth.check_role_exists() returns trigger as $$
begin begin
@@ -112,12 +129,26 @@ create constraint trigger ensure_user_role_exists
after insert or update on auth.accounts after insert or update on auth.accounts
for each row for each row
execute procedure auth.check_role_exists(); execute procedure auth.check_role_exists();
-- Description
COMMENT ON TRIGGER ensure_user_role_exists
ON auth.accounts
IS 'ensure user role exists';
-- trigger add queue new account -- trigger add queue new account
CREATE TRIGGER new_account_entry AFTER INSERT ON auth.accounts CREATE TRIGGER new_account_entry AFTER INSERT ON auth.accounts
FOR EACH ROW EXECUTE FUNCTION public.new_account_entry_fn(); FOR EACH ROW EXECUTE FUNCTION public.new_account_entry_fn();
-- Description
COMMENT ON TRIGGER new_account_entry
ON auth.accounts
IS 'Add new account in process_queue for further processing';
-- trigger add queue new account OTP validation -- trigger add queue new account OTP validation
CREATE TRIGGER new_account_otp_validation_entry AFTER INSERT ON auth.accounts CREATE TRIGGER new_account_otp_validation_entry AFTER INSERT ON auth.accounts
FOR EACH ROW EXECUTE FUNCTION public.new_account_otp_validation_entry_fn(); FOR EACH ROW EXECUTE FUNCTION public.new_account_otp_validation_entry_fn();
-- Description
COMMENT ON TRIGGER new_account_otp_validation_entry
ON auth.accounts
IS 'Add new account OTP validation in process_queue for further processing';
-- trigger check role on vessel -- trigger check role on vessel
drop trigger if exists ensure_vessel_role_exists on auth.vessels; drop trigger if exists ensure_vessel_role_exists on auth.vessels;
@@ -128,6 +159,18 @@ create constraint trigger ensure_vessel_role_exists
-- trigger add queue new vessel -- trigger add queue new vessel
CREATE TRIGGER new_vessel_entry AFTER INSERT ON auth.vessels CREATE TRIGGER new_vessel_entry AFTER INSERT ON auth.vessels
FOR EACH ROW EXECUTE FUNCTION public.new_vessel_entry_fn(); FOR EACH ROW EXECUTE FUNCTION public.new_vessel_entry_fn();
-- Description
COMMENT ON TRIGGER new_vessel_entry
ON auth.vessels
IS 'Add new vessel in process_queue for further processing';
-- trigger add new vessel name as public_vessel user configuration
CREATE TRIGGER new_vessel_public AFTER INSERT ON auth.vessels
FOR EACH ROW EXECUTE FUNCTION public.new_vessel_public_fn();
-- Description
COMMENT ON TRIGGER new_vessel_public
ON auth.vessels
IS 'Add new vessel name as public_vessel user configuration';
create or replace function create or replace function
auth.encrypt_pass() returns trigger as $$ auth.encrypt_pass() returns trigger as $$
@@ -245,6 +288,96 @@ begin
end; end;
$$ language plpgsql security definer; $$ language plpgsql security definer;
---------------------------------------------------------------------------
-- API account Oauth functions
--
-- oauth is on your exposed schema
create or replace function
api.oauth() returns void as $$
declare
_exist boolean;
begin
-- Ensure we have the required key/value in the access token
if current_setting('request.jwt.claims', true)::json->>'sub' is null OR
current_setting('request.jwt.claims', true)::json->>'email' is null THEN
return;
end if;
-- check email exist
select exists( select email from auth.users
where id = current_setting('request.jwt.claims', true)::json->>'sub'
) INTO _exist;
if NOT FOUND then
RAISE WARNING 'Register new oauth user email:[%]', current_setting('request.jwt.claims', true)::json->>'email';
-- insert new user, default value from the oauth access token
INSERT INTO auth.users (role, preferences)
VALUES ('user_role', '{"email_notifications":true, "email_valid": true, "email_verified": true}');
end if;
end;
$$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.oauth
IS 'openid/oauth user register entry point';
create or replace function
api.oauth_vessel(in _mmsi text, in _name text) returns void as $$
declare
_exist boolean;
vessel_name text := _name;
vessel_mmsi text := _mmsi;
_vessel_id text := null;
vessel_rec record;
app_settings jsonb;
_user_id text := null;
begin
RAISE WARNING 'oauth_vessel:[%]', current_setting('user.email', true);
RAISE WARNING 'oauth_vessel:[%]', current_setting('request.jwt.claims', true)::json->>'email';
-- Ensure we have the required key/value in the access token
if current_setting('request.jwt.claims', true)::json->>'sub' is null OR
current_setting('request.jwt.claims', true)::json->>'email' is null THEN
return;
end if;
-- check email exist
select exists( select email from auth.accounts
where email = current_setting('request.jwt.claims', true)::json->>'email'
) INTO _exist;
if _exist is False then
RAISE WARNING 'Register new oauth user email:[%]', current_setting('request.jwt.claims', true)::json->>'email';
-- insert new user, default value from the oauth access token
INSERT INTO auth.users VALUES(DEFAULT) RETURNING user_id INTO _user_id;
-- insert new user to account table from the oauth access token
INSERT INTO auth.accounts (email, first, last, pass, user_id, role, preferences)
VALUES (current_setting('request.jwt.claims', true)::json->>'email',
current_setting('request.jwt.claims', true)::json->>'given_name',
current_setting('request.jwt.claims', true)::json->>'family_name',
current_setting('request.jwt.claims', true)::json->>'sub',
_user_id, 'user_role', '{"email_notifications":true, "email_valid": true, "email_verified": true}');
end if;
IF public.isnumeric(vessel_mmsi) IS False THEN
vessel_mmsi = NULL;
END IF;
-- check vessel exist
SELECT * INTO vessel_rec
FROM auth.vessels vessel
WHERE vessel.owner_email = current_setting('request.jwt.claims', true)::json->>'email';
IF vessel_rec IS NULL THEN
RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, current_setting('request.jwt.claims', true)::json->>'email';
INSERT INTO auth.vessels (owner_email, mmsi, name, role)
VALUES (current_setting('request.jwt.claims', true)::json->>'email', vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id;
-- Gather url from app settings
app_settings := get_app_settings_fn();
-- set oauth user vessel_id attributes for token generation
PERFORM keycloak_py_fn(current_setting('request.jwt.claims', true)::json->>'sub'::TEXT, _vessel_id::TEXT, app_settings);
END IF;
end;
$$ language plpgsql security definer;
-- Description
COMMENT ON FUNCTION
api.oauth_vessel
IS 'user and vessel register entry point from signalk plugin';
--------------------------------------------------------------------------- ---------------------------------------------------------------------------
-- API vessel helper functions -- API vessel helper functions
-- register_vessel should be on your exposed schema -- register_vessel should be on your exposed schema
@@ -271,7 +404,7 @@ begin
IF vessel_rec IS NULL THEN IF vessel_rec IS NULL THEN
RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, vessel_email; RAISE WARNING 'Register new vessel name:[%] mmsi:[%] for [%]', vessel_name, vessel_mmsi, vessel_email;
INSERT INTO auth.vessels (owner_email, mmsi, name, role) INSERT INTO auth.vessels (owner_email, mmsi, name, role)
VALUES (vessel_email, vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id; VALUES (vessel_email, vessel_mmsi::NUMERIC, vessel_name, 'vessel_role') RETURNING vessel_id INTO _vessel_id;
vessel_rec.role := 'vessel_role'; vessel_rec.role := 'vessel_role';
vessel_rec.owner_email = vessel_email; vessel_rec.owner_email = vessel_email;
vessel_rec.vessel_id = _vessel_id; vessel_rec.vessel_id = _vessel_id;

View File

@@ -20,7 +20,16 @@ COMMENT ON COLUMN api.metadata.vessel_id IS 'Link auth.vessels with api.metadata
-- REFERENCE ship type with AIS type ? -- REFERENCE ship type with AIS type ?
-- REFERENCE mmsi MID with country ? -- REFERENCE mmsi MID with country ?
ALTER TABLE api.logbook ADD FOREIGN KEY (_from_moorage_id) REFERENCES api.moorages(id) ON DELETE RESTRICT;
COMMENT ON COLUMN api.logbook._from_moorage_id IS 'Link api.moorages with api.logbook via FOREIGN KEY and REFERENCES';
ALTER TABLE api.logbook ADD FOREIGN KEY (_to_moorage_id) REFERENCES api.moorages(id) ON DELETE RESTRICT;
COMMENT ON COLUMN api.logbook._to_moorage_id IS 'Link api.moorages with api.logbook via FOREIGN KEY and REFERENCES';
ALTER TABLE api.stays ADD FOREIGN KEY (moorage_id) REFERENCES api.moorages(id) ON DELETE RESTRICT;
COMMENT ON COLUMN api.stays.moorage_id IS 'Link api.moorages with api.stays via FOREIGN KEY and REFERENCES';
ALTER TABLE api.stays ADD FOREIGN KEY (stay_code) REFERENCES api.stays_at(stay_code) ON DELETE RESTRICT;
COMMENT ON COLUMN api.stays.stay_code IS 'Link api.stays_at with api.stays via FOREIGN KEY and REFERENCES';
ALTER TABLE api.moorages ADD FOREIGN KEY (stay_code) REFERENCES api.stays_at(stay_code) ON DELETE RESTRICT;
COMMENT ON COLUMN api.moorages.stay_code IS 'Link api.stays_at with api.moorages via FOREIGN KEY and REFERENCES';
-- List vessel -- List vessel
--TODO add geojson with position --TODO add geojson with position
@@ -37,10 +46,10 @@ CREATE OR REPLACE VIEW api.vessels_view WITH (security_invoker=true,security_bar
SELECT SELECT
v.name as name, v.name as name,
v.mmsi as mmsi, v.mmsi as mmsi,
v.created_at::timestamp(0) as created_at, v.created_at as created_at,
m.last_contact as last_contact, m.last_contact as last_contact,
((NOW() AT TIME ZONE 'UTC' - m.last_contact::timestamp without time zone) > INTERVAL '70 MINUTES') as offline, ((NOW() AT TIME ZONE 'UTC' - m.last_contact::TIMESTAMPTZ) > INTERVAL '70 MINUTES') as offline,
(NOW() AT TIME ZONE 'UTC' - m.last_contact::timestamp without time zone) as duration (NOW() AT TIME ZONE 'UTC' - m.last_contact::TIMESTAMPTZ) as duration
FROM auth.vessels v, metadata m FROM auth.vessels v, metadata m
WHERE v.owner_email = current_setting('user.email'); WHERE v.owner_email = current_setting('user.email');
-- Description -- Description
@@ -98,9 +107,9 @@ AS $vessel$
jsonb_build_object( jsonb_build_object(
'name', coalesce(m.name, null), 'name', coalesce(m.name, null),
'mmsi', coalesce(m.mmsi, null), 'mmsi', coalesce(m.mmsi, null),
'created_at', v.created_at::timestamp(0), 'created_at', v.created_at,
'first_contact', coalesce(m.created_at::timestamp(0), null), 'first_contact', coalesce(m.created_at, null),
'last_contact', coalesce(m.time::timestamp(0), null), 'last_contact', coalesce(m.time, null),
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null) 'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
)::jsonb || api.vessel_details_fn()::jsonb )::jsonb || api.vessel_details_fn()::jsonb
INTO vessel INTO vessel
@@ -140,12 +149,11 @@ AS $user_settings$
from ( from (
select a.email, a.first, a.last, a.preferences, a.created_at, select a.email, a.first, a.last, a.preferences, a.created_at,
INITCAP(CONCAT (LEFT(first, 1), ' ', last)) AS username, INITCAP(CONCAT (LEFT(first, 1), ' ', last)) AS username,
public.has_vessel_fn() as has_vessel, public.has_vessel_fn() as has_vessel
--public.has_vessel_metadata_fn() as has_vessel_metadata, --public.has_vessel_metadata_fn() as has_vessel_metadata,
a.public_id
from auth.accounts a from auth.accounts a
where email = current_setting('user.email') where email = current_setting('user.email')
) row; ) row;
END; END;
$user_settings$ language plpgsql security definer; $user_settings$ language plpgsql security definer;
-- Description -- Description
@@ -234,7 +242,7 @@ $vessel_details$
DECLARE DECLARE
BEGIN BEGIN
RETURN ( WITH tbl AS ( RETURN ( WITH tbl AS (
SELECT mmsi,ship_type,length,beam,height,plugin_version FROM api.metadata WHERE vessel_id = current_setting('vessel.id', false) SELECT mmsi,ship_type,length,beam,height,plugin_version,platform FROM api.metadata WHERE vessel_id = current_setting('vessel.id', false)
) )
SELECT json_build_object( SELECT json_build_object(
'ship_type', (SELECT ais.description FROM aistypes ais, tbl t WHERE t.ship_type = ais.id), 'ship_type', (SELECT ais.description FROM aistypes ais, tbl t WHERE t.ship_type = ais.id),
@@ -243,7 +251,8 @@ BEGIN
'length', t.ship_type, 'length', t.ship_type,
'beam', t.beam, 'beam', t.beam,
'height', t.height, 'height', t.height,
'plugin_version', t.plugin_version) 'plugin_version', t.plugin_version,
'platform', t.platform)
FROM tbl t FROM tbl t
); );
END; END;
@@ -257,8 +266,8 @@ DROP VIEW IF EXISTS api.eventlogs_view;
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
SELECT pq.* SELECT pq.*
FROM public.process_queue pq FROM public.process_queue pq
WHERE ref_id = current_setting('user.id', true) WHERE channel <> 'pre_logbook' AND (ref_id = current_setting('user.id', true)
OR ref_id = current_setting('vessel.id', true) OR ref_id = current_setting('vessel.id', true))
ORDER BY id ASC; ORDER BY id ASC;
-- Description -- Description
COMMENT ON VIEW COMMENT ON VIEW
@@ -286,16 +295,17 @@ COMMENT ON FUNCTION
api.update_logbook_observations_fn api.update_logbook_observations_fn
IS 'Update/Add logbook observations jsonb key pair value'; IS 'Update/Add logbook observations jsonb key pair value';
CREATE TYPE public_type AS ENUM ('public_logs', 'public_logs_list', 'public_timelapse', 'public_stats'); CREATE TYPE public_type AS ENUM ('public_logs', 'public_logs_list', 'public_timelapse', 'public_monitoring', 'public_stats');
CREATE FUNCTION api.ispublic_fn(IN id INTEGER, IN _type public_type) RETURNS BOOLEAN AS $ispublic$ CREATE or replace FUNCTION api.ispublic_fn(IN boat TEXT, IN _type TEXT, IN _id INTEGER DEFAULT NULL) RETURNS BOOLEAN AS $ispublic$
DECLARE DECLARE
_id INTEGER := id; vessel TEXT := '^' || boat || '$';
rec record; anonymous BOOLEAN := False;
valid_public_type BOOLEAN := False; valid_public_type BOOLEAN := False;
public_logs BOOLEAN := False;
BEGIN BEGIN
-- If _id is is not NULL and > 0 -- If boat is not NULL
IF _id IS NULL OR _id < 1 THEN IF boat IS NULL THEN
RAISE WARNING '-> ispublic_fn invalid input %', _id; RAISE WARNING '-> ispublic_fn invalid input %', boat;
RETURN False; RETURN False;
END IF; END IF;
-- Check if public_type is valid enum -- Check if public_type is valid enum
@@ -306,30 +316,36 @@ BEGIN
RETURN False; RETURN False;
END IF; END IF;
IF _type = 'public_logs' THEN RAISE WARNING '-> ispublic_fn _type [%], _id [%]', _type, _id;
IF _type ~ '^public_(logs|timelapse)$' AND _id > 0 THEN
WITH log as ( WITH log as (
select vessel_id from api.logbook l where l.id = _id SELECT vessel_id from api.logbook l where l.id = _id
) )
SELECT (l.vessel_id) is not null into rec SELECT EXISTS (
--SELECT l.vessel_id, 'email', 'settings', a.preferences SELECT l.vessel_id
FROM auth.accounts a, auth.vessels v, jsonb_each_text(a.preferences), log l FROM auth.accounts a, auth.vessels v, jsonb_each_text(a.preferences) as prefs, log l
WHERE v.vessel_id = l.vessel_id WHERE v.vessel_id = l.vessel_id
AND a.email = v.owner_email AND a.email = v.owner_email
AND key = 'public_logs'::TEXT AND a.preferences->>'public_vessel'::text ~* vessel
AND value::BOOLEAN = true; AND prefs.key = _type::TEXT
IF FOUND THEN AND prefs.value::BOOLEAN = true
RETURN True; ) into anonymous;
END IF; RAISE WARNING '-> ispublic_fn public_logs output boat:[%], type:[%], result:[%]', boat, _type, anonymous;
IF anonymous IS True THEN
RETURN True;
END IF;
ELSE ELSE
SELECT (a.email) is not null into rec SELECT EXISTS (
--SELECT a.email, a.preferences SELECT a.email
FROM auth.accounts a, jsonb_each_text(a.preferences) FROM auth.accounts a, jsonb_each_text(a.preferences) as prefs
WHERE a.public_id = _id WHERE a.preferences->>'public_vessel'::text ~* vessel
AND key = _type::TEXT AND prefs.key = _type::TEXT
AND value::BOOLEAN = true; AND prefs.value::BOOLEAN = true
IF FOUND THEN ) into anonymous;
RETURN True; RAISE WARNING '-> ispublic_fn output boat:[%], type:[%], result:[%]', boat, _type, anonymous;
END IF; IF anonymous IS True THEN
RETURN True;
END IF;
END IF; END IF;
RETURN False; RETURN False;
END END
@@ -337,4 +353,4 @@ $ispublic$ language plpgsql security definer;
-- Description -- Description
COMMENT ON FUNCTION COMMENT ON FUNCTION
api.ispublic_fn api.ispublic_fn
IS 'Is web page publicly accessible?'; IS 'Is web page publicly accessible by register boat name and/or logbook id';

View File

@@ -12,8 +12,8 @@ DROP TABLE IF EXISTS auth.otp;
CREATE TABLE IF NOT EXISTS auth.otp ( CREATE TABLE IF NOT EXISTS auth.otp (
-- update email type to CITEXT, https://www.postgresql.org/docs/current/citext.html -- update email type to CITEXT, https://www.postgresql.org/docs/current/citext.html
user_email CITEXT NOT NULL PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT, user_email CITEXT NOT NULL PRIMARY KEY REFERENCES auth.accounts(email) ON DELETE RESTRICT,
otp_pass VARCHAR(10) NOT NULL, otp_pass TEXT NOT NULL,
otp_timestamp TIMESTAMP WITHOUT TIME ZONE DEFAULT NOW(), otp_timestamp TIMESTAMPTZ DEFAULT NOW(),
otp_tries SMALLINT NOT NULL DEFAULT '0' otp_tries SMALLINT NOT NULL DEFAULT '0'
); );
-- Description -- Description
@@ -22,7 +22,8 @@ COMMENT ON TABLE
IS 'Stores temporal otp code for up to 15 minutes'; IS 'Stores temporal otp code for up to 15 minutes';
-- Indexes -- Indexes
CREATE INDEX otp_pass_idx ON auth.otp (otp_pass); CREATE INDEX otp_pass_idx ON auth.otp (otp_pass);
CREATE INDEX otp_user_email_idx ON auth.otp (user_email); -- Duplicate Indexes
--CREATE INDEX otp_user_email_idx ON auth.otp (user_email);
DROP FUNCTION IF EXISTS public.generate_uid_fn; DROP FUNCTION IF EXISTS public.generate_uid_fn;
CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT CREATE OR REPLACE FUNCTION public.generate_uid_fn(size INT) RETURNS TEXT

View File

@@ -38,6 +38,20 @@ grant execute on function api.pushover_fn(text,text) to api_anonymous;
grant execute on function api.telegram_fn(text,text) to api_anonymous; grant execute on function api.telegram_fn(text,text) to api_anonymous;
grant execute on function api.telegram_otp_fn(text) to api_anonymous; grant execute on function api.telegram_otp_fn(text) to api_anonymous;
--grant execute on function api.generate_otp_fn(text) to api_anonymous; --grant execute on function api.generate_otp_fn(text) to api_anonymous;
grant execute on function api.ispublic_fn(text,text,integer) to api_anonymous;
grant execute on function api.timelapse_fn to api_anonymous;
grant execute on function api.stats_logs_fn to api_anonymous;
grant execute on function api.stats_stays_fn to api_anonymous;
grant execute on function api.status_fn to api_anonymous;
-- Allow read on TABLES on API schema
--GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO api_anonymous;
-- Allow read on VIEWS on API schema
--GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO api_anonymous;
--GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO api_anonymous;
GRANT SELECT ON ALL TABLES IN SCHEMA api TO api_anonymous;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO api_anonymous;
--grant execute on function public.st_asgeojson(record,text,integer,boolean) to api_anonymous;
--grant execute on function public.st_makepoint(float,float) to api_anonymous;
-- authenticator -- authenticator
-- login role -- login role
@@ -79,6 +93,7 @@ GRANT SELECT ON TABLE auth.accounts TO grafana_auth;
GRANT SELECT ON TABLE auth.vessels TO grafana_auth; GRANT SELECT ON TABLE auth.vessels TO grafana_auth;
-- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth; -- GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana_auth;
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth; GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
GRANT ALL ON SCHEMA public TO grafana_auth; -- Important if grafana database in pg
-- User: -- User:
-- nologin, web api only -- nologin, web api only
@@ -96,9 +111,11 @@ GRANT SELECT ON TABLE public.process_queue TO user_role;
-- To check? -- To check?
GRANT SELECT ON TABLE auth.vessels TO user_role; GRANT SELECT ON TABLE auth.vessels TO user_role;
-- Allow users to update certain columns on specific TABLES on API schema -- Allow users to update certain columns on specific TABLES on API schema
GRANT UPDATE (name, notes) ON api.logbook TO user_role; GRANT UPDATE (name, _from, _to, notes) ON api.logbook TO user_role;
GRANT UPDATE (name, notes, stay_code) ON api.stays TO user_role; GRANT UPDATE (name, notes, stay_code, active, departed) ON api.stays TO user_role;
GRANT UPDATE (name, notes, stay_code, home_flag) ON api.moorages TO user_role; GRANT UPDATE (name, notes, stay_code, home_flag) ON api.moorages TO user_role;
-- Allow users to remove logs and stays
GRANT DELETE ON api.logbook,api.stays,api.moorages TO user_role;
-- Allow EXECUTE on all FUNCTIONS on API and public schema -- Allow EXECUTE on all FUNCTIONS on API and public schema
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role; GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role; GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
@@ -106,7 +123,7 @@ GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
-- pg15 feature security_invoker=true,security_barrier=true -- pg15 feature security_invoker=true,security_barrier=true
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO user_role; GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO user_role;
GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO user_role; GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO user_role;
GRANT SELECT ON TABLE api.monitoring_view,api.monitoring_view2,api.monitoring_view3 TO user_role; GRANT SELECT ON TABLE api.monitoring_view,api.monitoring_view2,api.monitoring_view3,api.explore_view TO user_role;
GRANT SELECT ON TABLE api.monitoring_humidity,api.monitoring_voltage,api.monitoring_temperatures TO user_role; GRANT SELECT ON TABLE api.monitoring_humidity,api.monitoring_voltage,api.monitoring_temperatures TO user_role;
GRANT SELECT ON TABLE api.stats_moorages_away_view,api.versions_view TO user_role; GRANT SELECT ON TABLE api.stats_moorages_away_view,api.versions_view TO user_role;
GRANT SELECT ON TABLE api.total_info_view TO user_role; GRANT SELECT ON TABLE api.total_info_view TO user_role;
@@ -114,6 +131,7 @@ GRANT SELECT ON TABLE api.stats_logs_view TO user_role;
GRANT SELECT ON TABLE api.stats_moorages_view TO user_role; GRANT SELECT ON TABLE api.stats_moorages_view TO user_role;
GRANT SELECT ON TABLE api.eventlogs_view TO user_role; GRANT SELECT ON TABLE api.eventlogs_view TO user_role;
GRANT SELECT ON TABLE api.vessels_view TO user_role; GRANT SELECT ON TABLE api.vessels_view TO user_role;
GRANT SELECT ON TABLE api.moorages_stays_view TO user_role;
-- Vessel: -- Vessel:
-- nologin -- nologin
@@ -136,7 +154,11 @@ GRANT EXECUTE ON FUNCTION public.trip_in_progress_fn(text) to vessel_role;
GRANT EXECUTE ON FUNCTION public.stay_in_progress_fn(text) to vessel_role; GRANT EXECUTE ON FUNCTION public.stay_in_progress_fn(text) to vessel_role;
-- hypertable get_partition_hash ?!? -- hypertable get_partition_hash ?!?
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role; GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA _timescaledb_internal TO vessel_role;
-- on metrics st_makepoint
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO vessel_role;
-- Oauth registration
GRANT EXECUTE ON FUNCTION api.oauth() TO vessel_role;
GRANT EXECUTE ON FUNCTION api.oauth_vessel(text,text) TO vessel_role;
--- Scheduler: --- Scheduler:
-- TODO: currently cron function are run as super user, switch to scheduler role. -- TODO: currently cron function are run as super user, switch to scheduler role.
@@ -208,6 +230,10 @@ CREATE POLICY api_scheduler_role ON api.metrics TO scheduler
CREATE POLICY grafana_role ON api.metrics TO grafana CREATE POLICY grafana_role ON api.metrics TO grafana
USING (vessel_id = current_setting('vessel.id', false)) USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false); WITH CHECK (false);
-- Allow anonymous to select based on the vessel.id
CREATE POLICY api_anonymous_role ON api.metrics TO api_anonymous
USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false);
-- Be sure to enable row level security on the table -- Be sure to enable row level security on the table
ALTER TABLE api.logbook ENABLE ROW LEVEL SECURITY; ALTER TABLE api.logbook ENABLE ROW LEVEL SECURITY;
@@ -232,6 +258,10 @@ CREATE POLICY api_scheduler_role ON api.logbook TO scheduler
CREATE POLICY grafana_role ON api.logbook TO grafana CREATE POLICY grafana_role ON api.logbook TO grafana
USING (vessel_id = current_setting('vessel.id', false)) USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false); WITH CHECK (false);
-- Allow anonymous to select based on the vessel.id
CREATE POLICY api_anonymous_role ON api.logbook TO api_anonymous
USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false);
-- Be sure to enable row level security on the table -- Be sure to enable row level security on the table
ALTER TABLE api.stays ENABLE ROW LEVEL SECURITY; ALTER TABLE api.stays ENABLE ROW LEVEL SECURITY;
@@ -255,6 +285,10 @@ CREATE POLICY api_scheduler_role ON api.stays TO scheduler
CREATE POLICY grafana_role ON api.stays TO grafana CREATE POLICY grafana_role ON api.stays TO grafana
USING (vessel_id = current_setting('vessel.id', false)) USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false); WITH CHECK (false);
-- Allow anonymous to select based on the vessel.id
CREATE POLICY api_anonymous_role ON api.stays TO api_anonymous
USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false);
-- Be sure to enable row level security on the table -- Be sure to enable row level security on the table
ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY; ALTER TABLE api.moorages ENABLE ROW LEVEL SECURITY;
@@ -278,6 +312,10 @@ CREATE POLICY api_scheduler_role ON api.moorages TO scheduler
CREATE POLICY grafana_role ON api.moorages TO grafana CREATE POLICY grafana_role ON api.moorages TO grafana
USING (vessel_id = current_setting('vessel.id', false)) USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false); WITH CHECK (false);
-- Allow anonymous to select based on the vessel.id
CREATE POLICY api_anonymous_role ON api.moorages TO api_anonymous
USING (vessel_id = current_setting('vessel.id', false))
WITH CHECK (false);
-- Be sure to enable row level security on the table -- Be sure to enable row level security on the table
ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY; ALTER TABLE auth.vessels ENABLE ROW LEVEL SECURITY;

View File

@@ -10,19 +10,23 @@ CREATE EXTENSION IF NOT EXISTS pg_cron; -- provides a simple cron-based job sche
-- TRUNCATE table jobs -- TRUNCATE table jobs
--TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT; --TRUNCATE TABLE cron.job CONTINUE IDENTITY RESTRICT;
-- Create a every 5 minutes or minute job cron_process_new_logbook_fn ?? -- Create a every 5 minutes or minute job cron_process_pre_logbook_fn ??
SELECT cron.schedule('cron_new_logbook', '*/5 * * * *', 'select public.cron_process_new_logbook_fn()'); SELECT cron.schedule('cron_pre_logbook', '*/5 * * * *', 'select public.cron_process_pre_logbook_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_pre_logbook';
-- Create a every 6 minutes or minute job cron_process_new_logbook_fn ??
SELECT cron.schedule('cron_new_logbook', '*/6 * * * *', 'select public.cron_process_new_logbook_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_logbook';
-- Create a every 5 minute job cron_process_new_stay_fn -- Create a every 7 minute job cron_process_new_stay_fn
SELECT cron.schedule('cron_new_stay', '*/6 * * * *', 'select public.cron_process_new_stay_fn()'); SELECT cron.schedule('cron_new_stay', '*/7 * * * *', 'select public.cron_process_new_stay_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_stay'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_stay';
-- Create a every 6 minute job cron_process_new_moorage_fn, delay from stay to give time to generate geo reverse location, eg: name -- Create a every 6 minute job cron_process_new_moorage_fn, delay from stay to give time to generate geo reverse location, eg: name
SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_process_new_moorage_fn()'); --SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_process_new_moorage_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage';
-- Create a every 10 minute job cron_process_monitor_offline_fn -- Create a every 11 minute job cron_process_monitor_offline_fn
SELECT cron.schedule('cron_monitor_offline', '*/11 * * * *', 'select public.cron_process_monitor_offline_fn()'); SELECT cron.schedule('cron_monitor_offline', '*/11 * * * *', 'select public.cron_process_monitor_offline_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_offline'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_offline';
@@ -42,18 +46,25 @@ SELECT cron.schedule('cron_monitor_online', '*/10 * * * *', 'select public.cron_
--SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()'); --SELECT cron.schedule('cron_new_account_otp', '*/6 * * * *', 'select public.cron_process_new_account_otp_validation_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_account_otp';
-- Create a every 5 minute job cron_process_grafana_fn
SELECT cron.schedule('cron_grafana', '*/5 * * * *', 'select public.cron_process_grafana_fn()');
-- Notification -- Notification
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp -- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
SELECT cron.schedule('cron_new_notification', '*/2 * * * *', 'select public.cron_process_new_notification_fn()'); SELECT cron.schedule('cron_new_notification', '*/1 * * * *', 'select public.cron_process_new_notification_fn()');
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification'; --UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_notification';
-- Maintenance -- Maintenance
-- Vacuum database at At 01:01 on Sunday. -- Vacuum database schema api at "At 01:31 on Sunday."
SELECT cron.schedule('cron_vacuum', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;'); SELECT cron.schedule('cron_vacuum_api', '31 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) api.logbook,api.stays,api.moorages,api.metadata,api.metrics;');
-- Remove all jobs log at At 02:02 on Sunday. -- Vacuum database schema auth at "At 01:01 on Sunday."
SELECT cron.schedule('cron_vacuum_auth', '1 1 * * 0', 'VACUUM (FULL, VERBOSE, ANALYZE, INDEX_CLEANUP) auth.accounts,auth.vessels,auth.otp;');
-- Remove old jobs log at "At 02:02 on Sunday."
SELECT cron.schedule('job_run_details_cleanup', '2 2 * * 0', 'select public.job_run_details_cleanup_fn()'); SELECT cron.schedule('job_run_details_cleanup', '2 2 * * 0', 'select public.job_run_details_cleanup_fn()');
-- Rebuilding indexes at first day of each month at 23:01.” -- Rebuilding indexes schema api at "first day of each month at 23:15."
SELECT cron.schedule('cron_reindex', '1 23 1 * *', 'REINDEX TABLE api.logbook; REINDEX TABLE api.stays; REINDEX TABLE api.moorages; REINDEX TABLE api.metadata; REINDEX TABLE api.metrics;'); SELECT cron.schedule('cron_reindex_api', '15 23 1 * *', 'REINDEX TABLE CONCURRENTLY api.logbook; REINDEX TABLE CONCURRENTLY api.stays; REINDEX TABLE CONCURRENTLY api.moorages; REINDEX TABLE CONCURRENTLY api.metadata;');
-- Rebuilding indexes schema auth at "first day of each month at 23:01."
SELECT cron.schedule('cron_reindex_auth', '1 23 1 * *', 'REINDEX TABLE CONCURRENTLY auth.accounts; REINDEX TABLE CONCURRENTLY auth.vessels; REINDEX TABLE CONCURRENTLY auth.otp;');
-- Any other maintenance require? -- Any other maintenance require?
-- OTP -- OTP
@@ -74,9 +85,9 @@ SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_proc
-- Cron job settings -- Cron job settings
UPDATE cron.job SET database = 'signalk'; UPDATE cron.job SET database = 'signalk';
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vaccuum permissions --UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vacuum permissions
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ?? UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
UPDATE cron.job SET database = 'postgresql' WHERE jobname = 'job_run_details_cleanup_fn'; UPDATE cron.job SET database = 'postgres' WHERE jobname = 'job_run_details_cleanup';
-- check job lists -- check job lists
SELECT * FROM cron.job; SELECT * FROM cron.job;
-- unschedule by job id -- unschedule by job id

View File

@@ -18,7 +18,7 @@ select current_database();
\c signalk \c signalk
CREATE TABLE public.ne_10m_geography_marine_polys ( CREATE TABLE public.ne_10m_geography_marine_polys (
gid serial4 NOT NULL, gid INT GENERATED ALWAYS AS IDENTITY NOT NULL,
featurecla TEXT NULL, featurecla TEXT NULL,
"name" TEXT NULL, "name" TEXT NULL,
namealt TEXT NULL, namealt TEXT NULL,

View File

@@ -17,6 +17,8 @@ INSERT INTO app_settings (name, value) VALUES
('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'), ('app.pushover_app_token', '${PGSAIL_PUSHOVER_APP_TOKEN}'),
('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'), ('app.pushover_app_url', '${PGSAIL_PUSHOVER_APP_URL}'),
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'), ('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
('app.grafana_admin_uri', '${PGSAIL_GRAFANA_ADMIN_URI}'),
('app.keycloak_uri', '${PGSAIL_KEYCLOAK_URI}'),
('app.url', '${PGSAIL_APP_URL}'), ('app.url', '${PGSAIL_APP_URL}'),
('app.version', '${PGSAIL_VERSION}'); ('app.version', '${PGSAIL_VERSION}');
-- Update comment with version -- Update comment with version

View File

@@ -1 +1 @@
0.4.0 0.6.0

File diff suppressed because one or more lines are too long

View File

@@ -59,7 +59,7 @@ const fs = require('fs');
user_views: [ user_views: [
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 2}, { url: '/logs_view', res_body_length: 0},
{ url: '/log_view', res_body_length: 2}, { url: '/log_view', res_body_length: 2},
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
@@ -198,7 +198,7 @@ const fs = require('fs');
user_views: [ user_views: [
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 1}, { url: '/logs_view', res_body_length: 0},
{ url: '/log_view', res_body_length: 1}, { url: '/log_view', res_body_length: 1},
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
@@ -604,8 +604,12 @@ request.set('User-Agent', 'PostgSail unit tests');
// Override client_id // Override client_id
data[i]['client_id'] = test.vessel_metadata.client_id; data[i]['client_id'] = test.vessel_metadata.client_id;
} }
// Force last entry to be back in time from previous, it should be ignore silently // The last entry are invalid and should be ignore.
data.at(-1).time = moment.utc(data.at(-2).time).subtract(1, 'minutes').format(); // - Invalid status
// - Invalid speedoverground
// - Invalid time previous time is duplicate
// Force last valid entry to be back in time from previous, it should be ignore silently
data.at(-1).time = moment.utc(data.at(-3).time).subtract(1, 'minutes').format();
//console.log(data[0]); //console.log(data[0]);
it('/metrics?select=time', function(done) { it('/metrics?select=time', function(done) {
@@ -625,7 +629,7 @@ request.set('User-Agent', 'PostgSail unit tests');
res.header['content-type'].should.match(new RegExp('json','g')); res.header['content-type'].should.match(new RegExp('json','g'));
res.header['server'].should.match(new RegExp('postgrest','g')); res.header['server'].should.match(new RegExp('postgrest','g'));
should.exist(res.body); should.exist(res.body);
res.body.length.should.match(test.vessel_metrics['metrics'].length-1); res.body.length.should.match(test.vessel_metrics['metrics'].length-3);
done(err); done(err);
}); });
}); });

View File

@@ -47,7 +47,7 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
user_views: [ user_views: [
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 2}, { url: '/logs_view', res_body_length: 1},
{ url: '/log_view', res_body_length: 2}, { url: '/log_view', res_body_length: 2},
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
@@ -211,7 +211,7 @@ request.set('User-Agent', 'PostgSail unit tests');
res.header['content-type'].should.match(new RegExp('json','g')); res.header['content-type'].should.match(new RegExp('json','g'));
res.header['server'].should.match(new RegExp('postgrest','g')); res.header['server'].should.match(new RegExp('postgrest','g'));
should.exist(res.body.token); should.exist(res.body.token);
res.body.token.should.match(user_jwt); //res.body.token.should.match(user_jwt);
console.log(user_jwt); console.log(user_jwt);
should.exist(user_jwt); should.exist(user_jwt);
done(err); done(err);

View File

@@ -43,7 +43,7 @@ var moment = require('moment');
}, },
user_tables: [ user_tables: [
{ url: '/stays', res_body_length: 3}, { url: '/stays', res_body_length: 3},
{ url: '/moorages', res_body_length: 2}, { url: '/moorages', res_body_length: 3},
{ url: '/logbook', res_body_length: 2}, { url: '/logbook', res_body_length: 2},
{ url: '/metadata', res_body_length: 1} { url: '/metadata', res_body_length: 1}
], ],
@@ -241,7 +241,7 @@ var moment = require('moment');
}, },
user_tables: [ user_tables: [
{ url: '/stays', res_body_length: 3}, { url: '/stays', res_body_length: 3},
{ url: '/moorages', res_body_length: 2}, { url: '/moorages', res_body_length: 4},
{ url: '/logbook', res_body_length: 2}, { url: '/logbook', res_body_length: 2},
{ url: '/metadata', res_body_length: 1} { url: '/metadata', res_body_length: 1}
], ],
@@ -684,7 +684,7 @@ request.set('User-Agent', 'PostgSail unit tests');
.set('Authorization', `Bearer ${user_jwt}`) .set('Authorization', `Bearer ${user_jwt}`)
.set('Accept', 'application/json') .set('Accept', 'application/json')
.end(function(err,res){ .end(function(err,res){
//console.log(res.body); console.log(res.body);
res.status.should.equal(200); res.status.should.equal(200);
should.exist(res.header['content-type']); should.exist(res.header['content-type']);
should.exist(res.header['server']); should.exist(res.header['server']);

View File

@@ -3,7 +3,7 @@
* Unit test #4 * Unit test #4
* OTP for email, Pushover, Telegram * OTP for email, Pushover, Telegram
* *
* process.env.PGSAIL_API_URI = from inside the docker * process.env.PGSAIL_API_URI = from inside the docker
* *
* npm install supertest should mocha mochawesome moment * npm install supertest should mocha mochawesome moment
* alias mocha="./node_modules/mocha/bin/_mocha" * alias mocha="./node_modules/mocha/bin/_mocha"
@@ -154,6 +154,16 @@ var moment = require("moment");
payload: null, payload: null,
res: {}, res: {},
}, },
public: [
{
url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_logs}", value: true },
},
{
url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_monitoring}", value: true },
},
],
}, },
{ {
cname: process.env.PGSAIL_API_URI, cname: process.env.PGSAIL_API_URI,
@@ -285,6 +295,16 @@ var moment = require("moment");
payload: null, payload: null,
res: {}, res: {},
}, },
public: [
{
url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_logs}", value: true },
},
{
url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_monitoring}", value: true },
},
],
}, },
].forEach(function (test) { ].forEach(function (test) {
//console.log(`${test.cname}`); //console.log(`${test.cname}`);
@@ -581,91 +601,127 @@ var moment = require("moment");
}); // Function endpoint }); // Function endpoint
*/ */
describe("Badges, user jwt", function () { describe("Badges, user jwt", function () {
it("/rpc/settings_fn return user settings", function (done) { it("/rpc/settings_fn return user settings", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post("/rpc/settings_fn")
.set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
console.log(res.body);
should.exist(res.body.settings);
should.exist(res.body.settings.preferences.badges)
let badges = res.body.settings.preferences.badges;
//console.log(Object.keys(badges));
Object.keys(badges).length.should.be.aboveOrEqual(3);
(badges).should.have.properties('Helmsman', 'Wake Maker', 'Stormtrooper');
done(err);
});
});
}); // user JWT
describe("Function monitoring endpoint, JWT user_role", function () {
let otp = null;
test.monitoring.forEach(function (subtest) {
it(`${subtest.url}`, function (done) {
try {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
.get(subtest.url) .post("/rpc/settings_fn")
.set("Authorization", `Bearer ${user_jwt}`) .set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json") .set("Accept", "application/json")
.end(function (err, res) { .end(function (err, res) {
res.status.should.equal(200); res.status.should.equal(200);
should.exist(res.header["content-type"]); should.exist(res.header["content-type"]);
should.exist(res.header["server"]); should.exist(res.header["server"]);
res.header["content-type"].should.match( res.header["content-type"].should.match(new RegExp("json", "g"));
new RegExp("json", "g") res.header["server"].should.match(new RegExp("postgrest", "g"));
console.log(res.body);
should.exist(res.body.settings);
should.exist(res.body.settings.preferences.badges);
let badges = res.body.settings.preferences.badges;
//console.log(Object.keys(badges));
Object.keys(badges).length.should.be.aboveOrEqual(3);
badges.should.have.properties(
"Helmsman",
"Wake Maker",
"Stormtrooper"
); );
done(err);
});
});
}); // user JWT
describe("Function monitoring endpoint, JWT user_role", function () {
let otp = null;
test.monitoring.forEach(function (subtest) {
it(`${subtest.url}`, function (done) {
try {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get(subtest.url)
.set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(
new RegExp("json", "g")
);
res.header["server"].should.match(new RegExp("postgrest", "g"));
//console.log(res.body);
should.exist(res.body);
//let monitoring = res.body;
//console.log(monitoring);
// minimum set for static monitoring page
// no value for humidity monitoring
//monitoring.length.should.be.aboveOrEqual(21);
done(err);
});
} catch (error) {
done();
}
});
});
}); // Monitoring endpoint
describe("Event Logs, user jwt", function () {
it("/eventlogs_view endpoint, list process_queue, JWT user_role", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get("/eventlogs_view")
.set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g")); res.header["server"].should.match(new RegExp("postgrest", "g"));
//console.log(res.body); //console.log(res.body);
should.exist(res.body); should.exist(res.body);
//let monitoring = res.body; let event = res.body;
//console.log(monitoring); //console.log(event);
// minimum set for static monitoring page // minimum events log for kapla & aava 13 + 4 email_otp = 17
// no value for humidity monitoring event.length.should.be.aboveOrEqual(13);
//monitoring.length.should.be.aboveOrEqual(21);
done(err); done(err);
}); });
} catch (error) {
done();
}
});
});
}); // Monitoring endpoint
describe("Event Logs, user jwt", function () {
it("/eventlogs_view endpoint, list process_queue, JWT user_role", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get("/eventlogs_view")
.set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
//console.log(res.body);
should.exist(res.body);
let event = res.body;
//console.log(event);
// minimum events log for kapla & aava 13 + 4 email_otp = 17
event.length.should.be.aboveOrEqual(13);
done(err);
}); });
}); }); // user JWT
}); // user JWT
describe("Function update preference for public access endpoint, JWT user_role", function () {
test.public.forEach(function (subtest) {
it(`${subtest.url}`, function (done) {
try {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post(subtest.url)
.send(subtest.payload)
.set("Authorization", `Bearer ${user_jwt}`)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["server"].should.match(new RegExp("postgrest", "g"));
//console.log(res.body);
should.exist(res.body);
//let monitoring = res.body;
//console.log(monitoring);
// minimum set for static monitoring page
// no value for humidity monitoring
//monitoring.length.should.be.aboveOrEqual(21);
done(err);
});
} catch (error) {
done();
}
});
});
}); // Monitoring endpoint
}); // OpenAPI description }); // OpenAPI description
}); // Users Array }); // Users Array

223
tests/index5.js Normal file
View File

@@ -0,0 +1,223 @@
"use strict";
/*
* Unit test #5
* Public/Anonymous access
*
* process.env.PGSAIL_API_URI = from inside the docker
*
* npm install supertest should mocha mochawesome moment
* alias mocha="./node_modules/mocha/bin/_mocha"
* mocha index5.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
*
*/
const sleep = (ms) => new Promise((r) => setTimeout(r, ms));
const supertest = require("supertest");
// Deprecated
const should = require("should");
//const chai = require("chai");
//const should = chai.should();
let request = null;
var moment = require("moment");
// Users Array
[
{
cname: process.env.PGSAIL_API_URI,
name: "PostgSail unit test kapla",
logs: {
url: "/logs_view",
header: { name: "x-is-public", value: btoa("kapla,public_logs_list,0") },
payload: null,
res: {},
},
log: {
url: "/log_view?id=eq.1",
header: { name: "x-is-public", value: btoa("kapla,public_logs,1") },
payload: null,
res: {},
},
monitoring: {
url: "/monitoring_view",
header: { name: "x-is-public", value: btoa("kapla,public_monitoring,0") },
payload: null,
res: {},
},
timelapse: {
url: "/rpc/timelapse_fn",
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,1") },
payload: null,
res: {},
},
timelapse_full: {
url: "/rpc/timelapse_fn",
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,0") },
payload: null,
res: {},
},
stats_logs: {
url: "/rpc/stats_logs_fn",
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
payload: null,
res: {},
},
stats_stays: {
url: "/rpc/stats_stay_fn",
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
payload: null,
res: {},
},
export_gpx: {
url: "/rpc/export_logbook_gpx_fn",
header: { name: "x-is-public", value: btoa("kapla,public_logs,0") },
payload: null,
res: {},
},
},
{
cname: process.env.PGSAIL_API_URI,
name: "PostgSail unit test, aava",
logs: {
url: "/logs_view",
header: { name: "x-is-public", value: btoa("aava,public_logs_list,0") },
payload: null,
res: {},
},
log: {
url: "/log_view?id=eq.3",
header: { name: "x-is-public", value: btoa("aava,public_logs,3") },
payload: null,
res: {},
},
monitoring: {
url: "/monitoring_view",
header: { name: "x-is-public", value: btoa("aava,public_monitoring,0") },
payload: null,
res: {},
},
timelapse: {
url: "/rpc/timelapse_fn",
header: { name: "x-is-public", value: btoa("aava,public_timelapse,3") },
payload: null,
res: {},
},
timelapse_full: {
url: "/rpc/timelapse_fn",
header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") },
payload: null,
res: {},
},
stats_logs: {
url: "/rpc/stats_logs_fn",
header: { name: "x-is-public", value: btoa("aava,public_stats,0") },
payload: null,
res: {},
},
stats_stays: {
url: "/rpc/stats_stay_fn",
header: { name: "x-is-public", value: btoa("kapla,public_stats,0") },
payload: null,
res: {},
},
export_gpx: {
url: "/rpc/export_logbook_gpx_fn",
header: { name: "x-is-public", value: btoa("aava,public_logs,0") },
payload: null,
res: {},
},
},
].forEach(function (test) {
//console.log(`${test.cname}`);
describe(`${test.name}`, function () {
request = supertest.agent(test.cname);
request.set("User-Agent", "PostgSail unit tests");
describe("With no JWT as api_anonymous", function () {
it("/logs_view, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get(test.logs.url)
.set(test.logs.header.name, test.logs.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(404);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
done(err);
});
});
it("/log_view, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get(test.log.url)
.set(test.log.header.name, test.log.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
done(err);
});
});
it("/monitoring_view, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get(test.monitoring.url)
.set(test.monitoring.header.name, test.monitoring.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
console.log(res.text);
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
done(err);
});
});
it("/rpc/timelapse_fn, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post(test.timelapse.url)
.set(test.timelapse.header.name, test.timelapse.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
console.log(res.text);
res.status.should.equal(404); // return 404 as it is not enable in user settings.
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
done(err);
});
});
it("/rpc/export_logbook_gpx_fn, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post(test.export_gpx.url)
.send({_id: 1})
.set(test.export_gpx.header.name, test.export_gpx.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
console.log(res.text)
res.status.should.equal(401);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
done(err);
});
});
}); // user JWT
}); // OpenAPI description
}); // Users Array

View File

@@ -21,7 +21,19 @@
"courseovergroundtrue" : 197.4, "courseovergroundtrue" : 197.4,
"windspeedapparent" : 15.4, "windspeedapparent" : 15.4,
"anglespeedapparent" : 43.0, "anglespeedapparent" : 43.0,
"status" : "moored", "status" : "sailing",
"metrics" : {"navigation.log": 17441395, "navigation.trip.log": 80284, "navigation.headingTrue": 3.4924, "navigation.gnss.satellites": 11, "environment.depth.belowKeel": 32.289, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.34, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 231, "electrical.batteries.1.voltage": 14.45, "navigation.gnss.antennaAltitude": -0.04, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 32.29, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 57, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 4, "network.n2k.ngt-1.130356.ch1.bandwidth": 3, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 5, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 11, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
},
{
"time" : "2022-07-31T11:29:13.340Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
"latitude" : 59.7213961,
"longitude" : 24.7349507,
"speedoverground" : 6.5,
"courseovergroundtrue" : 197.4,
"windspeedapparent" : 15.4,
"anglespeedapparent" : 43.0,
"status" : "sailing",
"metrics" : {"navigation.log": 17441395, "navigation.trip.log": 80284, "navigation.headingTrue": 3.4924, "navigation.gnss.satellites": 11, "environment.depth.belowKeel": 32.289, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.34, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 231, "electrical.batteries.1.voltage": 14.45, "navigation.gnss.antennaAltitude": -0.04, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 32.29, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 57, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 4, "network.n2k.ngt-1.130356.ch1.bandwidth": 3, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 5, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 11, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0} "metrics" : {"navigation.log": 17441395, "navigation.trip.log": 80284, "navigation.headingTrue": 3.4924, "navigation.gnss.satellites": 11, "environment.depth.belowKeel": 32.289, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.34, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 231, "electrical.batteries.1.voltage": 14.45, "navigation.gnss.antennaAltitude": -0.04, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 32.29, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 57, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 4, "network.n2k.ngt-1.130356.ch1.bandwidth": 3, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 5, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 11, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
}, },
{ {
@@ -561,7 +573,31 @@
"courseovergroundtrue" : 197.6, "courseovergroundtrue" : 197.6,
"windspeedapparent" : 15.9, "windspeedapparent" : 15.9,
"anglespeedapparent" : 31.0, "anglespeedapparent" : 31.0,
"status" : "ais-sart",
"metrics" : {}
},
{
"time" : "2022-07-31T12:14:29.168Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
"latitude" : 59.7124174,
"longitude" : 24.7289112,
"speedoverground" : 55.7,
"courseovergroundtrue" : 197.6,
"windspeedapparent" : 15.9,
"anglespeedapparent" : 31.0,
"status" : "anchored", "status" : "anchored",
"metrics" : {"navigation.log": 17442506, "navigation.trip.log": 81321, "navigation.headingTrue": 3.571, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 13.749, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.07, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 43.9, "electrical.batteries.1.voltage": 14.54, "navigation.gnss.antennaAltitude": 2.05, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 13.75, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 40, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 4, "network.n2k.ngt-1.130356.ch1.bandwidth": 3, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 5, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0} "metrics" : {}
},
{
"time" : "2022-07-31T12:14:29.168Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
"latitude" : 59.7124174,
"longitude" : 24.7289112,
"speedoverground" : 5.7,
"courseovergroundtrue" : 197.6,
"windspeedapparent" : 15.9,
"anglespeedapparent" : 31.0,
"status" : "anchored",
"metrics" : {}
} }
]} ]}

View File

@@ -12,6 +12,18 @@
"status" : "moored", "status" : "moored",
"metrics" : {"environment.wind.speedTrue": 4.44, "navigation.speedThroughWater": 3.0918118943701245, "performance.velocityMadeGood": 2.9323340761912995, "environment.wind.angleTrueWater": -0.3665191430024964, "environment.depth.belowTransducer": 13.1, "navigation.courseOverGroundMagnetic": 3.620685534088946, "navigation.courseRhumbline.crossTrackError": 0} "metrics" : {"environment.wind.speedTrue": 4.44, "navigation.speedThroughWater": 3.0918118943701245, "performance.velocityMadeGood": 2.9323340761912995, "environment.wind.angleTrueWater": -0.3665191430024964, "environment.depth.belowTransducer": 13.1, "navigation.courseOverGroundMagnetic": 3.620685534088946, "navigation.courseRhumbline.crossTrackError": 0}
}, },
{
"time" : "2022-07-30T14:52:28.000Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
"latitude" : 60.077666666666666,
"longitude" : 23.530866666666668,
"speedoverground" : 0.0,
"courseovergroundtrue" : 207.5,
"windspeedapparent" : 14.8,
"anglespeedapparent" : -12.0,
"status" : "sailing",
"metrics" : {"environment.wind.speedTrue": 4.44, "navigation.speedThroughWater": 3.0918118943701245, "performance.velocityMadeGood": 2.9323340761912995, "environment.wind.angleTrueWater": -0.3665191430024964, "environment.depth.belowTransducer": 13.1, "navigation.courseOverGroundMagnetic": 3.620685534088946, "navigation.courseRhumbline.crossTrackError": 0, "propulsion.main.runTime":1776241 }
},
{ {
"time" : "2022-07-30T14:53:28.000Z", "time" : "2022-07-30T14:53:28.000Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789", "client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
@@ -21,8 +33,8 @@
"courseovergroundtrue" : 207.5, "courseovergroundtrue" : 207.5,
"windspeedapparent" : 14.8, "windspeedapparent" : 14.8,
"anglespeedapparent" : -12.0, "anglespeedapparent" : -12.0,
"status" : "moored", "status" : "sailing",
"metrics" : {"environment.wind.speedTrue": 4.44, "navigation.speedThroughWater": 3.0918118943701245, "performance.velocityMadeGood": 2.9323340761912995, "environment.wind.angleTrueWater": -0.3665191430024964, "environment.depth.belowTransducer": 13.1, "navigation.courseOverGroundMagnetic": 3.620685534088946, "navigation.courseRhumbline.crossTrackError": 0, "propulsion.main.runTime":1776241 } "metrics" : {"environment.wind.speedTrue": 4.44, "navigation.speedThroughWater": 3.0918118943701245, "performance.velocityMadeGood": 2.9323340761912995, "environment.wind.angleTrueWater": -0.3665191430024964, "environment.depth.belowTransducer": 13.1, "navigation.courseOverGroundMagnetic": 3.620685534088946, "navigation.courseRhumbline.crossTrackError": 0 }
}, },
{ {
"time" : "2022-07-30T14:54:28.016Z", "time" : "2022-07-30T14:54:28.016Z",
@@ -322,7 +334,7 @@
"windspeedapparent" : 11.1, "windspeedapparent" : 11.1,
"anglespeedapparent" : 88.0, "anglespeedapparent" : 88.0,
"status" : "sailing", "status" : "sailing",
"metrics" : {"environment.wind.speedTrue": 3.1895563635765014, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 1.8151424224885533, "environment.depth.belowTransducer": 1.67, "navigation.courseOverGroundMagnetic": 3.1290262836898832, "navigation.courseRhumbline.crossTrackError": 0} "metrics" : {"environment.wind.speedTrue": 3.1895563635765014, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 1.8151424224885533, "environment.depth.belowTransducer": 1.67, "navigation.courseOverGroundMagnetic": 3.1290262836898832, "navigation.courseRhumbline.crossTrackError": 0 }
}, },
{ {
"time" : "2022-07-30T15:19:28.467Z", "time" : "2022-07-30T15:19:28.467Z",
@@ -348,6 +360,18 @@
"status" : "moored", "status" : "moored",
"metrics" : {"environment.wind.speedTrue": 0, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 0.7853981635767779, "environment.depth.belowTransducer": 1.65, "navigation.courseOverGroundMagnetic": 4.206068965341505, "navigation.courseRhumbline.crossTrackError": 0} "metrics" : {"environment.wind.speedTrue": 0, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 0.7853981635767779, "environment.depth.belowTransducer": 1.65, "navigation.courseOverGroundMagnetic": 4.206068965341505, "navigation.courseRhumbline.crossTrackError": 0}
}, },
{
"time" : "2022-07-30T15:20:28.467Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
"latitude" : 59.97688333333333,
"longitude" : 23.4321,
"speedoverground" : 0.0,
"courseovergroundtrue" : 241.0,
"windspeedapparent" : 4.3,
"anglespeedapparent" : 74.0,
"status" : "sailing",
"metrics" : {"environment.wind.speedTrue": 0, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 0.7853981635767779, "environment.depth.belowTransducer": 1.65, "navigation.courseOverGroundMagnetic": 4.206068965341505, "navigation.courseRhumbline.crossTrackError": 0, "propulsion.main.runTime":1776251}
},
{ {
"time" : "2022-07-30T15:21:28.467Z", "time" : "2022-07-30T15:21:28.467Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789", "client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
@@ -357,7 +381,7 @@
"courseovergroundtrue" : 241.0, "courseovergroundtrue" : 241.0,
"windspeedapparent" : 4.3, "windspeedapparent" : 4.3,
"anglespeedapparent" : 74.0, "anglespeedapparent" : 74.0,
"status" : "moored", "status" : "sailing",
"metrics" : {"environment.wind.speedTrue": 0, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 0.7853981635767779, "environment.depth.belowTransducer": 1.65, "navigation.courseOverGroundMagnetic": 4.206068965341505, "navigation.courseRhumbline.crossTrackError": 0} "metrics" : {"environment.wind.speedTrue": 0, "navigation.speedThroughWater": 0, "performance.velocityMadeGood": 0, "environment.wind.angleTrueWater": 0.7853981635767779, "environment.depth.belowTransducer": 1.65, "navigation.courseOverGroundMagnetic": 4.206068965341505, "navigation.courseRhumbline.crossTrackError": 0}
}, },
{ {
@@ -609,7 +633,31 @@
"courseovergroundtrue" : 122.0, "courseovergroundtrue" : 122.0,
"windspeedapparent" : 7.2, "windspeedapparent" : 7.2,
"anglespeedapparent" : 10.0, "anglespeedapparent" : 10.0,
"status" : "unknown",
"metrics" : {}
},
{
"time" : "2022-07-30T15:41:28.867Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
"latitude" : 59.86,
"longitude" : 23.365766666666666,
"speedoverground" : 60.0,
"courseovergroundtrue" : 122.0,
"windspeedapparent" : 7.2,
"anglespeedapparent" : 10.0,
"status" : "anchored", "status" : "anchored",
"metrics" : {"environment.wind.speedTrue": 0.63, "navigation.speedThroughWater": 3.2255674838104293, "performance.velocityMadeGood": -2.242978345998959, "environment.wind.angleTrueWater": 2.3038346131585485, "environment.depth.belowTransducer": 17.73, "navigation.courseOverGroundMagnetic": 2.129127154994025, "navigation.courseRhumbline.crossTrackError": 0} "metrics" : {}
},
{
"time" : "2022-07-30T15:41:28.867Z",
"client_id" : "vessels.urn:mrn:imo:mmsi:123456789",
"latitude" : 59.86,
"longitude" : 23.365766666666666,
"speedoverground" : 0.0,
"courseovergroundtrue" : 122.0,
"windspeedapparent" : 7.2,
"anglespeedapparent" : 10.0,
"status" : "anchored",
"metrics" : {}
} }
]} ]}

20
tests/sql/anonymous.sql Normal file
View File

@@ -0,0 +1,20 @@
---------------------------------------------------------------------------
-- Listing
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
-- output display format
\x on
\echo 'Validate anonymous access'
SELECT api.ispublic_fn('kapla', 'public_test');
SELECT api.ispublic_fn('kapla', 'public_logs_list');
SELECT api.ispublic_fn('kapla', 'public_logs', 1);
SELECT api.ispublic_fn('kapla', 'public_logs', 3);
SELECT api.ispublic_fn('kapla', 'public_monitoring');
SELECT api.ispublic_fn('kapla', 'public_timelapse');

View File

@@ -0,0 +1,26 @@
current_database
------------------
signalk
(1 row)
You are now connected to database "signalk" as user "username".
Expanded display is on.
Validate anonymous access
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | t
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | t
-[ RECORD 1 ]--
ispublic_fn | f

View File

@@ -18,7 +18,7 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
\echo 'Insert new api.logbook for badges' \echo 'Insert new api.logbook for badges'
INSERT INTO api.logbook INSERT INTO api.logbook
(id, active, "name", "_from", "_from_lat", "_from_lng", "_to", "_to_lat", "_to_lng", track_geom, track_geog, track_geojson, "_from_time", "_to_time", distance, duration, avg_speed, max_speed, max_wind_speed, notes, vessel_id) (id, active, "name", "_from", "_from_lat", "_from_lng", "_to", "_to_lat", "_to_lng", track_geom, track_geog, track_geojson, "_from_time", "_to_time", distance, duration, avg_speed, max_speed, max_wind_speed, notes, vessel_id)
VALUES OVERRIDING SYSTEM VALUE VALUES
(nextval('api.logbook_id_seq'), false, 'Tropics Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-63.151124640791096 14.01074681627324, -77.0912026418618 12.870995731013664)'::public.geometry, NULL, NULL, NOW(), NOW(), 123, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)), (nextval('api.logbook_id_seq'), false, 'Tropics Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-63.151124640791096 14.01074681627324, -77.0912026418618 12.870995731013664)'::public.geometry, NULL, NULL, NOW(), NOW(), 123, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)),
(nextval('api.logbook_id_seq'), false, 'Alaska Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-143.5773697471158 59.4404631255976, -152.35402122385003 56.58243132943173)'::public.geometry, NULL, NULL, NOW(), NOW(), 1234, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)); (nextval('api.logbook_id_seq'), false, 'Alaska Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-143.5773697471158 59.4404631255976, -152.35402122385003 56.58243132943173)'::public.geometry, NULL, NULL, NOW(), NOW(), 1234, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false));
@@ -27,10 +27,10 @@ SELECT set_config('user.email', 'demo+kapla@openplotter.cloud', false);
--SELECT set_config('vessel.client_id', 'vessels.urn:mrn:imo:mmsi:123456789', false); --SELECT set_config('vessel.client_id', 'vessels.urn:mrn:imo:mmsi:123456789', false);
\echo 'Process badge' \echo 'Process badge'
SELECT badges_logbook_fn(5); SELECT badges_logbook_fn(5,NOW()::TEXT);
SELECT badges_logbook_fn(6); SELECT badges_logbook_fn(6,NOW()::TEXT);
SELECT badges_geom_fn(5); SELECT badges_geom_fn(5,NOW()::TEXT);
SELECT badges_geom_fn(6); SELECT badges_geom_fn(6,NOW()::TEXT);
\echo 'Check badges for user' \echo 'Check badges for user'
SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a; SELECT jsonb_object_keys ( a.preferences->'badges' ) FROM auth.accounts a;
@@ -53,10 +53,10 @@ SELECT
\echo 'Insert new api.moorages for badges' \echo 'Insert new api.moorages for badges'
INSERT INTO api.moorages INSERT INTO api.moorages
(id,"name",country,stay_id,stay_code,stay_duration,reference_count,latitude,longitude,geog,home_flag,notes,vessel_id) (id,"name",country,stay_code,stay_duration,reference_count,latitude,longitude,geog,home_flag,notes,vessel_id)
VALUES OVERRIDING SYSTEM VALUE VALUES
(5,'Badge Mooring Pro',NULL,5,3,'11 days 00:39:56.418',1,NULL,NULL,NULL,false,'Badge Mooring Pro',current_setting('vessel.id', false)), (8,'Badge Mooring Pro',NULL,3,'11 days 00:39:56.418',1,NULL,NULL,NULL,false,'Badge Mooring Pro',current_setting('vessel.id', false)),
(6,'Badge Anchormaster',NULL,5,2,'26 days 00:49:56.418',1,NULL,NULL,NULL,false,'Badge Anchormaster',current_setting('vessel.id', false)); (9,'Badge Anchormaster',NULL,2,'26 days 00:49:56.418',1,NULL,NULL,NULL,false,'Badge Anchormaster',current_setting('vessel.id', false));
\echo 'Set config' \echo 'Set config'
SELECT set_config('user.email', 'demo+aava@openplotter.cloud', false); SELECT set_config('user.email', 'demo+aava@openplotter.cloud', false);

View File

@@ -31,7 +31,7 @@ SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_tim
\echo 'stays' \echo 'stays'
SELECT count(*) FROM api.stays WHERE vessel_id = current_setting('vessel.id', false); SELECT count(*) FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
\echo 'stays' \echo 'stays'
SELECT active,name,geog,stay_code FROM api.stays WHERE vessel_id = current_setting('vessel.id', false); SELECT active,name IS NOT NULL AS name,geog,stay_code FROM api.stays WHERE vessel_id = current_setting('vessel.id', false);
-- Test event logs view for user -- Test event logs view for user
\echo 'eventlogs_view' \echo 'eventlogs_view'
@@ -69,3 +69,8 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
--SELECT api.export_logbook_geojson_fn(1); --SELECT api.export_logbook_geojson_fn(1);
--SELECT api.export_logbook_gpx_fn(1); --SELECT api.export_logbook_gpx_fn(1);
--SELECT api.export_logbook_kml_fn(1); --SELECT api.export_logbook_kml_fn(1);
-- Check history
--\echo 'monitoring history fn'
--select api.monitoring_history_fn();
--select api.monitoring_history_fn('24');

View File

@@ -16,28 +16,28 @@ logbook
count | 2 count | 2
logbook logbook
-[ RECORD 1 ]--+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
name | Bollsta to Slottsbacken name | Pojoviken to Norra hamnen
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t track_geojson | t
track_geom | 0102000020E61000001A00000020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40 track_geom | 0102000020E61000001C000000B0DEBBE0E68737404DA938FBF0094E40B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
distance | 7.17 distance | 7.6447
duration | PT25M duration | PT27M
avg_speed | 3.6961538461538455 avg_speed | 3.6357142857142852
max_speed | 6.1 max_speed | 6.1
max_wind_speed | 22.1 max_wind_speed | 22.1
notes | new log note notes |
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}} extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
-[ RECORD 2 ]--+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
name | Knipan to Ekenäs name | Norra hamnen to Ekenäs
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t track_geojson | t
track_geom | 0102000020E6100000130000004806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40 track_geom | 0102000020E610000015000000029A081B9E6E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
distance | 8.6862 distance | 8.8968
duration | PT18M duration | PT20M
avg_speed | 6.026315789473684 avg_speed | 5.4523809523809526
max_speed | 6.5 max_speed | 6.5
max_wind_speed | 37.2 max_wind_speed | 37.2
notes | notes |
@@ -49,24 +49,24 @@ count | 3
stays stays
-[ RECORD 1 ]------------------------------------------------- -[ RECORD 1 ]-------------------------------------------------
active | f active | t
name | Bollsta name | f
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40 geog |
stay_code | 2 stay_code | 2
-[ RECORD 2 ]------------------------------------------------- -[ RECORD 2 ]-------------------------------------------------
active | f active | f
name | Slottsbacken name | t
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40 geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
stay_code | 1
-[ RECORD 3 ]-------------------------------------------------
active | t
name | Ekenäs
geog | 0101000020E6100000DE4C5FE2A25D3740AE47E17A14EE4D40
stay_code | 2 stay_code | 2
-[ RECORD 3 ]-------------------------------------------------
active | f
name | t
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
stay_code | 4
eventlogs_view eventlogs_view
-[ RECORD 1 ] -[ RECORD 1 ]
count | 13 count | 12
stats_logs_fn stats_logs_fn
SELECT 1 SELECT 1
@@ -74,11 +74,11 @@ SELECT 1
name | "kapla" name | "kapla"
count | 4 count | 4
max_speed | 7.1 max_speed | 7.1
max_distance | 8.6862 max_distance | 8.8968
max_duration | "PT1H11M" max_duration | "PT1H11M"
?column? | 3 ?column? | 3
?column? | 29.2865 ?column? | 30.1154
?column? | "PT2H37M" ?column? | "PT2H43M"
?column? | 44.2 ?column? | 44.2
?column? | 2 ?column? | 2
?column? | 4 ?column? | 4

View File

@@ -12,9 +12,14 @@ select current_database();
\x on \x on
-- Check the number of process pending -- Check the number of process pending
\echo 'Check the number of process pending'
-- Should be 22 -- Should be 22
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null; SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
--set role scheduler --set role scheduler
SELECT public.run_cron_jobs(); SELECT public.run_cron_jobs();
-- Check any pending job -- Check any pending job
SELECT count(*) as any_pending_jobs from public.process_queue pq where pq.processed is null; SELECT count(*) as any_pending_jobs from public.process_queue pq where pq.processed is null;
-- Check the number of metrics entries
\echo 'Check the number of metrics entries'
SELECT count(*) as metrics_count from api.metrics;

View File

@@ -5,12 +5,17 @@
You are now connected to database "signalk" as user "username". You are now connected to database "signalk" as user "username".
Expanded display is on. Expanded display is on.
Check the number of process pending
-[ RECORD 1 ] -[ RECORD 1 ]
jobs | 28 jobs | 26
-[ RECORD 1 ]-+- -[ RECORD 1 ]-+-
run_cron_jobs | run_cron_jobs |
-[ RECORD 1 ]----+-- -[ RECORD 1 ]----+--
any_pending_jobs | 0 any_pending_jobs | 2
Check the number of metrics entries
-[ RECORD 1 ]-+----
metrics_count | 172

View File

@@ -23,7 +23,7 @@ SELECT current_user, current_setting('user.email', true), current_setting('vesse
SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id; SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
\echo 'auth.accounts details' \echo 'auth.accounts details'
SELECT a.public_id IS NOT NULL AS public_id, a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a; SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a;
\echo 'auth.vessels details' \echo 'auth.vessels details'
--SELECT 'SELECT ' || STRING_AGG('v.' || column_name, ', ') || ' FROM auth.vessels AS v' FROM information_schema.columns WHERE table_name = 'vessels' AND table_schema = 'auth' AND column_name NOT IN ('created_at', 'updated_at'); --SELECT 'SELECT ' || STRING_AGG('v.' || column_name, ', ') || ' FROM auth.vessels AS v' FROM information_schema.columns WHERE table_name = 'vessels' AND table_schema = 'auth' AND column_name NOT IN ('created_at', 'updated_at');
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v; SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
@@ -60,12 +60,12 @@ SELECT m.id, m.name, m.mmsi, m.client_id, m.length, m.beam, m.height, m.ship_typ
\echo 'api.logs_view' \echo 'api.logs_view'
--SELECT * FROM api.logbook l; --SELECT * FROM api.logbook l;
--SELECT * FROM api.logs_view l; --SELECT * FROM api.logs_view l;
SELECT l.id, l.name, l.from, l.to, l.distance, l.duration FROM api.logs_view AS l; SELECT l.id, l.name, l.from, l.to, l.distance, l.duration, l._from_moorage_id, l._to_moorage_id FROM api.logs_view AS l;
--SELECT * FROM api.log_view l; --SELECT * FROM api.log_view l;
\echo 'api.stays' \echo 'api.stays'
--SELECT * FROM api.stays s; --SELECT * FROM api.stays s;
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.active, m.name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m; SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.moorage_id, m.active, m.name IS NOT NULL AS name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m;
\echo 'stays_view' \echo 'stays_view'
--SELECT * FROM api.stays_view s; --SELECT * FROM api.stays_view s;
@@ -73,7 +73,7 @@ SELECT m.id, m.name IS NOT NULL AS name, m.moorage, m.moorage_id, m.duration, m.
\echo 'api.moorages' \echo 'api.moorages'
--SELECT * FROM api.moorages m; --SELECT * FROM api.moorages m;
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.name, m.country, m.stay_id, m.stay_code, m.stay_duration, m.reference_count, m.latitude, m.longitude, m.geog, m.home_flag, m.notes FROM api.moorages AS m; SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.name, m.country, m.stay_code, m.stay_duration, m.reference_count, m.latitude, m.longitude, m.geog, m.home_flag, m.notes FROM api.moorages AS m;
\echo 'api.moorages_view' \echo 'api.moorages_view'
SELECT * FROM api.moorages_view s; SELECT * FROM api.moorages_view s;

View File

@@ -15,29 +15,27 @@ current_setting |
link vessel and user based on current_setting link vessel and user based on current_setting
-[ RECORD 1 ]---------------------------------------------------------------- -[ RECORD 1 ]----------------------------------------------------------------
name | kapla
client_id | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd
-[ RECORD 2 ]----------------------------------------------------------------
name | aava name | aava
client_id | vessels.urn:mrn:imo:mmsi:787654321 client_id | vessels.urn:mrn:imo:mmsi:787654321
-[ RECORD 2 ]----------------------------------------------------------------
name | kapla
client_id | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd
auth.accounts details auth.accounts details
-[ RECORD 1 ]-----+----------------------------- -[ RECORD 1 ]-----+-----------------------------
public_id | t
user_id | t user_id | t
email | demo+kapla@openplotter.cloud email | demo+aava@openplotter.cloud
first | First_kapla first | first_aava
last | Last_kapla last | last_aava
pass | t pass | t
role | user_role role | user_role
telegram | telegram |
pushover_user_key | pushover_user_key |
-[ RECORD 2 ]-----+----------------------------- -[ RECORD 2 ]-----+-----------------------------
public_id | t
user_id | t user_id | t
email | demo+aava@openplotter.cloud email | demo+kapla@openplotter.cloud
first | first_aava first | First_kapla
last | last_aava last | Last_kapla
pass | t pass | t
role | user_role role | user_role
telegram | telegram |
@@ -125,80 +123,87 @@ time | t
active | t active | t
api.logs_view api.logs_view
-[ RECORD 1 ]-------------- -[ RECORD 1 ]----+-----------------------
id | 2 id | 2
name | Knipan to Ekenäs name | Norra hamnen to Ekenäs
from | Knipan from | Norra hamnen
to | Ekenäs to | Ekenäs
distance | 8.6862 distance | 8.8968
duration | PT18M duration | PT20M
-[ RECORD 2 ]-------------- _from_moorage_id | 2
id | 1 _to_moorage_id | 3
name | patch log name 3 -[ RECORD 2 ]----+-----------------------
from | Bollsta id | 1
to | Slottsbacken name | patch log name 3
distance | 7.17 from | patch moorage name 3
duration | PT25M to | Norra hamnen
distance | 7.6447
duration | PT27M
_from_moorage_id | 1
_to_moorage_id | 2
api.stays api.stays
-[ RECORD 1 ]------------------------------------------------- -[ RECORD 1 ]--------------------------------------------------
id | 1 id | 3
vessel_id | t vessel_id | t
active | f moorage_id |
name | patch stay name 3 active | t
latitude | 60.077666666666666 name | f
longitude | 23.530866666666668 latitude | 59.86
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40 longitude | 23.365766666666666
arrived | t geog |
departed | t arrived | t
duration | departed | f
stay_code | 2 duration |
notes | new stay note 3 stay_code | 2
-[ RECORD 2 ]------------------------------------------------- notes |
id | 2 -[ RECORD 2 ]--------------------------------------------------
vessel_id | t id | 1
active | f vessel_id | t
name | Slottsbacken moorage_id | 1
latitude | 59.97688333333333 active | f
longitude | 23.4321 name | t
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40 latitude | 60.077666666666666
arrived | t longitude | 23.530866666666668
departed | t geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40
duration | arrived | t
stay_code | 1 departed | t
notes | duration | PT1M
-[ RECORD 3 ]------------------------------------------------- stay_code | 2
id | 3 notes | new stay note 3
vessel_id | t -[ RECORD 3 ]--------------------------------------------------
active | t id | 2
name | Ekenäs vessel_id | t
latitude | 59.86 moorage_id | 2
longitude | 23.365766666666666 active | f
geog | 0101000020E6100000DE4C5FE2A25D3740AE47E17A14EE4D40 name | t
arrived | t latitude | 59.97688333333333
departed | f longitude | 23.4321
duration | geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40
stay_code | 2 arrived | t
notes | departed | t
duration | PT2M
stay_code | 4
notes |
stays_view stays_view
-[ RECORD 1 ]+------------------ -[ RECORD 1 ]+---------------------
id | 2 id | 2
name | t name | t
moorage | Slottsbacken moorage | Norra hamnen
moorage_id | 2 moorage_id | 2
duration | PT3M duration | PT2M
stayed_at | Unknown stayed_at | Dock
stayed_at_id | 1 stayed_at_id | 4
arrived | t arrived | t
departed | t departed | t
notes | notes |
-[ RECORD 2 ]+------------------ -[ RECORD 2 ]+---------------------
id | 1 id | 1
name | t name | t
moorage | patch stay name 3 moorage | patch moorage name 3
moorage_id | 1 moorage_id | 1
duration | PT2M duration | PT1M
stayed_at | Anchor stayed_at | Anchor
stayed_at_id | 2 stayed_at_id | 2
arrived | t arrived | t
@@ -210,44 +215,57 @@ api.moorages
id | 1 id | 1
vessel_id | t vessel_id | t
name | patch moorage name 3 name | patch moorage name 3
country | fi country |
stay_id | 1
stay_code | 2 stay_code | 2
stay_duration | PT2M stay_duration | PT1M
reference_count | 1 reference_count | 1
latitude | 60.077666666666666 latitude | 60.0776666666667
longitude | 23.530866666666668 longitude | 23.5308666666667
geog | 0101000020E6100000B0DEBBE0E68737404DA938FBF0094E40 geog | 0101000020E6100000B9DEBBE0E687374052A938FBF0094E40
home_flag | t home_flag | t
notes | new moorage note 3 notes | new moorage note 3
-[ RECORD 2 ]---+--------------------------------------------------- -[ RECORD 2 ]---+---------------------------------------------------
id | 2 id | 2
vessel_id | t vessel_id | t
name | Slottsbacken name | Norra hamnen
country | fi country |
stay_id | 2 stay_code | 4
stay_code | 1 stay_duration | PT2M
stay_duration | PT3M reference_count | 2
reference_count | 1 latitude | 59.9768833333333
latitude | 59.97688333333333
longitude | 23.4321 longitude | 23.4321
geog | 0101000020E6100000029A081B9E6E37404A5658830AFD4D40 geog | 0101000020E6100000029A081B9E6E3740455658830AFD4D40
home_flag | f
notes |
-[ RECORD 3 ]---+---------------------------------------------------
id | 3
vessel_id | t
name | Ekenäs
country | fi
stay_code | 1
stay_duration |
reference_count | 1
latitude | 59.86
longitude | 23.3657666666667
geog | 0101000020E6100000E84C5FE2A25D3740AE47E17A14EE4D40
home_flag | f home_flag | f
notes | notes |
api.moorages_view api.moorages_view
-[ RECORD 1 ]-------+--------------------- -[ RECORD 1 ]-------+---------------------
id | 2
moorage | Norra hamnen
default_stay | Dock
default_stay_id | 4
total_stay | 0
total_duration | PT2M
arrivals_departures | 2
-[ RECORD 2 ]-------+---------------------
id | 1 id | 1
moorage | patch moorage name 3 moorage | patch moorage name 3
default_stay | Anchor default_stay | Anchor
default_stay_id | 2 default_stay_id | 2
total_stay | 0 total_stay | 0
arrivals_departures | 1 total_duration | PT1M
-[ RECORD 2 ]-------+---------------------
id | 2
moorage | Slottsbacken
default_stay | Unknown
default_stay_id | 1
total_stay | 0
arrivals_departures | 1 arrivals_departures | 1

View File

@@ -22,15 +22,15 @@ count | 21
Test monitoring_view3 for user Test monitoring_view3 for user
-[ RECORD 1 ] -[ RECORD 1 ]
count | 3682 count | 3736
Test monitoring_voltage for user Test monitoring_voltage for user
-[ RECORD 1 ] -[ RECORD 1 ]
count | 46 count | 47
Test monitoring_temperatures for user Test monitoring_temperatures for user
-[ RECORD 1 ] -[ RECORD 1 ]
count | 119 count | 120
Test monitoring_humidity for user Test monitoring_humidity for user
-[ RECORD 1 ] -[ RECORD 1 ]

View File

@@ -81,6 +81,10 @@ select * from pg_policies;
SELECT public.reverse_geocode_py_fn('nominatim', 1.4440116666666667, 38.82985166666667); SELECT public.reverse_geocode_py_fn('nominatim', 1.4440116666666667, 38.82985166666667);
\echo 'Test geoip reverse_geoip_py_fn' \echo 'Test geoip reverse_geoip_py_fn'
--SELECT reverse_geoip_py_fn('62.74.13.231'); --SELECT reverse_geoip_py_fn('62.74.13.231');
\echo 'Test opverpass API overpass_py_fn'
SELECT public.overpass_py_fn(2.19917, 41.386873333333334); -- Port Olimpic
SELECT public.overpass_py_fn(1.92574333333, 41.258915); -- Port de la Ginesta
SELECT public.overpass_py_fn(23.4321, 59.9768833333333); -- Norra hamnen
-- List details product versions -- List details product versions
SELECT api.versions_fn(); SELECT api.versions_fn();

View File

@@ -6,10 +6,10 @@
You are now connected to database "signalk" as user "username". You are now connected to database "signalk" as user "username".
Expanded display is on. Expanded display is on.
-[ RECORD 1 ]--+------------------------------- -[ RECORD 1 ]--+-------------------------------
server_version | 15.4 (Debian 15.4-2.pgdg110+1) server_version | 16.1 (Debian 16.1-1.pgdg110+1)
-[ RECORD 1 ]--------+---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
postgis_full_version | POSTGIS="3.4.0 0874ea3" [EXTENSION] PGSQL="150" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)" postgis_full_version | POSTGIS="3.4.1 ca035b9" [EXTENSION] PGSQL="160" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
-[ RECORD 1 ]-------------------------------------------------------------------------------------- -[ RECORD 1 ]--------------------------------------------------------------------------------------
Name | citext Name | citext
@@ -48,12 +48,12 @@ Schema | pg_catalog
Description | PL/Python3U untrusted procedural language Description | PL/Python3U untrusted procedural language
-[ RECORD 8 ]-------------------------------------------------------------------------------------- -[ RECORD 8 ]--------------------------------------------------------------------------------------
Name | postgis Name | postgis
Version | 3.4.0 Version | 3.4.1
Schema | public Schema | public
Description | PostGIS geometry and geography spatial types and functions Description | PostGIS geometry and geography spatial types and functions
-[ RECORD 9 ]-------------------------------------------------------------------------------------- -[ RECORD 9 ]--------------------------------------------------------------------------------------
Name | timescaledb Name | timescaledb
Version | 2.12.2 Version | 2.13.1
Schema | public Schema | public
Description | Enables scalable inserts and complex queries for time-series data (Community Edition) Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
-[ RECORD 10 ]------------------------------------------------------------------------------------- -[ RECORD 10 ]-------------------------------------------------------------------------------------
@@ -96,24 +96,24 @@ laninline | 0
lanvalidator | 2248 lanvalidator | 2248
lanacl | lanacl |
-[ RECORD 4 ]-+----------- -[ RECORD 4 ]-+-----------
oid | 13542 oid | 13545
lanname | plpgsql lanname | plpgsql
lanowner | 10 lanowner | 10
lanispl | t lanispl | t
lanpltrusted | t lanpltrusted | t
lanplcallfoid | 13539 lanplcallfoid | 13542
laninline | 13540 laninline | 13543
lanvalidator | 13541 lanvalidator | 13544
lanacl | lanacl |
-[ RECORD 5 ]-+----------- -[ RECORD 5 ]-+-----------
oid | 18283 oid | 18297
lanname | plpython3u lanname | plpython3u
lanowner | 10 lanowner | 10
lanispl | t lanispl | t
lanpltrusted | t lanpltrusted | t
lanplcallfoid | 18280 lanplcallfoid | 18294
laninline | 18281 laninline | 18295
lanvalidator | 18282 lanvalidator | 18296
lanacl | lanacl |
-[ RECORD 1 ]+----------- -[ RECORD 1 ]+-----------
@@ -243,6 +243,8 @@ schema_auth | accounts
-[ RECORD 2 ]--------- -[ RECORD 2 ]---------
schema_auth | otp schema_auth | otp
-[ RECORD 3 ]--------- -[ RECORD 3 ]---------
schema_auth | users
-[ RECORD 4 ]---------
schema_auth | vessels schema_auth | vessels
(0 rows) (0 rows)
@@ -321,14 +323,14 @@ cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | true with_check | true
-[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------ -[ RECORD 9 ]------------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | auth
tablename | moorages tablename | vessels
policyname | admin_all policyname | grafana_proxy_role
permissive | PERMISSIVE permissive | PERMISSIVE
roles | {username} roles | {grafana_auth}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | false
-[ RECORD 10 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 10 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | metrics tablename | metrics
@@ -358,6 +360,15 @@ qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false with_check | false
-[ RECORD 13 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 13 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | metrics
policyname | api_anonymous_role
permissive | PERMISSIVE
roles | {api_anonymous}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false
-[ RECORD 14 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | logbook tablename | logbook
policyname | admin_all policyname | admin_all
permissive | PERMISSIVE permissive | PERMISSIVE
@@ -365,7 +376,7 @@ roles | {username}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | true
-[ RECORD 14 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | logbook tablename | logbook
policyname | api_vessel_role policyname | api_vessel_role
@@ -374,26 +385,8 @@ roles | {vessel_role}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | true with_check | true
-[ RECORD 15 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | logbook
policyname | api_user_role
permissive | PERMISSIVE
roles | {user_role}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, true))
with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 16 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 16 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | vessels
policyname | grafana_proxy_role
permissive | PERMISSIVE
roles | {grafana_auth}
cmd | ALL
qual | true
with_check | false
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth
tablename | accounts tablename | accounts
policyname | admin_all policyname | admin_all
permissive | PERMISSIVE permissive | PERMISSIVE
@@ -401,6 +394,15 @@ roles | {username}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | true
-[ RECORD 17 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | logbook
policyname | api_user_role
permissive | PERMISSIVE
roles | {user_role}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, true))
with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 18 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 18 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | logbook tablename | logbook
@@ -421,6 +423,15 @@ qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false with_check | false
-[ RECORD 20 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 20 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | logbook
policyname | api_anonymous_role
permissive | PERMISSIVE
roles | {api_anonymous}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false
-[ RECORD 21 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | stays tablename | stays
policyname | admin_all policyname | admin_all
permissive | PERMISSIVE permissive | PERMISSIVE
@@ -428,7 +439,7 @@ roles | {username}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | true
-[ RECORD 21 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 22 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | stays tablename | stays
policyname | api_vessel_role policyname | api_vessel_role
@@ -437,7 +448,7 @@ roles | {vessel_role}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | true with_check | true
-[ RECORD 22 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | stays tablename | stays
policyname | api_user_role policyname | api_user_role
@@ -446,25 +457,43 @@ roles | {user_role}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, true)) qual | (vessel_id = current_setting('vessel.id'::text, true))
with_check | (vessel_id = current_setting('vessel.id'::text, false)) with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | stays
policyname | api_scheduler_role
permissive | PERMISSIVE
roles | {scheduler}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 24 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | stays tablename | stays
policyname | api_scheduler_role
permissive | PERMISSIVE
roles | {scheduler}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | stays
policyname | grafana_role policyname | grafana_role
permissive | PERMISSIVE permissive | PERMISSIVE
roles | {grafana} roles | {grafana}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false with_check | false
-[ RECORD 25 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | stays
policyname | api_anonymous_role
permissive | PERMISSIVE
roles | {api_anonymous}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | moorages
policyname | admin_all
permissive | PERMISSIVE
roles | {username}
cmd | ALL
qual | true
with_check | true
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | moorages tablename | moorages
policyname | api_vessel_role policyname | api_vessel_role
@@ -473,7 +502,7 @@ roles | {vessel_role}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | true with_check | true
-[ RECORD 26 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | moorages tablename | moorages
policyname | api_user_role policyname | api_user_role
@@ -482,7 +511,7 @@ roles | {user_role}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, true)) qual | (vessel_id = current_setting('vessel.id'::text, true))
with_check | (vessel_id = current_setting('vessel.id'::text, false)) with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 27 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | moorages tablename | moorages
policyname | api_scheduler_role policyname | api_scheduler_role
@@ -491,7 +520,7 @@ roles | {scheduler}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | (vessel_id = current_setting('vessel.id'::text, false)) with_check | (vessel_id = current_setting('vessel.id'::text, false))
-[ RECORD 28 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api schemaname | api
tablename | moorages tablename | moorages
policyname | grafana_role policyname | grafana_role
@@ -500,7 +529,16 @@ roles | {grafana}
cmd | ALL cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false)) qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false with_check | false
-[ RECORD 29 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | api
tablename | moorages
policyname | api_anonymous_role
permissive | PERMISSIVE
roles | {api_anonymous}
cmd | ALL
qual | (vessel_id = current_setting('vessel.id'::text, false))
with_check | false
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | vessels tablename | vessels
policyname | admin_all policyname | admin_all
@@ -509,7 +547,7 @@ roles | {username}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | true
-[ RECORD 30 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | vessels tablename | vessels
policyname | api_user_role policyname | api_user_role
@@ -518,7 +556,7 @@ roles | {user_role}
cmd | ALL cmd | ALL
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true))) qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true))) with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
-[ RECORD 31 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | vessels tablename | vessels
policyname | grafana_role policyname | grafana_role
@@ -527,7 +565,7 @@ roles | {grafana}
cmd | ALL cmd | ALL
qual | ((owner_email)::text = current_setting('user.email'::text, true)) qual | ((owner_email)::text = current_setting('user.email'::text, true))
with_check | false with_check | false
-[ RECORD 32 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | accounts tablename | accounts
policyname | api_user_role policyname | api_user_role
@@ -536,7 +574,7 @@ roles | {user_role}
cmd | ALL cmd | ALL
qual | ((email)::text = current_setting('user.email'::text, true)) qual | ((email)::text = current_setting('user.email'::text, true))
with_check | ((email)::text = current_setting('user.email'::text, true)) with_check | ((email)::text = current_setting('user.email'::text, true))
-[ RECORD 33 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | accounts tablename | accounts
policyname | api_scheduler_role policyname | api_scheduler_role
@@ -545,7 +583,7 @@ roles | {scheduler}
cmd | ALL cmd | ALL
qual | ((email)::text = current_setting('user.email'::text, true)) qual | ((email)::text = current_setting('user.email'::text, true))
with_check | ((email)::text = current_setting('user.email'::text, true)) with_check | ((email)::text = current_setting('user.email'::text, true))
-[ RECORD 34 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | auth schemaname | auth
tablename | accounts tablename | accounts
policyname | grafana_proxy_role policyname | grafana_proxy_role
@@ -554,7 +592,7 @@ roles | {grafana_auth}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | false with_check | false
-[ RECORD 35 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | public schemaname | public
tablename | process_queue tablename | process_queue
policyname | admin_all policyname | admin_all
@@ -563,7 +601,7 @@ roles | {username}
cmd | ALL cmd | ALL
qual | true qual | true
with_check | true with_check | true
-[ RECORD 36 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | public schemaname | public
tablename | process_queue tablename | process_queue
policyname | api_vessel_role policyname | api_vessel_role
@@ -572,7 +610,7 @@ roles | {vessel_role}
cmd | ALL cmd | ALL
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true))) qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
with_check | true with_check | true
-[ RECORD 37 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | public schemaname | public
tablename | process_queue tablename | process_queue
policyname | api_user_role policyname | api_user_role
@@ -581,7 +619,7 @@ roles | {user_role}
cmd | ALL cmd | ALL
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true))) qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true))) with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
-[ RECORD 38 ]----------------------------------------------------------------------------------------------------------------------------- -[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
schemaname | public schemaname | public
tablename | process_queue tablename | process_queue
policyname | api_scheduler_role policyname | api_scheduler_role
@@ -596,13 +634,23 @@ Test nominatim reverse_geocode_py_fn
reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"} reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
Test geoip reverse_geoip_py_fn Test geoip reverse_geoip_py_fn
Test opverpass API overpass_py_fn
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "wikipedia": "ca:Port Olímpic de Barcelona", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
-[ RECORD 1 ]--+----------------------------------------------------------------------------------------------------------------------------------------------------------------------
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta"}
-[ RECORD 1 ]--+----------------------------------------------
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
-[ RECORD 1 ]---------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
versions_fn | {"api_version" : "0.4.0", "sys_version" : "PostgreSQL 15.4", "timescaledb" : "2.12.2", "postgis" : "3.4.0", "postgrest" : "PostgREST 11.2.1"} versions_fn | {"api_version" : "0.6.0", "sys_version" : "PostgreSQL 16.1", "timescaledb" : "2.13.1", "postgis" : "3.4.1", "postgrest" : "PostgREST 12.0.2"}
-[ RECORD 1 ]----------------- -[ RECORD 1 ]-----------------
api_version | 0.4.0 api_version | 0.6.0
sys_version | PostgreSQL 15.4 sys_version | PostgreSQL 16.1
timescaledb | 2.12.2 timescaledb | 2.13.1
postgis | 3.4.0 postgis | 3.4.1
postgrest | PostgREST 11.2.1 postgrest | PostgREST 12.0.2

View File

@@ -36,7 +36,7 @@ SET vessel.name = 'kapla';
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:123456789'; --SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:123456789';
--SELECT * FROM api.vessels_view v; --SELECT * FROM api.vessels_view v;
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v; SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
SELECT name,geojson,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m; SELECT name,geojson->'geometry' as geometry,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;
SET "user.email" = 'demo+aava@openplotter.cloud'; SET "user.email" = 'demo+aava@openplotter.cloud';
SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id; SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_id;
@@ -45,4 +45,4 @@ SET vessel.name = 'aava';
--SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:787654321'; --SET vessel.client_id = 'vessels.urn:mrn:imo:mmsi:787654321';
--SELECT * FROM api.vessels_view v; --SELECT * FROM api.vessels_view v;
SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v; SELECT name, mmsi, created_at IS NOT NULL as created_at, last_contact IS NOT NULL as last_contact FROM api.vessels_view v;
SELECT name,geojson,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m; SELECT name,geojson->'geometry' as geometry,watertemperature,insidetemperature,outsidetemperature FROM api.monitoring_view m;

View File

@@ -37,9 +37,9 @@ mmsi |
created_at | t created_at | t
last_contact | t last_contact | t
-[ RECORD 1 ]------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -[ RECORD 1 ]------+--------------------------------------------------------
name | kapla name | kapla
geojson | {"type": "Feature", "geometry": {"type": "Point", "coordinates": [23.365766667, 59.86]}, "properties": {"name": "kapla", "latitude": 59.86, "longitude": 23.365766666666666}} geometry | {"type": "Point", "coordinates": [23.365766667, 59.86]}
watertemperature | watertemperature |
insidetemperature | insidetemperature |
outsidetemperature | outsidetemperature |
@@ -55,9 +55,9 @@ mmsi | 787654321
created_at | t created_at | t
last_contact | t last_contact | t
-[ RECORD 1 ]------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -[ RECORD 1 ]------+------------------------------------------------------------
name | aava name | aava
geojson | {"type": "Feature", "geometry": {"type": "Point", "coordinates": [2.2934791, 41.465333283]}, "properties": {"name": "aava", "latitude": 41.46533328333334, "longitude": 2.2934791}} geometry | {"type": "Point", "coordinates": [2.2934791, 41.465333283]}
watertemperature | 280.25 watertemperature | 280.25
insidetemperature | insidetemperature |
outsidetemperature | outsidetemperature |

View File

@@ -9,8 +9,25 @@ if [[ -z "${PGSAIL_API_URI}" ]]; then
exit 1 exit 1
fi fi
#npm install # psql
npm install -g pnpm && pnpm install if [[ ! -x "/usr/bin/psql" ]]; then
apt update && apt -y install postgresql-client
fi
# go install
if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then
#wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \
#rm -rf /usr/local/go && tar -C /usr/local -xzf go1.21.4.linux-arm64.tar.gz && \
apt update && apt -y install golang && \
go install github.com/KarnerTh/mermerd@latest
fi
# pnpm install
if [[ ! -x "/usr/local/bin/pnpm" ]]; then
npm install -g pnpm
fi
pnpm install || exit 1
# settings # settings
export mymocha="./node_modules/mocha/bin/_mocha" export mymocha="./node_modules/mocha/bin/_mocha"
mkdir -p output/ && rm -rf output/* mkdir -p output/ && rm -rf output/*
@@ -121,6 +138,7 @@ else
exit 1 exit 1
fi fi
# Monitoring API unit tests
$mymocha index4.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report4.html $mymocha index4.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report4.html
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
echo OK echo OK
@@ -129,7 +147,7 @@ else
exit 1 exit 1
fi fi
# Monitoring unit tests # Monitoring SQL unit tests
psql ${PGSAIL_DB_URI} < sql/monitoring.sql > output/monitoring.sql.output psql ${PGSAIL_DB_URI} < sql/monitoring.sql > output/monitoring.sql.output
diff sql/monitoring.sql.output output/monitoring.sql.output > /dev/null diff sql/monitoring.sql.output output/monitoring.sql.output > /dev/null
#diff -u sql/monitoring.sql.output output/monitoring.sql.output | wc -l #diff -u sql/monitoring.sql.output output/monitoring.sql.output | wc -l
@@ -142,12 +160,47 @@ else
exit 1 exit 1
fi fi
# Download and update openapi documentation # Anonymous API unit tests
wget ${PGSAIL_API_URI} -O ../openapi.json $mymocha index5.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report5.html
if [ $? -eq 0 ]; then
echo OK
else
echo mocha index5.js
exit 1
fi
# Anonymous SQL unit tests
psql ${PGSAIL_DB_URI} < sql/anonymous.sql > output/anonymous.sql.output
diff sql/anonymous.sql.output output/anonymous.sql.output > /dev/null
#diff -u sql/anonymous.sql.output output/anonymous.sql.output | wc -l
#echo 0 #echo 0
if [ $? -eq 0 ]; then if [ $? -eq 0 ]; then
echo SQL anonymous.sql OK
else
echo SQL anonymous.sql FAILED
diff -u sql/anonymous.sql.output output/anonymous.sql.output
exit 1
fi
# Download and update openapi documentation
wget ${PGSAIL_API_URI} -O openapi.json
#echo 0
if [ $? -eq 0 ]; then
cp openapi.json ../openapi.json
echo openapi.json OK echo openapi.json OK
else else
echo openapi.json FAILED echo openapi.json FAILED
exit 1 exit 1
fi fi
# Generate and update mermaid schema documentation
/root/go/bin/mermerd --runConfig ../docs/ERD/mermerdConfig.yaml
echo $?
echo 0
if [ $? -eq 0 ]; then
cp postgsail.md ../docs/ERD/postgsail.md
echo postgsail.md OK
else
echo postgsail.md FAILED
exit 1
fi