mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 03:07:47 +00:00
Compare commits
36 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
e857440133 | ||
![]() |
0a13e0a798 | ||
![]() |
8fe0513c3c | ||
![]() |
b69a52eacd | ||
![]() |
129ee7dcbd | ||
![]() |
9800d83463 | ||
![]() |
b80799aa2f | ||
![]() |
84f73f2281 | ||
![]() |
b3d5b80731 | ||
![]() |
5cefe47fea | ||
![]() |
98f45ace66 | ||
![]() |
ff69d6ee7a | ||
![]() |
52dcdb61fd | ||
![]() |
6bbe9795fb | ||
![]() |
23ea9dfee7 | ||
![]() |
82a0f19566 | ||
![]() |
a7019445e3 | ||
![]() |
f97635cae1 | ||
![]() |
fe93b9ec11 | ||
![]() |
b5be201e65 | ||
![]() |
33b97898d8 | ||
![]() |
c726f6cc07 | ||
![]() |
48647e5978 | ||
![]() |
1702b825c7 | ||
![]() |
3c4f68218f | ||
![]() |
42e85cc498 | ||
![]() |
bd9b207d43 | ||
![]() |
c588fc676c | ||
![]() |
e003985d6c | ||
![]() |
0cf9fa701d | ||
![]() |
23d2ced60c | ||
![]() |
a1a5f29c16 | ||
![]() |
5fe9a37eee | ||
![]() |
2064947457 | ||
![]() |
30b1b8f0a6 | ||
![]() |
4dcd8d2ea5 |
@@ -84,6 +84,7 @@ To understand the why and how, you might want to read [Why.md](https://github.co
|
||||
- Timelapse video your trips, with or without time control.
|
||||
- Add custom notes to your logs.
|
||||
- Export to CSV, GPX, GeoJSON, KML and download your logs.
|
||||
- Export your logs as image (PNG) or video (MP4).
|
||||
- Aggregate your trip statistics: Longest voyage, time spent at anchorages, home ports etc.
|
||||
- See your moorages on a global map, with incoming and outgoing voyages from each trip.
|
||||
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
|
||||
|
@@ -72,6 +72,12 @@ flowchart TD
|
||||
B --> O{Update account,vessel,otp}
|
||||
F --> P{Update metadata}
|
||||
G --> P
|
||||
A --> Q((cron_post_logbook))
|
||||
Q --> R{QGIS and notification}
|
||||
A --> S((cron_video))
|
||||
A --> U((cron_alert))
|
||||
S --> T{notification}
|
||||
U --> T{notification}
|
||||
```
|
||||
cron job are not process by default because if you don't have the correct settings set (SMTP, PushOver, Telegram), you might enter in a loop with error and you could be blocked or banned from the external services.
|
||||
|
||||
@@ -82,21 +88,35 @@ UPDATE cron.job SET active = True;
|
||||
```
|
||||
Be sure to review your postgsail settings via SQL in the table `app_settings`:
|
||||
```sql
|
||||
SELECT * FROM app_settings;
|
||||
SELECT * FROM public.app_settings;
|
||||
```
|
||||
|
||||
### How to bypass OTP for a local install?
|
||||
|
||||
You can skip the otp, add or update json key value to the account preference.
|
||||
To can skip the otp process, add or update the following json key value to the account preference.
|
||||
```json
|
||||
"email_valid": true
|
||||
```
|
||||
SQL query
|
||||
```sql
|
||||
UPDATE auth.accounts
|
||||
SET preferences='{"email_valid": true}'::jsonb || preferences
|
||||
WHERE email='your.email@domain.com';
|
||||
```
|
||||
|
||||
OTP is created and sent by email using a cron in postgres/cron/job
|
||||
OTP is created and sent by email using a cron in postgres/cron/job.
|
||||
```sql
|
||||
SELECT * FROM auth.otp;
|
||||
```
|
||||
|
||||
accounts are store in table signalk/auth/accounts
|
||||
Accounts are store in table signalk/auth/accounts
|
||||
```sql
|
||||
SELECT * FROM auth.accounts;
|
||||
```
|
||||
|
||||
You should have an history in table signalk/public/process_queue
|
||||
```sql
|
||||
select * from public.process_queue;
|
||||
SELECT * from public.process_queue;
|
||||
```
|
||||
|
||||
### How to turn off signups
|
||||
|
@@ -30,6 +30,12 @@ A full-featured development environment.
|
||||
|
||||
### On-premise (self-hosted)
|
||||
|
||||
This kind of deployment needs the [docker application](https://www.docker.com/) to be installed and running. Check this [tutorial](https://www.docker.com/101-tutorial).
|
||||
|
||||
Docker run pre packaged application (aka images) which can be retrieved as sources (Dockerfile and resources) to build or already built from registries (private or public).
|
||||
|
||||
PostgSail depends heavily on [PostgreSQL](https://www.postgresql.org/). Check this [tutorial](https://www.postgresql.org/docs/current/tutorial.html).
|
||||
|
||||
#### pre-deploy configuration
|
||||
|
||||
To get these running, copy `.env.example` and rename to `.env` then set the value accordingly.
|
||||
@@ -135,7 +141,7 @@ UPDATE app_settings
|
||||
WHERE name = 'app.email_server';
|
||||
```
|
||||
|
||||
As it is all about SQL, [Read more](https://github.com/xbgmsharp/postgsail/blob/main/docs/ERD/README.md) about the database and explore your data.
|
||||
As it is all about SQL, [Read more](https://github.com/xbgmsharp/postgsail/blob/main/docs/ERD/README.md) about the database to configure your instance and explore your data.
|
||||
|
||||
### Ingest data
|
||||
|
||||
|
2
frontend
2
frontend
Submodule frontend updated: 6195e98c6b...b5b094cc97
784
initdb/99_migrations_202405.sql
Normal file
784
initdb/99_migrations_202405.sql
Normal file
@@ -0,0 +1,784 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration May 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('account_disable','PostgSail Account disable',E'Hello __RECIPIENT__,\nSorry!Your account is disable. Please contact me to solve the issue.','PostgSail Account disable!',E'Sorry!\nYour account is disable. Please contact me to solve the issue.');
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
-- Track IP per user to avoid abuse
|
||||
create or replace function
|
||||
api.login(in email text, in pass text) returns auth.jwt_token as $$
|
||||
declare
|
||||
_role name;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
_email_valid boolean := false;
|
||||
_email text := email;
|
||||
_user_id text := null;
|
||||
_user_disable boolean := false;
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
begin
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
if _role is null then
|
||||
-- HTTP/403
|
||||
--raise invalid_password using message = 'invalid user or password';
|
||||
-- HTTP/401
|
||||
raise insufficient_privilege using message = 'invalid user or password';
|
||||
end if;
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
SELECT preferences['disable'],user_id INTO _user_disable,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _user_disable is True then
|
||||
-- due to the raise, the insert is never committed.
|
||||
--INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
-- VALUES ('account_disable', _email, now(), _user_id);
|
||||
RAISE sqlstate 'PT402' using message = 'Account disable, contact us',
|
||||
detail = 'Quota exceeded',
|
||||
hint = 'Upgrade your plan';
|
||||
END IF;
|
||||
|
||||
-- Check email_valid and generate OTP
|
||||
SELECT preferences['email_valid'],user_id INTO _email_valid,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _email_valid is null or _email_valid is False THEN
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('email_otp', _email, now(), _user_id);
|
||||
END IF;
|
||||
|
||||
-- Track IP per user to avoid abuse
|
||||
--RAISE WARNING 'api.login debug: [%],[%]', client_ip, login.email;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE auth.accounts a SET preferences = jsonb_recursive_merge(a.preferences, jsonb_build_object('ip', client_ip)) WHERE a.email = login.email;
|
||||
END IF;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
--RAISE WARNING 'api.login debug: [%],[%],[%]', app_jwt_secret, _role, login.email;
|
||||
-- Generate jwt
|
||||
select jwt.sign(
|
||||
-- row_to_json(r), ''
|
||||
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select _role as role, login.email as email, -- TODO replace with user_id
|
||||
-- select _role as role, user_id as uid, -- add support in check_jwt
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
end;
|
||||
$$ language plpgsql security definer;
|
||||
|
||||
-- Add moorage name to view
|
||||
DROP VIEW IF EXISTS api.moorages_stays_view;
|
||||
CREATE OR REPLACE VIEW api.moorages_stays_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
select
|
||||
_to.name AS _to_name,
|
||||
_to.id AS _to_id,
|
||||
_to._to_time,
|
||||
_from.id AS _from_id,
|
||||
_from.name AS _from_name,
|
||||
_from._from_time,
|
||||
s.stay_code,s.duration,m.id,m.name
|
||||
FROM api.stays_at sa, api.moorages m, api.stays s
|
||||
LEFT JOIN api.logbook AS _from ON _from._from_time = s.departed
|
||||
LEFT JOIN api.logbook AS _to ON _to._to_time = s.arrived
|
||||
WHERE s.departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
AND s.moorage_id = m.id
|
||||
ORDER BY _to._to_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorages_stays_view
|
||||
IS 'Moorages stay listing web view';
|
||||
|
||||
-- Create a merge_logbook_fn
|
||||
CREATE OR REPLACE FUNCTION api.merge_logbook_fn(IN id_start integer, IN id_end integer) RETURNS void AS $merge_logbook$
|
||||
DECLARE
|
||||
logbook_rec_start record;
|
||||
logbook_rec_end record;
|
||||
log_name text;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
BEGIN
|
||||
-- If id_start or id_end is not NULL
|
||||
IF (id_start IS NULL OR id_start < 1) OR (id_end IS NULL OR id_end < 1) THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % %', id_start, id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- If id_end is lower than id_start
|
||||
IF id_end <= id_start THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % < %', id_end, id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the start logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_start
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_start
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_start.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the end logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_end
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_end
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_end.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE WARNING '-> merge_logbook_fn logbook start:% end:%', id_start, id_end;
|
||||
PERFORM set_config('vessel.id', logbook_rec_start.vessel_id, false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
-- add the avg_wind_speed
|
||||
extra_json := extra_json || jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed);
|
||||
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
SELECT CONCAT(logbook_rec_start._from, ' to ', logbook_rec_end._to) INTO log_name;
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec_start.id, logbook_rec_start._from_time, logbook_rec_end._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
-- Update the start logbook with the new calculate metrics
|
||||
duration = (logbook_rec_end._to_time::TIMESTAMPTZ - logbook_rec_start._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
name = log_name,
|
||||
track_geom = geo_rec._track_geom,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
-- Set _to metrics from end logbook
|
||||
_to = logbook_rec_end._to,
|
||||
_to_moorage_id = logbook_rec_end._to_moorage_id,
|
||||
_to_lat = logbook_rec_end._to_lat,
|
||||
_to_lng = logbook_rec_end._to_lng,
|
||||
_to_time = logbook_rec_end._to_time
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
-- GeoJSON require track_geom field
|
||||
geojson := logbook_update_geojson_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_geojson = geojson
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
-- Update logbook mark for deletion
|
||||
UPDATE api.logbook
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE id = logbook_rec_end.id;
|
||||
-- Update related stays mark for deletion
|
||||
UPDATE api.stays
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE arrived = logbook_rec_start._to_time;
|
||||
-- Update related moorages mark for deletion
|
||||
UPDATE api.moorages
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE id = logbook_rec_start._to_moorage_id;
|
||||
|
||||
-- Clean up, remove invalid logbook and stay, moorage entry
|
||||
DELETE FROM api.logbook WHERE id = logbook_rec_end.id;
|
||||
RAISE WARNING '-> merge_logbook_fn delete logbook id [%]', logbook_rec_end.id;
|
||||
DELETE FROM api.stays WHERE arrived = logbook_rec_start._to_time;
|
||||
RAISE WARNING '-> merge_logbook_fn delete stay arrived [%]', logbook_rec_start._to_time;
|
||||
DELETE FROM api.moorages WHERE id = logbook_rec_start._to_moorage_id;
|
||||
RAISE WARNING '-> merge_logbook_fn delete moorage id [%]', logbook_rec_start._to_moorage_id;
|
||||
END;
|
||||
$merge_logbook$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.merge_logbook_fn
|
||||
IS 'Merge 2 logbook by id, from the start of the lower log id and the end of the higher log id, update the calculate data as well (avg, geojson)';
|
||||
|
||||
-- Add tags to view
|
||||
DROP VIEW IF EXISTS api.logs_view;
|
||||
CREATE OR REPLACE VIEW api.logs_view
|
||||
WITH(security_invoker=true,security_barrier=true)
|
||||
AS SELECT id,
|
||||
name,
|
||||
_from AS "from",
|
||||
_from_time AS started,
|
||||
_to AS "to",
|
||||
_to_time AS ended,
|
||||
distance,
|
||||
duration,
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
extra->'tags' AS tags
|
||||
FROM api.logbook l
|
||||
WHERE name IS NOT NULL AND _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW api.logs_view IS 'Logs web view';
|
||||
|
||||
-- Update a logbook with avg wind speed
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_avg_fn;
|
||||
CREATE OR REPLACE FUNCTION public.logbook_update_avg_fn(
|
||||
IN _id integer,
|
||||
IN _start TEXT,
|
||||
IN _end TEXT,
|
||||
OUT avg_speed double precision,
|
||||
OUT max_speed double precision,
|
||||
OUT max_wind_speed double precision,
|
||||
OUT avg_wind_speed double precision,
|
||||
OUT count_metric integer
|
||||
) AS $logbook_update_avg$
|
||||
BEGIN
|
||||
RAISE NOTICE '-> logbook_update_avg_fn calculate avg for logbook id=%, start:"%", end:"%"', _id, _start, _end;
|
||||
SELECT AVG(speedoverground), MAX(speedoverground), MAX(windspeedapparent), AVG(windspeedapparent), COUNT(*) INTO
|
||||
avg_speed, max_speed, max_wind_speed, avg_wind_speed, count_metric
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= _start::TIMESTAMPTZ
|
||||
AND m.time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false);
|
||||
RAISE NOTICE '-> logbook_update_avg_fn avg for logbook id=%, avg_speed:%, max_speed:%, avg_wind_speed:%, max_wind_speed:%, count:%', _id, avg_speed, max_speed, avg_wind_speed, max_wind_speed, count_metric;
|
||||
END;
|
||||
$logbook_update_avg$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_update_avg_fn
|
||||
IS 'Update logbook details with calculate average and max data, AVG(speedOverGround), MAX(speedOverGround), MAX(windspeedapparent), count_metric';
|
||||
|
||||
-- Update pending new logbook from process queue
|
||||
DROP FUNCTION IF EXISTS process_logbook_queue_fn;
|
||||
CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void AS $process_logbook_queue$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
from_name text;
|
||||
to_name text;
|
||||
log_name text;
|
||||
from_moorage record;
|
||||
to_moorage record;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
log_settings jsonb;
|
||||
user_settings jsonb;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
|
||||
-- Do we have an existing moorage within 300m of the new log
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
from_moorage := process_lat_lon_fn(logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||
to_moorage := process_lat_lon_fn(logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
||||
SELECT CONCAT(from_moorage.moorage_name, ' to ' , to_moorage.moorage_name) INTO log_name;
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
-- add the avg_wind_speed
|
||||
extra_json := extra_json || jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed);
|
||||
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
duration = (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
_from = from_moorage.moorage_name,
|
||||
_from_moorage_id = from_moorage.moorage_id,
|
||||
_to_moorage_id = to_moorage.moorage_id,
|
||||
_to = to_moorage.moorage_name,
|
||||
name = log_name,
|
||||
track_geom = geo_rec._track_geom,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
notes = NULL -- reset pre_log process
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GeoJSON require track_geom field geometry linestring
|
||||
geojson := logbook_update_geojson_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_geojson = geojson
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GeoJSON Timelapse require track_geojson geometry point
|
||||
-- Add properties to the geojson for timelapse purpose
|
||||
PERFORM public.logbook_timelapse_geojson_fn(logbook_rec.id);
|
||||
|
||||
-- Add post logbook entry to process queue for notification and QGIS processing
|
||||
-- Require as we need the logbook to be updated with SQL commit
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('post_logbook', logbook_rec.id, NOW(), current_setting('vessel.id', true));
|
||||
|
||||
END;
|
||||
$process_logbook_queue$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.process_logbook_queue_fn
|
||||
IS 'Update logbook details when completed, logbook_update_avg_fn, logbook_update_geom_distance_fn, reverse_geocode_py_fn';
|
||||
|
||||
-- Add avg_wind_speed to logbook geojson
|
||||
-- Add back truewindspeed and truewinddirection to logbook geojson
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geojson JSON
|
||||
) AS $logbook_geojson$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
_to_time
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
notes,
|
||||
extra['avg_wind_speed'] as avg_wind_speed,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) as status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMPTZ
|
||||
AND time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$logbook_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_update_geojson_fn
|
||||
IS 'Update log details with geojson';
|
||||
|
||||
-- Add properties to the geojson for timelapse purpose
|
||||
DROP FUNCTION IF EXISTS public.logbook_timelapse_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_timelapse_geojson_fn(IN _id INT) returns void
|
||||
AS $logbook_timelapse$
|
||||
declare
|
||||
first_feature_note JSONB;
|
||||
second_feature_note JSONB;
|
||||
last_feature_note JSONB;
|
||||
logbook_rec record;
|
||||
begin
|
||||
-- We need to fetch the processed logbook data.
|
||||
SELECT name,duration,distance,_from,_to INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
--raise warning '-> logbook_rec: %', logbook_rec;
|
||||
select format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into first_feature_note;
|
||||
select format('{"notes": "%s"}', logbook_rec._from) into second_feature_note;
|
||||
select format('{"notes": "%s"}', logbook_rec._to) into last_feature_note;
|
||||
--raise warning '-> logbook_rec: % % %', first_feature_note, second_feature_note, last_feature_note;
|
||||
|
||||
-- Update the properties of the first feature, the second with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 1, properties}',
|
||||
(track_geojson -> 'features' -> 1 -> 'properties' || first_feature_note)::jsonb
|
||||
)
|
||||
WHERE id = _id
|
||||
and track_geojson -> 'features' -> 1 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Update the properties of the third feature, the second with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 2, properties}',
|
||||
(track_geojson -> 'features' -> 2 -> 'properties' || second_feature_note)::jsonb
|
||||
)
|
||||
where id = _id
|
||||
and track_geojson -> 'features' -> 2 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Update the properties of the last feature with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, -1, properties}',
|
||||
CASE
|
||||
WHEN COALESCE((track_geojson -> 'features' -> -1 -> 'properties' ->> 'notes'), '') = '' THEN
|
||||
(track_geojson -> 'features' -> -1 -> 'properties' || last_feature_note)::jsonb
|
||||
ELSE
|
||||
track_geojson -> 'features' -> -1 -> 'properties'
|
||||
END
|
||||
)
|
||||
WHERE id = _id
|
||||
and track_geojson -> 'features' -> -1 -> 'geometry' ->> 'type' = 'Point';
|
||||
end;
|
||||
$logbook_timelapse$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_timelapse_geojson_fn
|
||||
IS 'Update logbook geojson, Add properties to some geojson features for timelapse purpose';
|
||||
|
||||
-- CRON for signalk plugin upgrade
|
||||
-- The goal is to avoid error from old plugin version by enforcing upgrade.
|
||||
-- ERROR: there is no unique or exclusion constraint matching the ON CONFLICT specification
|
||||
-- "POST /metadata?on_conflict=client_id HTTP/1.1" 400 137 "-" "postgsail.signalk v0.0.9"
|
||||
DROP FUNCTION IF EXISTS public.cron_process_skplugin_upgrade_fn;
|
||||
CREATE FUNCTION public.cron_process_skplugin_upgrade_fn() RETURNS void AS $skplugin_upgrade$
|
||||
DECLARE
|
||||
skplugin_upgrade_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for signalk plugin version
|
||||
RAISE NOTICE 'cron_process_plugin_upgrade_fn';
|
||||
FOR skplugin_upgrade_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.plugin_version,a.first
|
||||
FROM api.metadata m
|
||||
LEFT JOIN auth.vessels v ON v.vessel_id = m.vessel_id
|
||||
LEFT JOIN auth.accounts a ON v.owner_email = a.email
|
||||
WHERE m.plugin_version <= '0.3.0'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_skplugin_upgrade_rec_fn for [%]', skplugin_upgrade_rec;
|
||||
SELECT json_build_object('email', skplugin_upgrade_rec.owner_email, 'recipient', skplugin_upgrade_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_skplugin_upgrade_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('skplugin_upgrade'::TEXT, user_settings::JSONB);
|
||||
END LOOP;
|
||||
END;
|
||||
$skplugin_upgrade$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_skplugin_upgrade_fn
|
||||
IS 'init by pg_cron, check for signalk plugin version and notify for upgrade';
|
||||
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('skplugin_upgrade','PostgSail Signalk plugin upgrade',E'Hello __RECIPIENT__,\nPlease upgrade your postgsail signalk plugin. Be sure to contact me if you encounter any issue.','PostgSail Signalk plugin upgrade!',E'Please upgrade your postgsail signalk plugin.');
|
||||
|
||||
DROP FUNCTION IF EXISTS public.metadata_ip_trigger_fn;
|
||||
-- Track IP per vessel to avoid abuse
|
||||
CREATE FUNCTION public.metadata_ip_trigger_fn() RETURNS trigger
|
||||
AS $metadata_ip_trigger$
|
||||
DECLARE
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
BEGIN
|
||||
RAISE WARNING 'metadata_ip_trigger_fn [%] [%]', current_setting('vessel.id', true), client_ip;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE api.metadata
|
||||
SET
|
||||
configuration = NEW.configuration || jsonb_build_object('ip', client_ip)
|
||||
WHERE id = NEW.id;
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$metadata_ip_trigger$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION public.metadata_ip_trigger_fn() IS 'Add IP from vessel in metadata, track abuse';
|
||||
|
||||
DROP TRIGGER IF EXISTS metadata_ip_trigger ON api.metadata;
|
||||
-- Generate an error
|
||||
--CREATE TRIGGER metadata_ip_trigger BEFORE UPDATE ON api.metadata
|
||||
-- FOR EACH ROW EXECUTE FUNCTION metadata_ip_trigger_fn();
|
||||
-- Description
|
||||
--COMMENT ON TRIGGER
|
||||
-- metadata_ip_trigger ON api.metadata
|
||||
-- IS 'AFTER UPDATE ON api.metadata run function metadata_ip_trigger_fn for tracking vessel IP';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.logbook_active_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_active_geojson_fn(
|
||||
OUT _track_geojson jsonb
|
||||
) AS $logbook_active_geojson$
|
||||
BEGIN
|
||||
WITH log_active AS (
|
||||
SELECT * FROM api.logbook WHERE active IS True
|
||||
),
|
||||
log_gis_line AS (
|
||||
SELECT ST_MakeLine(
|
||||
ARRAY(
|
||||
SELECT st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m, log_active l
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= l._from_time::TIMESTAMPTZ
|
||||
AND m.time <= l._to_time::TIMESTAMPTZ
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
)
|
||||
),
|
||||
log_gis_point AS (
|
||||
SELECT
|
||||
ST_AsGeoJSON(t.*)::json AS GeoJSONPoint
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) AS status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
ORDER BY m.time DESC LIMIT 1
|
||||
)
|
||||
) as t
|
||||
),
|
||||
log_agg as (
|
||||
SELECT
|
||||
CASE WHEN log_gis_line.st_makeline IS NOT NULL THEN
|
||||
( SELECT jsonb_agg(ST_AsGeoJSON(log_gis_line.*)::json)::jsonb AS GeoJSONLine FROM log_gis_line )
|
||||
ELSE
|
||||
( SELECT '[]'::json AS GeoJSONLine )::jsonb
|
||||
END
|
||||
FROM log_gis_line
|
||||
)
|
||||
SELECT
|
||||
jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', log_agg.GeoJSONLine::jsonb || log_gis_point.GeoJSONPoint::jsonb
|
||||
) INTO _track_geojson FROM log_agg, log_gis_point;
|
||||
END;
|
||||
$logbook_active_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_active_geojson_fn
|
||||
IS 'Create a GeoJSON with 2 features, LineString with a current active log and Point with the last position';
|
||||
|
||||
-- Update monitoring view to support live trip and truewindspeed and truewinddirection to stationary GeoJSON.
|
||||
DROP VIEW IF EXISTS api.monitoring_view;
|
||||
CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
(NOW() AT TIME ZONE 'UTC' - time) > INTERVAL '70 MINUTES' as offline,
|
||||
metrics-> 'environment.water.temperature' AS waterTemperature,
|
||||
metrics-> 'environment.inside.temperature' AS insideTemperature,
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature,
|
||||
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
|
||||
metrics-> 'environment.wind.directionTrue' AS windDirectionTrue,
|
||||
metrics-> 'environment.inside.relativeHumidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.relativeHumidity' AS outsideHumidity,
|
||||
metrics-> 'environment.outside.pressure' AS outsidePressure,
|
||||
metrics-> 'environment.inside.pressure' AS insidePressure,
|
||||
metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge,
|
||||
metrics-> 'electrical.batteries.House.voltage' AS batteryVoltage,
|
||||
metrics-> 'environment.depth.belowTransducer' AS depth,
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'name', current_setting('vessel.name', false),
|
||||
'latitude', m.latitude,
|
||||
'longitude', m.longitude,
|
||||
'time', m.time,
|
||||
'speedoverground', m.speedoverground,
|
||||
'windspeedapparent', m.windspeedapparent,
|
||||
'truewindspeed', coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null),
|
||||
'truewinddirection', coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null),
|
||||
'status', coalesce(m.status, null)
|
||||
)::jsonb ) AS geojson,
|
||||
current_setting('vessel.name', false) AS name,
|
||||
m.status,
|
||||
CASE WHEN m.status <> 'moored' THEN (
|
||||
SELECT public.logbook_active_geojson_fn() )
|
||||
END AS live
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view
|
||||
IS 'Monitoring static web view';
|
||||
|
||||
-- Allow to access tables for user_role and grafana and api_anonymous
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO user_role;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO grafana;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO api_anonymous;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO grafana;
|
||||
|
||||
-- Allow to execute fn for user_role and grafana and api_anonymous
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO grafana;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana;
|
||||
GRANT EXECUTE ON FUNCTION public.logbook_active_geojson_fn TO api_anonymous;
|
||||
GRANT EXECUTE ON FUNCTION public.metersToKnots TO api_anonymous;
|
||||
GRANT EXECUTE ON FUNCTION public.radiantToDegrees TO api_anonymous;
|
||||
|
||||
-- Fix vessel name (Organization) ensure we have a value either from metadata tbl (signalk) or from vessel tbl
|
||||
DROP FUNCTION IF EXISTS public.cron_process_grafana_fn;
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_grafana_fn() RETURNS void
|
||||
AS $cron_process_grafana_fn$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
data_rec record;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- We run grafana provisioning only after the first received vessel metadata
|
||||
-- Check for new vessel metadata pending grafana provisioning
|
||||
RAISE NOTICE 'cron_process_grafana_fn';
|
||||
FOR process_rec in
|
||||
SELECT * from process_queue
|
||||
where channel = 'grafana' and processed is null
|
||||
order by stored asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Get vessel details base on metadata id
|
||||
SELECT
|
||||
v.owner_email,coalesce(m.name,v.name) as name,m.vessel_id into data_rec
|
||||
FROM auth.vessels v
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.id = process_rec.payload::INTEGER;
|
||||
IF data_rec.vessel_id IS NULL OR data_rec.name IS NULL THEN
|
||||
RAISE WARNING '-> DEBUG cron_process_grafana_fn grafana_py_fn error [%]', data_rec;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- as we got data from the vessel we can do the grafana provisioning.
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn grafana_py_fn provisioning [%]', data_rec;
|
||||
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- add user in keycloak
|
||||
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_process_grafana_fn$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_grafana_fn
|
||||
IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.3'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Notifications/Reminders for old signalk plugin
|
||||
-- At 08:06 on Sunday.
|
||||
-- At 08:06 on every 4th day-of-month if it's on Sunday.
|
||||
SELECT cron.schedule('cron_skplugin_upgrade', '6 8 */4 * 0', 'select public.cron_process_skplugin_upgrade_fn()');
|
||||
UPDATE cron.job SET database = 'postgres' WHERE jobname = 'cron_skplugin_upgrade';
|
879
initdb/99_migrations_202406.sql
Normal file
879
initdb/99_migrations_202406.sql
Normal file
@@ -0,0 +1,879 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration June 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Add video timelapse notification message
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('video_ready','PostgSail Video ready',E'Hey,\nYour video is available at __VIDEO_LINK__.\nPlease make sure you download your video as it will delete in 7 days.','PostgSail Video ready!',E'Your video is ready __VIDEO_LINK__.');
|
||||
|
||||
-- Generate and request the logbook image url to be cache on QGIS server.
|
||||
DROP FUNCTION IF EXISTS public.qgis_getmap_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_getmap_py_fn(IN vessel_id TEXT DEFAULT NULL, IN log_id NUMERIC DEFAULT NULL, IN extent TEXT DEFAULT NULL, IN logs_url BOOLEAN DEFAULT False) RETURNS VOID
|
||||
AS $qgis_getmap_py$
|
||||
import requests
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_image_size_to_bbox(extent, width, height):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox_aspect_ratio = (max_x - min_x) / (max_y - min_y)
|
||||
image_aspect_ratio = width / height
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
height = width / bbox_aspect_ratio
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
width = height * bbox_aspect_ratio
|
||||
|
||||
return int(width), int(height)
|
||||
|
||||
def calculate_width(extent, fixed_height):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox_aspect_ratio = (max_x - min_x) / (max_y - min_y)
|
||||
width = fixed_height * bbox_aspect_ratio
|
||||
return int(width)
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = max_x - min_x
|
||||
bbox_height = max_y - min_y
|
||||
bbox_aspect_ratio = bbox_width / bbox_height
|
||||
image_aspect_ratio = fixed_width / fixed_height
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
def generate_getmap_url(server_url, project_path, layer_name, extent, width=1080, height=566, crs="EPSG:3857", format="image/png"):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox = f"{min_x},{min_y},{max_x},{max_y}"
|
||||
|
||||
# Adjust image size to match BBOX aspect ratio
|
||||
#width, height = adjust_image_size_to_bbox(extent, width, height)
|
||||
|
||||
# Calculate width to maintain aspect ratio with fixed height
|
||||
#width = calculate_width(extent, height)
|
||||
|
||||
url = (
|
||||
f"{server_url}?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&FORMAT={format}&CRS={crs}"
|
||||
f"&BBOX={bbox}&WIDTH={width}&HEIGHT={height}&LAYERS={layer_name}&MAP={project_path}"
|
||||
)
|
||||
return url
|
||||
|
||||
if logs_url == False:
|
||||
server_url = f"https://gis.openplotter.cloud/log_{vessel_id}_{log_id}.png".format(vessel_id, log_id)
|
||||
else:
|
||||
server_url = f"https://gis.openplotter.cloud/logs_{vessel_id}_{log_id}.png".format(vessel_id, log_id)
|
||||
project_path = "/projects/postgsail5.qgz"
|
||||
layer_name = "OpenStreetMap,SQLLayer"
|
||||
#plpy.notice('qgis_getmap_py vessel_id [{}], log_id [{}], extent [{}]'.format(vessel_id, log_id, extent))
|
||||
|
||||
# Parse extent and scale factor
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(extent))
|
||||
#plpy.notice('qgis_getmap_py scaled_extent [{}]'.format(scaled_extent))
|
||||
|
||||
fixed_width = 1080
|
||||
fixed_height = 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_getmap_py adjusted_extent [{}]'.format(adjusted_extent))
|
||||
|
||||
getmap_url = generate_getmap_url(server_url, project_path, layer_name, adjusted_extent)
|
||||
if logs_url == False:
|
||||
filter_url = f"{getmap_url}&FILTER=SQLLayer:\"vessel_id\" = '{vessel_id}' AND \"id\" = {log_id}".format(getmap_url, vessel_id, log_id)
|
||||
else:
|
||||
filter_url = f"{getmap_url}&FILTER=SQLLayer:\"vessel_id\" = '{vessel_id}'".format(getmap_url, vessel_id)
|
||||
#plpy.notice('qgis_getmap_py getmap_url [{}]'.format(filter_url))
|
||||
|
||||
# Fetch image to be cache in qgis server
|
||||
headers = {"User-Agent": "PostgSail", "From": "xbgmsharp@gmail.com"}
|
||||
r = requests.get(filter_url, headers=headers, timeout=100)
|
||||
# Parse response
|
||||
if r.status_code != 200:
|
||||
plpy.warning('Failed to get WMS image, url[{}]'.format(filter_url))
|
||||
$qgis_getmap_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_getmap_py_fn
|
||||
IS 'Generate a log map, to generate the cache data for faster access later';
|
||||
|
||||
-- Generate the logbook extent for the logbook image to access the QGIS server.
|
||||
DROP FUNCTION IF EXISTS public.qgis_bbox_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_bbox_py_fn(IN vessel_id TEXT DEFAULT NULL, IN log_id NUMERIC DEFAULT NULL, IN width NUMERIC DEFAULT 1080, IN height NUMERIC DEFAULT 566, IN scaleout BOOLEAN DEFAULT True, OUT bbox TEXT)
|
||||
AS $qgis_bbox_py$
|
||||
log_extent = None
|
||||
# If we have a vessel_id then it is logs image map
|
||||
if vessel_id:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if vessel_id in SD:
|
||||
plan = SD[vessel_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 ) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text"])
|
||||
SD[vessel_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# Else we have a log_id then it is single log image map
|
||||
else:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if log_id in SD:
|
||||
plan = SD[log_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT ST_Extent(ST_Transform(track_geom, 3857)) FROM api.logbook WHERE id = $1::NUMERIC", ["text"])
|
||||
SD[log_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [log_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = float(max_x - min_x)
|
||||
bbox_height = float(max_y - min_y)
|
||||
bbox_aspect_ratio = float(bbox_width / bbox_height)
|
||||
image_aspect_ratio = float(fixed_width / fixed_height)
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
if not log_extent:
|
||||
plpy.warning('Failed to get sql qgis_bbox_py log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
# Parse extent and apply ZoomOut scale factor
|
||||
if scaleout:
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(log_extent))
|
||||
else:
|
||||
scaled_extent = parse_extent_from_db(log_extent)
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], scaled_extent [{}]'.format(log_id, scaled_extent))
|
||||
fixed_width = width # default 1080
|
||||
fixed_height = height # default 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], adjusted_extent [{}]'.format(log_id, adjusted_extent))
|
||||
min_x, min_y, max_x, max_y = adjusted_extent
|
||||
return f"{min_x},{min_y},{max_x},{max_y}"
|
||||
$qgis_bbox_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_bbox_py_fn
|
||||
IS 'Generate the BBOX base on log extent and adapt extent to the image size for QGIS Server';
|
||||
|
||||
-- qgis_role user and role with login, read-only on auth.accounts, limit 20 connections
|
||||
CREATE ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 20 LOGIN PASSWORD 'mysecretpassword';
|
||||
COMMENT ON ROLE qgis_role IS
|
||||
'Role use by QGIS server and Apache to connect and lookup the logbook table.';
|
||||
-- Allow read on VIEWS on API schema
|
||||
GRANT USAGE ON SCHEMA api TO qgis_role;
|
||||
GRANT SELECT ON TABLE api.logbook TO qgis_role;
|
||||
GRANT USAGE ON SCHEMA public TO qgis_role;
|
||||
-- For all postgis fn, st_extent, st_transform
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO qgis_role;
|
||||
-- Allow qgis_role to select all logbook records
|
||||
CREATE POLICY logbook_qgis_role ON api.logbook TO qgis_role
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Add support for HTML email with image inline for logbook
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_email_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_email_py$
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
import requests
|
||||
|
||||
# Import the email modules we need
|
||||
from email.message import EmailMessage
|
||||
from email.utils import formatdate,make_msgid
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if email_type in SD:
|
||||
plan = SD[email_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[email_type] = plan
|
||||
|
||||
# Execute the statement with the email_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [email_type], 1)
|
||||
email_subject = rv[0]['email_subject']
|
||||
email_content = rv[0]['email_content']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if not _user or not app:
|
||||
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
|
||||
plpy.error('Error missing parameters')
|
||||
return None
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
email_content = email_content.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'logbook_img' in _user and _user['logbook_img']:
|
||||
email_content = email_content.replace('__LOGBOOK_IMG__', str(_user['logbook_img']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
email_content = email_content.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
email_content = email_content.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'otp_code' in _user and _user['otp_code']:
|
||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||
if 'reset_qs' in _user and _user['reset_qs']:
|
||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
email_from = 'root@localhost'
|
||||
if 'app.email_from' in app and app['app.email_from']:
|
||||
email_from = 'PostgSail <' + app['app.email_from'] + '>'
|
||||
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
|
||||
|
||||
email_to = 'root@localhost'
|
||||
if 'email' in _user and _user['email']:
|
||||
email_to = _user['email']
|
||||
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
|
||||
else:
|
||||
plpy.error('Error email to')
|
||||
return None
|
||||
|
||||
if email_type == 'logbook':
|
||||
msg = EmailMessage()
|
||||
msg.set_content(email_content)
|
||||
else:
|
||||
msg = MIMEText(email_content, 'plain', 'utf-8')
|
||||
msg["Subject"] = email_subject
|
||||
msg["From"] = email_from
|
||||
msg["To"] = email_to
|
||||
msg["Date"] = formatdate()
|
||||
msg["Message-ID"] = make_msgid()
|
||||
|
||||
if email_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Create a Content-ID for the image
|
||||
image_cid = make_msgid()
|
||||
# Set an alternative html body
|
||||
msg.add_alternative("""\
|
||||
<html>
|
||||
<body>
|
||||
<p>{email_content}</p>
|
||||
<img src="cid:{image_cid}">
|
||||
</body>
|
||||
</html>
|
||||
""".format(email_content=email_content, image_cid=image_cid[1:-1]), subtype='html')
|
||||
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
response = requests.get(img_url, stream=True)
|
||||
if response.status_code == 200:
|
||||
msg.get_payload()[1].add_related(response.raw.data,
|
||||
maintype='image',
|
||||
subtype='png',
|
||||
cid=image_cid)
|
||||
|
||||
server_smtp = 'localhost'
|
||||
if 'app.email_server' in app and app['app.email_server']:
|
||||
server_smtp = app['app.email_server']
|
||||
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
|
||||
|
||||
# Send the message via our own SMTP server.
|
||||
try:
|
||||
# send your message with credentials specified above
|
||||
with smtplib.SMTP(server_smtp, 587) as server:
|
||||
if 'app.email_user' in app and app['app.email_user'] \
|
||||
and 'app.email_pass' in app and app['app.email_pass']:
|
||||
server.starttls()
|
||||
server.login(app['app.email_user'], app['app.email_pass'])
|
||||
#server.send_message(msg)
|
||||
server.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||
server.quit()
|
||||
# tell the script to report if your message was sent or which errors need to be fixed
|
||||
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
|
||||
return None
|
||||
except OSError as error:
|
||||
plpy.error('OS Error occurred: ' + str(error))
|
||||
except smtplib.SMTPConnectError:
|
||||
plpy.error('Failed to connect to the server. Bad connection settings?')
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
plpy.error('Failed to connect to the server. Wrong user/password?')
|
||||
except smtplib.SMTPException as e:
|
||||
plpy.error('SMTP error occurred: ' + str(e))
|
||||
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_email_py_fn
|
||||
IS 'Send email notification using plpython3u';
|
||||
|
||||
-- Add vessel_id key, expose vessel_id
|
||||
DROP FUNCTION IF EXISTS api.vessel_fn;
|
||||
CREATE OR REPLACE FUNCTION api.vessel_fn(OUT vessel JSON) RETURNS JSON
|
||||
AS $vessel$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_build_object(
|
||||
'name', coalesce(m.name, null),
|
||||
'mmsi', coalesce(m.mmsi, null),
|
||||
'vessel_id', m.vessel_id,
|
||||
'created_at', v.created_at,
|
||||
'first_contact', coalesce(m.created_at, null),
|
||||
'last_contact', coalesce(m.time, null),
|
||||
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
|
||||
)::jsonb || api.vessel_details_fn()::jsonb
|
||||
INTO vessel
|
||||
FROM auth.vessels v, api.metadata m,
|
||||
( select
|
||||
current_setting('vessel.name') as name,
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics
|
||||
WHERE
|
||||
latitude IS NOT NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY time DESC LIMIT 1
|
||||
) AS geojson_t
|
||||
WHERE
|
||||
m.vessel_id = current_setting('vessel.id')
|
||||
AND m.vessel_id = v.vessel_id;
|
||||
--RAISE notice 'api.vessel_fn %', obj;
|
||||
END;
|
||||
$vessel$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.vessel_fn
|
||||
IS 'Expose vessel details to API';
|
||||
|
||||
-- Update pending new logbook from process queue
|
||||
DROP FUNCTION IF EXISTS public.process_post_logbook_fn;
|
||||
CREATE OR REPLACE FUNCTION public.process_post_logbook_fn(IN _id integer) RETURNS void AS $process_post_logbook_queue$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
log_settings jsonb;
|
||||
user_settings jsonb;
|
||||
extra_json jsonb;
|
||||
log_img_url text;
|
||||
logs_img_url text;
|
||||
extent_bbox text;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_post_logbook_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_post_logbook_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_post_logbook_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Generate logbook image map name from QGIS
|
||||
SELECT CONCAT('log_', logbook_rec.vessel_id::TEXT, '_', logbook_rec.id, '.png') INTO log_img_url;
|
||||
SELECT ST_Extent(ST_Transform(logbook_rec.track_geom, 3857))::TEXT AS envelope INTO extent_bbox FROM api.logbook WHERE id = logbook_rec.id;
|
||||
PERFORM public.qgis_getmap_py_fn(logbook_rec.vessel_id::TEXT, logbook_rec.id, extent_bbox::TEXT, False);
|
||||
-- Generate logs image map name from QGIS
|
||||
WITH merged AS (
|
||||
SELECT ST_Union(logbook_rec.track_geom) AS merged_geometry
|
||||
FROM api.logbook WHERE vessel_id = logbook_rec.vessel_id
|
||||
)
|
||||
SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT AS envelope INTO extent_bbox FROM merged;
|
||||
SELECT CONCAT('logs_', logbook_rec.vessel_id::TEXT, '_', logbook_rec.id, '.png') INTO logs_img_url;
|
||||
PERFORM public.qgis_getmap_py_fn(logbook_rec.vessel_id::TEXT, logbook_rec.id, extent_bbox::TEXT, True);
|
||||
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('logbook_name', logbook_rec.name, 'logbook_link', logbook_rec.id, 'logbook_img', log_img_url) INTO log_settings;
|
||||
user_settings := get_user_settings_from_vesselid_fn(logbook_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || log_settings::JSONB into user_settings;
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn log_settings [%]', log_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('logbook'::TEXT, user_settings::JSONB);
|
||||
-- Process badges
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn user_settings [%]', user_settings->>'email'::TEXT;
|
||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||
PERFORM badges_logbook_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
PERFORM badges_geom_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
END;
|
||||
$process_post_logbook_queue$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.process_post_logbook_fn
|
||||
IS 'Generate QGIS image and Notify user for new logbook.';
|
||||
|
||||
-- Check for new logbook pending notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_post_logbook_fn;
|
||||
CREATE FUNCTION public.cron_process_post_logbook_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
BEGIN
|
||||
-- Check for new logbook pending update
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn init loop';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'post_logbook' AND processed IS NULL
|
||||
ORDER BY stored ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn processing queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||
-- update logbook
|
||||
PERFORM process_post_logbook_fn(process_rec.payload::INTEGER);
|
||||
-- update process_queue table , processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn processed queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_post_logbook_fn
|
||||
IS 'init by pg_cron to check for new logbook pending qgis and notification, after process_new_logbook_fn';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.run_cron_jobs;
|
||||
CREATE FUNCTION public.run_cron_jobs() RETURNS void AS $$
|
||||
BEGIN
|
||||
-- In correct order
|
||||
perform public.cron_process_new_notification_fn();
|
||||
perform public.cron_process_monitor_online_fn();
|
||||
--perform public.cron_process_grafana_fn();
|
||||
perform public.cron_process_pre_logbook_fn();
|
||||
perform public.cron_process_new_logbook_fn();
|
||||
perform public.cron_process_post_logbook_fn();
|
||||
perform public.cron_process_new_stay_fn();
|
||||
--perform public.cron_process_new_moorage_fn();
|
||||
perform public.cron_process_monitor_offline_fn();
|
||||
END
|
||||
$$ language plpgsql;
|
||||
|
||||
DROP VIEW IF EXISTS api.eventlogs_view;
|
||||
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT pq.*
|
||||
FROM public.process_queue pq
|
||||
WHERE channel <> 'pre_logbook'
|
||||
AND channel <> 'post_logbook'
|
||||
AND (ref_id = current_setting('user.id', true)
|
||||
OR ref_id = current_setting('vessel.id', true))
|
||||
ORDER BY id ASC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.eventlogs_view
|
||||
IS 'Event logs view';
|
||||
|
||||
-- CRON for new video notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_video_fn;
|
||||
CREATE FUNCTION public.cron_process_new_video_fn() RETURNS void AS $$
|
||||
declare
|
||||
process_rec record;
|
||||
metadata_rec record;
|
||||
video_settings jsonb;
|
||||
user_settings jsonb;
|
||||
begin
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_new_video_fn';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'new_video'
|
||||
AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_video_fn for [%]', process_rec.payload;
|
||||
SELECT * INTO metadata_rec
|
||||
FROM api.metadata
|
||||
WHERE vessel_id = process_rec.ref_id::TEXT;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_new_video_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknown vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_new_video_fn vessel_id %', current_setting('vessel.id', false);
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('video_link', CONCAT('https://videos.openplotter.cloud/', process_rec.payload)) into video_settings;
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || video_settings::JSONB into user_settings;
|
||||
RAISE DEBUG '-> DEBUG cron_process_new_video_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('video_ready'::TEXT, user_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_video_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_video_fn
|
||||
IS 'init by pg_cron to check for new video event pending notifications, if so perform process_notification_queue_fn';
|
||||
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_pushover_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_pushover_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_pushover_py$
|
||||
"""
|
||||
https://pushover.net/api#messages
|
||||
Send a notification to a pushover user
|
||||
"""
|
||||
import requests
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
pushover_title = rv[0]['pushover_title']
|
||||
pushover_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
pushover_message = pushover_message.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
pushover_message = pushover_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
pushover_message = pushover_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
pushover_token = None
|
||||
if 'app.pushover_app_token' in app and app['app.pushover_app_token']:
|
||||
pushover_token = app['app.pushover_app_token']
|
||||
else:
|
||||
plpy.error('Error no pushover token defined, check app settings')
|
||||
return None
|
||||
pushover_user = None
|
||||
if 'pushover_user_key' in _user and _user['pushover_user_key']:
|
||||
pushover_user = _user['pushover_user_key']
|
||||
else:
|
||||
plpy.error('Error no pushover user token defined, check user settings')
|
||||
return None
|
||||
|
||||
if message_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Send notification with gis image logbook as attachment
|
||||
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
response = requests.get(img_url, stream=True)
|
||||
if response.status_code == 200:
|
||||
r = requests.post("https://api.pushover.net/1/messages.json", data = {
|
||||
"token": pushover_token,
|
||||
"user": pushover_user,
|
||||
"title": pushover_title,
|
||||
"message": pushover_message
|
||||
}, files = {
|
||||
"attachment": (str(_user['logbook_img']), response.raw.data, "image/png")
|
||||
})
|
||||
else:
|
||||
r = requests.post("https://api.pushover.net/1/messages.json", data = {
|
||||
"token": pushover_token,
|
||||
"user": pushover_user,
|
||||
"title": pushover_title,
|
||||
"message": pushover_message
|
||||
})
|
||||
|
||||
#print(r.text)
|
||||
# Return ?? or None if not found
|
||||
#plpy.notice('Sent pushover successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent pushover successfully to [{}] [{}] [{}]'.format(pushover_user, pushover_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send pushover')
|
||||
return None
|
||||
$send_pushover_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_pushover_py_fn
|
||||
IS 'Send pushover notification using plpython3u';
|
||||
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_telegram_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_telegram_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_telegram_py$
|
||||
"""
|
||||
https://core.telegram.org/bots/api#sendmessage
|
||||
Send a message to a telegram user or group specified on chatId
|
||||
chat_id must be a number!
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
telegram_title = rv[0]['pushover_title']
|
||||
telegram_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
telegram_message = telegram_message.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
telegram_message = telegram_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
telegram_message = telegram_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
telegram_token = None
|
||||
if 'app.telegram_bot_token' in app and app['app.telegram_bot_token']:
|
||||
telegram_token = app['app.telegram_bot_token']
|
||||
else:
|
||||
plpy.error('Error no telegram token defined, check app settings')
|
||||
return None
|
||||
telegram_chat_id = None
|
||||
if 'telegram_chat_id' in _user and _user['telegram_chat_id']:
|
||||
telegram_chat_id = _user['telegram_chat_id']
|
||||
else:
|
||||
plpy.error('Error no telegram user token defined, check user settings')
|
||||
return None
|
||||
|
||||
# sendMessage via requests
|
||||
headers = {'Content-Type': 'application/json',
|
||||
'Proxy-Authorization': 'Basic base64'}
|
||||
data_dict = {'chat_id': telegram_chat_id,
|
||||
'text': telegram_message,
|
||||
'parse_mode': 'HTML',
|
||||
'disable_notification': False}
|
||||
data = json.dumps(data_dict)
|
||||
url = f'https://api.telegram.org/bot{telegram_token}/sendMessage'
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
if message_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Send gis image logbook
|
||||
# https://core.telegram.org/bots/api#sendphoto
|
||||
data_dict['photo'] = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
del data_dict['text']
|
||||
data = json.dumps(data_dict)
|
||||
url = f'https://api.telegram.org/bot{telegram_token}/sendPhoto'
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
|
||||
#print(r.text)
|
||||
# Return something boolean?
|
||||
#plpy.notice('Sent telegram successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent telegram successfully to [{}] [{}] [{}]'.format(telegram_chat_id, telegram_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send telegram')
|
||||
return None
|
||||
$send_telegram_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_telegram_py_fn
|
||||
IS 'Send a message to a telegram user or group specified on chatId using plpython3u';
|
||||
|
||||
-- Add maplapse video record in queue
|
||||
DROP FUNCTION IF EXISTS api.maplapse_record_fn;
|
||||
CREATE OR REPLACE FUNCTION api.maplapse_record_fn(IN maplapse TEXT) RETURNS BOOLEAN
|
||||
AS $maplapse_record$
|
||||
BEGIN
|
||||
-- payload: 'Bromera,?start_log=8430&end_log=8491&height=100vh'
|
||||
IF maplapse ~ '^(\w+)\,\?(start_log=\d+).*$' then
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('maplapse_video', maplapse, NOW(), current_setting('vessel.id', true));
|
||||
RETURN True;
|
||||
ELSE
|
||||
RETURN False;
|
||||
END IF;
|
||||
END;
|
||||
$maplapse_record$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.maplapse_record_fn
|
||||
IS 'Add maplapse video record in queue';
|
||||
|
||||
CREATE ROLE maplapse_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN PASSWORD 'mysecretpassword';
|
||||
COMMENT ON ROLE maplapse_role IS
|
||||
'Role use by maplapse external cronjob to connect and lookup the process_queue table.';
|
||||
GRANT USAGE ON SCHEMA public TO maplapse_role;
|
||||
GRANT SELECT,UPDATE,INSERT ON TABLE public.process_queue TO maplapse_role;
|
||||
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO maplapse_role;
|
||||
-- Allow maplapse_role to select,update,insert on tbl process_queue
|
||||
CREATE POLICY public_maplapse_role ON public.process_queue TO maplapse_role
|
||||
USING (true)
|
||||
WITH CHECK (true);
|
||||
|
||||
-- Allow to execute fn for user_role and grafana
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO grafana;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana;
|
||||
GRANT SELECT ON TABLE api.eventlogs_view TO user_role;
|
||||
|
||||
-- Update grafana role SQl connection to 30
|
||||
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
|
||||
-- Alter moorages table with default duration of 0.
|
||||
ALTER TABLE api.moorages ALTER COLUMN stay_duration SET DEFAULT 'PT0S';
|
||||
-- Update moorage_view to default with duration of 0
|
||||
DROP VIEW IF EXISTS api.moorage_view;
|
||||
CREATE OR REPLACE VIEW api.moorage_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
SELECT id,
|
||||
m.name AS Name,
|
||||
sa.description AS Default_Stay,
|
||||
sa.stay_code AS Default_Stay_Id,
|
||||
m.home_flag AS Home,
|
||||
EXTRACT(DAY FROM justify_hours ( COALESCE(m.stay_duration, 'PT0S') )) AS Total_Stay,
|
||||
COALESCE(m.stay_duration, 'PT0S') AS Total_Duration,
|
||||
m.reference_count AS Arrivals_Departures,
|
||||
m.notes
|
||||
-- m.geog
|
||||
FROM api.moorages m, api.stays_at sa
|
||||
-- m.stay_duration is only process on a stay
|
||||
-- default with duration of 0sec
|
||||
WHERE geog IS NOT NULL
|
||||
AND m.stay_code = sa.stay_code;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorage_view
|
||||
IS 'Moorage details web view';
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.4'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Create a every 7 minutes for cron_process_post_logbook_fn
|
||||
SELECT cron.schedule('cron_post_logbook', '*/7 * * * *', 'select public.cron_process_post_logbook_fn()');
|
||||
UPDATE cron.job SET database = 'signalk' where jobname = 'cron_post_logbook';
|
||||
-- Create a every 15 minutes for cron_process_post_logbook_fn
|
||||
SELECT cron.schedule('cron_new_video', '*/15 * * * *', 'select public.cron_process_new_video_fn()');
|
||||
UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_video';
|
755
initdb/99_migrations_202407.sql
Normal file
755
initdb/99_migrations_202407.sql
Normal file
@@ -0,0 +1,755 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration July 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Add video error notification message
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('video_error','PostgSail Video Error',E'Hey,\nSorry we could not generate your video.\nPlease reach out to debug and solve the issue.','PostgSail Video Error!',E'There has been an error with your video.');
|
||||
|
||||
-- CRON for new video notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_video_fn;
|
||||
CREATE FUNCTION public.cron_process_video_fn() RETURNS void AS $cron_process_video$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
metadata_rec record;
|
||||
video_settings jsonb;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_video_fn';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE (channel = 'new_video' OR channel = 'error_video')
|
||||
AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_video_fn for [%]', process_rec.payload;
|
||||
SELECT * INTO metadata_rec
|
||||
FROM api.metadata
|
||||
WHERE vessel_id = process_rec.ref_id::TEXT;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_video_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknown vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_video_fn vessel_id %', current_setting('vessel.id', false);
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('video_link', CONCAT('https://videos.openplotter.cloud/', process_rec.payload)) into video_settings;
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || video_settings::JSONB into user_settings;
|
||||
RAISE DEBUG '-> DEBUG cron_process_video_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
IF process_rec.channel = 'new_video' THEN
|
||||
PERFORM send_notification_fn('video_ready'::TEXT, user_settings::JSONB);
|
||||
ELSE
|
||||
PERFORM send_notification_fn('video_error'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_video_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_process_video$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_video_fn
|
||||
IS 'init by pg_cron to check for new video event pending notifications, if so perform process_notification_queue_fn';
|
||||
|
||||
-- Fix error when stateOfCharge is null. Make stateOfCharge null value assume to be charge 1.
|
||||
DROP FUNCTION IF EXISTS public.cron_alerts_fn();
|
||||
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $cron_alerts$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
app_settings JSONB;
|
||||
user_settings JSONB;
|
||||
alerting JSONB;
|
||||
_alarms JSONB;
|
||||
alarms TEXT;
|
||||
alert_default JSONB := '{
|
||||
"low_pressure_threshold": 990,
|
||||
"high_wind_speed_threshold": 30,
|
||||
"low_water_depth_threshold": 1,
|
||||
"min_notification_interval": 6,
|
||||
"high_pressure_drop_threshold": 12,
|
||||
"low_battery_charge_threshold": 90,
|
||||
"low_battery_voltage_threshold": 12.5,
|
||||
"low_water_temperature_threshold": 10,
|
||||
"low_indoor_temperature_threshold": 7,
|
||||
"low_outdoor_temperature_threshold": 3
|
||||
}';
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id,
|
||||
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||
(a.preferences->'alarms')::JSONB as alarms
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||
PERFORM set_config('user.email', alert_rec.email, false);
|
||||
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||
avg(coalesce((m.metrics->>'electrical.batteries.House.capacity.stateOfCharge')::numeric, 1)) AS charge
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = alert_rec.vessel_id
|
||||
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||
END IF;
|
||||
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||
END IF;
|
||||
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||
END IF;
|
||||
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_alerts$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_alerts_fn
|
||||
IS 'init by pg_cron to check for alerts';
|
||||
|
||||
-- Fix error: None of these media types are available: text/xml
|
||||
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL) RETURNS "text/xml"
|
||||
AS $export_logbooks_gpx$
|
||||
declare
|
||||
merged_jsonb jsonb;
|
||||
app_settings jsonb;
|
||||
BEGIN
|
||||
-- Merge GIS track_geom of geometry type Point into a jsonb array format
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geojson IS NOT NULL
|
||||
ORDER BY _from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE track_geojson IS NOT NULL
|
||||
ORDER BY _from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
END IF;
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn _jsonb %' , _jsonb;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_url_fn();
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn app_settings %', app_settings;
|
||||
-- Generate GPX XML, extract Point features from geojson.
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
app_settings->>'app.url' as "xmlns:postgsail"),
|
||||
xmlelement(name metadata,
|
||||
xmlelement(name link, xmlattributes(app_settings->>'app.url' as href),
|
||||
xmlelement(name text, 'PostgSail'))),
|
||||
xmlelement(name trk,
|
||||
xmlelement(name name, 'logbook name'),
|
||||
xmlelement(name trkseg, xmlagg(
|
||||
xmlelement(name trkpt,
|
||||
xmlattributes(features->'coordinates'->1 as lat, features->'coordinates'->0 as lon),
|
||||
xmlelement(name time, features->'properties'->>'time')
|
||||
)))))::pg_catalog.xml
|
||||
FROM jsonb_array_elements(merged_jsonb) AS features;
|
||||
END;
|
||||
$export_logbooks_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbooks_gpx_fn
|
||||
IS 'Export a logs entries to GPX XML format';
|
||||
|
||||
-- Add export logbooks as png
|
||||
DROP FUNCTION IF EXISTS public.qgis_bbox_trip_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_bbox_trip_py_fn(IN _str_to_parse TEXT DEFAULT NULL, OUT bbox TEXT)
|
||||
AS $qgis_bbox_trip_py$
|
||||
plpy.notice('qgis_bbox_trip_py_fn _str_to_parse [{}]'.format(_str_to_parse))
|
||||
vessel_id, log_id, log_end = _str_to_parse.split('_')
|
||||
width = 1080
|
||||
height = 566
|
||||
scaleout = True
|
||||
log_extent = None
|
||||
# If we have a vessel_id then it is full logs image map
|
||||
if vessel_id and log_end is None:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if vessel_id in SD:
|
||||
plan = SD[vessel_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 ) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text"])
|
||||
SD[vessel_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# If we have a vessel_id and a log_end then it is subset logs image map
|
||||
elif vessel_id and log_end:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
shared_cache = vessel_id + str(log_id) + str(log_end)
|
||||
if shared_cache in SD:
|
||||
plan = SD[shared_cache]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 and id >= $2::NUMERIC and id <= $3::NUMERIC) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text","text","text"])
|
||||
SD[shared_cache] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id,log_id,log_end], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# Else we have a log_id then it is single log image map
|
||||
else :
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if log_id in SD:
|
||||
plan = SD[log_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT ST_Extent(ST_Transform(track_geom, 3857)) FROM api.logbook WHERE id = $1::NUMERIC", ["text"])
|
||||
SD[log_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [log_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = float(max_x - min_x)
|
||||
bbox_height = float(max_y - min_y)
|
||||
bbox_aspect_ratio = float(bbox_width / bbox_height)
|
||||
image_aspect_ratio = float(fixed_width / fixed_height)
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
if not log_extent:
|
||||
plpy.warning('Failed to get sql qgis_bbox_trip_py_fn log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
# Parse extent and apply ZoomOut scale factor
|
||||
if scaleout:
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(log_extent))
|
||||
else:
|
||||
scaled_extent = parse_extent_from_db(log_extent)
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], scaled_extent [{}]'.format(log_id, scaled_extent))
|
||||
fixed_width = width # default 1080
|
||||
fixed_height = height # default 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_bbox_trip_py_fn log_id [{}], adjusted_extent [{}]'.format(log_id, adjusted_extent))
|
||||
min_x, min_y, max_x, max_y = adjusted_extent
|
||||
return f"{min_x},{min_y},{max_x},{max_y}"
|
||||
$qgis_bbox_trip_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_bbox_trip_py_fn
|
||||
IS 'Generate the BBOX base on trip extent and adapt extent to the image size for QGIS Server';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.grafana_py_fn;
|
||||
-- Update grafana provisioning, ERROR: KeyError: 'secureJsonFields'
|
||||
CREATE OR REPLACE FUNCTION public.grafana_py_fn(_v_name text, _v_id text, _u_email text, app jsonb)
|
||||
RETURNS void
|
||||
TRANSFORM FOR TYPE jsonb
|
||||
LANGUAGE plpython3u
|
||||
AS $function$
|
||||
"""
|
||||
https://grafana.com/docs/grafana/latest/developers/http_api/
|
||||
Create organization base on vessel name
|
||||
Create user base on user email
|
||||
Add user to organization
|
||||
Add data_source to organization
|
||||
Add dashboard to organization
|
||||
Update organization preferences
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import re
|
||||
|
||||
grafana_uri = None
|
||||
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
|
||||
grafana_uri = app['app.grafana_admin_uri']
|
||||
else:
|
||||
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||
return None
|
||||
|
||||
b_name = None
|
||||
if not _v_name:
|
||||
b_name = _v_id
|
||||
else:
|
||||
b_name = _v_name
|
||||
|
||||
# add vessel org
|
||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||
path = 'api/orgs'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'name':b_name}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "orgId" in r.json():
|
||||
org_id = r.json()['orgId']
|
||||
else:
|
||||
plpy.error('Error grafana add vessel org {req} - {res}'.format(req=data_dict,res=r.json()))
|
||||
return none
|
||||
|
||||
# add user to vessel org
|
||||
path = 'api/admin/users'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "id" in r.json():
|
||||
user_id = r.json()['id']
|
||||
else:
|
||||
plpy.error('Error grafana add user to vessel org')
|
||||
return
|
||||
|
||||
# read data_source
|
||||
path = 'api/datasources/1'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
r = requests.get(url, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
data_source = r.json()
|
||||
data_source['id'] = 0
|
||||
data_source['orgId'] = org_id
|
||||
data_source['uid'] = "ds_" + _v_id
|
||||
data_source['name'] = "ds_" + _v_id
|
||||
data_source['secureJsonData'] = {}
|
||||
data_source['secureJsonData']['password'] = 'mysecretpassword'
|
||||
data_source['readOnly'] = True
|
||||
if "secureJsonFields" in data_source:
|
||||
del data_source['secureJsonFields']
|
||||
|
||||
# add data_source to vessel org
|
||||
path = 'api/datasources'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(data_source)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
del headers['X-Grafana-Org-Id']
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add data_source to vessel org')
|
||||
return
|
||||
|
||||
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
|
||||
for dashboard in dashboards_tpl:
|
||||
# read dashboard template by uid
|
||||
path = 'api/dashboards/uid'
|
||||
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
|
||||
if 'X-Grafana-Org-Id' in headers:
|
||||
del headers['X-Grafana-Org-Id']
|
||||
r = requests.get(url, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana read dashboard template')
|
||||
return
|
||||
new_dashboard = r.json()
|
||||
del new_dashboard['meta']
|
||||
new_dashboard['dashboard']['version'] = 0
|
||||
new_dashboard['dashboard']['id'] = 0
|
||||
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
|
||||
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
|
||||
# add dashboard to vessel org
|
||||
path = 'api/dashboards/db'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(new_dashboard)
|
||||
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=new_data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add dashboard to vessel org')
|
||||
return
|
||||
|
||||
# Update Org Prefs
|
||||
path = 'api/org/preferences'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
home_dashboard = {}
|
||||
home_dashboard['timezone'] = 'utc'
|
||||
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
|
||||
data = json.dumps(home_dashboard)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.patch(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200:
|
||||
plpy.error('Error grafana update org preferences')
|
||||
return
|
||||
|
||||
plpy.notice('Done')
|
||||
$function$
|
||||
;
|
||||
COMMENT ON FUNCTION public.grafana_py_fn(text, text, text, jsonb) IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
|
||||
|
||||
-- Add missing comment on function cron_process_no_activity_fn
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_no_activity_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||
|
||||
-- Update grafana role SQL connection to 30
|
||||
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
ALTER ROLE api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
ALTER ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
|
||||
-- Create qgis schema for qgis projects
|
||||
CREATE SCHEMA IF NOT EXISTS qgis;
|
||||
COMMENT ON SCHEMA qgis IS 'Hold qgis_projects';
|
||||
GRANT USAGE ON SCHEMA qgis TO qgis_role;
|
||||
CREATE TABLE qgis.qgis_projects (
|
||||
"name" text NOT NULL,
|
||||
metadata jsonb NULL,
|
||||
"content" bytea NULL,
|
||||
CONSTRAINT qgis_projects_pkey PRIMARY KEY (name)
|
||||
);
|
||||
-- Description
|
||||
COMMENT ON TABLE
|
||||
qgis.qgis_projects
|
||||
IS 'Store qgis projects using QGIS-Server or QGIS-Desktop from https://qgis.org/';
|
||||
GRANT SELECT,INSERT,UPDATE,DELETE ON TABLE qgis.qgis_projects TO qgis_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO qgis_role;
|
||||
|
||||
-- allow anonymous access to tbl and views
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO api_anonymous;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO api_anonymous;
|
||||
-- Allow EXECUTE on all FUNCTIONS on API and public schema to user_role
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
-- Allow EXECUTE on all FUNCTIONS on public schema to vessel_role
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO vessel_role;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.5'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Update video cronjob
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_process_video_fn()'
|
||||
WHERE jobname = 'cron_new_video';
|
||||
UPDATE cron.job
|
||||
SET jobname='cron_video'
|
||||
WHERE command='select public.cron_process_video_fn()';
|
@@ -19,6 +19,8 @@ INSERT INTO app_settings (name, value) VALUES
|
||||
('app.telegram_bot_token', '${PGSAIL_TELEGRAM_BOT_TOKEN}'),
|
||||
('app.grafana_admin_uri', '${PGSAIL_GRAFANA_ADMIN_URI}'),
|
||||
('app.keycloak_uri', '${PGSAIL_KEYCLOAK_URI}'),
|
||||
('app.gis_url', '${PGSAIL_QGIS_URL}'),
|
||||
('app.videos_url', '${PGSAIL_VIDEOS_URL}'),
|
||||
('app.url', '${PGSAIL_APP_URL}'),
|
||||
('app.version', '${PGSAIL_VERSION}');
|
||||
-- Update comment with version
|
||||
@@ -27,6 +29,8 @@ COMMENT ON DATABASE signalk IS 'PostgSail version ${PGSAIL_VERSION}';
|
||||
ALTER ROLE authenticator WITH PASSWORD '${PGSAIL_AUTHENTICATOR_PASSWORD}';
|
||||
ALTER ROLE grafana WITH PASSWORD '${PGSAIL_GRAFANA_PASSWORD}';
|
||||
ALTER ROLE grafana_auth WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
ALTER ROLE qgis_role WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
ALTER ROLE maplapse_role WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
END
|
||||
|
||||
curl -s -XPOST -Hx-pgsail:${PGSAIL_VERSION} https://api.openplotter.cloud/rpc/telemetry_fn
|
||||
|
@@ -1 +1 @@
|
||||
0.7.1
|
||||
0.7.5
|
||||
|
File diff suppressed because one or more lines are too long
@@ -49,7 +49,7 @@ var moment = require('moment');
|
||||
],
|
||||
user_views: [
|
||||
{ url: '/stays_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 3},
|
||||
{ url: '/logs_view', res_body_length: 2},
|
||||
{ url: '/log_view', res_body_length: 2},
|
||||
//{ url: '/stats_view', res_body_length: 1},
|
||||
@@ -247,7 +247,7 @@ var moment = require('moment');
|
||||
],
|
||||
user_views: [
|
||||
{ url: '/stays_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 4},
|
||||
{ url: '/logs_view', res_body_length: 2},
|
||||
{ url: '/log_view', res_body_length: 2},
|
||||
//{ url: '/stats_view', res_body_length: 1},
|
||||
|
@@ -64,6 +64,11 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
|
||||
SELECT api.update_logbook_observations_fn(1, '{"observations":{"cloudCoverage":1}}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'add tags to logbook'
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Check export
|
||||
--\echo 'check logbook export fn'
|
||||
--SELECT api.export_logbook_geojson_fn(1);
|
||||
|
@@ -28,7 +28,7 @@ avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
@@ -41,7 +41,7 @@ avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
|
||||
stays
|
||||
-[ RECORD 1 ]
|
||||
@@ -91,12 +91,22 @@ DROP TABLE
|
||||
stats_logs_fn |
|
||||
|
||||
update_logbook_observations_fn
|
||||
-[ RECORD 1 ]---------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
-[ RECORD 1 ]-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
-[ RECORD 1 ]------------------+--
|
||||
update_logbook_observations_fn | t
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}}
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
add tags to logbook
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
-[ RECORD 1 ]------------------+--
|
||||
update_logbook_observations_fn | t
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
|
@@ -243,7 +243,7 @@ vessel_id | t
|
||||
name | Ekenäs
|
||||
country | fi
|
||||
stay_code | 1
|
||||
stay_duration |
|
||||
stay_duration | PT0S
|
||||
reference_count | 1
|
||||
latitude | 59.86
|
||||
longitude | 23.3657666666667
|
||||
@@ -268,4 +268,12 @@ default_stay_id | 2
|
||||
total_stay | 0
|
||||
total_duration | PT1M
|
||||
arrivals_departures | 1
|
||||
-[ RECORD 3 ]-------+---------------------
|
||||
id | 3
|
||||
moorage | Ekenäs
|
||||
default_stay | Unknown
|
||||
default_stay_id | 1
|
||||
total_stay | 0
|
||||
total_duration | PT0S
|
||||
arrivals_departures | 1
|
||||
|
||||
|
38
tests/sql/logbook.sql
Normal file
38
tests/sql/logbook.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
\echo 'Validate logbook operation'
|
||||
-- set user_id
|
||||
SELECT a.user_id as "user_id" FROM auth.accounts a WHERE a.email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"user_id"
|
||||
SELECT set_config('user.id', :'user_id', false) IS NOT NULL as user_id;
|
||||
|
||||
-- set vessel_id
|
||||
SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Delete logbook for user
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
\echo 'logbook'
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Delete logbook for user
|
||||
\echo 'Delete logbook for user kapla'
|
||||
SELECT api.delete_logbook_fn(5); -- delete Tropics Zone
|
||||
SELECT api.delete_logbook_fn(6); -- delete Alaska Zone
|
||||
|
||||
-- Merge logbook for user
|
||||
\echo 'Merge logbook for user kapla'
|
||||
SELECT api.merge_logbook_fn(1,2);
|
83
tests/sql/logbook.sql.output
Normal file
83
tests/sql/logbook.sql.output
Normal file
@@ -0,0 +1,83 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
Validate logbook operation
|
||||
-[ RECORD 1 ]
|
||||
user_id | t
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]
|
||||
count | 4
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E61000001C000000B0DEBBE0E68737404DA938FBF0094E40B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E610000015000000029A081B9E6E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | f
|
||||
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
|
||||
distance | 123
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | f
|
||||
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
|
||||
distance | 1234
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
|
||||
Delete logbook for user kapla
|
||||
-[ RECORD 1 ]-----+--
|
||||
delete_logbook_fn | t
|
||||
|
||||
-[ RECORD 1 ]-----+--
|
||||
delete_logbook_fn | t
|
||||
|
||||
Merge logbook for user kapla
|
||||
-[ RECORD 1 ]----+-
|
||||
merge_logbook_fn |
|
||||
|
36
tests/sql/maplapse.sql
Normal file
36
tests/sql/maplapse.sql
Normal file
@@ -0,0 +1,36 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- user_role
|
||||
SET ROLE user_role;
|
||||
SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
-- insert fake request maplapse
|
||||
\echo 'Insert fake request maplapse'
|
||||
SELECT api.maplapse_record_fn('Kapla,?start_log=1&end_log=1&height=100vh');
|
||||
|
||||
-- maplapse_role
|
||||
SET ROLE maplapse_role;
|
||||
|
||||
\echo 'GET pending maplapse task'
|
||||
SELECT id as maplapse_id from process_queue where channel = 'maplapse_video' and processed is null order by stored asc limit 1 \gset
|
||||
SELECT count(id) from process_queue where channel = 'maplapse_video' and processed is null limit 1;
|
||||
|
||||
\echo 'Update process on completion'
|
||||
UPDATE process_queue SET processed = NOW() WHERE id = :'maplapse_id';
|
||||
|
||||
\echo 'Insert video availability notification in process queue'
|
||||
INSERT INTO process_queue ("channel", "payload", "ref_id", "stored") VALUES ('new_video', CONCAT('video_', :'vessel_id_kapla'::TEXT, '_1', '_1.mp4'), :'vessel_id_kapla'::TEXT, NOW());
|
24
tests/sql/maplapse.sql.output
Normal file
24
tests/sql/maplapse.sql.output
Normal file
@@ -0,0 +1,24 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
SET
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
Insert fake request maplapse
|
||||
-[ RECORD 1 ]------+--
|
||||
maplapse_record_fn | t
|
||||
|
||||
SET
|
||||
GET pending maplapse task
|
||||
-[ RECORD 1 ]
|
||||
count | 1
|
||||
|
||||
Update process on completion
|
||||
UPDATE 1
|
||||
Insert video availability notification in process queue
|
||||
INSERT 0 1
|
52
tests/sql/qgis.sql
Normal file
52
tests/sql/qgis.sql
Normal file
@@ -0,0 +1,52 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
-- Assign vessel_id var
|
||||
SELECT v.vessel_id as "vessel_id_kapla" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
SELECT v.vessel_id as "vessel_id_aava" FROM auth.vessels v WHERE v.owner_email = 'demo+aava@openplotter.cloud' \gset
|
||||
|
||||
-- qgis
|
||||
SET ROLE qgis_role;
|
||||
|
||||
-- Get BBOX Extent from SQL query for a log:
|
||||
-- "^/log_(\w+)_(\d+).png$"
|
||||
-- "^/log_(\w+)_(\d+)_sat.png$
|
||||
-- require a log_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for a log: "^/log_(\w+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(null, 1);
|
||||
SELECT public.qgis_bbox_py_fn(null, 3);
|
||||
-- "^/log_(\w+)_(\d+)_line.png$"
|
||||
\echo 'Get BBOX Extent from SQL query for a log as line: "^/log_(\w+)_(\d+)_line.png$"'
|
||||
SELECT public.qgis_bbox_py_fn(null, 1, 333, 216, False);
|
||||
SELECT public.qgis_bbox_py_fn(null, 3, 333, 216, False);
|
||||
-- Get BBOX Extent from SQL query for all logs by vessel_id
|
||||
-- "^/logs_(\w+)_(\d+).png$"
|
||||
-- require a vessel_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(\w+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_kapla'::TEXT);
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_aava'::TEXT);
|
||||
-- Get BBOX Extent from SQL query for all logs by vessel_id
|
||||
-- "^/logs_(\w+)_(\d+).png$"
|
||||
-- require a vessel_id, optional image width and height, scale_out
|
||||
\echo 'Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(\w+)_(\d+)_(\d+).png$"'
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_kapla'::TEXT, 1, 2);
|
||||
SELECT public.qgis_bbox_py_fn(:'vessel_id_aava'::TEXT, 3, 4);
|
||||
-- require a vessel_id, optional image width and height, scale_out as in Apache
|
||||
\echo 'Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((\w+)_(\d+)_(\d+)).png$"'
|
||||
SELECT public.qgis_bbox_trip_py_fn(CONCAT(:'vessel_id_kapla'::TEXT, '_', 1, '_',2));
|
||||
SELECT public.qgis_bbox_trip_py_fn(CONCAT(:'vessel_id_aava'::TEXT, '_', 3, '_', 4));
|
||||
|
||||
--SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
|
||||
-- SQL request from QGIS to fetch the necessary data base on vessel_id
|
||||
--SELECT id, vessel_id, name as logname, ST_Transform(track_geom, 3857) as track_geom, ROUND(distance, 2) as distance, ROUND(EXTRACT(epoch FROM duration)/3600,2) as duration,_from_time,_to_time FROM api.logbook where track_geom is not null and _to_time is not null ORDER BY _from_time DESC;
|
||||
SELECT count(*) FROM api.logbook where track_geom is not null and _to_time is not null;
|
46
tests/sql/qgis.sql.output
Normal file
46
tests/sql/qgis.sql.output
Normal file
@@ -0,0 +1,46 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
SET
|
||||
Get BBOX Extent from SQL query for a log: "^/log_(w+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]---+----------------------------------------------------
|
||||
qgis_bbox_py_fn | 2749398.035335689,8334944,2756917.964664311,8338885
|
||||
|
||||
Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$"
|
||||
-[ RECORD 1 ]---+-------------------------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059
|
||||
|
||||
-[ RECORD 1 ]---+--------------------------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2750457.4431765806,8335162.530580978,2755858.0759322727,8338665.643719805
|
||||
|
||||
Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]---+------------------------------------------------------
|
||||
qgis_bbox_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
|
||||
|
||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$"
|
||||
-[ RECORD 1 ]---+-------------------------------------
|
||||
qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0
|
||||
|
||||
-[ RECORD 1 ]---+---------------------------------------
|
||||
qgis_bbox_py_fn | 97351,-192283890.5,2909895,205691085.5
|
||||
|
||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$"
|
||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||
qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||
|
||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||
qgis_bbox_trip_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
|
||||
|
||||
-[ RECORD 1 ]
|
||||
count | 3
|
||||
|
@@ -6,10 +6,10 @@
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]--+-------------------------------
|
||||
server_version | 16.2 (Debian 16.2-1.pgdg110+2)
|
||||
server_version | 16.3 (Debian 16.3-1.pgdg120+1)
|
||||
|
||||
-[ RECORD 1 ]--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
||||
-[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||
Name | citext
|
||||
@@ -53,7 +53,7 @@ Schema | public
|
||||
Description | PostGIS geometry and geography spatial types and functions
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
Name | timescaledb
|
||||
Version | 2.14.2
|
||||
Version | 2.15.3
|
||||
Schema | public
|
||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
@@ -96,24 +96,24 @@ laninline | 0
|
||||
lanvalidator | 2248
|
||||
lanacl |
|
||||
-[ RECORD 4 ]-+-----------
|
||||
oid | 13545
|
||||
oid | 13568
|
||||
lanname | plpgsql
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 13542
|
||||
laninline | 13543
|
||||
lanvalidator | 13544
|
||||
lanplcallfoid | 13565
|
||||
laninline | 13566
|
||||
lanvalidator | 13567
|
||||
lanacl |
|
||||
-[ RECORD 5 ]-+-----------
|
||||
oid | 18175
|
||||
oid | 18168
|
||||
lanname | plpython3u
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 18172
|
||||
laninline | 18173
|
||||
lanvalidator | 18174
|
||||
lanplcallfoid | 18165
|
||||
laninline | 18166
|
||||
lanvalidator | 18167
|
||||
lanacl |
|
||||
|
||||
-[ RECORD 1 ]+-----------
|
||||
@@ -450,6 +450,24 @@ qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | logbook_qgis_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {qgis_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | public_maplapse_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {maplapse_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
@@ -457,7 +475,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_scheduler_role
|
||||
@@ -466,7 +484,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | grafana_role
|
||||
@@ -475,7 +493,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_anonymous_role
|
||||
@@ -484,7 +502,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | admin_all
|
||||
@@ -493,7 +511,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_vessel_role
|
||||
@@ -502,7 +520,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_user_role
|
||||
@@ -511,7 +529,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_scheduler_role
|
||||
@@ -520,7 +538,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | grafana_role
|
||||
@@ -529,7 +547,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_anonymous_role
|
||||
@@ -538,7 +556,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | admin_all
|
||||
@@ -547,7 +565,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | api_user_role
|
||||
@@ -556,7 +574,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | grafana_role
|
||||
@@ -565,7 +583,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | ((owner_email)::text = current_setting('user.email'::text, true))
|
||||
with_check | false
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_user_role
|
||||
@@ -574,7 +592,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_scheduler_role
|
||||
@@ -583,7 +601,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | grafana_proxy_role
|
||||
@@ -592,7 +610,7 @@ roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | admin_all
|
||||
@@ -601,7 +619,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_vessel_role
|
||||
@@ -610,7 +628,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | true
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 43 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_user_role
|
||||
@@ -619,7 +637,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 44 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_scheduler_role
|
||||
@@ -645,12 +663,12 @@ overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure
|
||||
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
||||
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.7.2", "sys_version" : "PostgreSQL 16.2", "timescaledb" : "2.14.2", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.0.2"}
|
||||
versions_fn | {"api_version" : "0.7.4", "sys_version" : "PostgreSQL 16.3", "timescaledb" : "2.15.3", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.2.2"}
|
||||
|
||||
-[ RECORD 1 ]-----------------
|
||||
api_version | 0.7.2
|
||||
sys_version | PostgreSQL 16.2
|
||||
timescaledb | 2.14.2
|
||||
api_version | 0.7.4
|
||||
sys_version | PostgreSQL 16.3
|
||||
timescaledb | 2.15.3
|
||||
postgis | 3.4.2
|
||||
postgrest | PostgREST 12.0.2
|
||||
postgrest | PostgREST 12.2.2
|
||||
|
||||
|
@@ -182,6 +182,45 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# logbook SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/logbook.sql > output/logbook.sql.output
|
||||
diff sql/logbook.sql.output output/logbook.sql.output > /dev/null
|
||||
#diff -u sql/logbook.sql.output output/logbook.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL logbook.sql OK
|
||||
else
|
||||
echo SQL logbook.sql FAILED
|
||||
diff -u sql/logbook.sql.output output/logbook.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# qgis SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/qgis.sql > output/qgis.sql.output
|
||||
diff sql/qgis.sql.output output/qgis.sql.output > /dev/null
|
||||
#diff -u sql/qgis.sql.output output/qgis.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL qgis.sql OK
|
||||
else
|
||||
echo SQL qgis.sql FAILED
|
||||
diff -u sql/qgis.sql.output output/qgis.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# maplapse SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/maplapse.sql > output/maplapse.sql.output
|
||||
diff sql/maplapse.sql.output output/maplapse.sql.output > /dev/null
|
||||
#diff -u sql/maplapse.sql.output output/maplapse.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL maplapse.sql OK
|
||||
else
|
||||
echo SQL maplapse.sql FAILED
|
||||
diff -u sql/maplapse.sql.output output/maplapse.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and update openapi documentation
|
||||
wget ${PGSAIL_API_URI} -O openapi.json
|
||||
#echo 0
|
||||
|
Reference in New Issue
Block a user