mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 19:27:49 +00:00
Compare commits
53 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
9532075bc4 | ||
![]() |
5996b4d483 | ||
![]() |
fdd6fc18e1 | ||
![]() |
af3866fafe | ||
![]() |
53daaa9947 | ||
![]() |
3fed9e0b6a | ||
![]() |
f3168542fd | ||
![]() |
d266485fef | ||
![]() |
8738becd82 | ||
![]() |
ad43ca6629 | ||
![]() |
9368878963 | ||
![]() |
496491a43a | ||
![]() |
7494b39abc | ||
![]() |
74426a75f8 | ||
![]() |
9bac88a8cc | ||
![]() |
c0af53155c | ||
![]() |
e0aa6a4d0e | ||
![]() |
2425b674f7 | ||
![]() |
b7a1462ec6 | ||
![]() |
a31d857a6e | ||
![]() |
dbeb64c0dc | ||
![]() |
229c219751 | ||
![]() |
3216ffe42c | ||
![]() |
e2e3e5814e | ||
![]() |
5f709eb71e | ||
![]() |
d5bf36a85c | ||
![]() |
90d48c0c52 | ||
![]() |
62707aa86c | ||
![]() |
ac187a1480 | ||
![]() |
7b0bf7494f | ||
![]() |
c64219e249 | ||
![]() |
2127dd7fcb | ||
![]() |
2a583b94dc | ||
![]() |
147d9946c3 | ||
![]() |
993cfaeaff | ||
![]() |
3e70283221 | ||
![]() |
0697acb940 | ||
![]() |
8ca4d03649 | ||
![]() |
7a465ff532 | ||
![]() |
96dce86678 | ||
![]() |
8dd827f70d | ||
![]() |
572f0cd19d | ||
![]() |
047f243758 | ||
![]() |
5c494896c6 | ||
![]() |
b7e717afbc | ||
![]() |
2f3912582a | ||
![]() |
f7b9a54a71 | ||
![]() |
4e554083b0 | ||
![]() |
69b6490534 | ||
![]() |
8b336f6f9b | ||
![]() |
ef5868d412 | ||
![]() |
ce532bbb4d | ||
![]() |
66999ca9bb |
55
.github/workflows/db-lint.yml
vendored
Normal file
55
.github/workflows/db-lint.yml
vendored
Normal file
@@ -0,0 +1,55 @@
|
||||
name: Linting rules on database schema.
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- 'initdb/**'
|
||||
branches:
|
||||
- 'main'
|
||||
push:
|
||||
branches:
|
||||
- 'main'
|
||||
paths:
|
||||
- 'initdb/**'
|
||||
tags:
|
||||
- "*"
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
schemalint:
|
||||
name: schemalint
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the source
|
||||
uses: actions/checkout@v3
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
||||
- name: Pull Docker images
|
||||
run: docker-compose pull db api
|
||||
|
||||
- name: Run PostgSail Database & schemalint
|
||||
# Environment variables
|
||||
env:
|
||||
# The hostname used to communicate with the PostgreSQL service container
|
||||
PGHOST: localhost
|
||||
PGPORT: 5432
|
||||
PGDATABASE: signalk
|
||||
PGUSER: username
|
||||
PGPASSWORD: password
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
echo ${PGSAIL_API_URL}
|
||||
curl ${PGSAIL_API_URL}
|
||||
npm i -D schemalint
|
||||
npx schemalint
|
||||
- name: Show the logs
|
||||
if: always()
|
||||
run: |
|
||||
docker-compose logs
|
2
.github/workflows/db-test.yml
vendored
2
.github/workflows/db-test.yml
vendored
@@ -51,7 +51,7 @@ jobs:
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
echo ${PGSAIL_API_URL}
|
||||
curl ${PGSAIL_API_URL}
|
||||
|
2
.github/workflows/frontend-test.yml
vendored
2
.github/workflows/frontend-test.yml
vendored
@@ -47,7 +47,7 @@ jobs:
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
||||
docker-compose ps -a
|
||||
echo "Test PostgSail Web Unit Test"
|
||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml up -d web_dev && sleep 100
|
||||
|
2
.github/workflows/grafana-test.yml
vendored
2
.github/workflows/grafana-test.yml
vendored
@@ -42,7 +42,7 @@ jobs:
|
||||
source .env
|
||||
docker-compose stop || true
|
||||
docker-compose rm || true
|
||||
docker-compose up -d db && sleep 15
|
||||
docker-compose up -d db && sleep 30
|
||||
docker-compose ps -a
|
||||
echo "Test PostgSail Grafana Unit Test"
|
||||
docker-compose up -d app && sleep 5
|
||||
|
8
.gitignore
vendored
8
.gitignore
vendored
@@ -1,2 +1,10 @@
|
||||
.DS_Store
|
||||
.env
|
||||
initdb/*.csv
|
||||
initdb/*.no
|
||||
initdb/*.jwk
|
||||
tests/node_modules/
|
||||
tests/output/
|
||||
assets/*
|
||||
.pnpm-store/
|
||||
db-data/
|
22
.schemalintrc.js
Normal file
22
.schemalintrc.js
Normal file
@@ -0,0 +1,22 @@
|
||||
module.exports = {
|
||||
connection: {
|
||||
host: process.env.PGHOST,
|
||||
user: process.env.PGUSER,
|
||||
password: process.env.PGPASSWORD,
|
||||
database: process.env.PGDATABASE,
|
||||
charset: "utf8",
|
||||
},
|
||||
|
||||
rules: {
|
||||
"name-casing": ["error", "snake"],
|
||||
"prefer-jsonb-to-json": ["error"],
|
||||
"prefer-text-to-varchar": ["error"],
|
||||
"prefer-timestamptz-to-timestamp": ["error"],
|
||||
"prefer-identity-to-serial": ["error"],
|
||||
"name-inflection": ["error", "singular"],
|
||||
},
|
||||
|
||||
schemas: [{ name: "public" }, { name: "api" }],
|
||||
|
||||
ignores: [],
|
||||
};
|
16
README.md
16
README.md
@@ -23,18 +23,18 @@ postgsail-telegram-bot:
|
||||
|
||||
- Automatically log your voyages without manually starting or stopping a trip.
|
||||
- Automatically capture the details of your voyages (boat speed, heading, wind speed, etc).
|
||||
- Timelapse video your trips!
|
||||
- Timelapse video your trips, with or without time control.
|
||||
- Add custom notes to your logs.
|
||||
- Export to CSV or GPX and download your logs.
|
||||
- Export to CSV or GPX or KLM and download your logs.
|
||||
- Aggregate your trip statistics: Longest voyage, time spent at anchorages, home ports etc.
|
||||
- See your moorages on a global map, with incoming and outgoing voyages from each trip.
|
||||
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
|
||||
- History: view trends.
|
||||
- Alert monitoring: get notification on low voltage or low fuel remotely.
|
||||
- Notification via email or PushOver, Telegram
|
||||
- Offline mode
|
||||
- Low Bandwidth mode
|
||||
- Awesome statistics and graphs
|
||||
- Notification via email or PushOver, Telegram.
|
||||
- Offline mode.
|
||||
- Low Bandwidth mode.
|
||||
- Awesome statistics and graphs.
|
||||
- Anything missing? just ask!
|
||||
|
||||
## Context
|
||||
@@ -209,10 +209,6 @@ Out of the box iot platform using docker with the following software:
|
||||
- [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/)
|
||||
- [Grafana, open observability platform | Grafana Labs](https://grafana.com)
|
||||
|
||||
### Releases & updates
|
||||
|
||||
PostgSail Release Notes & Future Plans: see planned and in-progress updates and detailed information about current and past releases. [PostgSail project](https://github.com/xbgmsharp?tab=projects)
|
||||
|
||||
### Support
|
||||
|
||||
To get support, please create new [issue](https://github.com/xbgmsharp/postgsail/issues).
|
||||
|
2
frontend
2
frontend
Submodule frontend updated: ac25a3afb9...086b393568
@@ -237,6 +237,10 @@
|
||||
"templating": {
|
||||
"list": [
|
||||
{
|
||||
"datasource": {
|
||||
"type": "postgres",
|
||||
"uid": "PCC52D03280B7034C"
|
||||
},
|
||||
"definition": "SET \"user.email\" = '${__user.email}';\nSET vessel.id = '${__user.login}';\nSELECT\n v.name AS __text,\n m.vessel_id AS __value\n FROM auth.vessels v\n JOIN api.metadata m ON v.owner_email = '${__user.email}' and m.vessel_id = v.vessel_id;",
|
||||
"description": "Vessel Name",
|
||||
"hide": 0,
|
||||
|
@@ -43,7 +43,6 @@ CREATE TYPE status AS ENUM ('sailing', 'motoring', 'moored', 'anchored');
|
||||
-- Table api.metrics
|
||||
CREATE TABLE IF NOT EXISTS api.metrics (
|
||||
time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
client_id TEXT NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
@@ -97,22 +96,19 @@ SELECT create_hypertable('api.metrics', 'time', chunk_time_interval => INTERVAL
|
||||
|
||||
CREATE TABLE IF NOT EXISTS api.logbook(
|
||||
id SERIAL PRIMARY KEY,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
--client_id VARCHAR(255) NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
_from VARCHAR(255),
|
||||
name TEXT,
|
||||
_from TEXT,
|
||||
_from_lat DOUBLE PRECISION NULL,
|
||||
_from_lng DOUBLE PRECISION NULL,
|
||||
_to VARCHAR(255),
|
||||
_to TEXT,
|
||||
_to_lat DOUBLE PRECISION NULL,
|
||||
_to_lng DOUBLE PRECISION NULL,
|
||||
--track_geom Geometry(LINESTRING)
|
||||
track_geom geometry(LINESTRING,4326) NULL,
|
||||
track_geog geography(LINESTRING) NULL,
|
||||
track_geojson JSON NULL,
|
||||
track_gpx XML NULL,
|
||||
track_geojson JSONB NULL,
|
||||
_from_time TIMESTAMP WITHOUT TIME ZONE NOT NULL,
|
||||
_to_time TIMESTAMP WITHOUT TIME ZONE NULL,
|
||||
distance NUMERIC, -- meters?
|
||||
@@ -137,19 +133,16 @@ COMMENT ON COLUMN api.logbook.track_geom IS 'postgis geometry type EPSG:4326 Uni
|
||||
CREATE INDEX ON api.logbook USING GIST ( track_geog );
|
||||
COMMENT ON COLUMN api.logbook.track_geog IS 'postgis geography type default SRID 4326 Unit: degres';
|
||||
-- Otherwise -- ERROR: Only lon/lat coordinate systems are supported in geography.
|
||||
COMMENT ON COLUMN api.logbook.track_geojson IS 'store the geojson track metrics data, can not depend api.metrics table, should be generate from linetring to save disk space?';
|
||||
COMMENT ON COLUMN api.logbook.track_gpx IS 'store the gpx track metrics data, can not depend api.metrics table, should be generate from linetring to save disk space?';
|
||||
COMMENT ON COLUMN api.logbook.track_geojson IS 'store generated geojson with track metrics data using with LineString and Point features, we can not depend api.metrics table';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Stays
|
||||
-- virtual logbook by boat?
|
||||
CREATE TABLE IF NOT EXISTS api.stays(
|
||||
id SERIAL PRIMARY KEY,
|
||||
--client_id VARCHAR(255) NOT NULL REFERENCES api.metadata(client_id) ON DELETE RESTRICT,
|
||||
--client_id VARCHAR(255) NULL,
|
||||
vessel_id TEXT NOT NULL REFERENCES api.metadata(vessel_id) ON DELETE RESTRICT,
|
||||
active BOOLEAN DEFAULT false,
|
||||
name VARCHAR(255),
|
||||
name TEXT,
|
||||
latitude DOUBLE PRECISION NULL,
|
||||
longitude DOUBLE PRECISION NULL,
|
||||
geog GEOGRAPHY(POINT) NULL,
|
||||
@@ -384,6 +377,12 @@ CREATE FUNCTION metrics_trigger_fn() RETURNS trigger AS $metrics$
|
||||
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], invalid longitude >= 180 OR <= -180 [%] [%]', NEW.vessel_id, NEW.latitude, NEW.longitude;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
-- Check if valid longitude and latitude not close to -0.0000001 from Victron Cerbo
|
||||
IF NEW.latitude = NEW.longitude THEN
|
||||
-- Ignore entry if latitude,longitude are equal
|
||||
RAISE WARNING 'Metrics Ignoring metric, vessel_id [%], latitude and longitude are equal [%] [%]', NEW.vessel_id, NEW.latitude, NEW.longitude;
|
||||
RETURN NULL;
|
||||
END IF;
|
||||
-- Check if status is null
|
||||
IF NEW.status IS NULL THEN
|
||||
RAISE WARNING 'Metrics Unknown NEW.status, vessel_id [%], null status, set to default moored from [%]', NEW.vessel_id, NEW.status;
|
||||
|
@@ -16,47 +16,75 @@ CREATE OR REPLACE FUNCTION api.timelapse_fn(
|
||||
IN end_log INTEGER DEFAULT NULL,
|
||||
IN start_date TEXT DEFAULT NULL,
|
||||
IN end_date TEXT DEFAULT NULL,
|
||||
OUT geojson JSON) RETURNS JSON AS $timelapse$
|
||||
OUT geojson JSONB) RETURNS JSONB AS $timelapse$
|
||||
DECLARE
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- TODO using jsonb pgsql function instead of python
|
||||
-- Using sub query to force id order by
|
||||
-- Merge GIS track_geom into a GeoJSON MultiLineString
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geojson IS NOT NULL;
|
||||
--raise WARNING 'by log _geojson %' , _geojson;
|
||||
WITH logbook as (
|
||||
SELECT track_geom
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geom IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
)
|
||||
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
|
||||
SELECT ST_Collect(
|
||||
ARRAY(
|
||||
SELECT track_geom FROM logbook))
|
||||
) as geo;
|
||||
--raise WARNING 'by log id _geojson %' , _geojson;
|
||||
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook
|
||||
WHERE _from_time >= start_log::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND _to_time <= end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes'
|
||||
AND track_geojson IS NOT NULL;
|
||||
WITH logbook as (
|
||||
SELECT track_geom
|
||||
FROM api.logbook
|
||||
WHERE _from_time >= start_log::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND _to_time <= end_date::TIMESTAMP WITHOUT TIME ZONE + interval '23 hours 59 minutes'
|
||||
AND track_geom IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
)
|
||||
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
|
||||
SELECT ST_Collect(
|
||||
ARRAY(
|
||||
SELECT track_geom FROM logbook))
|
||||
) as geo;
|
||||
--raise WARNING 'by date _geojson %' , _geojson;
|
||||
ELSE
|
||||
SELECT jsonb_agg(track_geojson->'features') INTO _geojson
|
||||
FROM api.logbook
|
||||
WHERE track_geojson IS NOT NULL;
|
||||
WITH logbook as (
|
||||
SELECT track_geom
|
||||
FROM api.logbook
|
||||
WHERE track_geom IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
)
|
||||
SELECT ST_AsGeoJSON(geo.*) INTO _geojson FROM (
|
||||
SELECT ST_Collect(
|
||||
ARRAY(
|
||||
SELECT track_geom FROM logbook))
|
||||
) as geo;
|
||||
--raise WARNING 'all result _geojson %' , _geojson;
|
||||
END IF;
|
||||
-- Return a GeoJSON filter on Point
|
||||
-- Return a GeoJSON MultiLineString
|
||||
-- result _geojson [null, null]
|
||||
--raise WARNING 'result _geojson %' , _geojson;
|
||||
SELECT json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', public.geojson_py_fn(_geojson, 'LineString'::TEXT) ) INTO geojson;
|
||||
'features', ARRAY[_geojson] ) INTO geojson;
|
||||
END;
|
||||
$timelapse$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.timelapse_fn
|
||||
IS 'Export to geojson feature point with Time and courseOverGroundTrue properties';
|
||||
IS 'Export all selected logs geometry `track_geom` to a geojson as MultiLineString with empty properties';
|
||||
|
||||
-- export_logbook_geojson_fn
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_geojson_fn;
|
||||
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSON) RETURNS JSON AS $export_logbook_geojson$
|
||||
CREATE FUNCTION api.export_logbook_geojson_fn(IN _id integer, OUT geojson JSONB) RETURNS JSONB AS $export_logbook_geojson$
|
||||
-- validate with geojson.io
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
@@ -80,37 +108,236 @@ $export_logbook_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_geojson_fn
|
||||
IS 'Export a log entry to geojson feature linestring and multipoint';
|
||||
IS 'Export a log entry to geojson with features LineString and Point';
|
||||
|
||||
-- Generate GPX XML file output
|
||||
-- https://opencpn.org/OpenCPN/info/gpxvalidation.html
|
||||
--
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_gpx_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER, OUT gpx XML) RETURNS pg_catalog.xml
|
||||
AS $export_logbook_gpx$
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_gpx_fn(IN _id INTEGER) RETURNS pg_catalog.xml
|
||||
AS $export_logbook_gpx2$
|
||||
DECLARE
|
||||
app_settings jsonb;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> export_logbook_gpx_fn invalid input %', _id;
|
||||
RETURN '';
|
||||
END IF;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_url_fn();
|
||||
--RAISE DEBUG '-> logbook_update_gpx_fn app_settings %', app_settings;
|
||||
-- Generate GPX XML, extract Point features from geojson.
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
app_settings->>'app.url' as "xmlns:postgsail",
|
||||
'http://www.w3.org/2001/XMLSchema-instance' as "xmlns:xsi",
|
||||
'http://www.garmin.com/xmlschemas/GpxExtensions/v3' as "xmlns:gpxx",
|
||||
'http://www.topografix.com/GPX/1/1 http://www.topografix.com/GPX/1/1/gpx.xsd http://www.garmin.com/xmlschemas/GpxExtensions/v3 http://www8.garmin.com/xmlschemas/GpxExtensionsv3.xsd' as "xsi:schemaLocation"),
|
||||
xmlelement(name metadata,
|
||||
xmlelement(name link, xmlattributes(app_settings->>'app.url' as href),
|
||||
xmlelement(name text, 'PostgSail'))),
|
||||
xmlelement(name trk,
|
||||
xmlelement(name name, l.name),
|
||||
xmlelement(name desc, l.notes),
|
||||
xmlelement(name link, xmlattributes(concat(app_settings->>'app.url', '/log/', l.id) as href),
|
||||
xmlelement(name text, l.name)),
|
||||
xmlelement(name extensions, xmlelement(name "postgsail:log_id", l.id),
|
||||
xmlelement(name "postgsail:link", concat(app_settings->>'app.url', '/log/', l.id)),
|
||||
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
||||
xmlelement(name "opencpn:viz", '1'),
|
||||
xmlelement(name "opencpn:start", l._from_time),
|
||||
xmlelement(name "opencpn:end", l._to_time)
|
||||
),
|
||||
xmlelement(name trkseg, xmlagg(
|
||||
xmlelement(name trkpt,
|
||||
xmlattributes(features->'geometry'->'coordinates'->1 as lat, features->'geometry'->'coordinates'->0 as lon),
|
||||
xmlelement(name time, features->'properties'->>'time')
|
||||
)))))::pg_catalog.xml
|
||||
FROM api.logbook l, jsonb_array_elements(track_geojson->'features') AS features
|
||||
WHERE features->'geometry'->>'type' = 'Point'
|
||||
AND l.id = _id
|
||||
GROUP BY l.name,l.notes,l.id;
|
||||
END;
|
||||
$export_logbook_gpx2$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_gpx_fn
|
||||
IS 'Export a log entry to GPX XML format';
|
||||
|
||||
-- Generate KML XML file output
|
||||
-- https://developers.google.com/kml/documentation/kml_tut
|
||||
-- TODO https://developers.google.com/kml/documentation/time#timespans
|
||||
DROP FUNCTION IF EXISTS api.export_logbook_kml_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbook_kml_fn(IN _id INTEGER) RETURNS pg_catalog.xml
|
||||
AS $export_logbook_kml$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> export_logbook_gpx_fn invalid input %', _id;
|
||||
RETURN;
|
||||
RAISE WARNING '-> export_logbook_kml_fn invalid input %', _id;
|
||||
return '';
|
||||
END IF;
|
||||
-- Gather log details
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook WHERE id = _id;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> export_logbook_gpx_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
RAISE WARNING '-> export_logbook_kml_fn invalid logbook %', _id;
|
||||
return '';
|
||||
END IF;
|
||||
gpx := logbook_rec.track_gpx;
|
||||
-- Extract POINT from LINESTRING to generate KML XML
|
||||
RETURN xmlelement(name kml,
|
||||
xmlattributes( '1.0' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.w3.org/2005/Atom' as "xmlns:atom",
|
||||
'http://www.opengis.net/kml/2.2' as "xmlns",
|
||||
'http://www.google.com/kml/ext/2.2' as "xmlns:gx",
|
||||
'http://www.opengis.net/kml/2.2' as "xmlns:kml"),
|
||||
xmlelement(name "Document",
|
||||
xmlelement(name name, logbook_rec.name),
|
||||
xmlelement(name "Placemark",
|
||||
xmlelement(name name, logbook_rec.notes),
|
||||
ST_AsKML(logbook_rec.track_geom)::pg_catalog.xml)
|
||||
))::pg_catalog.xml
|
||||
FROM api.logbook WHERE id = _id;
|
||||
END;
|
||||
$export_logbook_gpx$ LANGUAGE plpgsql;
|
||||
$export_logbook_kml$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbook_gpx_fn
|
||||
IS 'Export a log entry to GPX XML format';
|
||||
api.export_logbook_kml_fn
|
||||
IS 'Export a log entry to KML XML format';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.export_logbooks_gpx_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbooks_gpx_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml
|
||||
AS $export_logbooks_gpx$
|
||||
declare
|
||||
merged_jsonb jsonb;
|
||||
app_settings jsonb;
|
||||
BEGIN
|
||||
-- Merge GIS track_geom of geometry type Point into a jsonb array format
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geojson IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('coordinates', f->'geometry'->'coordinates', 'time', f->'properties'->>'time')
|
||||
) INTO merged_jsonb
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook
|
||||
WHERE track_geojson IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
END IF;
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn _jsonb %' , _jsonb;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_url_fn();
|
||||
--RAISE WARNING '-> export_logbooks_gpx_fn app_settings %', app_settings;
|
||||
-- Generate GPX XML, extract Point features from geojson.
|
||||
RETURN xmlelement(name gpx,
|
||||
xmlattributes( '1.1' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.topografix.com/GPX/1/1' as xmlns,
|
||||
'http://www.opencpn.org' as "xmlns:opencpn",
|
||||
app_settings->>'app.url' as "xmlns:postgsail"),
|
||||
xmlelement(name metadata,
|
||||
xmlelement(name link, xmlattributes(app_settings->>'app.url' as href),
|
||||
xmlelement(name text, 'PostgSail'))),
|
||||
xmlelement(name trk,
|
||||
xmlelement(name name, 'logbook name'),
|
||||
xmlelement(name trkseg, xmlagg(
|
||||
xmlelement(name trkpt,
|
||||
xmlattributes(features->'coordinates'->1 as lat, features->'coordinates'->0 as lon),
|
||||
xmlelement(name time, features->'properties'->>'time')
|
||||
)))))::pg_catalog.xml
|
||||
FROM jsonb_array_elements(merged_jsonb) AS features;
|
||||
END;
|
||||
$export_logbooks_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbooks_gpx_fn
|
||||
IS 'Export a logs entries to GPX XML format';
|
||||
|
||||
DROP FUNCTION IF EXISTS api.export_logbooks_kml_fn;
|
||||
CREATE OR REPLACE FUNCTION api.export_logbooks_kml_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL) RETURNS pg_catalog.xml
|
||||
AS $export_logbooks_kml$
|
||||
DECLARE
|
||||
_geom geometry;
|
||||
app_settings jsonb;
|
||||
BEGIN
|
||||
-- Merge GIS track_geom into a GeoJSON MultiLineString
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
WITH logbook as (
|
||||
SELECT track_geom
|
||||
FROM api.logbook
|
||||
WHERE id >= start_log
|
||||
AND id <= end_log
|
||||
AND track_geom IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
)
|
||||
SELECT ST_Collect(
|
||||
ARRAY(
|
||||
SELECT track_geom FROM logbook))
|
||||
into _geom;
|
||||
ELSE
|
||||
WITH logbook as (
|
||||
SELECT track_geom
|
||||
FROM api.logbook
|
||||
WHERE track_geom IS NOT NULL
|
||||
GROUP BY id
|
||||
ORDER BY id ASC
|
||||
)
|
||||
SELECT ST_Collect(
|
||||
ARRAY(
|
||||
SELECT track_geom FROM logbook))
|
||||
into _geom;
|
||||
--raise WARNING 'all result _geojson %' , _geojson;
|
||||
END IF;
|
||||
|
||||
-- Extract POINT from LINESTRING to generate KML XML
|
||||
RETURN xmlelement(name kml,
|
||||
xmlattributes( '1.0' as version,
|
||||
'PostgSAIL' as creator,
|
||||
'http://www.w3.org/2005/Atom' as "xmlns:atom",
|
||||
'http://www.opengis.net/kml/2.2' as "xmlns",
|
||||
'http://www.google.com/kml/ext/2.2' as "xmlns:gx",
|
||||
'http://www.opengis.net/kml/2.2' as "xmlns:kml"),
|
||||
xmlelement(name "Document",
|
||||
xmlelement(name name, 'logbook name'),
|
||||
xmlelement(name "Placemark",
|
||||
ST_AsKML(_geom)::pg_catalog.xml
|
||||
)
|
||||
)
|
||||
)::pg_catalog.xml;
|
||||
END;
|
||||
$export_logbooks_kml$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.export_logbooks_kml_fn
|
||||
IS 'Export a logs entries to KML XML format';
|
||||
|
||||
-- Find all log from and to moorage geopoint within 100m
|
||||
DROP FUNCTION IF EXISTS api.find_log_from_moorage_fn;
|
||||
|
@@ -38,13 +38,13 @@ CREATE VIEW stay_in_progress AS
|
||||
DROP VIEW IF EXISTS api.logs_view;
|
||||
CREATE OR REPLACE VIEW api.logs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration"
|
||||
name as "name",
|
||||
_from as "from",
|
||||
_from_time as "started",
|
||||
_to as "to",
|
||||
_to_time as "ended",
|
||||
distance as "distance",
|
||||
duration as "duration"
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
@@ -56,13 +56,13 @@ COMMENT ON VIEW
|
||||
-- Initial try of MATERIALIZED VIEW
|
||||
CREATE MATERIALIZED VIEW api.logs_mat_view AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration"
|
||||
name as "name",
|
||||
_from as "from",
|
||||
_from_time as "started",
|
||||
_to as "to",
|
||||
_to_time as "ended",
|
||||
distance as "distance",
|
||||
duration as "duration"
|
||||
FROM api.logbook l
|
||||
WHERE _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
@@ -74,14 +74,14 @@ COMMENT ON MATERIALIZED VIEW
|
||||
DROP VIEW IF EXISTS api.log_view;
|
||||
CREATE OR REPLACE VIEW api.log_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT id,
|
||||
name as "Name",
|
||||
_from as "From",
|
||||
_from_time as "Started",
|
||||
_to as "To",
|
||||
_to_time as "Ended",
|
||||
distance as "Distance",
|
||||
duration as "Duration",
|
||||
notes as "Notes",
|
||||
name as "name",
|
||||
_from as "from",
|
||||
_from_time as "started",
|
||||
_to as "to",
|
||||
_to_time as "ended",
|
||||
distance as "distance",
|
||||
duration as "duration",
|
||||
notes as "notes",
|
||||
track_geojson as geojson,
|
||||
avg_speed as avg_speed,
|
||||
max_speed as max_speed,
|
||||
@@ -257,12 +257,12 @@ CREATE OR REPLACE VIEW api.stats_logs_view WITH (security_invoker=true,security_
|
||||
SELECT m.time FROM api.metrics m ORDER BY m.time ASC limit 1),
|
||||
logbook AS (
|
||||
SELECT
|
||||
count(*) AS "Number of Log Entries",
|
||||
max(l.max_speed) AS "Max Speed",
|
||||
max(l.max_wind_speed) AS "Max Wind Speed",
|
||||
sum(l.distance) AS "Total Distance",
|
||||
sum(l.duration) AS "Total Time Underway",
|
||||
concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "Longest Nonstop Sail"
|
||||
count(*) AS "number_of_log_entries",
|
||||
max(l.max_speed) AS "max_speed",
|
||||
max(l.max_wind_speed) AS "max_wind_speed",
|
||||
sum(l.distance) AS "total_distance",
|
||||
sum(l.duration) AS "total_time_underway",
|
||||
concat( max(l.distance), ' NM, ', max(l.duration), ' hours') AS "longest_nonstop_sail"
|
||||
FROM api.logbook l)
|
||||
SELECT
|
||||
m.name as Name,
|
||||
@@ -301,10 +301,10 @@ CREATE OR REPLACE VIEW api.stats_moorages_view WITH (security_invoker=true,secur
|
||||
select sum(m.stay_duration) as time_spent_away from api.moorages m where home_flag is false
|
||||
)
|
||||
SELECT
|
||||
home_ports.home_ports as "Home Ports",
|
||||
unique_moorage.unique_moorage as "Unique Moorages",
|
||||
time_at_home_ports.time_at_home_ports "Time Spent at Home Port(s)",
|
||||
time_spent_away.time_spent_away as "Time Spent Away"
|
||||
home_ports.home_ports as "home_ports",
|
||||
unique_moorage.unique_moorage as "unique_moorages",
|
||||
time_at_home_ports.time_at_home_ports "time_spent_at_home_port(s)",
|
||||
time_spent_away.time_spent_away as "time_spent_away"
|
||||
FROM home_ports, unique_moorage, time_at_home_ports, time_spent_away;
|
||||
COMMENT ON VIEW
|
||||
api.stats_moorages_view
|
||||
@@ -346,8 +346,8 @@ CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=tru
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature,
|
||||
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
|
||||
metrics-> 'environment.wind.directionGround' AS windDirectionGround,
|
||||
metrics-> 'environment.inside.humidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.humidity' AS outsideHumidity,
|
||||
metrics-> 'environment.inside.relativeHumidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.relativeHumidity' AS outsideHumidity,
|
||||
metrics-> 'environment.outside.pressure' AS outsidePressure,
|
||||
metrics-> 'environment.inside.pressure' AS insidePressure,
|
||||
metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge,
|
||||
@@ -361,7 +361,7 @@ CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=tru
|
||||
'longitude', m.longitude
|
||||
)::jsonb ) AS geojson,
|
||||
current_setting('vessel.name', false) AS name
|
||||
FROM api.metrics m
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view
|
||||
@@ -372,7 +372,7 @@ CREATE VIEW api.monitoring_humidity WITH (security_invoker=true,security_barrier
|
||||
SELECT m.time, key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'environment.%.humidity'
|
||||
WHERE key ILIKE 'environment.%.humidity' OR key ILIKE 'environment.%.relativeHumidity'
|
||||
ORDER BY m.time DESC;
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_humidity
|
||||
|
@@ -348,21 +348,6 @@ COMMENT ON FUNCTION
|
||||
public.cron_vacuum_fn
|
||||
IS 'init by pg_cron to full vacuum tables on schema api';
|
||||
|
||||
-- CRON for clean up job details logs
|
||||
CREATE FUNCTION job_run_details_cleanup_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Remove job run log older than 3 months
|
||||
RAISE NOTICE 'job_run_details_cleanup_fn';
|
||||
DELETE FROM postgres.cron.job_run_details
|
||||
WHERE start_time <= NOW() AT TIME ZONE 'UTC' - INTERVAL '91 DAYS';
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.job_run_details_cleanup_fn
|
||||
IS 'init by pg_cron to cleanup job_run_details table on schema public postgres db';
|
||||
|
||||
-- CRON for alerts notification
|
||||
CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
@@ -404,7 +389,7 @@ BEGIN
|
||||
WHERE v.owner_email = a.email)
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_vessel_rec_fn for [%]', no_vessel;
|
||||
SELECT json_build_object('email', no_vessel.email, 'recipient', a.first) into user_settings;
|
||||
SELECT json_build_object('email', no_vessel.email, 'recipient', no_vessel.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_vessel_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||
@@ -433,8 +418,8 @@ BEGIN
|
||||
FROM api.metadata m
|
||||
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_metadata_rec;
|
||||
SELECT json_build_object('email', no_metadata_rec.email, 'recipient', a.first) into user_settings;
|
||||
RAISE NOTICE '-> cron_process_no_metadata_rec_fn for [%]', no_metadata_rec;
|
||||
SELECT json_build_object('email', no_metadata_rec.email, 'recipient', no_metadata_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_metadata_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||
@@ -456,13 +441,14 @@ BEGIN
|
||||
RAISE NOTICE 'cron_process_no_activity_fn';
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.time
|
||||
FROM api.metadata m
|
||||
LEFT JOIN auth.vessels v ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time <= NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS'
|
||||
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', a.first) into user_settings;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_activity_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
@@ -473,3 +459,88 @@ $no_activity$ language plpgsql;
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_no_activity_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 200 days then send notification';
|
||||
|
||||
-- CRON for deactivated/deletion
|
||||
CREATE FUNCTION cron_process_deactivated_fn() RETURNS void AS $deactivated$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
RAISE NOTICE 'cron_process_deactivated_fn';
|
||||
|
||||
-- List accounts with vessel inactivity for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for inactivity [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn inactivity [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
|
||||
-- List accounts with no vessel metadata for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,a.first,a.created_at
|
||||
FROM auth.accounts a, auth.vessels v
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM api.metadata m
|
||||
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||
AND v.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no metadata [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no metadata [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
|
||||
-- List accounts with no vessel created for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT a.user_id,a.email,a.first,a.created_at
|
||||
FROM auth.accounts a
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM auth.vessels v
|
||||
WHERE v.owner_email = a.email)
|
||||
AND a.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no vessel [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no vessel [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$deactivated$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_deactivated_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 1 year then send notification and delete data';
|
||||
|
||||
-- Need to be in the postgres database.
|
||||
\c postgres
|
||||
-- CRON for clean up job details logs
|
||||
CREATE FUNCTION job_run_details_cleanup_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Remove job run log older than 3 months
|
||||
RAISE NOTICE 'job_run_details_cleanup_fn';
|
||||
DELETE FROM cron.job_run_details
|
||||
WHERE start_time <= NOW() AT TIME ZONE 'UTC' - INTERVAL '91 DAYS';
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.job_run_details_cleanup_fn
|
||||
IS 'init by pg_cron to cleanup job_run_details table on schema public postgres db';
|
||||
|
@@ -52,7 +52,7 @@ COMMENT ON TABLE
|
||||
-- with escape value, eg: E'A\nB\r\nC'
|
||||
-- https://stackoverflow.com/questions/26638615/insert-line-break-in-postgresql-when-updating-text-field
|
||||
-- TODO Update notification subject for log entry to 'logbook #NB ...'
|
||||
INSERT INTO email_templates VALUES
|
||||
INSERT INTO public.email_templates VALUES
|
||||
('logbook',
|
||||
'New Logbook Entry',
|
||||
E'Hello __RECIPIENT__,\n\nWe just wanted to let you know that you have a new entry on openplotter.cloud: "__LOGBOOK_NAME__"\r\n\r\nSee more details at __APP_URL__/log/__LOGBOOK_LINK__\n\nHappy sailing!\nThe PostgSail Team',
|
||||
@@ -64,19 +64,19 @@ INSERT INTO email_templates VALUES
|
||||
'Welcome',
|
||||
E'Hi!\nYou successfully created an account\nKeep in mind to register your vessel.\n'),
|
||||
('new_vessel',
|
||||
'New vessel',
|
||||
'New boat',
|
||||
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n',
|
||||
'New vessel',
|
||||
'New boat',
|
||||
E'Hi!\nHow are you?\n__BOAT__ is now linked to your account.\n'),
|
||||
('monitor_offline',
|
||||
'Vessel Offline',
|
||||
'Boat went Offline',
|
||||
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n',
|
||||
'Vessel Offline',
|
||||
'Boat went Offline',
|
||||
E'__BOAT__ has been offline for more than an hour\r\nFind more details at __APP_URL__/boats\n'),
|
||||
('monitor_online',
|
||||
'Vessel Online',
|
||||
'Boat went Online',
|
||||
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n',
|
||||
'Vessel Online',
|
||||
'Boat went Offline',
|
||||
E'__BOAT__ just came online\nFind more details at __APP_URL__/boats\n'),
|
||||
('new_badge',
|
||||
'New Badge!',
|
||||
@@ -115,19 +115,24 @@ INSERT INTO email_templates VALUES
|
||||
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'),
|
||||
('no_vessel',
|
||||
'PostgSail add your boat',
|
||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not created your boat yet.\nIf you need any assistance we would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not created your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
'PostgSail next step',
|
||||
E'Hello,\nYou should create your vessel. Check your email!\n'),
|
||||
('no_metadata',
|
||||
'PostgSail connect your boat',
|
||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance we would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nYou have created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
'PostgSail next step',
|
||||
E'Hello,\nYou should connect your vessel. Check your email!\n'),
|
||||
('no_activity',
|
||||
'PostgSail boat inactivity',
|
||||
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance we would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
'PostgSail inactivity!',
|
||||
E'Congratulations!\nWe detected inactivity. Check your email!\n');
|
||||
E'We detected inactivity. Check your email!\n'),
|
||||
('deactivated',
|
||||
'PostgSail account deactivated',
|
||||
E'Hello __RECIPIENT__,\nYour account has been deactivated and all your data has been removed from PostgSail system.',
|
||||
'PostgSail deactivated!',
|
||||
E'We removed your account. Check your email!\n');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Queue handling
|
||||
|
@@ -15,7 +15,7 @@ CREATE SCHEMA IF NOT EXISTS public;
|
||||
-- process single cron event, process_[logbook|stay|moorage]_queue_fn()
|
||||
--
|
||||
|
||||
CREATE OR REPLACE FUNCTION logbook_metrics_dwithin_fn(
|
||||
CREATE OR REPLACE FUNCTION public.logbook_metrics_dwithin_fn(
|
||||
IN _start text,
|
||||
IN _end text,
|
||||
IN lgn float,
|
||||
@@ -33,18 +33,18 @@ CREATE OR REPLACE FUNCTION logbook_metrics_dwithin_fn(
|
||||
AND ST_DWithin(
|
||||
Geography(ST_MakePoint(m.longitude, m.latitude)),
|
||||
Geography(ST_MakePoint(lgn, lat)),
|
||||
10
|
||||
15
|
||||
);
|
||||
END;
|
||||
$logbook_metrics_dwithin$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_metrics_dwithin_fn
|
||||
IS 'Check if all entries for a logbook are in stationary movement with 10 meters';
|
||||
IS 'Check if all entries for a logbook are in stationary movement with 15 meters';
|
||||
|
||||
-- Update a logbook with avg data
|
||||
-- TODO using timescale function
|
||||
CREATE OR REPLACE FUNCTION logbook_update_avg_fn(
|
||||
CREATE OR REPLACE FUNCTION public.logbook_update_avg_fn(
|
||||
IN _id integer,
|
||||
IN _start TEXT,
|
||||
IN _end TEXT,
|
||||
@@ -74,8 +74,8 @@ COMMENT ON FUNCTION
|
||||
-- Create a LINESTRING for Geometry
|
||||
-- Todo validate st_length unit?
|
||||
-- https://postgis.net/docs/ST_Length.html
|
||||
DROP FUNCTION IF EXISTS logbook_update_geom_distance_fn;
|
||||
CREATE FUNCTION logbook_update_geom_distance_fn(IN _id integer, IN _start text, IN _end text,
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_geom_distance_fn;
|
||||
CREATE FUNCTION public.logbook_update_geom_distance_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geom Geometry(LINESTRING),
|
||||
OUT _track_distance double precision
|
||||
) AS $logbook_geo_distance$
|
||||
@@ -109,7 +109,7 @@ COMMENT ON FUNCTION
|
||||
IS 'Update logbook details with geometry data an distance, ST_Length in Nautical Mile (international)';
|
||||
|
||||
-- Create GeoJSON for api consume.
|
||||
CREATE FUNCTION logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
CREATE FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geojson JSON
|
||||
) AS $logbook_geojson$
|
||||
declare
|
||||
@@ -117,52 +117,52 @@ CREATE FUNCTION logbook_update_geojson_fn(IN _id integer, IN _start text, IN _en
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( select
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
avg_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
notes,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
notes,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( select
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND time <= _end::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND time <= _end::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
select log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$logbook_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
@@ -195,7 +195,7 @@ AS $logbook_update_gpx$
|
||||
RAISE WARNING '-> logbook_update_gpx_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Gathe url from app settings
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
--RAISE DEBUG '-> logbook_update_gpx_fn app_settings %', app_settings;
|
||||
-- Generate XML
|
||||
@@ -213,7 +213,7 @@ AS $logbook_update_gpx$
|
||||
xmlelement(name desc, log_rec.notes),
|
||||
xmlelement(name link, xmlattributes(concat(app_settings->>'app.url', '/log/', log_rec.id) as href),
|
||||
xmlelement(name text, log_rec.name)),
|
||||
xmlelement(name extensions, xmlelement(name "postgsail:log_id", 1),
|
||||
xmlelement(name extensions, xmlelement(name "postgsail:log_id", log_rec.id),
|
||||
xmlelement(name "postgsail:link", concat(app_settings->>'app.url','/log/', log_rec.id)),
|
||||
xmlelement(name "opencpn:guid", uuid_generate_v4()),
|
||||
xmlelement(name "opencpn:viz", '1'),
|
||||
@@ -230,9 +230,9 @@ AS $logbook_update_gpx$
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= log_rec._from_time::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND m.time <= log_rec._to_time::TIMESTAMP WITHOUT TIME ZONE
|
||||
AND vessel_id = log_rec.vessel_id;
|
||||
-- ERROR: column "m.time" must appear in the GROUP BY clause or be used in an aggregate function at character 2304
|
||||
--ORDER BY m.time ASC;
|
||||
AND vessel_id = log_rec.vessel_id
|
||||
GROUP BY m.time
|
||||
ORDER BY m.time ASC;
|
||||
END;
|
||||
$logbook_update_gpx$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
@@ -509,11 +509,11 @@ CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GPX field
|
||||
gpx := logbook_update_gpx_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_gpx = gpx
|
||||
WHERE id = logbook_rec.id;
|
||||
--gpx := logbook_update_gpx_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
--UPDATE api.logbook
|
||||
-- SET
|
||||
-- track_gpx = gpx
|
||||
-- WHERE id = logbook_rec.id;
|
||||
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('logbook_name', log_name, 'logbook_link', logbook_rec.id) into log_settings;
|
||||
@@ -853,9 +853,9 @@ COMMENT ON FUNCTION
|
||||
public.process_vessel_queue_fn
|
||||
IS 'process new vessel notification';
|
||||
|
||||
-- Get user settings details from a log entry
|
||||
-- Get application settings details from a log entry
|
||||
DROP FUNCTION IF EXISTS get_app_settings_fn;
|
||||
CREATE OR REPLACE FUNCTION get_app_settings_fn (OUT app_settings jsonb)
|
||||
CREATE OR REPLACE FUNCTION get_app_settings_fn(OUT app_settings jsonb)
|
||||
RETURNS jsonb
|
||||
AS $get_app_settings$
|
||||
DECLARE
|
||||
@@ -865,17 +865,37 @@ BEGIN
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name LIKE '%app.email%'
|
||||
OR name LIKE '%app.pushover%'
|
||||
OR name LIKE '%app.url'
|
||||
OR name LIKE '%app.telegram%';
|
||||
name LIKE 'app.email%'
|
||||
OR name LIKE 'app.pushover%'
|
||||
OR name LIKE 'app.url'
|
||||
OR name LIKE 'app.telegram%';
|
||||
END;
|
||||
$get_app_settings$
|
||||
LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.get_app_settings_fn
|
||||
IS 'get app settings details, email, pushover, telegram';
|
||||
IS 'get application settings details, email, pushover, telegram';
|
||||
|
||||
DROP FUNCTION IF EXISTS get_app_url_fn;
|
||||
CREATE OR REPLACE FUNCTION get_app_url_fn(OUT app_settings jsonb)
|
||||
RETURNS jsonb
|
||||
AS $get_app_url$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_object_agg(name, value) INTO app_settings
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name = 'app.url';
|
||||
END;
|
||||
$get_app_url$
|
||||
LANGUAGE plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.get_app_url_fn
|
||||
IS 'get application url security definer';
|
||||
|
||||
-- Send notifications
|
||||
DROP FUNCTION IF EXISTS send_notification_fn;
|
||||
@@ -968,7 +988,7 @@ AS $get_user_settings_from_vesselid$
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
AND m.vessel_id = vesselid
|
||||
AND lower(a.email) = lower(v.owner_email);
|
||||
AND a.email = v.owner_email;
|
||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||
PERFORM set_config('user.recipient', user_settings->>'recipient'::TEXT, false);
|
||||
END;
|
||||
@@ -1239,7 +1259,7 @@ CREATE OR REPLACE FUNCTION public.badges_geom_fn(IN logbook_id integer) RETURNS
|
||||
user_settings jsonb;
|
||||
badge_tmp text;
|
||||
begin
|
||||
RAISE NOTICE '--> public.badges_geom_fn user.email [%], vessel.id [%]', current_setting('user.email', false), current_setting('vessel.id', false);
|
||||
--RAISE NOTICE '--> public.badges_geom_fn user.email [%], vessel.id [%]', current_setting('user.email', false), current_setting('vessel.id', false);
|
||||
-- Tropical & Alaska zone manually add into ne_10m_geography_marine_polys
|
||||
-- Check if each geographic marine zone exist as a badge
|
||||
FOR marine_rec IN
|
||||
@@ -1319,7 +1339,7 @@ BEGIN
|
||||
WHERE auth.accounts.email = _email;
|
||||
IF account_rec.email IS NULL THEN
|
||||
RAISE EXCEPTION 'Invalid user'
|
||||
USING HINT = 'Unknow user or password';
|
||||
USING HINT = 'Unknown user or password';
|
||||
END IF;
|
||||
-- Set session variables
|
||||
PERFORM set_config('user.id', account_rec.user_id, false);
|
||||
@@ -1397,7 +1417,7 @@ BEGIN
|
||||
perform public.cron_process_new_moorage_fn();
|
||||
perform public.cron_process_monitor_offline_fn();
|
||||
END
|
||||
$$ language plpgsql security definer;
|
||||
$$ language plpgsql;
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Delete all data for a account by email and vessel_id
|
||||
@@ -1417,14 +1437,14 @@ BEGIN
|
||||
delete from auth.accounts a where email = _email;
|
||||
RETURN True;
|
||||
END
|
||||
$delete_account$ language plpgsql security definer;
|
||||
$delete_account$ language plpgsql;
|
||||
|
||||
-- Dump all data for a account by email and vessel_id
|
||||
CREATE OR REPLACE FUNCTION public.dump_account_fn(IN _email TEXT, IN _vessel_id TEXT) RETURNS BOOLEAN
|
||||
AS $dump_account$
|
||||
BEGIN
|
||||
-- TODO use COPY but we can't all in one?
|
||||
RETURN True;
|
||||
-- TODO use COPY but we can't all in one?
|
||||
select count(*) from api.metrics m where vessel_id = _vessel_id;
|
||||
select * from api.metadata m where vessel_id = _vessel_id;
|
||||
select * from api.logbook l where vessel_id = _vessel_id;
|
||||
@@ -1433,18 +1453,18 @@ BEGIN
|
||||
select * from auth.vessels v where vessel_id = _vessel_id;
|
||||
select * from auth.accounts a where email = _email;
|
||||
END
|
||||
$dump_account$ language plpgsql security definer;
|
||||
$dump_account$ language plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.delete_vessel_fn(IN _vessel_id TEXT) RETURNS BOOLEAN
|
||||
AS $delete_account$
|
||||
AS $delete_vessel$
|
||||
BEGIN
|
||||
RETURN True;
|
||||
select count(*) from api.metrics m where vessel_id = _vessel_id;
|
||||
delete from api.metrics m where vessel_id = _vessel_id;
|
||||
select * from api.metadata m where vessel_id = _vessel_id;
|
||||
delete from api.metadata m where vessel_id = _vessel_id;
|
||||
delete from api.logbook l where vessel_id = _vessel_id;
|
||||
delete from api.moorages m where vessel_id = _vessel_id;
|
||||
delete from api.stays s where vessel_id = _vessel_id;
|
||||
delete from api.metadata m where vessel_id = _vessel_id;
|
||||
RETURN True;
|
||||
END
|
||||
$delete_account$ language plpgsql security definer;
|
||||
$delete_vessel$ language plpgsql;
|
@@ -21,17 +21,17 @@ CREATE EXTENSION IF NOT EXISTS "pgcrypto"; -- provides cryptographic functions
|
||||
|
||||
DROP TABLE IF EXISTS auth.accounts CASCADE;
|
||||
CREATE TABLE IF NOT EXISTS auth.accounts (
|
||||
userid UUID NOT NULL UNIQUE DEFAULT uuid_generate_v4(),
|
||||
public_id SERIAL UNIQUE NOT NULL,
|
||||
user_id TEXT NOT NULL UNIQUE DEFAULT RIGHT(gen_random_uuid()::text, 12),
|
||||
email CITEXT primary key check ( email ~* '^.+@.+\..+$' ),
|
||||
first text not null check (length(pass) < 512),
|
||||
last text not null check (length(pass) < 512),
|
||||
pass text not null check (length(pass) < 512),
|
||||
role name not null check (length(role) < 512),
|
||||
email CITEXT PRIMARY KEY CHECK ( email ~* '^.+@.+\..+$' ),
|
||||
first TEXT NOT NULL CHECK (length(pass) < 512),
|
||||
last TEXT NOT NULL CHECK (length(pass) < 512),
|
||||
pass TEXT NOT NULL CHECK (length(pass) < 512),
|
||||
role name NOT NULL CHECK (length(role) < 512),
|
||||
preferences JSONB NULL DEFAULT '{"email_notifications":true}',
|
||||
created_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
updated_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
connected_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
connected_at TIMESTAMP WITHOUT TIME ZONE NOT NULL DEFAULT NOW(),
|
||||
CONSTRAINT valid_email CHECK (length(email) > 5), -- Enforce at least 5 char, eg: a@b.io
|
||||
CONSTRAINT valid_first CHECK (length(first) > 1),
|
||||
CONSTRAINT valid_last CHECK (length(last) > 1),
|
||||
@@ -44,9 +44,11 @@ COMMENT ON TABLE
|
||||
-- Indexes
|
||||
-- is unused index?
|
||||
--CREATE INDEX accounts_role_idx ON auth.accounts (role);
|
||||
CREATE INDEX accounts_preferences_idx ON auth.accounts using GIN (preferences);
|
||||
-- is unused index?
|
||||
--CREATE INDEX accounts_userid_idx ON auth.accounts (userid);
|
||||
CREATE INDEX accounts_preferences_idx ON auth.accounts USING GIN (preferences);
|
||||
CREATE INDEX accounts_public_id_idx ON auth.accounts (public_id);
|
||||
COMMENT ON COLUMN auth.accounts.public_id IS 'User public_id to allow mapping for anonymous access, could be use as well for as Grafana orgId';
|
||||
COMMENT ON COLUMN auth.accounts.first IS 'User first name with CONSTRAINT CHECK';
|
||||
COMMENT ON COLUMN auth.accounts.last IS 'User last name with CONSTRAINT CHECK';
|
||||
|
||||
CREATE TRIGGER accounts_moddatetime
|
||||
BEFORE UPDATE ON auth.accounts
|
||||
|
@@ -25,7 +25,7 @@ COMMENT ON COLUMN api.metadata.vessel_id IS 'Link auth.vessels with api.metadata
|
||||
-- List vessel
|
||||
--TODO add geojson with position
|
||||
DROP VIEW IF EXISTS api.vessels_view;
|
||||
CREATE OR REPLACE VIEW api.vessels_view AS
|
||||
CREATE OR REPLACE VIEW api.vessels_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
WITH metadata AS (
|
||||
SELECT COALESCE(
|
||||
(SELECT m.time
|
||||
@@ -96,10 +96,11 @@ AS $vessel$
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_build_object(
|
||||
'name', v.name,
|
||||
'mmsi', coalesce(v.mmsi, null),
|
||||
'name', coalesce(m.name, null),
|
||||
'mmsi', coalesce(m.mmsi, null),
|
||||
'created_at', v.created_at::timestamp(0),
|
||||
'last_contact', coalesce(m.time, null),
|
||||
'first_contact', coalesce(m.created_at::timestamp(0), null),
|
||||
'last_contact', coalesce(m.time::timestamp(0), null),
|
||||
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
|
||||
)::jsonb || api.vessel_details_fn()::jsonb
|
||||
INTO vessel
|
||||
@@ -117,7 +118,7 @@ AS $vessel$
|
||||
latitude IS NOT NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY time DESC
|
||||
ORDER BY time DESC LIMIT 1
|
||||
) AS geojson_t
|
||||
WHERE
|
||||
m.vessel_id = current_setting('vessel.id')
|
||||
@@ -136,11 +137,12 @@ CREATE OR REPLACE FUNCTION api.settings_fn(out settings json) RETURNS JSON
|
||||
AS $user_settings$
|
||||
BEGIN
|
||||
select row_to_json(row)::json INTO settings
|
||||
from (
|
||||
select a.email, a.first, a.last, a.preferences, a.created_at,
|
||||
from (
|
||||
select a.email, a.first, a.last, a.preferences, a.created_at,
|
||||
INITCAP(CONCAT (LEFT(first, 1), ' ', last)) AS username,
|
||||
public.has_vessel_fn() as has_vessel
|
||||
public.has_vessel_fn() as has_vessel,
|
||||
--public.has_vessel_metadata_fn() as has_vessel_metadata,
|
||||
a.public_id
|
||||
from auth.accounts a
|
||||
where email = current_setting('user.email')
|
||||
) row;
|
||||
@@ -254,10 +256,10 @@ COMMENT ON FUNCTION
|
||||
DROP VIEW IF EXISTS api.eventlogs_view;
|
||||
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT pq.*
|
||||
from public.process_queue pq
|
||||
where ref_id = current_setting('user.id', true)
|
||||
or ref_id = current_setting('vessel.id', true)
|
||||
order by id asc;
|
||||
FROM public.process_queue pq
|
||||
WHERE ref_id = current_setting('user.id', true)
|
||||
OR ref_id = current_setting('vessel.id', true)
|
||||
ORDER BY id ASC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.eventlogs_view
|
||||
@@ -268,20 +270,71 @@ DROP FUNCTION IF EXISTS api.update_logbook_observations_fn;
|
||||
CREATE OR REPLACE FUNCTION api.update_logbook_observations_fn(IN _id INT, IN observations TEXT) RETURNS BOOLEAN AS
|
||||
$update_logbook_observations$
|
||||
DECLARE
|
||||
_value TEXT := NULL;
|
||||
BEGIN
|
||||
-- Merge existing observations with the new observations objects
|
||||
RAISE NOTICE '-> update_logbook_extra_fn id:[%] observations:[%]', _id, observations;
|
||||
_value := to_jsonb(observations)::jsonb;
|
||||
-- { 'observations': { 'seaState': -1, 'cloudCoverage': -1, 'visibility': -1 } }
|
||||
UPDATE api.logbook SET extra = public.jsonb_recursive_merge(extra, _value) WHERE id = _id;
|
||||
UPDATE api.logbook SET extra = public.jsonb_recursive_merge(extra, observations::jsonb) WHERE id = _id;
|
||||
IF FOUND IS True THEN
|
||||
RETURN True;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END;
|
||||
$update_logbook_observations$ language plpgsql security definer;
|
||||
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.update_logbook_observations_fn
|
||||
IS 'Update logbook observations jsonb key pair value';
|
||||
IS 'Update/Add logbook observations jsonb key pair value';
|
||||
|
||||
CREATE TYPE public_type AS ENUM ('public_logs', 'public_logs_list', 'public_timelapse', 'public_stats');
|
||||
CREATE FUNCTION api.ispublic_fn(IN id INTEGER, IN _type public_type) RETURNS BOOLEAN AS $ispublic$
|
||||
DECLARE
|
||||
_id INTEGER := id;
|
||||
rec record;
|
||||
valid_public_type BOOLEAN := False;
|
||||
BEGIN
|
||||
-- If _id is is not NULL and > 0
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> ispublic_fn invalid input %', _id;
|
||||
RETURN False;
|
||||
END IF;
|
||||
-- Check if public_type is valid enum
|
||||
SELECT _type::name = any(enum_range(null::public_type)::name[]) INTO valid_public_type;
|
||||
IF valid_public_type IS False THEN
|
||||
-- Ignore entry if type is invalid
|
||||
RAISE WARNING '-> ispublic_fn invalid input type %', _type;
|
||||
RETURN False;
|
||||
END IF;
|
||||
|
||||
IF _type = 'public_logs' THEN
|
||||
WITH log as (
|
||||
select vessel_id from api.logbook l where l.id = _id
|
||||
)
|
||||
SELECT (l.vessel_id) is not null into rec
|
||||
--SELECT l.vessel_id, 'email', 'settings', a.preferences
|
||||
FROM auth.accounts a, auth.vessels v, jsonb_each_text(a.preferences), log l
|
||||
WHERE v.vessel_id = l.vessel_id
|
||||
AND a.email = v.owner_email
|
||||
AND key = 'public_logs'::TEXT
|
||||
AND value::BOOLEAN = true;
|
||||
IF FOUND THEN
|
||||
RETURN True;
|
||||
END IF;
|
||||
ELSE
|
||||
SELECT (a.email) is not null into rec
|
||||
--SELECT a.email, a.preferences
|
||||
FROM auth.accounts a, jsonb_each_text(a.preferences)
|
||||
WHERE a.public_id = _id
|
||||
AND key = _type::TEXT
|
||||
AND value::BOOLEAN = true;
|
||||
IF FOUND THEN
|
||||
RETURN True;
|
||||
END IF;
|
||||
END IF;
|
||||
RETURN False;
|
||||
END
|
||||
$ispublic$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.ispublic_fn
|
||||
IS 'Is web page publicly accessible?';
|
||||
|
@@ -15,13 +15,13 @@ select current_database();
|
||||
--
|
||||
-- api_anonymous
|
||||
-- nologin
|
||||
-- api_anonymous role in the database with which to execute anonymous web requests, limit 10 connections
|
||||
-- api_anonymous role in the database with which to execute anonymous web requests, limit 20 connections
|
||||
-- api_anonymous allows JWT token generation with an expiration time via function api.login() from auth.accounts table
|
||||
create role api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOLOGIN NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10;
|
||||
create role api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOLOGIN NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 20;
|
||||
comment on role api_anonymous is
|
||||
'The role that PostgREST will switch to when a user is not authenticated.';
|
||||
-- Limit to 10 connections
|
||||
--alter user api_anonymous connection limit 10;
|
||||
-- Limit to 20 connections
|
||||
--alter user api_anonymous connection limit 20;
|
||||
grant usage on schema api to api_anonymous;
|
||||
-- explicitly limit EXECUTE privileges to only signup and login and reset functions
|
||||
grant execute on function api.login(text,text) to api_anonymous;
|
||||
@@ -46,18 +46,19 @@ comment on role authenticator is
|
||||
'Role that serves as an entry-point for API servers such as PostgREST.';
|
||||
grant api_anonymous to authenticator;
|
||||
|
||||
-- Grafana user and role with login, read-only, limit 15 connections
|
||||
CREATE ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 15 LOGIN PASSWORD 'mysecretpassword';
|
||||
-- Grafana user and role with login, read-only, limit 20 connections
|
||||
CREATE ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 20 LOGIN PASSWORD 'mysecretpassword';
|
||||
comment on role grafana is
|
||||
'Role that grafana will use for authenticated web users.';
|
||||
-- Allow API schema and Tables
|
||||
GRANT USAGE ON SCHEMA api TO grafana;
|
||||
-- Allow read on SEQUENCE on API schema
|
||||
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO grafana;
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO grafana;
|
||||
-- Allow read on TABLES on API schema
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO grafana;
|
||||
-- Allow read on VIEWS on API schema
|
||||
GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO grafana;
|
||||
GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO grafana;
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO grafana;
|
||||
GRANT SELECT ON TABLE api.monitoring_view,api.monitoring_view2,api.monitoring_view3 TO grafana;
|
||||
GRANT SELECT ON TABLE api.monitoring_humidity,api.monitoring_voltage,api.monitoring_temperatures TO grafana;
|
||||
-- Allow Auth schema and Tables
|
||||
@@ -65,8 +66,8 @@ GRANT USAGE ON SCHEMA auth TO grafana;
|
||||
GRANT SELECT ON TABLE auth.vessels TO grafana;
|
||||
GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana;
|
||||
|
||||
-- Grafana_auth authenticator user and role with login, read-only on auth.accounts, limit 15 connections
|
||||
CREATE ROLE grafana_auth WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 15 LOGIN PASSWORD 'mysecretpassword';
|
||||
-- Grafana_auth authenticator user and role with login, read-only on auth.accounts, limit 20 connections
|
||||
CREATE ROLE grafana_auth WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 20 LOGIN PASSWORD 'mysecretpassword';
|
||||
comment on role grafana_auth is
|
||||
'Role that grafana auth proxy authenticator via apache.';
|
||||
-- Allow read on VIEWS on API schema
|
||||
@@ -81,29 +82,25 @@ GRANT EXECUTE ON FUNCTION public.citext_eq(citext, citext) TO grafana_auth;
|
||||
|
||||
-- User:
|
||||
-- nologin, web api only
|
||||
-- read-only for all and Read-Write on logbook, stays and moorage except for specific (name, notes) COLUMNS
|
||||
-- read-only for all and Read on logbook, stays and moorage and Write only for specific (name, notes) COLUMNS
|
||||
CREATE ROLE user_role WITH NOLOGIN NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION;
|
||||
comment on role user_role is
|
||||
'Role that PostgREST will switch to for authenticated web users.';
|
||||
GRANT user_role to authenticator;
|
||||
GRANT USAGE ON SCHEMA api TO user_role;
|
||||
-- Allow read on SEQUENCE on API schema
|
||||
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO user_role;
|
||||
-- Allow read on TABLES on API schema
|
||||
GRANT SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata,api.stays_at TO user_role;
|
||||
GRANT SELECT ON TABLE public.process_queue TO user_role;
|
||||
-- To check?
|
||||
GRANT SELECT ON TABLE auth.vessels TO user_role;
|
||||
-- Allow users to update certain columns
|
||||
-- Allow users to update certain columns on specific TABLES on API schema
|
||||
GRANT UPDATE (name, notes) ON api.logbook TO user_role;
|
||||
GRANT UPDATE (name, notes, stay_code) ON api.stays TO user_role;
|
||||
GRANT UPDATE (name, notes, stay_code, home_flag) ON api.moorages TO user_role;
|
||||
-- Allow EXECUTE on all FUNCTIONS on API and public schema
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
|
||||
--GRANT EXECUTE ON FUNCTION public.check_jwt() TO user_role;
|
||||
-- Allow others functions or allow all in public !! ??
|
||||
--GRANT EXECUTE ON FUNCTION api.export_logbook_geojson_linestring_fn(int4) TO user_role;
|
||||
--GRANT EXECUTE ON FUNCTION public.st_asgeojson(text) TO user_role;
|
||||
--GRANT EXECUTE ON FUNCTION public.geography_eq(geography, geography) TO user_role;
|
||||
-- TODO should not be need !! ??
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
|
||||
-- pg15 feature security_invoker=true,security_barrier=true
|
||||
@@ -111,33 +108,12 @@ GRANT SELECT ON TABLE api.logs_view,api.moorages_view,api.stays_view TO user_rol
|
||||
GRANT SELECT ON TABLE api.log_view,api.moorage_view,api.stay_view,api.vessels_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_view,api.monitoring_view2,api.monitoring_view3 TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_humidity,api.monitoring_voltage,api.monitoring_temperatures TO user_role;
|
||||
GRANT SELECT ON TABLE api.stats_moorages_away_view,api.versions_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.total_info_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.stats_logs_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.stats_moorages_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.eventlogs_view TO user_role;
|
||||
-- Update ownership for security user_role as run by web user.
|
||||
-- Web listing
|
||||
--ALTER VIEW api.stays_view OWNER TO user_role;
|
||||
--ALTER VIEW api.moorages_view OWNER TO user_role;
|
||||
--ALTER VIEW api.logs_view OWNER TO user_role;
|
||||
--ALTER VIEW api.vessel_p_view OWNER TO user_role;
|
||||
--ALTER VIEW api.monitoring_view OWNER TO user_role;
|
||||
-- Remove all permissions except select
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.stays_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.moorages_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.logs_view FROM user_role;
|
||||
--REVOKE UPDATE, TRUNCATE, REFERENCES, DELETE, TRIGGER, INSERT ON TABLE api.monitoring_view FROM user_role;
|
||||
|
||||
-- Allow read and update on VIEWS
|
||||
-- Web detail view
|
||||
--ALTER VIEW api.log_view OWNER TO user_role;
|
||||
-- Remove all permissions except select and update
|
||||
--REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.log_view FROM user_role;
|
||||
|
||||
ALTER VIEW api.vessels_view OWNER TO user_role;
|
||||
-- Remove all permissions except select and update
|
||||
REVOKE TRUNCATE, DELETE, TRIGGER, INSERT ON TABLE api.vessels_view FROM user_role;
|
||||
|
||||
GRANT SELECT ON TABLE api.vessels_view TO user_role;
|
||||
|
||||
-- Vessel:
|
||||
-- nologin
|
||||
@@ -147,8 +123,10 @@ comment on role vessel_role is
|
||||
'Role that PostgREST will switch to for authenticated web vessels.';
|
||||
GRANT vessel_role to authenticator;
|
||||
GRANT USAGE ON SCHEMA api TO vessel_role;
|
||||
GRANT INSERT, UPDATE, SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO vessel_role;
|
||||
-- Allow read on SEQUENCE on API schema
|
||||
GRANT USAGE, SELECT ON SEQUENCE api.logbook_id_seq,api.metadata_id_seq,api.moorages_id_seq,api.stays_id_seq TO vessel_role;
|
||||
-- Allow read/write on TABLES on API schema
|
||||
GRANT INSERT, UPDATE, SELECT ON TABLE api.metrics,api.logbook,api.moorages,api.stays,api.metadata TO vessel_role;
|
||||
GRANT INSERT ON TABLE public.process_queue TO vessel_role;
|
||||
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO vessel_role;
|
||||
-- explicitly limit EXECUTE privileges to pgrest db-pre-request function
|
||||
|
@@ -23,7 +23,7 @@ SELECT cron.schedule('cron_new_moorage', '*/7 * * * *', 'select public.cron_proc
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_moorage';
|
||||
|
||||
-- Create a every 10 minute job cron_process_monitor_offline_fn
|
||||
SELECT cron.schedule('cron_monitor_offline', '*/10 * * * *', 'select public.cron_process_monitor_offline_fn()');
|
||||
SELECT cron.schedule('cron_monitor_offline', '*/11 * * * *', 'select public.cron_process_monitor_offline_fn()');
|
||||
--UPDATE cron.job SET database = 'signalk' where jobname = 'cron_monitor_offline';
|
||||
|
||||
-- Create a every 10 minute job cron_process_monitor_online_fn
|
||||
@@ -66,15 +66,17 @@ SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_proce
|
||||
|
||||
-- Notifications/Reminders of no vessel & no metadata & no activity
|
||||
-- At 08:05 on Sunday.
|
||||
SELECT cron.schedule('cron_no_vessel', '05 08 * * 0', 'select public.cron_process_no_vessel_fn()');
|
||||
SELECT cron.schedule('cron_no_metadata', '05 08 * * 0', 'select public.cron_process_no_metadata_fn()');
|
||||
SELECT cron.schedule('cron_no_activity', '05 08 * * 0', 'select public.cron_process_no_activity_fn()');
|
||||
-- At 08:05 on every 4th day-of-month if it's on Sunday.
|
||||
SELECT cron.schedule('cron_no_vessel', '5 8 */4 * 0', 'select public.cron_process_no_vessel_fn()');
|
||||
SELECT cron.schedule('cron_no_metadata', '5 8 */4 * 0', 'select public.cron_process_no_metadata_fn()');
|
||||
SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_process_no_activity_fn()');
|
||||
|
||||
-- Cron job settings
|
||||
UPDATE cron.job SET database = 'signalk';
|
||||
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
|
||||
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vaccuum permissions
|
||||
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
|
||||
UPDATE cron.job SET database = 'postgresql' WHERE jobname = 'job_run_details_cleanup_fn';
|
||||
-- check job lists
|
||||
SELECT * FROM cron.job;
|
||||
-- unschedule by job id
|
||||
|
@@ -1 +1 @@
|
||||
0.3.0
|
||||
0.4.0
|
||||
|
File diff suppressed because one or more lines are too long
@@ -103,6 +103,14 @@ var moment = require('moment');
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_logbook_kml_fn',
|
||||
payload: {
|
||||
_id: 2
|
||||
},
|
||||
res: {
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_moorages_geojson_fn',
|
||||
payload: {},
|
||||
res: {
|
||||
@@ -293,6 +301,32 @@ var moment = require('moment');
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_logbook_kml_fn',
|
||||
payload: {
|
||||
_id: 4
|
||||
},
|
||||
res: {
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_logbooks_gpx_fn',
|
||||
payload: {
|
||||
start_log: 3,
|
||||
end_log: 4
|
||||
},
|
||||
res: {
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_logbooks_kml_fn',
|
||||
payload: {
|
||||
start_log: 3,
|
||||
end_log: 4
|
||||
},
|
||||
res: {
|
||||
obj_name: null
|
||||
}
|
||||
},
|
||||
{ url: '/rpc/export_moorages_geojson_fn',
|
||||
payload: {},
|
||||
res: {
|
||||
|
@@ -25,7 +25,7 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
\echo 'logbook'
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_gpx IS NOT NULL AS track_gpx, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Test stays for user
|
||||
\echo 'stays'
|
||||
@@ -57,3 +57,15 @@ SELECT stats_logs_fn->'name' AS name,
|
||||
FROM stats_jsonb;
|
||||
DROP TABLE stats_jsonb;
|
||||
SELECT api.stats_logs_fn('2022-01-01'::text,'2022-06-12'::text);
|
||||
|
||||
-- Update logbook observations
|
||||
\echo 'update_logbook_observations_fn'
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
SELECT api.update_logbook_observations_fn(1, '{"observations":{"cloudCoverage":1}}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Check export
|
||||
--\echo 'check logbook export fn'
|
||||
--SELECT api.export_logbook_geojson_fn(1);
|
||||
--SELECT api.export_logbook_gpx_fn(1);
|
||||
--SELECT api.export_logbook_kml_fn(1);
|
||||
|
@@ -21,7 +21,6 @@ name | Bollsta to Slottsbacken
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_gpx | t
|
||||
track_geom | 0102000020E61000001A00000020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.17
|
||||
duration | PT25M
|
||||
@@ -35,7 +34,6 @@ name | Knipan to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_gpx | t
|
||||
track_geom | 0102000020E6100000130000004806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.6862
|
||||
duration | PT18M
|
||||
@@ -92,3 +90,13 @@ DROP TABLE
|
||||
-[ RECORD 1 ]-+-
|
||||
stats_logs_fn |
|
||||
|
||||
update_logbook_observations_fn
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
|
||||
-[ RECORD 1 ]------------------+--
|
||||
update_logbook_observations_fn | t
|
||||
|
||||
-[ RECORD 1 ]---------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}}
|
||||
|
||||
|
@@ -23,7 +23,7 @@ SELECT current_user, current_setting('user.email', true), current_setting('vesse
|
||||
SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
|
||||
|
||||
\echo 'auth.accounts details'
|
||||
SELECT a.userid IS NOT NULL AS userid, a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a;
|
||||
SELECT a.public_id IS NOT NULL AS public_id, a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a;
|
||||
\echo 'auth.vessels details'
|
||||
--SELECT 'SELECT ' || STRING_AGG('v.' || column_name, ', ') || ' FROM auth.vessels AS v' FROM information_schema.columns WHERE table_name = 'vessels' AND table_schema = 'auth' AND column_name NOT IN ('created_at', 'updated_at');
|
||||
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
|
||||
@@ -60,7 +60,7 @@ SELECT m.id, m.name, m.mmsi, m.client_id, m.length, m.beam, m.height, m.ship_typ
|
||||
\echo 'api.logs_view'
|
||||
--SELECT * FROM api.logbook l;
|
||||
--SELECT * FROM api.logs_view l;
|
||||
SELECT l.id, "Name", "From", "To", "Distance", "Duration" FROM api.logs_view AS l;
|
||||
SELECT l.id, l.name, l.from, l.to, l.distance, l.duration FROM api.logs_view AS l;
|
||||
--SELECT * FROM api.log_view l;
|
||||
|
||||
\echo 'api.stays'
|
||||
|
@@ -23,7 +23,7 @@ client_id | vessels.urn:mrn:imo:mmsi:787654321
|
||||
|
||||
auth.accounts details
|
||||
-[ RECORD 1 ]-----+-----------------------------
|
||||
userid | t
|
||||
public_id | t
|
||||
user_id | t
|
||||
email | demo+kapla@openplotter.cloud
|
||||
first | First_kapla
|
||||
@@ -33,7 +33,7 @@ role | user_role
|
||||
telegram |
|
||||
pushover_user_key |
|
||||
-[ RECORD 2 ]-----+-----------------------------
|
||||
userid | t
|
||||
public_id | t
|
||||
user_id | t
|
||||
email | demo+aava@openplotter.cloud
|
||||
first | first_aava
|
||||
@@ -127,18 +127,18 @@ active | t
|
||||
api.logs_view
|
||||
-[ RECORD 1 ]--------------
|
||||
id | 2
|
||||
Name | Knipan to Ekenäs
|
||||
From | Knipan
|
||||
To | Ekenäs
|
||||
Distance | 8.6862
|
||||
Duration | PT18M
|
||||
name | Knipan to Ekenäs
|
||||
from | Knipan
|
||||
to | Ekenäs
|
||||
distance | 8.6862
|
||||
duration | PT18M
|
||||
-[ RECORD 2 ]--------------
|
||||
id | 1
|
||||
Name | patch log name 3
|
||||
From | Bollsta
|
||||
To | Slottsbacken
|
||||
Distance | 7.17
|
||||
Duration | PT25M
|
||||
name | patch log name 3
|
||||
from | Bollsta
|
||||
to | Slottsbacken
|
||||
distance | 7.17
|
||||
duration | PT25M
|
||||
|
||||
api.stays
|
||||
-[ RECORD 1 ]-------------------------------------------------
|
||||
|
@@ -53,7 +53,7 @@ Schema | public
|
||||
Description | PostGIS geometry and geography spatial types and functions
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
Name | timescaledb
|
||||
Version | 2.12.0
|
||||
Version | 2.12.2
|
||||
Schema | public
|
||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
@@ -597,12 +597,12 @@ reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
|
||||
|
||||
Test geoip reverse_geoip_py_fn
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.3.0", "sys_version" : "PostgreSQL 15.4", "timescaledb" : "2.12.0", "postgis" : "3.4.0", "postgrest" : "PostgREST 11.2.1"}
|
||||
versions_fn | {"api_version" : "0.4.0", "sys_version" : "PostgreSQL 15.4", "timescaledb" : "2.12.2", "postgis" : "3.4.0", "postgrest" : "PostgREST 11.2.1"}
|
||||
|
||||
-[ RECORD 1 ]-----------------
|
||||
api_version | 0.3.0
|
||||
api_version | 0.4.0
|
||||
sys_version | PostgreSQL 15.4
|
||||
timescaledb | 2.12.0
|
||||
timescaledb | 2.12.2
|
||||
postgis | 3.4.0
|
||||
postgrest | PostgREST 11.2.1
|
||||
|
||||
|
@@ -135,9 +135,19 @@ diff sql/monitoring.sql.output output/monitoring.sql.output > /dev/null
|
||||
#diff -u sql/monitoring.sql.output output/monitoring.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo OK
|
||||
echo SQL monitoring.sql OK
|
||||
else
|
||||
echo SQL monitoring.sql FAILED
|
||||
diff -u sql/monitoring.sql.output output/monitoring.sql.output
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
# Download and update openapi documentation
|
||||
wget ${PGSAIL_API_URI} -O ../openapi.json
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo openapi.json OK
|
||||
else
|
||||
echo openapi.json FAILED
|
||||
exit 1
|
||||
fi
|
||||
|
Reference in New Issue
Block a user