mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 19:27:49 +00:00
Compare commits
36 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
f72d6b9859 | ||
![]() |
3e30709675 | ||
![]() |
60e0097540 | ||
![]() |
5d21cb2e44 | ||
![]() |
ea89c934ee | ||
![]() |
20e1b6ad73 | ||
![]() |
79e195c24b | ||
![]() |
156d64d936 | ||
![]() |
c3dccf94de | ||
![]() |
3038821353 | ||
![]() |
051408a307 | ||
![]() |
2f7439d704 | ||
![]() |
c792bf81d9 | ||
![]() |
5551376ce2 | ||
![]() |
069ac31ca0 | ||
![]() |
d10b0cf501 | ||
![]() |
5dda28db51 | ||
![]() |
0a80f2e35e | ||
![]() |
dc79ca2f28 | ||
![]() |
fe950b2d2a | ||
![]() |
029e0b3fb6 | ||
![]() |
62854a95e0 | ||
![]() |
e301e6fedd | ||
![]() |
57cf87fbe9 | ||
![]() |
3a43e57b3c | ||
![]() |
95d283b2ac | ||
![]() |
18aba507e9 | ||
![]() |
6045ff46c0 | ||
![]() |
6e367a0e4c | ||
![]() |
eec149d411 | ||
![]() |
de2f9c94e8 | ||
![]() |
d65a0b0a54 | ||
![]() |
59c5142909 | ||
![]() |
e2fe23e58d | ||
![]() |
eedf5881d9 | ||
![]() |
3327c5a813 |
12
.github/workflows/db-lint.yml
vendored
12
.github/workflows/db-lint.yml
vendored
@@ -27,7 +27,7 @@ jobs:
|
|||||||
run: cp .env.example .env
|
run: cp .env.example .env
|
||||||
|
|
||||||
- name: Pull Docker images
|
- name: Pull Docker images
|
||||||
run: docker-compose pull db api
|
run: docker compose pull db api
|
||||||
|
|
||||||
- name: Run PostgSail Database & schemalint
|
- name: Run PostgSail Database & schemalint
|
||||||
# Environment variables
|
# Environment variables
|
||||||
@@ -41,10 +41,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
source .env
|
source .env
|
||||||
docker-compose stop || true
|
docker compose stop || true
|
||||||
docker-compose rm || true
|
docker compose rm || true
|
||||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||||
docker-compose ps -a
|
docker compose ps -a
|
||||||
echo ${PGSAIL_API_URL}
|
echo ${PGSAIL_API_URL}
|
||||||
curl ${PGSAIL_API_URL}
|
curl ${PGSAIL_API_URL}
|
||||||
npm i -D schemalint
|
npm i -D schemalint
|
||||||
@@ -52,4 +52,4 @@ jobs:
|
|||||||
- name: Show the logs
|
- name: Show the logs
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker-compose logs
|
docker compose logs
|
14
.github/workflows/db-test.yml
vendored
14
.github/workflows/db-test.yml
vendored
@@ -29,10 +29,10 @@ jobs:
|
|||||||
run: cp .env.example .env
|
run: cp .env.example .env
|
||||||
|
|
||||||
- name: Pull Docker images
|
- name: Pull Docker images
|
||||||
run: docker-compose pull db api
|
run: docker compose pull db api
|
||||||
|
|
||||||
- name: Build Docker images
|
- name: Build Docker images
|
||||||
run: docker-compose -f docker-compose.dev.yml -f docker-compose.yml build tests
|
run: docker compose -f docker-compose.dev.yml -f docker-compose.yml build tests
|
||||||
|
|
||||||
- name: Install psql
|
- name: Install psql
|
||||||
run: sudo apt install postgresql-client
|
run: sudo apt install postgresql-client
|
||||||
@@ -49,10 +49,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
source .env
|
source .env
|
||||||
docker-compose stop || true
|
docker compose stop || true
|
||||||
docker-compose rm || true
|
docker compose rm || true
|
||||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||||
docker-compose ps -a
|
docker compose ps -a
|
||||||
echo ${PGSAIL_API_URL}
|
echo ${PGSAIL_API_URL}
|
||||||
curl ${PGSAIL_API_URL}
|
curl ${PGSAIL_API_URL}
|
||||||
psql -c "select 1"
|
psql -c "select 1"
|
||||||
@@ -70,4 +70,4 @@ jobs:
|
|||||||
- name: Show the logs
|
- name: Show the logs
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker-compose logs
|
docker compose logs
|
10
.github/workflows/frontend-test.yml
vendored
10
.github/workflows/frontend-test.yml
vendored
@@ -49,10 +49,10 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
source .env
|
source .env
|
||||||
docker-compose stop || true
|
docker compose stop || true
|
||||||
docker-compose rm || true
|
docker compose rm || true
|
||||||
docker-compose up -d db && sleep 30 && docker-compose up -d api && sleep 5
|
docker compose up -d db && sleep 30 && docker compose up -d api && sleep 5
|
||||||
docker-compose ps -a
|
docker compose ps -a
|
||||||
echo "Test PostgSail Web Unit Test"
|
echo "Test PostgSail Web Unit Test"
|
||||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml up -d web_dev && sleep 100
|
docker compose -f docker-compose.dev.yml -f docker-compose.yml up -d web_dev && sleep 100
|
||||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml logs web_dev
|
docker compose -f docker-compose.dev.yml -f docker-compose.yml logs web_dev
|
||||||
@@ -67,4 +67,4 @@ jobs:
|
|||||||
- name: Show the logs
|
- name: Show the logs
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker-compose logs
|
docker compose logs
|
17
.github/workflows/grafana-test.yml
vendored
17
.github/workflows/grafana-test.yml
vendored
@@ -26,7 +26,7 @@ jobs:
|
|||||||
run: cp .env.example .env
|
run: cp .env.example .env
|
||||||
|
|
||||||
- name: Pull Docker images
|
- name: Pull Docker images
|
||||||
run: docker-compose pull db app
|
run: docker compose pull db app
|
||||||
|
|
||||||
- name: Run PostgSail Grafana test
|
- name: Run PostgSail Grafana test
|
||||||
# Environment variables
|
# Environment variables
|
||||||
@@ -40,15 +40,16 @@ jobs:
|
|||||||
run: |
|
run: |
|
||||||
set -eu
|
set -eu
|
||||||
source .env
|
source .env
|
||||||
docker-compose stop || true
|
docker compose stop || true
|
||||||
docker-compose rm || true
|
docker compose rm || true
|
||||||
docker-compose up -d db && sleep 30
|
docker compose up -d db && sleep 30
|
||||||
docker-compose ps -a
|
docker compose ps -a
|
||||||
echo "Test PostgSail Grafana Unit Test"
|
echo "Test PostgSail Grafana Unit Test"
|
||||||
docker-compose up -d app && sleep 5
|
docker compose up -d app && sleep 5
|
||||||
docker-compose ps -a
|
docker compose ps -a
|
||||||
curl http://localhost:3001/
|
curl http://localhost:3001/
|
||||||
|
docker compose exec -i db psql -Uusername signalk -c "select public.cron_process_grafana_fn();"
|
||||||
- name: Show the logs
|
- name: Show the logs
|
||||||
if: always()
|
if: always()
|
||||||
run: |
|
run: |
|
||||||
docker-compose logs
|
docker compose logs
|
@@ -47,6 +47,8 @@ services:
|
|||||||
PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000
|
PGRST_OPENAPI_SERVER_PROXY_URI: http://127.0.0.1:3000
|
||||||
PGRST_DB_PRE_REQUEST: public.check_jwt
|
PGRST_DB_PRE_REQUEST: public.check_jwt
|
||||||
PGRST_DB_POOL: 20
|
PGRST_DB_POOL: 20
|
||||||
|
PGRST_DB_POOL_MAX_IDLETIME: 60
|
||||||
|
PGRST_DB_POOL_ACQUISITION_TIMEOUT: 20
|
||||||
PGRST_DB_URI: ${PGRST_DB_URI}
|
PGRST_DB_URI: ${PGRST_DB_URI}
|
||||||
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
PGRST_JWT_SECRET: ${PGRST_JWT_SECRET}
|
||||||
PGRST_SERVER_TIMING_ENABLED: 1
|
PGRST_SERVER_TIMING_ENABLED: 1
|
||||||
|
@@ -124,5 +124,13 @@ SELECT * from public.process_queue;
|
|||||||
If you just want to use this as a standalone application and don't want people to be able to sign up for an account.
|
If you just want to use this as a standalone application and don't want people to be able to sign up for an account.
|
||||||
|
|
||||||
```SQL
|
```SQL
|
||||||
revoke execute on function api.signup(text,text,text,text) to api_anonymous;
|
REVOKE execute on function api.signup(text,text,text,text) to api_anonymous;
|
||||||
```
|
```
|
||||||
|
|
||||||
|
### How to disable completely anonymous access
|
||||||
|
|
||||||
|
If you just want to use this as a standalone application and don't want people to be able to access public account.
|
||||||
|
|
||||||
|
```SQL
|
||||||
|
REVOKE SELECT ON ALL TABLES IN SCHEMA api TO api_anonymous;
|
||||||
|
```
|
@@ -257,5 +257,5 @@ erDiagram
|
|||||||
api_stays }o--|| api_moorages : ""
|
api_stays }o--|| api_moorages : ""
|
||||||
api_stays }o--|| api_stays_at : ""
|
api_stays }o--|| api_stays_at : ""
|
||||||
auth_otp |o--|| auth_accounts : ""
|
auth_otp |o--|| auth_accounts : ""
|
||||||
auth_vessels |o--|| auth_accounts : ""
|
auth_vessels }o--|| auth_accounts : ""
|
||||||
```
|
```
|
@@ -190,7 +190,7 @@ Check the [End-to-End (E2E) test sample](https://github.com/xbgmsharp/postgsail/
|
|||||||
|
|
||||||
### Docker dependencies
|
### Docker dependencies
|
||||||
|
|
||||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
`docker compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||||
|
|
||||||
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
||||||
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
||||||
|
2
frontend
2
frontend
Submodule frontend updated: b5b094cc97...2fb525adad
@@ -366,7 +366,7 @@ CREATE OR REPLACE FUNCTION public.process_logbook_queue_fn(IN _id integer) RETUR
|
|||||||
|
|
||||||
-- Add trip details name as note for the first geometry point entry from the GeoJSON
|
-- Add trip details name as note for the first geometry point entry from the GeoJSON
|
||||||
SELECT format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into trip_note;
|
SELECT format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into trip_note;
|
||||||
-- Update the properties of the first feature
|
-- Update the properties of the first feature
|
||||||
UPDATE api.logbook
|
UPDATE api.logbook
|
||||||
SET track_geojson = jsonb_set(
|
SET track_geojson = jsonb_set(
|
||||||
track_geojson,
|
track_geojson,
|
||||||
|
@@ -708,7 +708,7 @@ COMMENT ON FUNCTION
|
|||||||
public.cron_process_no_activity_fn
|
public.cron_process_no_activity_fn
|
||||||
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||||
|
|
||||||
-- Update grafana role SQL connection to 30
|
-- Update grafana,qgis,api role SQL connection to 30
|
||||||
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||||
ALTER ROLE api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
ALTER ROLE api_anonymous WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||||
ALTER ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
ALTER ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||||
|
1370
initdb/99_migrations_202408.sql
Normal file
1370
initdb/99_migrations_202408.sql
Normal file
File diff suppressed because it is too large
Load Diff
693
initdb/99_migrations_202409.sql
Normal file
693
initdb/99_migrations_202409.sql
Normal file
@@ -0,0 +1,693 @@
|
|||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||||
|
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||||
|
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||||
|
--
|
||||||
|
-- Migration September 2024
|
||||||
|
--
|
||||||
|
-- List current database
|
||||||
|
select current_database();
|
||||||
|
|
||||||
|
-- connect to the DB
|
||||||
|
\c signalk
|
||||||
|
|
||||||
|
\echo 'Timing mode is enabled'
|
||||||
|
\timing
|
||||||
|
|
||||||
|
\echo 'Force timezone, just in case'
|
||||||
|
set timezone to 'UTC';
|
||||||
|
|
||||||
|
-- Add new email template account_inactivity
|
||||||
|
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||||
|
VALUES ('inactivity','We Haven''t Seen You in a While!','Hi __RECIPIENT__,
|
||||||
|
|
||||||
|
You''re busy. We understand.
|
||||||
|
|
||||||
|
You haven''t logged into PostgSail for a considerable period. Since we last saw you, we have continued to add new and exciting features to help you explorer your navigation journey.
|
||||||
|
|
||||||
|
Meanwhile, we have cleanup your data. If you wish to maintain an up-to-date overview of your sail journey in PostgSail''''s dashboard, kindly log in to your account within the next seven days.
|
||||||
|
|
||||||
|
Please note that your account will be permanently deleted if it remains inactive for seven more days.
|
||||||
|
|
||||||
|
If you have any questions or concerns or if you believe this to be an error, please do not hesitate to reach out at info@openplotter.cloud.
|
||||||
|
|
||||||
|
Sincerely,
|
||||||
|
Francois','We Haven''t Seen You in a While!','You haven''t logged into PostgSail for a considerable period. Login to check what''s new!.');
|
||||||
|
|
||||||
|
-- Update HTML email for new logbook
|
||||||
|
DROP FUNCTION IF EXISTS public.send_email_py_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION public.send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||||
|
AS $send_email_py$
|
||||||
|
# Import smtplib for the actual sending function
|
||||||
|
import smtplib
|
||||||
|
import requests
|
||||||
|
|
||||||
|
# Import the email modules we need
|
||||||
|
from email.message import EmailMessage
|
||||||
|
from email.utils import formatdate,make_msgid
|
||||||
|
from email.mime.text import MIMEText
|
||||||
|
|
||||||
|
# Use the shared cache to avoid preparing the email metadata
|
||||||
|
if email_type in SD:
|
||||||
|
plan = SD[email_type]
|
||||||
|
# A prepared statement from Python
|
||||||
|
else:
|
||||||
|
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||||
|
SD[email_type] = plan
|
||||||
|
|
||||||
|
# Execute the statement with the email_type param and limit to 1 result
|
||||||
|
rv = plpy.execute(plan, [email_type], 1)
|
||||||
|
email_subject = rv[0]['email_subject']
|
||||||
|
email_content = rv[0]['email_content']
|
||||||
|
|
||||||
|
# Replace fields using input jsonb obj
|
||||||
|
if not _user or not app:
|
||||||
|
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
|
||||||
|
plpy.error('Error missing parameters')
|
||||||
|
return None
|
||||||
|
if 'logbook_name' in _user and _user['logbook_name']:
|
||||||
|
email_content = email_content.replace('__LOGBOOK_NAME__', str(_user['logbook_name']))
|
||||||
|
if 'logbook_link' in _user and _user['logbook_link']:
|
||||||
|
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||||
|
if 'logbook_img' in _user and _user['logbook_img']:
|
||||||
|
email_content = email_content.replace('__LOGBOOK_IMG__', str(_user['logbook_img']))
|
||||||
|
if 'logbook_stats' in _user and _user['logbook_stats']:
|
||||||
|
email_content = email_content.replace('__LOGBOOK_STATS__', str(_user['logbook_stats']))
|
||||||
|
if 'video_link' in _user and _user['video_link']:
|
||||||
|
email_content = email_content.replace('__VIDEO_LINK__', str(_user['video_link']))
|
||||||
|
if 'recipient' in _user and _user['recipient']:
|
||||||
|
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
|
||||||
|
if 'boat' in _user and _user['boat']:
|
||||||
|
email_content = email_content.replace('__BOAT__', _user['boat'])
|
||||||
|
if 'badge' in _user and _user['badge']:
|
||||||
|
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
|
||||||
|
if 'otp_code' in _user and _user['otp_code']:
|
||||||
|
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||||
|
if 'reset_qs' in _user and _user['reset_qs']:
|
||||||
|
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||||
|
if 'alert' in _user and _user['alert']:
|
||||||
|
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||||
|
|
||||||
|
if 'app.url' in app and app['app.url']:
|
||||||
|
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||||
|
|
||||||
|
email_from = 'root@localhost'
|
||||||
|
if 'app.email_from' in app and app['app.email_from']:
|
||||||
|
email_from = 'PostgSail <' + app['app.email_from'] + '>'
|
||||||
|
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
|
||||||
|
|
||||||
|
email_to = 'root@localhost'
|
||||||
|
if 'email' in _user and _user['email']:
|
||||||
|
email_to = _user['email']
|
||||||
|
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
|
||||||
|
else:
|
||||||
|
plpy.error('Error email to')
|
||||||
|
return None
|
||||||
|
|
||||||
|
if email_type == 'logbook':
|
||||||
|
msg = EmailMessage()
|
||||||
|
msg.set_content(email_content)
|
||||||
|
else:
|
||||||
|
msg = MIMEText(email_content, 'plain', 'utf-8')
|
||||||
|
msg["Subject"] = email_subject
|
||||||
|
msg["From"] = email_from
|
||||||
|
msg["To"] = email_to
|
||||||
|
msg["Date"] = formatdate()
|
||||||
|
msg["Message-ID"] = make_msgid()
|
||||||
|
|
||||||
|
if email_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||||
|
# Create a Content-ID for the image
|
||||||
|
image_cid = make_msgid()
|
||||||
|
# Transform to HTML template, replace text by HTML link
|
||||||
|
logbook_link = "{__APP_URL__}/log/{__LOGBOOK_LINK__}".format( __APP_URL__=app['app.url'], __LOGBOOK_LINK__=str(_user['logbook_link']))
|
||||||
|
timelapse_link = "{__APP_URL__}/timelapse/{__LOGBOOK_LINK__}".format( __APP_URL__=app['app.url'], __LOGBOOK_LINK__=str(_user['logbook_link']))
|
||||||
|
email_content = email_content.replace('\n', '<br/>')
|
||||||
|
email_content = email_content.replace(logbook_link, '<a href="{logbook_link}">{logbook_link}</a>'.format(logbook_link=str(logbook_link)))
|
||||||
|
email_content = email_content.replace(timelapse_link, '<a href="{timelapse_link}">{timelapse_link}</a>'.format(timelapse_link=str(logbook_link)))
|
||||||
|
email_content = email_content.replace(str(_user['logbook_name']), '<a href="{logbook_link}">{logbook_name}</a>'.format(logbook_link=str(logbook_link), logbook_name=str(_user['logbook_name'])))
|
||||||
|
# Set an alternative html body
|
||||||
|
msg.add_alternative("""\
|
||||||
|
<html>
|
||||||
|
<body>
|
||||||
|
<p>{email_content}</p>
|
||||||
|
<img src="cid:{image_cid}">
|
||||||
|
</body>
|
||||||
|
</html>
|
||||||
|
""".format(email_content=email_content, image_cid=image_cid[1:-1]), subtype='html')
|
||||||
|
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||||
|
response = requests.get(img_url, stream=True)
|
||||||
|
if response.status_code == 200:
|
||||||
|
msg.get_payload()[1].add_related(response.raw.data,
|
||||||
|
maintype='image',
|
||||||
|
subtype='png',
|
||||||
|
cid=image_cid)
|
||||||
|
|
||||||
|
server_smtp = 'localhost'
|
||||||
|
if 'app.email_server' in app and app['app.email_server']:
|
||||||
|
server_smtp = app['app.email_server']
|
||||||
|
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
|
||||||
|
|
||||||
|
# Send the message via our own SMTP server.
|
||||||
|
try:
|
||||||
|
# send your message with credentials specified above
|
||||||
|
with smtplib.SMTP(server_smtp, 587) as server:
|
||||||
|
if 'app.email_user' in app and app['app.email_user'] \
|
||||||
|
and 'app.email_pass' in app and app['app.email_pass']:
|
||||||
|
server.starttls()
|
||||||
|
server.login(app['app.email_user'], app['app.email_pass'])
|
||||||
|
#server.send_message(msg)
|
||||||
|
server.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||||
|
server.quit()
|
||||||
|
# tell the script to report if your message was sent or which errors need to be fixed
|
||||||
|
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
|
||||||
|
return None
|
||||||
|
except OSError as error:
|
||||||
|
plpy.error('OS Error occurred: ' + str(error))
|
||||||
|
except smtplib.SMTPConnectError:
|
||||||
|
plpy.error('Failed to connect to the server. Bad connection settings?')
|
||||||
|
except smtplib.SMTPServerDisconnected:
|
||||||
|
plpy.error('Failed to connect to the server. Wrong user/password?')
|
||||||
|
except smtplib.SMTPException as e:
|
||||||
|
plpy.error('SMTP error occurred: ' + str(e))
|
||||||
|
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.send_email_py_fn
|
||||||
|
IS 'Send email notification using plpython3u';
|
||||||
|
|
||||||
|
-- Update stats_logs_fn, update debug
|
||||||
|
CREATE OR REPLACE FUNCTION api.stats_logs_fn(start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT stats jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||||
|
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||||
|
BEGIN
|
||||||
|
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
RAISE WARNING '--> stats_logs_fn, filter result stats by date [%]', start_date;
|
||||||
|
_start_date := start_date::TIMESTAMPTZ;
|
||||||
|
_end_date := end_date::TIMESTAMPTZ;
|
||||||
|
END IF;
|
||||||
|
--RAISE NOTICE '--> stats_logs_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||||
|
WITH
|
||||||
|
meta AS (
|
||||||
|
SELECT m.name FROM api.metadata m ),
|
||||||
|
logs_view AS (
|
||||||
|
SELECT *
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE _from_time >= _start_date::TIMESTAMPTZ
|
||||||
|
AND _to_time <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
),
|
||||||
|
first_date AS (
|
||||||
|
SELECT _from_time as first_date from logs_view ORDER BY first_date ASC LIMIT 1
|
||||||
|
),
|
||||||
|
last_date AS (
|
||||||
|
SELECT _to_time as last_date from logs_view ORDER BY _to_time DESC LIMIT 1
|
||||||
|
),
|
||||||
|
max_speed_id AS (
|
||||||
|
SELECT id FROM logs_view WHERE max_speed = (SELECT max(max_speed) FROM logs_view) ),
|
||||||
|
max_wind_speed_id AS (
|
||||||
|
SELECT id FROM logs_view WHERE max_wind_speed = (SELECT max(max_wind_speed) FROM logs_view)),
|
||||||
|
max_distance_id AS (
|
||||||
|
SELECT id FROM logs_view WHERE distance = (SELECT max(distance) FROM logs_view)),
|
||||||
|
max_duration_id AS (
|
||||||
|
SELECT id FROM logs_view WHERE duration = (SELECT max(duration) FROM logs_view)),
|
||||||
|
logs_stats AS (
|
||||||
|
SELECT
|
||||||
|
count(*) AS count,
|
||||||
|
max(max_speed) AS max_speed,
|
||||||
|
max(max_wind_speed) AS max_wind_speed,
|
||||||
|
max(distance) AS max_distance,
|
||||||
|
sum(distance) AS sum_distance,
|
||||||
|
max(duration) AS max_duration,
|
||||||
|
sum(duration) AS sum_duration
|
||||||
|
FROM logs_view l )
|
||||||
|
--select * from logbook;
|
||||||
|
-- Return a JSON
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'name', meta.name,
|
||||||
|
'first_date', first_date.first_date,
|
||||||
|
'last_date', last_date.last_date,
|
||||||
|
'max_speed_id', max_speed_id.id,
|
||||||
|
'max_wind_speed_id', max_wind_speed_id.id,
|
||||||
|
'max_duration_id', max_duration_id.id,
|
||||||
|
'max_distance_id', max_distance_id.id)::jsonb || to_jsonb(logs_stats.*)::jsonb INTO stats
|
||||||
|
FROM max_speed_id, max_wind_speed_id, max_distance_id, max_duration_id,
|
||||||
|
logs_stats, meta, logs_view, first_date, last_date;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
|
||||||
|
-- Fix stays and moorage statistics for user by date
|
||||||
|
CREATE OR REPLACE FUNCTION api.stats_stays_fn(
|
||||||
|
IN start_date TEXT DEFAULT NULL,
|
||||||
|
IN end_date TEXT DEFAULT NULL,
|
||||||
|
OUT stats JSON) RETURNS JSON AS $stats_stays$
|
||||||
|
DECLARE
|
||||||
|
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||||
|
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||||
|
BEGIN
|
||||||
|
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
RAISE NOTICE '--> stats_stays_fn, custom filter result stats by date [%]', start_date;
|
||||||
|
_start_date := start_date::TIMESTAMPTZ;
|
||||||
|
_end_date := end_date::TIMESTAMPTZ;
|
||||||
|
END IF;
|
||||||
|
--RAISE NOTICE '--> stats_stays_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||||
|
WITH
|
||||||
|
stays as (
|
||||||
|
select distinct(moorage_id) as moorage_id, sum(duration) as duration, count(id) as reference_count
|
||||||
|
from api.stays s
|
||||||
|
WHERE arrived >= _start_date::TIMESTAMPTZ
|
||||||
|
AND departed <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
group by moorage_id
|
||||||
|
order by moorage_id
|
||||||
|
),
|
||||||
|
moorages AS (
|
||||||
|
SELECT m.id, m.home_flag, m.reference_count, m.stay_duration, m.stay_code, m.country, s.duration, s.reference_count
|
||||||
|
from api.moorages m, stays s
|
||||||
|
where s.moorage_id = m.id
|
||||||
|
order by moorage_id
|
||||||
|
),
|
||||||
|
home_ports AS (
|
||||||
|
select count(*) as home_ports from moorages m where home_flag is true
|
||||||
|
),
|
||||||
|
unique_moorages AS (
|
||||||
|
select count(*) as unique_moorages from moorages m
|
||||||
|
),
|
||||||
|
time_at_home_ports AS (
|
||||||
|
select sum(m.stay_duration) as time_at_home_ports from moorages m where home_flag is true
|
||||||
|
),
|
||||||
|
sum_stay_duration AS (
|
||||||
|
select sum(m.stay_duration) as sum_stay_duration from moorages m where home_flag is false
|
||||||
|
),
|
||||||
|
time_spent_away_arr AS (
|
||||||
|
select m.stay_code,sum(m.stay_duration) as stay_duration from moorages m where home_flag is false group by m.stay_code order by m.stay_code
|
||||||
|
),
|
||||||
|
time_spent_arr as (
|
||||||
|
select jsonb_agg(t.*) as time_spent_away_arr from time_spent_away_arr t
|
||||||
|
),
|
||||||
|
time_spent_away AS (
|
||||||
|
select sum(m.stay_duration) as time_spent_away from moorages m where home_flag is false
|
||||||
|
),
|
||||||
|
time_spent as (
|
||||||
|
select jsonb_agg(t.*) as time_spent_away from time_spent_away t
|
||||||
|
)
|
||||||
|
-- Return a JSON
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'home_ports', home_ports.home_ports,
|
||||||
|
'unique_moorages', unique_moorages.unique_moorages,
|
||||||
|
'time_at_home_ports', time_at_home_ports.time_at_home_ports,
|
||||||
|
'sum_stay_duration', sum_stay_duration.sum_stay_duration,
|
||||||
|
'time_spent_away', time_spent_away.time_spent_away,
|
||||||
|
'time_spent_away_arr', time_spent_arr.time_spent_away_arr) INTO stats
|
||||||
|
FROM home_ports, unique_moorages,
|
||||||
|
time_at_home_ports, sum_stay_duration, time_spent_away, time_spent_arr;
|
||||||
|
END;
|
||||||
|
$stats_stays$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.stats_stays_fn
|
||||||
|
IS 'Stays/Moorages stats by date';
|
||||||
|
|
||||||
|
-- Update api.stats_moorages_view, fix time_spent_at_home_port
|
||||||
|
CREATE OR REPLACE VIEW api.stats_moorages_view WITH (security_invoker=true,security_barrier=true) AS
|
||||||
|
WITH
|
||||||
|
home_ports AS (
|
||||||
|
select count(*) as home_ports from api.moorages m where home_flag is true
|
||||||
|
),
|
||||||
|
unique_moorage AS (
|
||||||
|
select count(*) as unique_moorage from api.moorages m
|
||||||
|
),
|
||||||
|
time_at_home_ports AS (
|
||||||
|
select sum(m.stay_duration) as time_at_home_ports from api.moorages m where home_flag is true
|
||||||
|
),
|
||||||
|
time_spent_away AS (
|
||||||
|
select sum(m.stay_duration) as time_spent_away from api.moorages m where home_flag is false
|
||||||
|
)
|
||||||
|
SELECT
|
||||||
|
home_ports.home_ports as "home_ports",
|
||||||
|
unique_moorage.unique_moorage as "unique_moorages",
|
||||||
|
time_at_home_ports.time_at_home_ports as "time_spent_at_home_port(s)",
|
||||||
|
time_spent_away.time_spent_away as "time_spent_away"
|
||||||
|
FROM home_ports, unique_moorage, time_at_home_ports, time_spent_away;
|
||||||
|
|
||||||
|
-- Add stats_fn, user statistics by date
|
||||||
|
DROP FUNCTION IF EXISTS api.stats_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION api.stats_fn(
|
||||||
|
IN start_date TEXT DEFAULT NULL,
|
||||||
|
IN end_date TEXT DEFAULT NULL,
|
||||||
|
OUT stats JSONB) RETURNS JSONB AS $stats_global$
|
||||||
|
DECLARE
|
||||||
|
_start_date TIMESTAMPTZ DEFAULT '1970-01-01';
|
||||||
|
_end_date TIMESTAMPTZ DEFAULT NOW();
|
||||||
|
stats_logs JSONB;
|
||||||
|
stats_moorages JSONB;
|
||||||
|
stats_logs_topby JSONB;
|
||||||
|
stats_moorages_topby JSONB;
|
||||||
|
BEGIN
|
||||||
|
IF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
RAISE WARNING '--> stats_fn, filter result stats by date [%]', start_date;
|
||||||
|
_start_date := start_date::TIMESTAMPTZ;
|
||||||
|
_end_date := end_date::TIMESTAMPTZ;
|
||||||
|
END IF;
|
||||||
|
RAISE NOTICE '--> stats_fn, _start_date [%], _end_date [%]', _start_date, _end_date;
|
||||||
|
-- Get global logs statistics
|
||||||
|
SELECT api.stats_logs_fn(_start_date::TEXT, _end_date::TEXT) INTO stats_logs;
|
||||||
|
-- Get global stays/moorages statistics
|
||||||
|
SELECT api.stats_stays_fn(_start_date::TEXT, _end_date::TEXT) INTO stats_moorages;
|
||||||
|
-- Get Top 5 trips statistics
|
||||||
|
WITH
|
||||||
|
logs_view AS (
|
||||||
|
SELECT id,avg_speed,max_speed,max_wind_speed,distance,duration
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE _from_time >= _start_date::TIMESTAMPTZ
|
||||||
|
AND _to_time <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
),
|
||||||
|
logs_top_avg_speed AS (
|
||||||
|
SELECT id,avg_speed FROM logs_view
|
||||||
|
GROUP BY id,avg_speed
|
||||||
|
ORDER BY avg_speed DESC
|
||||||
|
LIMIT 5),
|
||||||
|
logs_top_speed AS (
|
||||||
|
SELECT id,max_speed FROM logs_view
|
||||||
|
WHERE max_speed IS NOT NULL
|
||||||
|
GROUP BY id,max_speed
|
||||||
|
ORDER BY max_speed DESC
|
||||||
|
LIMIT 5),
|
||||||
|
logs_top_wind_speed AS (
|
||||||
|
SELECT id,max_wind_speed FROM logs_view
|
||||||
|
WHERE max_wind_speed IS NOT NULL
|
||||||
|
GROUP BY id,max_wind_speed
|
||||||
|
ORDER BY max_wind_speed DESC
|
||||||
|
LIMIT 5),
|
||||||
|
logs_top_distance AS (
|
||||||
|
SELECT id FROM logs_view
|
||||||
|
GROUP BY id,distance
|
||||||
|
ORDER BY distance DESC
|
||||||
|
LIMIT 5),
|
||||||
|
logs_top_duration AS (
|
||||||
|
SELECT id FROM logs_view
|
||||||
|
GROUP BY id,duration
|
||||||
|
ORDER BY duration DESC
|
||||||
|
LIMIT 5)
|
||||||
|
-- Stats Top Logs
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'stats_logs', stats_logs,
|
||||||
|
'stats_moorages', stats_moorages,
|
||||||
|
'logs_top_speed', (SELECT jsonb_agg(logs_top_speed.*) FROM logs_top_speed),
|
||||||
|
'logs_top_avg_speed', (SELECT jsonb_agg(logs_top_avg_speed.*) FROM logs_top_avg_speed),
|
||||||
|
'logs_top_wind_speed', (SELECT jsonb_agg(logs_top_wind_speed.*) FROM logs_top_wind_speed),
|
||||||
|
'logs_top_distance', (SELECT jsonb_agg(logs_top_distance.id) FROM logs_top_distance),
|
||||||
|
'logs_top_duration', (SELECT jsonb_agg(logs_top_duration.id) FROM logs_top_duration)
|
||||||
|
) INTO stats;
|
||||||
|
-- Stats top 5 moorages statistics
|
||||||
|
WITH
|
||||||
|
stays as (
|
||||||
|
select distinct(moorage_id) as moorage_id, sum(duration) as duration, count(id) as reference_count
|
||||||
|
from api.stays s
|
||||||
|
WHERE s.arrived >= _start_date::TIMESTAMPTZ
|
||||||
|
AND s.departed <= _end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
group by s.moorage_id
|
||||||
|
order by s.moorage_id
|
||||||
|
),
|
||||||
|
moorages AS (
|
||||||
|
SELECT m.id, m.home_flag, m.reference_count, m.stay_duration, m.stay_code, m.country, s.duration as dur, s.reference_count as ref_count
|
||||||
|
from api.moorages m, stays s
|
||||||
|
where s.moorage_id = m.id
|
||||||
|
order by s.moorage_id
|
||||||
|
),
|
||||||
|
moorages_top_arrivals AS (
|
||||||
|
SELECT id,ref_count FROM moorages
|
||||||
|
GROUP BY id,ref_count
|
||||||
|
ORDER BY ref_count DESC
|
||||||
|
LIMIT 5),
|
||||||
|
moorages_top_duration AS (
|
||||||
|
SELECT id,dur FROM moorages
|
||||||
|
GROUP BY id,dur
|
||||||
|
ORDER BY dur DESC
|
||||||
|
LIMIT 5),
|
||||||
|
moorages_countries AS (
|
||||||
|
SELECT DISTINCT(country) FROM moorages
|
||||||
|
WHERE country IS NOT NULL AND country <> 'unknown'
|
||||||
|
GROUP BY country
|
||||||
|
ORDER BY country DESC
|
||||||
|
LIMIT 5)
|
||||||
|
SELECT stats || jsonb_build_object(
|
||||||
|
'moorages_top_arrivals', (SELECT jsonb_agg(moorages_top_arrivals) FROM moorages_top_arrivals),
|
||||||
|
'moorages_top_duration', (SELECT jsonb_agg(moorages_top_duration) FROM moorages_top_duration),
|
||||||
|
'moorages_top_countries', (SELECT jsonb_agg(moorages_countries.country) FROM moorages_countries)
|
||||||
|
) INTO stats;
|
||||||
|
END;
|
||||||
|
$stats_global$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.stats_fn
|
||||||
|
IS 'Stats logbook and moorages by date';
|
||||||
|
|
||||||
|
-- Add mapgl_fn, generate a geojson with all linestring
|
||||||
|
DROP FUNCTION IF EXISTS api.mapgl_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION api.mapgl_fn(start_log integer DEFAULT NULL::integer, end_log integer DEFAULT NULL::integer, start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT geojson jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
AS $mapgl$
|
||||||
|
DECLARE
|
||||||
|
_geojson jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Using sub query to force id order by time
|
||||||
|
-- Extract GeoJSON LineString and merge into a new GeoJSON
|
||||||
|
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||||
|
IF start_log IS NOT NULL AND end_log IS NULL THEN
|
||||||
|
end_log := start_log;
|
||||||
|
END IF;
|
||||||
|
IF start_date IS NOT NULL AND end_date IS NULL THEN
|
||||||
|
end_date := start_date;
|
||||||
|
END IF;
|
||||||
|
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||||
|
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l.id >= start_log
|
||||||
|
AND l.id <= end_log
|
||||||
|
AND l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l._from_time >= start_date::TIMESTAMPTZ
|
||||||
|
AND l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
AND l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
ELSE
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
END IF;
|
||||||
|
-- Generate the GeoJSON with all moorages
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', _geojson || ( SELECT
|
||||||
|
jsonb_agg(ST_AsGeoJSON(m.*)::JSONB) as moorages_geojson
|
||||||
|
FROM
|
||||||
|
( SELECT
|
||||||
|
id,name,stay_code,
|
||||||
|
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||||
|
geog
|
||||||
|
FROM api.moorages
|
||||||
|
WHERE geog IS NOT null
|
||||||
|
) AS m
|
||||||
|
) ) INTO geojson;
|
||||||
|
END;
|
||||||
|
$mapgl$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.mapgl_fn
|
||||||
|
IS 'Get all logbook LineString alone with all moorages into a geojson to be process by DeckGL';
|
||||||
|
|
||||||
|
-- Refresh user_role permissions
|
||||||
|
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||||
|
|
||||||
|
-- Add cron_inactivity_fn, cleanup all data for inactive users and vessels
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_inactivity_fn()
|
||||||
|
RETURNS void
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
no_activity_rec record;
|
||||||
|
user_settings jsonb;
|
||||||
|
total_metrics INTEGER;
|
||||||
|
del_metrics INTEGER;
|
||||||
|
out_json JSONB;
|
||||||
|
BEGIN
|
||||||
|
-- List accounts with vessel inactivity for more than 200 DAYS
|
||||||
|
-- List accounts with no vessel created for more than 200 DAYS
|
||||||
|
-- List accounts with no vessel metadata for more than 200 DAYS
|
||||||
|
-- Check for users and vessels with no activity for more than 200 days
|
||||||
|
-- remove data and notify user
|
||||||
|
RAISE NOTICE 'cron_inactivity_fn';
|
||||||
|
FOR no_activity_rec in
|
||||||
|
with accounts as (
|
||||||
|
SELECT a.email,a.first,a.last,
|
||||||
|
(a.updated_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS') as no_account_activity,
|
||||||
|
COALESCE((m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '200 DAYS'),true) as no_metadata_activity,
|
||||||
|
m.vessel_id IS null as no_metadata_vesssel_id,
|
||||||
|
m.time IS null as no_metadata_time,
|
||||||
|
v.vessel_id IS null as no_vessel_vesssel_id,
|
||||||
|
a.preferences->>'ip' as ip,v.name as user_vesssel,
|
||||||
|
m.name as sk_vesssel,v.vessel_id as v_vessel_id,m.vessel_id as m_vessel_id,
|
||||||
|
a.created_at as account_created,m.time as metadata_updated_at,
|
||||||
|
v.created_at as vessel_created,v.updated_at as vessel_updated_at
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||||
|
order by a.created_at asc
|
||||||
|
)
|
||||||
|
select * from accounts a where
|
||||||
|
(no_account_activity is true
|
||||||
|
or no_vessel_vesssel_id is true
|
||||||
|
or no_metadata_activity is true
|
||||||
|
or no_metadata_vesssel_id is true
|
||||||
|
or no_metadata_time is true )
|
||||||
|
ORDER BY a.account_created asc
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_inactivity_fn for [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_inactivity_fn user_settings [%]', user_settings;
|
||||||
|
IF no_activity_rec.no_vessel_vesssel_id is true then
|
||||||
|
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_metadata_vesssel_id is true then
|
||||||
|
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_metadata_activity is true then
|
||||||
|
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_account_activity is true then
|
||||||
|
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||||
|
END IF;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('inactivity'::TEXT, user_settings::JSONB);
|
||||||
|
-- Delete vessel metrics
|
||||||
|
IF no_activity_rec.v_vessel_id IS NOT NULL THEN
|
||||||
|
SELECT count(*) INTO total_metrics from api.metrics where vessel_id = no_activity_rec.v_vessel_id;
|
||||||
|
WITH deleted AS (delete from api.metrics m where vessel_id = no_activity_rec.v_vessel_id RETURNING *) SELECT count(*) INTO del_metrics FROM deleted;
|
||||||
|
SELECT jsonb_build_object('total_metrics', total_metrics, 'del_metrics', del_metrics) INTO out_json;
|
||||||
|
RAISE NOTICE '-> debug cron_inactivity_fn [%]', out_json;
|
||||||
|
END IF;
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION public.cron_inactivity_fn() IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||||
|
|
||||||
|
-- Add cron_deactivated_fn, delete all data for inactive users and vessels
|
||||||
|
CREATE OR REPLACE FUNCTION public.cron_deactivated_fn()
|
||||||
|
RETURNS void
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
no_activity_rec record;
|
||||||
|
user_settings jsonb;
|
||||||
|
del_vessel_data JSONB;
|
||||||
|
del_meta INTEGER;
|
||||||
|
del_vessel INTEGER;
|
||||||
|
del_account INTEGER;
|
||||||
|
out_json JSONB;
|
||||||
|
BEGIN
|
||||||
|
RAISE NOTICE 'cron_deactivated_fn';
|
||||||
|
-- List accounts with vessel inactivity for more than 230 DAYS
|
||||||
|
-- List accounts with no vessel created for more than 230 DAYS
|
||||||
|
-- List accounts with no vessel metadata for more than 230 DAYS
|
||||||
|
-- Remove data and remove user and notify user
|
||||||
|
FOR no_activity_rec in
|
||||||
|
with accounts as (
|
||||||
|
SELECT a.email,a.first,a.last,
|
||||||
|
(a.updated_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS') as no_account_activity,
|
||||||
|
COALESCE((m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'),true) as no_metadata_activity,
|
||||||
|
m.vessel_id IS null as no_metadata_vesssel_id,
|
||||||
|
m.time IS null as no_metadata_time,
|
||||||
|
v.vessel_id IS null as no_vessel_vesssel_id,
|
||||||
|
a.preferences->>'ip' as ip,v.name as user_vesssel,
|
||||||
|
m.name as sk_vesssel,v.vessel_id as v_vessel_id,m.vessel_id as m_vessel_id,
|
||||||
|
a.created_at as account_created,m.time as metadata_updated_at,
|
||||||
|
v.created_at as vessel_created,v.updated_at as vessel_updated_at
|
||||||
|
FROM auth.accounts a
|
||||||
|
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||||
|
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||||
|
order by a.created_at asc
|
||||||
|
)
|
||||||
|
select * from accounts a where
|
||||||
|
(no_account_activity is true
|
||||||
|
or no_vessel_vesssel_id is true
|
||||||
|
or no_metadata_activity is true
|
||||||
|
or no_metadata_vesssel_id is true
|
||||||
|
or no_metadata_time is true )
|
||||||
|
ORDER BY a.account_created asc
|
||||||
|
LOOP
|
||||||
|
RAISE NOTICE '-> cron_deactivated_fn for [%]', no_activity_rec;
|
||||||
|
SELECT json_build_object('email', no_activity_rec.email, 'recipient', no_activity_rec.first) into user_settings;
|
||||||
|
RAISE NOTICE '-> debug cron_deactivated_fn user_settings [%]', user_settings;
|
||||||
|
IF no_activity_rec.no_vessel_vesssel_id is true then
|
||||||
|
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_metadata_vesssel_id is true then
|
||||||
|
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_metadata_activity is true then
|
||||||
|
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||||
|
ELSIF no_activity_rec.no_account_activity is true then
|
||||||
|
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||||
|
END IF;
|
||||||
|
-- Send notification
|
||||||
|
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||||
|
-- Delete vessel data
|
||||||
|
IF no_activity_rec.v_vessel_id IS NOT NULL THEN
|
||||||
|
SELECT public.delete_vessel_fn(no_activity_rec.v_vessel_id) INTO del_vessel_data;
|
||||||
|
WITH deleted AS (delete from api.metadata where vessel_id = no_activity_rec.v_vessel_id RETURNING *) SELECT count(*) INTO del_meta FROM deleted;
|
||||||
|
SELECT jsonb_build_object('del_metadata', del_meta) || del_vessel_data INTO del_vessel_data;
|
||||||
|
RAISE NOTICE '-> debug cron_deactivated_fn [%]', del_vessel_data;
|
||||||
|
END IF;
|
||||||
|
-- Delete account data
|
||||||
|
WITH deleted AS (delete from auth.vessels where owner_email = no_activity_rec.email RETURNING *) SELECT count(*) INTO del_vessel FROM deleted;
|
||||||
|
WITH deleted AS (delete from auth.accounts where email = no_activity_rec.email RETURNING *) SELECT count(*) INTO del_account FROM deleted;
|
||||||
|
SELECT jsonb_build_object('del_account', del_account, 'del_vessel', del_vessel) || del_vessel_data INTO out_json;
|
||||||
|
RAISE NOTICE '-> debug cron_deactivated_fn [%]', out_json;
|
||||||
|
-- TODO remove keycloak and grafana provisioning
|
||||||
|
END LOOP;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION public.cron_deactivated_fn() IS 'init by pg_cron, check for vessel with no activity for more than 230 then send notification and delete account and vessel data';
|
||||||
|
|
||||||
|
-- Remove unused and duplicate function
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_no_activity_fn;
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_inactivity_fn;
|
||||||
|
DROP FUNCTION IF EXISTS public.cron_process_deactivated_fn;
|
||||||
|
|
||||||
|
-- Update version
|
||||||
|
UPDATE public.app_settings
|
||||||
|
SET value='0.7.7'
|
||||||
|
WHERE "name"='app.version';
|
||||||
|
|
||||||
|
\c postgres
|
253
initdb/99_migrations_202410.sql
Normal file
253
initdb/99_migrations_202410.sql
Normal file
@@ -0,0 +1,253 @@
|
|||||||
|
---------------------------------------------------------------------------
|
||||||
|
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||||
|
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||||
|
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||||
|
--
|
||||||
|
-- Migration October 2024
|
||||||
|
--
|
||||||
|
-- List current database
|
||||||
|
select current_database();
|
||||||
|
|
||||||
|
-- connect to the DB
|
||||||
|
\c signalk
|
||||||
|
|
||||||
|
\echo 'Timing mode is enabled'
|
||||||
|
\timing
|
||||||
|
|
||||||
|
\echo 'Force timezone, just in case'
|
||||||
|
set timezone to 'UTC';
|
||||||
|
|
||||||
|
-- Update moorages map, export more properties (notes,reference_count) from moorages tbl
|
||||||
|
CREATE OR REPLACE FUNCTION api.export_moorages_geojson_fn(OUT geojson jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
DECLARE
|
||||||
|
BEGIN
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features',
|
||||||
|
( SELECT
|
||||||
|
json_agg(ST_AsGeoJSON(m.*)::JSON) as moorages_geojson
|
||||||
|
FROM
|
||||||
|
( SELECT
|
||||||
|
id,name,stay_code,notes,reference_count,
|
||||||
|
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||||
|
geog
|
||||||
|
FROM api.moorages
|
||||||
|
WHERE geog IS NOT NULL
|
||||||
|
) AS m
|
||||||
|
)
|
||||||
|
) INTO geojson;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
|
||||||
|
COMMENT ON FUNCTION api.export_moorages_geojson_fn(out jsonb) IS 'Export moorages as geojson';
|
||||||
|
|
||||||
|
-- Update mapgl_fn, update moorages map sub query to export more properties (notes,reference_count) from moorages tbl
|
||||||
|
DROP FUNCTION IF EXISTS api.mapgl_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION api.mapgl_fn(start_log integer DEFAULT NULL::integer, end_log integer DEFAULT NULL::integer, start_date text DEFAULT NULL::text, end_date text DEFAULT NULL::text, OUT geojson jsonb)
|
||||||
|
RETURNS jsonb
|
||||||
|
AS $mapgl$
|
||||||
|
DECLARE
|
||||||
|
_geojson jsonb;
|
||||||
|
BEGIN
|
||||||
|
-- Using sub query to force id order by time
|
||||||
|
-- Extract GeoJSON LineString and merge into a new GeoJSON
|
||||||
|
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||||
|
IF start_log IS NOT NULL AND end_log IS NULL THEN
|
||||||
|
end_log := start_log;
|
||||||
|
END IF;
|
||||||
|
IF start_date IS NOT NULL AND end_date IS NULL THEN
|
||||||
|
end_date := start_date;
|
||||||
|
END IF;
|
||||||
|
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||||
|
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l.id >= start_log
|
||||||
|
AND l.id <= end_log
|
||||||
|
AND l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l._from_time >= start_date::TIMESTAMPTZ
|
||||||
|
AND l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||||
|
AND l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
ELSE
|
||||||
|
SELECT jsonb_agg(
|
||||||
|
jsonb_build_object('type', 'Feature',
|
||||||
|
'properties', f->'properties',
|
||||||
|
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'LineString'))
|
||||||
|
) INTO _geojson
|
||||||
|
FROM (
|
||||||
|
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||||
|
FROM api.logbook l
|
||||||
|
WHERE l.track_geojson IS NOT NULL
|
||||||
|
ORDER BY l._from_time ASC
|
||||||
|
) AS sub
|
||||||
|
WHERE (f->'geometry'->>'type') = 'LineString';
|
||||||
|
END IF;
|
||||||
|
-- Generate the GeoJSON with all moorages
|
||||||
|
SELECT jsonb_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', _geojson || ( SELECT
|
||||||
|
jsonb_agg(ST_AsGeoJSON(m.*)::JSONB) as moorages_geojson
|
||||||
|
FROM
|
||||||
|
( SELECT
|
||||||
|
id,name,stay_code,notes,reference_count,
|
||||||
|
EXTRACT(DAY FROM justify_hours ( stay_duration )) AS Total_Stay,
|
||||||
|
geog
|
||||||
|
FROM api.moorages
|
||||||
|
WHERE geog IS NOT null
|
||||||
|
) AS m
|
||||||
|
) ) INTO geojson;
|
||||||
|
END;
|
||||||
|
$mapgl$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
api.mapgl_fn
|
||||||
|
IS 'Generate a geojson with all logs as geometry LineString with moorages as geometry Point to be process by DeckGL';
|
||||||
|
|
||||||
|
-- Update logbook_update_geojson_fn, fix corrupt linestring properties
|
||||||
|
CREATE OR REPLACE FUNCTION public.logbook_update_geojson_fn(_id integer, _start text, _end text, OUT _track_geojson json)
|
||||||
|
RETURNS json
|
||||||
|
LANGUAGE plpgsql
|
||||||
|
AS $function$
|
||||||
|
declare
|
||||||
|
log_geojson jsonb;
|
||||||
|
metrics_geojson jsonb;
|
||||||
|
_map jsonb;
|
||||||
|
begin
|
||||||
|
-- GeoJson Feature Logbook linestring
|
||||||
|
SELECT
|
||||||
|
ST_AsGeoJSON(log.*) into log_geojson
|
||||||
|
FROM
|
||||||
|
( SELECT
|
||||||
|
id,name,
|
||||||
|
distance,
|
||||||
|
duration,
|
||||||
|
avg_speed,
|
||||||
|
max_speed,
|
||||||
|
max_wind_speed,
|
||||||
|
_from_time,
|
||||||
|
_to_time,
|
||||||
|
_from_moorage_id,
|
||||||
|
_to_moorage_id,
|
||||||
|
notes,
|
||||||
|
extra['avg_wind_speed'] as avg_wind_speed,
|
||||||
|
track_geom
|
||||||
|
FROM api.logbook
|
||||||
|
WHERE id = _id
|
||||||
|
) AS log;
|
||||||
|
-- GeoJson Feature Metrics point
|
||||||
|
SELECT
|
||||||
|
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||||
|
FROM (
|
||||||
|
( SELECT
|
||||||
|
time,
|
||||||
|
courseovergroundtrue,
|
||||||
|
speedoverground,
|
||||||
|
windspeedapparent,
|
||||||
|
longitude,latitude,
|
||||||
|
'' AS notes,
|
||||||
|
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||||
|
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||||
|
coalesce(status, null) as status,
|
||||||
|
st_makepoint(longitude,latitude) AS geo_point
|
||||||
|
FROM api.metrics m
|
||||||
|
WHERE m.latitude IS NOT NULL
|
||||||
|
AND m.longitude IS NOT NULL
|
||||||
|
AND time >= _start::TIMESTAMPTZ
|
||||||
|
AND time <= _end::TIMESTAMPTZ
|
||||||
|
AND vessel_id = current_setting('vessel.id', false)
|
||||||
|
ORDER BY m.time ASC
|
||||||
|
)
|
||||||
|
) AS t;
|
||||||
|
|
||||||
|
-- Merge jsonb
|
||||||
|
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||||
|
-- output
|
||||||
|
SELECT
|
||||||
|
json_build_object(
|
||||||
|
'type', 'FeatureCollection',
|
||||||
|
'features', _map
|
||||||
|
) into _track_geojson;
|
||||||
|
END;
|
||||||
|
$function$
|
||||||
|
;
|
||||||
|
COMMENT ON FUNCTION public.logbook_update_geojson_fn(in int4, in text, in text, out json) IS 'Update log details with geojson';
|
||||||
|
|
||||||
|
-- Add trigger to update logbook stats from user edit geojson
|
||||||
|
DROP FUNCTION IF EXISTS public.update_logbook_with_geojson_trigger_fn;
|
||||||
|
CREATE OR REPLACE FUNCTION public.update_logbook_with_geojson_trigger_fn() RETURNS TRIGGER AS $$
|
||||||
|
DECLARE
|
||||||
|
geojson JSONB;
|
||||||
|
feature JSONB;
|
||||||
|
BEGIN
|
||||||
|
-- Parse the incoming GeoJSON data from the track_geojson column
|
||||||
|
geojson := NEW.track_geojson::jsonb;
|
||||||
|
|
||||||
|
-- Extract the first feature (assume it is the LineString)
|
||||||
|
feature := geojson->'features'->0;
|
||||||
|
|
||||||
|
IF geojson IS NOT NULL AND feature IS NOT NULL AND (feature->'properties' ? 'x-update') THEN
|
||||||
|
|
||||||
|
-- Get properties from the feature to extract avg_speed, and max_speed
|
||||||
|
NEW.avg_speed := (feature->'properties'->>'avg_speed')::FLOAT;
|
||||||
|
NEW.max_speed := (feature->'properties'->>'max_speed')::FLOAT;
|
||||||
|
NEW.max_wind_speed := (feature->'properties'->>'max_wind_speed')::FLOAT;
|
||||||
|
NEW.extra := jsonb_set( NEW.extra,
|
||||||
|
'{avg_wind_speed}',
|
||||||
|
to_jsonb((feature->'properties'->>'avg_wind_speed')::FLOAT),
|
||||||
|
true -- this flag means it will create the key if it does not exist
|
||||||
|
);
|
||||||
|
|
||||||
|
-- Calculate the LineString's actual spatial distance
|
||||||
|
NEW.track_geom := ST_GeomFromGeoJSON(feature->'geometry'::text);
|
||||||
|
NEW.distance := TRUNC (ST_Length(NEW.track_geom,false)::INT * 0.0005399568, 4); -- convert to NM
|
||||||
|
|
||||||
|
END IF;
|
||||||
|
RETURN NEW;
|
||||||
|
END;
|
||||||
|
$$ LANGUAGE plpgsql;
|
||||||
|
-- Description
|
||||||
|
COMMENT ON FUNCTION
|
||||||
|
public.update_logbook_with_geojson_trigger_fn
|
||||||
|
IS 'Extracts specific properties (distance, duration, avg_speed, max_speed) from a geometry LINESTRING part of a GeoJSON FeatureCollection, and then updates a column in a table named logbook';
|
||||||
|
|
||||||
|
-- Add trigger on logbook update to update metrics from track_geojson
|
||||||
|
CREATE TRIGGER update_logbook_with_geojson_trigger_fn
|
||||||
|
BEFORE UPDATE OF track_geojson ON api.logbook
|
||||||
|
FOR EACH ROW
|
||||||
|
WHEN (NEW.track_geojson IS DISTINCT FROM OLD.track_geojson)
|
||||||
|
EXECUTE FUNCTION public.update_logbook_with_geojson_trigger_fn();
|
||||||
|
|
||||||
|
-- Refresh user_role permissions
|
||||||
|
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||||
|
|
||||||
|
-- Update version
|
||||||
|
UPDATE public.app_settings
|
||||||
|
SET value='0.7.8'
|
||||||
|
WHERE "name"='app.version';
|
||||||
|
|
||||||
|
\c postgres
|
@@ -1 +1 @@
|
|||||||
0.7.5
|
0.7.8
|
||||||
|
File diff suppressed because one or more lines are too long
@@ -13,6 +13,6 @@ $ bash tests.sh
|
|||||||
|
|
||||||
## docker
|
## docker
|
||||||
```bash
|
```bash
|
||||||
$ docker-compose up -d db && sleep 15 && docker-compose up -d api && sleep 5
|
$ docker compose up -d db && sleep 15 && docker compose up -d api && sleep 5
|
||||||
$ docker-compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
$ docker compose -f docker-compose.dev.yml -f docker-compose.yml up tests
|
||||||
```
|
```
|
@@ -142,7 +142,7 @@ var moment = require("moment");
|
|||||||
.set(test.logs.header.name, test.logs.header.value)
|
.set(test.logs.header.name, test.logs.header.value)
|
||||||
.set("Accept", "application/json")
|
.set("Accept", "application/json")
|
||||||
.end(function (err, res) {
|
.end(function (err, res) {
|
||||||
res.status.should.equal(404);
|
res.status.should.equal(200);
|
||||||
should.exist(res.header["content-type"]);
|
should.exist(res.header["content-type"]);
|
||||||
should.exist(res.header["server"]);
|
should.exist(res.header["server"]);
|
||||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
@@ -192,7 +192,7 @@ var moment = require("moment");
|
|||||||
.set("Accept", "application/json")
|
.set("Accept", "application/json")
|
||||||
.end(function (err, res) {
|
.end(function (err, res) {
|
||||||
console.log(res.text);
|
console.log(res.text);
|
||||||
res.status.should.equal(404); // return 404 as it is not enable in user settings.
|
res.status.should.equal(200); // return 404 as it is not enable in user settings.
|
||||||
should.exist(res.header["content-type"]);
|
should.exist(res.header["content-type"]);
|
||||||
should.exist(res.header["server"]);
|
should.exist(res.header["server"]);
|
||||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
203
tests/index6.js
Normal file
203
tests/index6.js
Normal file
@@ -0,0 +1,203 @@
|
|||||||
|
"use strict";
|
||||||
|
/*
|
||||||
|
* Unit test #5
|
||||||
|
* Public/Anonymous access
|
||||||
|
*
|
||||||
|
* process.env.PGSAIL_API_URI = from inside the docker
|
||||||
|
*
|
||||||
|
* npm install supertest should mocha mochawesome moment
|
||||||
|
* alias mocha="./node_modules/mocha/bin/_mocha"
|
||||||
|
* mocha index5.js --reporter mochawesome --reporter-options reportDir=/mnt/postgsail/,reportFilename=report_api.html
|
||||||
|
*
|
||||||
|
*/
|
||||||
|
|
||||||
|
const sleep = (ms) => new Promise((r) => setTimeout(r, ms));
|
||||||
|
|
||||||
|
const supertest = require("supertest");
|
||||||
|
// Deprecated
|
||||||
|
const should = require("should");
|
||||||
|
//const chai = require("chai");
|
||||||
|
//const should = chai.should();
|
||||||
|
let request = null;
|
||||||
|
var moment = require("moment");
|
||||||
|
|
||||||
|
// Users Array
|
||||||
|
[
|
||||||
|
{
|
||||||
|
cname: process.env.PGSAIL_API_URI,
|
||||||
|
name: "PostgSail unit test anonymous, no x-is-public header",
|
||||||
|
moorages: {
|
||||||
|
url: "/moorages_view",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stays: {
|
||||||
|
url: "/stays_view",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
logs: {
|
||||||
|
url: "/logs_view",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
log: {
|
||||||
|
url: "/log_view?id=eq.1",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
monitoring: {
|
||||||
|
url: "/monitoring_view",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
timelapse: {
|
||||||
|
url: "/rpc/timelapse_fn",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
timelapse_full: {
|
||||||
|
url: "/rpc/timelapse_fn",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stats_logs: {
|
||||||
|
url: "/rpc/stats_logs_fn",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
stats_stays: {
|
||||||
|
url: "/rpc/stats_stay_fn",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
export_gpx: {
|
||||||
|
url: "/rpc/export_logbook_gpx_fn",
|
||||||
|
payload: null,
|
||||||
|
res: {},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
].forEach(function (test) {
|
||||||
|
//console.log(`${test.cname}`);
|
||||||
|
describe(`${test.name}`, function () {
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request.set("User-Agent", "PostgSail unit tests");
|
||||||
|
|
||||||
|
describe("With no JWT as api_anonymous, no x-is-public", function () {
|
||||||
|
it("/stays_view, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.get(test.stays.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
res.body.length.should.be.equal(0);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/moorages_view, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.get(test.log.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
res.body.length.should.be.equal(0);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/logs_view, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.get(test.logs.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
res.body.length.should.be.equal(0);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/log_view, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.get(test.log.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
res.body.length.should.be.equal(0);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/monitoring_view, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.get(test.monitoring.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
console.log(res.text);
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
res.body.length.should.be.equal(0);
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/rpc/timelapse_fn, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.post(test.timelapse.url)
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
console.log(res.text);
|
||||||
|
res.status.should.equal(200);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
it("/rpc/export_logbook_gpx_fn, api_anonymous no jwt token", function (done) {
|
||||||
|
// Reset agent so we do not save cookies
|
||||||
|
request = supertest.agent(test.cname);
|
||||||
|
request
|
||||||
|
.post(test.export_gpx.url)
|
||||||
|
.send({_id: 1})
|
||||||
|
.set("Accept", "application/json")
|
||||||
|
.end(function (err, res) {
|
||||||
|
console.log(res.text)
|
||||||
|
res.status.should.equal(401);
|
||||||
|
should.exist(res.header["content-type"]);
|
||||||
|
should.exist(res.header["server"]);
|
||||||
|
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||||
|
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||||
|
done(err);
|
||||||
|
});
|
||||||
|
});
|
||||||
|
}); // user JWT
|
||||||
|
}); // OpenAPI description
|
||||||
|
}); // Users Array
|
@@ -180,6 +180,18 @@
|
|||||||
"status" : "sailing",
|
"status" : "sailing",
|
||||||
"metrics" : {"navigation.log": 17441766, "navigation.trip.log": 80747, "navigation.headingTrue": 3.5972, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 20.948999999999998, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.47, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 192.4, "electrical.batteries.1.voltage": 14.56, "navigation.gnss.antennaAltitude": 0.39, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 20.95, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 64, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 3, "network.n2k.ngt-1.130356.ch1.bandwidth": 2, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 4, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
"metrics" : {"navigation.log": 17441766, "navigation.trip.log": 80747, "navigation.headingTrue": 3.5972, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 20.948999999999998, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.47, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 192.4, "electrical.batteries.1.voltage": 14.56, "navigation.gnss.antennaAltitude": 0.39, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 20.95, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 64, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 3, "network.n2k.ngt-1.130356.ch1.bandwidth": 2, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 4, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"time" : "2022-07-31T11:41:28.561Z",
|
||||||
|
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||||
|
"latitude" : 59.7163052,
|
||||||
|
"longitude" : 25.7325741,
|
||||||
|
"speedoverground" : 9.5,
|
||||||
|
"courseovergroundtrue" : 198.8,
|
||||||
|
"windspeedapparent" : 18.0,
|
||||||
|
"anglespeedapparent" : 41.0,
|
||||||
|
"status" : "sailing",
|
||||||
|
"metrics" : {"navigation.log": 17441766, "navigation.trip.log": 80747, "navigation.headingTrue": 3.5972, "navigation.gnss.satellites": 10, "environment.depth.belowKeel": 20.948999999999998, "navigation.magneticVariation": 0.1414, "navigation.speedThroughWater": 3.47, "environment.water.temperature": 313.15, "electrical.batteries.1.current": 192.4, "electrical.batteries.1.voltage": 14.56, "navigation.gnss.antennaAltitude": 0.39, "network.n2k.ngt-1.130356.errorID": 0, "network.n2k.ngt-1.130356.modelID": 14, "environment.depth.belowTransducer": 20.95, "electrical.batteries.1.temperature": 299.82, "environment.depth.transducerToKeel": -0.001, "navigation.gnss.horizontalDilution": 0.8, "network.n2k.ngt-1.130356.ch1.rxLoad": 4, "network.n2k.ngt-1.130356.ch1.txLoad": 0, "network.n2k.ngt-1.130356.ch2.rxLoad": 0, "network.n2k.ngt-1.130356.ch2.txLoad": 64, "network.n2k.ngt-1.130356.ch1.deleted": 0, "network.n2k.ngt-1.130356.ch2.deleted": 0, "network.n2k.ngt-1.130356.ch2Bandwidth": 3, "network.n2k.ngt-1.130356.ch1.bandwidth": 2, "network.n2k.ngt-1.130356.ch1.rxDropped": 0, "network.n2k.ngt-1.130356.ch2.rxDropped": 0, "network.n2k.ngt-1.130356.ch1.rxFiltered": 0, "network.n2k.ngt-1.130356.ch2.rxFiltered": 0, "network.n2k.ngt-1.130356.ch1.rxBandwidth": 4, "network.n2k.ngt-1.130356.ch1.txBandwidth": 0, "network.n2k.ngt-1.130356.ch2.rxBandwidth": 0, "network.n2k.ngt-1.130356.ch2.txBandwidth": 10, "network.n2k.ngt-1.130356.uniChannelCount": 2, "network.n2k.ngt-1.130356.indiChannelCount": 2, "network.n2k.ngt-1.130356.ch1.BufferLoading": 0, "network.n2k.ngt-1.130356.ch2.bufferLoading": 0, "network.n2k.ngt-1.130356.ch1.PointerLoading": 0, "network.n2k.ngt-1.130356.ch2.pointerLoading": 0}
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"time" : "2022-07-31T11:42:28.569Z",
|
"time" : "2022-07-31T11:42:28.569Z",
|
||||||
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
"client_id" : "vessels.urn:mrn:imo:mmsi:987654321",
|
||||||
|
@@ -69,6 +69,12 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
|
|||||||
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
|
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
|
||||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||||
|
|
||||||
|
\echo 'Check numbers of geojson properties'
|
||||||
|
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[0].properties'))
|
||||||
|
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||||
|
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[1].properties'))
|
||||||
|
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||||
|
|
||||||
-- Check export
|
-- Check export
|
||||||
--\echo 'check logbook export fn'
|
--\echo 'check logbook export fn'
|
||||||
--SELECT api.export_logbook_geojson_fn(1);
|
--SELECT api.export_logbook_geojson_fn(1);
|
||||||
|
@@ -66,21 +66,21 @@ stay_code | 4
|
|||||||
|
|
||||||
eventlogs_view
|
eventlogs_view
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 12
|
count | 11
|
||||||
|
|
||||||
stats_logs_fn
|
stats_logs_fn
|
||||||
SELECT 1
|
SELECT 1
|
||||||
-[ RECORD 1 ]+----------
|
-[ RECORD 1 ]+----------
|
||||||
name | "kapla"
|
name | "kapla"
|
||||||
count | 4
|
count | 4
|
||||||
max_speed | 7.1
|
max_speed | 9.5
|
||||||
max_distance | 8.8968
|
max_distance | 68.8677
|
||||||
max_duration | "PT1H11M"
|
max_duration | "PT1H11M"
|
||||||
?column? | 3
|
?column? | 3
|
||||||
?column? | 30.1154
|
?column? | 90.6030
|
||||||
?column? | "PT2H43M"
|
?column? | "PT2H44M"
|
||||||
?column? | 44.2
|
?column? | 44.2
|
||||||
?column? | 2
|
?column? | 3
|
||||||
?column? | 4
|
?column? | 4
|
||||||
?column? | 4
|
?column? | 4
|
||||||
first_date | t
|
first_date | t
|
||||||
@@ -110,3 +110,54 @@ update_logbook_observations_fn | t
|
|||||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||||
|
|
||||||
|
Check numbers of geojson properties
|
||||||
|
-[ RECORD 1 ]-----+-----------------
|
||||||
|
jsonb_object_keys | id
|
||||||
|
-[ RECORD 2 ]-----+-----------------
|
||||||
|
jsonb_object_keys | name
|
||||||
|
-[ RECORD 3 ]-----+-----------------
|
||||||
|
jsonb_object_keys | notes
|
||||||
|
-[ RECORD 4 ]-----+-----------------
|
||||||
|
jsonb_object_keys | _to_time
|
||||||
|
-[ RECORD 5 ]-----+-----------------
|
||||||
|
jsonb_object_keys | distance
|
||||||
|
-[ RECORD 6 ]-----+-----------------
|
||||||
|
jsonb_object_keys | duration
|
||||||
|
-[ RECORD 7 ]-----+-----------------
|
||||||
|
jsonb_object_keys | avg_speed
|
||||||
|
-[ RECORD 8 ]-----+-----------------
|
||||||
|
jsonb_object_keys | max_speed
|
||||||
|
-[ RECORD 9 ]-----+-----------------
|
||||||
|
jsonb_object_keys | _from_time
|
||||||
|
-[ RECORD 10 ]----+-----------------
|
||||||
|
jsonb_object_keys | _to_moorage_id
|
||||||
|
-[ RECORD 11 ]----+-----------------
|
||||||
|
jsonb_object_keys | avg_wind_speed
|
||||||
|
-[ RECORD 12 ]----+-----------------
|
||||||
|
jsonb_object_keys | max_wind_speed
|
||||||
|
-[ RECORD 13 ]----+-----------------
|
||||||
|
jsonb_object_keys | _from_moorage_id
|
||||||
|
|
||||||
|
-[ RECORD 1 ]-----+---------------------
|
||||||
|
jsonb_object_keys | time
|
||||||
|
-[ RECORD 2 ]-----+---------------------
|
||||||
|
jsonb_object_keys | trip
|
||||||
|
-[ RECORD 3 ]-----+---------------------
|
||||||
|
jsonb_object_keys | notes
|
||||||
|
-[ RECORD 4 ]-----+---------------------
|
||||||
|
jsonb_object_keys | status
|
||||||
|
-[ RECORD 5 ]-----+---------------------
|
||||||
|
jsonb_object_keys | latitude
|
||||||
|
-[ RECORD 6 ]-----+---------------------
|
||||||
|
jsonb_object_keys | longitude
|
||||||
|
-[ RECORD 7 ]-----+---------------------
|
||||||
|
jsonb_object_keys | truewindspeed
|
||||||
|
-[ RECORD 8 ]-----+---------------------
|
||||||
|
jsonb_object_keys | speedoverground
|
||||||
|
-[ RECORD 9 ]-----+---------------------
|
||||||
|
jsonb_object_keys | truewinddirection
|
||||||
|
-[ RECORD 10 ]----+---------------------
|
||||||
|
jsonb_object_keys | windspeedapparent
|
||||||
|
-[ RECORD 11 ]----+---------------------
|
||||||
|
jsonb_object_keys | courseovergroundtrue
|
||||||
|
|
||||||
|
@@ -13,7 +13,7 @@ select current_database();
|
|||||||
|
|
||||||
-- Check the number of process pending
|
-- Check the number of process pending
|
||||||
\echo 'Check the number of process pending'
|
\echo 'Check the number of process pending'
|
||||||
-- Should be 22
|
-- Should be 24
|
||||||
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
SELECT count(*) as jobs from public.process_queue pq where pq.processed is null;
|
||||||
--set role scheduler
|
--set role scheduler
|
||||||
SELECT public.run_cron_jobs();
|
SELECT public.run_cron_jobs();
|
||||||
|
@@ -7,7 +7,7 @@ You are now connected to database "signalk" as user "username".
|
|||||||
Expanded display is on.
|
Expanded display is on.
|
||||||
Check the number of process pending
|
Check the number of process pending
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
jobs | 26
|
jobs | 24
|
||||||
|
|
||||||
-[ RECORD 1 ]-+-
|
-[ RECORD 1 ]-+-
|
||||||
run_cron_jobs |
|
run_cron_jobs |
|
||||||
@@ -17,5 +17,5 @@ any_pending_jobs | 2
|
|||||||
|
|
||||||
Check the number of metrics entries
|
Check the number of metrics entries
|
||||||
-[ RECORD 1 ]-+----
|
-[ RECORD 1 ]-+----
|
||||||
metrics_count | 172
|
metrics_count | 173
|
||||||
|
|
||||||
|
@@ -22,15 +22,15 @@ count | 21
|
|||||||
|
|
||||||
Test monitoring_view3 for user
|
Test monitoring_view3 for user
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 3736
|
count | 3775
|
||||||
|
|
||||||
Test monitoring_voltage for user
|
Test monitoring_voltage for user
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 47
|
count | 48
|
||||||
|
|
||||||
Test monitoring_temperatures for user
|
Test monitoring_temperatures for user
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 120
|
count | 121
|
||||||
|
|
||||||
Test monitoring_humidity for user
|
Test monitoring_humidity for user
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
|
@@ -11,35 +11,35 @@ Get BBOX Extent from SQL query for a log: "^/log_(w+)_(d+).png$"
|
|||||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||||
|
|
||||||
-[ RECORD 1 ]---+----------------------------------------------------
|
-[ RECORD 1 ]---+----------------------------------------------------
|
||||||
qgis_bbox_py_fn | 2749398.035335689,8334944,2756917.964664311,8338885
|
qgis_bbox_py_fn | 2745681,8303937.662962963,2871529,8369891.337037037
|
||||||
|
|
||||||
Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$"
|
Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$"
|
||||||
-[ RECORD 1 ]---+-------------------------------------------------------------------------
|
-[ RECORD 1 ]---+-------------------------------------------------------------------------
|
||||||
qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059
|
qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059
|
||||||
|
|
||||||
-[ RECORD 1 ]---+--------------------------------------------------------------------------
|
-[ RECORD 1 ]---+-----------------------------------------------------------------------
|
||||||
qgis_bbox_py_fn | 2750457.4431765806,8335162.530580978,2755858.0759322727,8338665.643719805
|
qgis_bbox_py_fn | 2752672.6236475753,8300633.73408079,2864537.04561218,8373194.440219993
|
||||||
|
|
||||||
Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$"
|
Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$"
|
||||||
-[ RECORD 1 ]---+------------------------------------------------------
|
-[ RECORD 1 ]---+------------------------------------------------------
|
||||||
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||||
|
|
||||||
-[ RECORD 1 ]---+------------------------------------------------------
|
-[ RECORD 1 ]---+------------------------------------------------------
|
||||||
qgis_bbox_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
|
qgis_bbox_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049
|
||||||
|
|
||||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$"
|
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$"
|
||||||
-[ RECORD 1 ]---+-------------------------------------
|
-[ RECORD 1 ]---+-------------------------------------
|
||||||
qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0
|
qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0
|
||||||
|
|
||||||
-[ RECORD 1 ]---+---------------------------------------
|
-[ RECORD 1 ]---+---------------------------------------
|
||||||
qgis_bbox_py_fn | 97351,-192283890.5,2909895,205691085.5
|
qgis_bbox_py_fn | 90420,-201110377.5,3027720,214517572.5
|
||||||
|
|
||||||
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$"
|
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$"
|
||||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||||
qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
|
||||||
|
|
||||||
-[ RECORD 1 ]--------+------------------------------------------------------
|
-[ RECORD 1 ]--------+------------------------------------------------------
|
||||||
qgis_bbox_trip_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
|
qgis_bbox_trip_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049
|
||||||
|
|
||||||
-[ RECORD 1 ]
|
-[ RECORD 1 ]
|
||||||
count | 3
|
count | 3
|
||||||
|
@@ -6,10 +6,10 @@
|
|||||||
You are now connected to database "signalk" as user "username".
|
You are now connected to database "signalk" as user "username".
|
||||||
Expanded display is on.
|
Expanded display is on.
|
||||||
-[ RECORD 1 ]--+-------------------------------
|
-[ RECORD 1 ]--+-------------------------------
|
||||||
server_version | 16.3 (Debian 16.3-1.pgdg120+1)
|
server_version | 16.4 (Debian 16.4-1.pgdg120+2)
|
||||||
|
|
||||||
-[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
postgis_full_version | POSTGIS="3.5.0 d2c3ca4" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.11.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||||
|
|
||||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||||
Name | citext
|
Name | citext
|
||||||
@@ -48,12 +48,12 @@ Schema | pg_catalog
|
|||||||
Description | PL/Python3U untrusted procedural language
|
Description | PL/Python3U untrusted procedural language
|
||||||
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
||||||
Name | postgis
|
Name | postgis
|
||||||
Version | 3.4.2
|
Version | 3.5.0
|
||||||
Schema | public
|
Schema | public
|
||||||
Description | PostGIS geometry and geography spatial types and functions
|
Description | PostGIS geometry and geography spatial types and functions
|
||||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||||
Name | timescaledb
|
Name | timescaledb
|
||||||
Version | 2.15.3
|
Version | 2.17.1
|
||||||
Schema | public
|
Schema | public
|
||||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||||
@@ -106,14 +106,14 @@ laninline | 13566
|
|||||||
lanvalidator | 13567
|
lanvalidator | 13567
|
||||||
lanacl |
|
lanacl |
|
||||||
-[ RECORD 5 ]-+-----------
|
-[ RECORD 5 ]-+-----------
|
||||||
oid | 18168
|
oid | 18190
|
||||||
lanname | plpython3u
|
lanname | plpython3u
|
||||||
lanowner | 10
|
lanowner | 10
|
||||||
lanispl | t
|
lanispl | t
|
||||||
lanpltrusted | t
|
lanpltrusted | t
|
||||||
lanplcallfoid | 18165
|
lanplcallfoid | 18187
|
||||||
laninline | 18166
|
laninline | 18188
|
||||||
lanvalidator | 18167
|
lanvalidator | 18189
|
||||||
lanacl |
|
lanacl |
|
||||||
|
|
||||||
-[ RECORD 1 ]+-----------
|
-[ RECORD 1 ]+-----------
|
||||||
@@ -653,22 +653,22 @@ reverse_geocode_py_fn | {"name": "Spain", "country_code": "es"}
|
|||||||
|
|
||||||
Test geoip reverse_geoip_py_fn
|
Test geoip reverse_geoip_py_fn
|
||||||
Test opverpass API overpass_py_fn
|
Test opverpass API overpass_py_fn
|
||||||
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "wikipedia": "ca:Port Olímpic de Barcelona", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
|
overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "image": "https://i.imgur.com/1KQVeFV.jpeg", "phone": "+34 933561016", "leisure": "marina", "website": "https://portolimpic.barcelona/", "wikidata": "Q171204", "wikipedia": "ca:Port Olímpic de Barcelona", "check_date": "2024-09-16", "addr:street": "Moll de Xaloc", "power_supply": "yes", "seamark:type": "harbour", "addr:postcode": "08005", "internet_access": "wlan", "wikimedia_commons": "Category:Port Olímpic (Barcelona)", "sanitary_dump_station": "yes", "seamark:harbour:category": "marina"}
|
||||||
|
|
||||||
-[ RECORD 1 ]--+----------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]--+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta"}
|
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta", "check_date": "2024-08-23"}
|
||||||
|
|
||||||
-[ RECORD 1 ]--+----------------------------------------------
|
-[ RECORD 1 ]--+----------------------------------------------
|
||||||
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
||||||
|
|
||||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||||
versions_fn | {"api_version" : "0.7.4", "sys_version" : "PostgreSQL 16.3", "timescaledb" : "2.15.3", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.2.2"}
|
versions_fn | {"api_version" : "0.7.8", "sys_version" : "PostgreSQL 16.4", "timescaledb" : "2.17.1", "postgis" : "3.5.0", "postgrest" : "PostgREST 12.2.3"}
|
||||||
|
|
||||||
-[ RECORD 1 ]-----------------
|
-[ RECORD 1 ]-----------------
|
||||||
api_version | 0.7.4
|
api_version | 0.7.8
|
||||||
sys_version | PostgreSQL 16.3
|
sys_version | PostgreSQL 16.4
|
||||||
timescaledb | 2.15.3
|
timescaledb | 2.17.1
|
||||||
postgis | 3.4.2
|
postgis | 3.5.0
|
||||||
postgrest | PostgREST 12.2.2
|
postgrest | PostgREST 12.2.3
|
||||||
|
|
||||||
|
@@ -168,6 +168,14 @@ else
|
|||||||
echo mocha index5.js
|
echo mocha index5.js
|
||||||
exit 1
|
exit 1
|
||||||
fi
|
fi
|
||||||
|
# Anonymous API unit tests
|
||||||
|
$mymocha index6.js --reporter ./node_modules/mochawesome --reporter-options reportDir=output/,reportFilename=report6.html
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo OK
|
||||||
|
else
|
||||||
|
echo mocha index6.js
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
# Anonymous SQL unit tests
|
# Anonymous SQL unit tests
|
||||||
psql ${PGSAIL_DB_URI} < sql/anonymous.sql > output/anonymous.sql.output
|
psql ${PGSAIL_DB_URI} < sql/anonymous.sql > output/anonymous.sql.output
|
||||||
|
Reference in New Issue
Block a user