62 Commits

Author SHA1 Message Date
xbgmsharp
13f8240838 Release 0.9.1-202504 2025-05-04 22:41:27 +02:00
xbgmsharp
b7fe6a27b2 Update tests versions, PostgREST 12.2.12 2025-05-04 22:41:13 +02:00
xbgmsharp
29cc40f6de Update SQL comments 2025-05-04 22:32:42 +02:00
xbgmsharp
861e61d378 Update sql tests, name change in OSM data 2025-05-04 22:27:15 +02:00
xbgmsharp
684f34644f Update tests, improve and add more test 2025-05-04 22:26:37 +02:00
xbgmsharp
6ad9980cd2 Update tests, Update api.eventlogs_view skip the new_stay 2025-05-04 22:25:48 +02:00
xbgmsharp
7f5974efe2 Update tests, Remove deprecated client_id 2025-05-04 22:25:08 +02:00
xbgmsharp
f4b65d3156 Update README.md 2025-05-04 22:23:41 +02:00
xbgmsharp
d8ef8b8958 Add migration 202504
- Install and update TimescaleDB Toolkit
- Remove deprecated client_id
- Remove index from logbook columns
- Remove deprecated column from api.logbook
- Add new mobilityDB support with comments
- Update api.export_logbook_geojson_linestring_trip_fn, add more metadata properties
- Update public.check_jwt, Make new mobilitydb export geojson function anonymous access
- Create api.monitoring_upsert_fn, the function that update api.metadata monitoring configuration
- Update cron_process_skplugin_upgrade_fn, update check for signalk plugin version
- Update api.find_log_from_moorage_fn using the mobilitydb trajectory
- Update api.find_log_to_moorage_fn using the mobilitydb trajectory
- Update api.eventlogs_view to fetch the events logs backwards and skip the new_stay
- Update api.stats_logs_fn, ensure the trip is completed
- Update api.stats_fn, ensure the trip is completed
- Update moorage_delete_trigger_fn, When morrage is deleted, delete process_queue references as well.
- Create public.logbook_delete_trigger_fn, When logbook is deleted, logbook_ext need to deleted as well.
- Update metadata table, mark client_id as deprecated
- Update public.process_lat_lon_fn, Add new moorage refrence in public.process_queue for event logs review
- Add Description on missing trigger
- Update public.logbook_active_geojson_fn, fix log_gis_line as there is no end time yet
- Create public.stay_active_geojson_fn function to produce a GeoJSON with the last position and stay details
- Update monitoring view to support live moorage in GeoJSON
- Add api.monitoring_live view, the live tracking view
- Refresh permissions
2025-05-04 22:23:00 +02:00
xbgmsharp
686ac7498b Update web UI front-end to v0.1.0-beta15 2025-05-04 22:17:20 +02:00
xbgmsharp
d4c4347a4c Update web UI front-end to v0.1.0-beta14 2025-04-26 10:20:06 +02:00
xbgmsharp
4aacae3913 Update tests versions, PostgREST 12.2.11 2025-04-26 10:06:48 +02:00
xbgmsharp
06fd834441 Update metadata tests, ensure a reuslt for api.metadata.configuration base on update_at value 2025-04-21 23:42:12 +02:00
xbgmsharp
86bd4b5843 Update SQL tests, update event logs view revert b6fef6358a 2025-04-21 23:40:54 +02:00
xbgmsharp
111d7d36db Update tests versions, timescaledb_toolkit 1.21.0, timescaledb 2.19.3, PostgREST 12.2.10 2025-04-21 22:32:00 +02:00
xbgmsharp
b6fef6358a Update SQL tets, update event logs view 2025-04-13 08:35:06 +02:00
xbgmsharp
4aecea7532 Update tests versions, timescaledb 2.19.2 2025-04-13 08:34:28 +02:00
xbgmsharp
c8908748f7 Update Update ERD
- update logbook metrics
2025-04-06 22:36:40 +02:00
xbgmsharp
0e812c0939 Update mobilityDB unit sql tests, new geojson properties 2025-04-06 09:26:55 +02:00
xbgmsharp
395b7cfad7 Update sql unit test, Improve anymous test 2025-04-06 00:00:26 +02:00
xbgmsharp
14e8c8363c Update OpenAPI 2025-04-05 23:59:52 +02:00
xbgmsharp
448124f01b Update Update ERD
- Deprecated client_id
- Add new logbook metrics
2025-04-05 23:58:46 +02:00
xbgmsharp
3b466e3d93 Update tests versions, Add timescaledb_toolkit 1.19.0 2025-04-04 14:17:58 +02:00
xbgmsharp
f0ddca7d58 Update tests versions, timescaledb 2.19.1 2025-04-04 14:16:30 +02:00
xbgmsharp
7744ad4af9 Release 0.9.0-202503 2025-03-31 17:38:52 +02:00
xbgmsharp
5c70a9a453 Update cron_post_jobs output 2025-03-31 13:37:42 +02:00
xbgmsharp
6635015dbf Update mobilitydb SQL test 2025-03-31 13:37:26 +02:00
xbgmsharp
f285fcddb0 Add migration 202503
- Update metadata table, mark client_id as deprecated
- Update metadata table update configuration column type to jsonb
- Update metadata table add new column available_keys
- Update metadata_upsert_trigger_fn to metadata table
- Add metadata table tigger for update_metadata_configuration
- Update api.export_logbook_geojson_linestring_trip_fn, add metadata
- Add public.get_season, return the season based on the input date for logbook tag
- Refresh permissions
2025-03-31 13:34:39 +02:00
xbgmsharp
cabf405648 Update mobilitydb SQL test 2025-03-31 13:32:56 +02:00
xbgmsharp
b2f3372b26 Update API tests
- Deprecated client_id
- Update configuration
- Add available_keys
2025-03-31 13:32:19 +02:00
xbgmsharp
754c9bb6e7 Update tests versions, PostgSail 0.9.0, timescaledb 2.19.0 2025-03-31 13:30:12 +02:00
xbgmsharp
f9238c62dd Update OpenAPI 2025-03-31 13:29:22 +02:00
xbgmsharp
4a294674e8 Add metadata SQL unit test 2025-03-31 13:29:03 +02:00
xbgmsharp
83e92cfd6c Add metadata SQL test 2025-03-31 13:28:42 +02:00
xbgmsharp
718ca6d6ea Update grafana SQL tests
- Deprecated client_id
- Update configuration
- Add available_keys
2025-03-31 13:28:17 +02:00
xbgmsharp
a07f4f181c Update Update ERD
- Deprecated client_id
- Update configuration
- Add available_keys
2025-03-31 13:27:09 +02:00
xbgmsharp
72b06f9eb9 Update README 2025-03-14 16:01:23 +01:00
xbgmsharp
598a789d36 Update docker compose file, remove deprecated version 2025-02-27 15:19:11 +01:00
xbgmsharp
37e948cb20 Update tests versions, PostgreSQL 16.8, timescaledb 2.18.2 PostgREST 12.2.8 2025-02-27 15:17:53 +01:00
xbgmsharp
f26ece878b Update tests versions, PostGIS 3.5.2, timescaledb 2.18.0, PostgREST 12.2.6 2025-02-02 09:47:07 +01:00
xbgmsharp
9f8b43577e Update tests 2025-01-16 11:05:46 +01:00
xbgmsharp
0c76edf793 Update tests 2025-01-16 10:31:20 +01:00
xbgmsharp
0cac828347 Update front-end to v0.1.0-beta14 2025-01-16 10:30:38 +01:00
xbgmsharp
9e9189ac36 Update migration 202412:
- Update public.logbook_update_metrics_short_fn, handle corner use case
- Update public.logbook_update_metrics_fn, handle corner use case
- Update public.logbook_update_metrics_timebucket_fn, handle corner use case
2025-01-16 08:59:50 +01:00
xbgmsharp
5409f1eec9 Update migration 202412:
- Update api.export_logbooks_geojson_point_trips_fn,
- Update export_logbook_geojson_trip_fn, update geojson from trip to geojson with more properties
2025-01-09 21:19:45 +01:00
xbgmsharp
e5491ae0c9 Update migration 202412
- Update api.export_logbook_geojson_point_trip_fn, fix dynamic notes for good
2025-01-08 23:37:51 +01:00
xbgmsharp
1b57641e7d Update migration 202412
- Update api.export_logbook_geojson_point_trip_fn, fix dynamic notes
2025-01-08 23:30:44 +01:00
xbgmsharp
aa7608e07e Update migration 202412
- Update api.stats_fn, due to reference_count and stay_duration columns removal
- Update api.stats_stays_fn, due to reference_count and stay_duration columns removal
- Update log_view with dynamic GeoJSON, change geojson export fn
- Update delete_trip_entry_fn, support additional temporal sequence columns (depth,etc...)
- Update export_logbook_geojson_trip_fn, update geojson from trip to geojson additional temporal sequence columns (depth,etc...)
- Update api.export_logbook_geojson_point_trip_fn, update geojson from trip to geojson additional temporal sequence columns (depth,etc...)
2025-01-08 23:17:06 +01:00
xbgmsharp
9575eba043 Release 0.8.1-202412 2025-01-06 09:06:45 +01:00
xbgmsharp
aa7450271d Update overpass tests 2025-01-05 18:20:03 +01:00
xbgmsharp
893a7fc46f Update tests versions, PostGIS 3.5.1 2025-01-05 18:14:24 +01:00
xbgmsharp
3a035f3519 Update migration 202412
- Remove deprecated column from api.moorages
- Update api.moorage_view, due to stay_duration column removal
- Update stats_moorages_view, due to stay_duration column removal
- Update stats_moorages_away_view, due to stay_duration column removal
2025-01-05 18:01:50 +01:00
xbgmsharp
1355629c4e Add migration 202412
- Full support for MobilityDB
- Add new mobilityDB support
- Remove deprecated column from api.logbook
- Update public.logbook_update_metrics_short_fn, aggregate more metrics
- Update public.logbook_update_metrics_fn, aggregate more metrics
- Update public.logbook_update_metrics_timebucket_fn, aggregate more metrics
- Update api.merge_logbook_fn, add support for mobility temporal type
- Update export_logbook_geojson_trip_fn, update geojson from trip to geojson
- Create api.export_logbook_geojson_point_trip_fn, transform spatiotemporal trip into a geojson with the corresponding properties
- Update public.process_lat_lon_fn remove deprecated moorages columns
- Update logbook table, add support for mobility temporal type
- Update public.badges_geom_fn remove track_geom and use mobilitydb trajectory
- Update public.process_stay_queue_fn remove calculation of stay duration and count
- Update public.badges_moorages_fn remove calculation of stay duration and count
- Update api.find_log_from_moorage_fn using the mobilitydb trajectory
- Update api.find_log_to_moorage_fn using the mobilitydb trajectory
- Update api.delete_logbook_fn to delete moorage dependency using mobilitydb
- Update public.qgis_bbox_py_fn to use mobilitydb trajectory
- Update public.qgis_bbox_trip_py_fn to use mobilitydb trajectory
2025-01-05 17:37:33 +01:00
xbgmsharp
0b8a9950e8 Update tests
- Add full support for MobilityDB
2025-01-05 17:36:48 +01:00
xbgmsharp
1c1cf70ae2 Update ERD 2025-01-05 17:35:49 +01:00
xbgmsharp
f7cf07ca99 Update OpenAPI 2025-01-05 17:35:27 +01:00
xbgmsharp
00056ec4f0 Merge pull request #14 from Marinminds/main
Update Self‐hosted-installation-guide on AWS.md
2024-12-12 14:56:50 +01:00
koenraad
717a85c3ec Update Self‐hosted-installation-guide on AWS.md
Add SQL queries
2024-12-12 12:09:50 +01:00
koenraad
609fb0a05d Update Self‐hosted-installation-guide on AWS.md
update markup
add pgadmin instructions
2024-12-12 11:40:25 +01:00
xbgmsharp
22a6b7eb65 Update docs 2024-12-12 10:07:09 +01:00
xbgmsharp
561c695f32 Update doc 2024-12-07 12:07:45 +01:00
xbgmsharp
c51059a431 Update migration 202411
Update public.logbook_update_metrics_fn, aggregate metrics by time-series to reduce size
2024-12-07 12:05:51 +01:00
37 changed files with 4607 additions and 259 deletions

View File

@@ -104,6 +104,8 @@ To understand the why and how, you might want to read [Why.md](https://github.co
Remove the hassle of running PostgSail yourself. Here you can skip the technical setup, the maintenance work and server costs by getting PostgSail on our reliable and secure PostgSail Cloud. Register and try for free at [iot.openplotter.cloud](https://iot.openplotter.cloud/). Remove the hassle of running PostgSail yourself. Here you can skip the technical setup, the maintenance work and server costs by getting PostgSail on our reliable and secure PostgSail Cloud. Register and try for free at [iot.openplotter.cloud](https://iot.openplotter.cloud/).
PostgSail Cloud is Open Source and free for personal use with a single vessel. If wish to manage multiple boats contact us.
## On-Premise (for free) ## On-Premise (for free)
Self host postgSail where you want and how you want. There are no restrictions, youre in full control. [Install Guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/README.md) Self host postgSail where you want and how you want. There are no restrictions, youre in full control. [Install Guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/README.md)
@@ -142,5 +144,6 @@ An out of the box IoT platform using Docker (could be extend to K3 or K8) with t
- [PostgreSQL, open source object-relational database system](https://postgresql.org) - [PostgreSQL, open source object-relational database system](https://postgresql.org)
- [TimescaleDB, Time-series data extends PostgreSQL](https://www.timescale.com) - [TimescaleDB, Time-series data extends PostgreSQL](https://www.timescale.com)
- [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/) - [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/)
- [MobilityDB, An open source geospatial trajectory data management & analysis platform.](https://mobilitydb.com/)
- [Grafana, open observability platform | Grafana Labs](https://grafana.com) - [Grafana, open observability platform | Grafana Labs](https://grafana.com)
- And many more - And many more

View File

@@ -1,5 +1,3 @@
version: "3.9"
services: services:
db: db:
image: xbgmsharp/timescaledb-postgis image: xbgmsharp/timescaledb-postgis

View File

@@ -13,22 +13,30 @@ erDiagram
timestamp_with_time_zone _to_time timestamp_with_time_zone _to_time
boolean active boolean active
double_precision avg_speed double_precision avg_speed
numeric distance "in NM" numeric distance "Distance in nautical miles (NM)"
interval duration "Best to use standard ISO 8601" interval duration "Duration in ISO 8601 format"
jsonb extra "computed signalk metrics of interest, runTime, currentLevel, etc" jsonb extra "Computed SignalK metrics such as runtime, current level, etc."
integer id "{NOT_NULL}" integer id "{NOT_NULL}"
double_precision max_speed double_precision max_speed
double_precision max_wind_speed double_precision max_wind_speed
text name text name
text notes text notes
geography track_geog "postgis geography type default SRID 4326 Unit: degres"
jsonb track_geojson "store generated geojson with track metrics data using with LineString and Point features, we can not depend api.metrics table"
geometry track_geom "postgis geometry type EPSG:4326 Unit: degres"
tgeogpoint trip "MobilityDB trajectory" tgeogpoint trip "MobilityDB trajectory"
tfloat trip_batt_charge "Battery Charge"
tfloat trip_batt_voltage "Battery Voltage"
tfloat trip_cog "courseovergroundtrue" tfloat trip_cog "courseovergroundtrue"
tfloat trip_depth "Depth"
tfloat trip_heading "heading True"
tfloat trip_hum_out "Humidity outside"
ttext trip_notes ttext trip_notes
tfloat trip_pres_out "Pressure outside"
tfloat trip_sog "speedoverground" tfloat trip_sog "speedoverground"
tfloat trip_solar_power "solar powerPanel"
tfloat trip_solar_voltage "solar voltage"
ttext trip_status ttext trip_status
tfloat trip_tank_level "Tank currentLevel"
tfloat trip_temp_out "Temperature outside"
tfloat trip_temp_water "Temperature water"
tfloat trip_twa "windspeedapparent" tfloat trip_twa "windspeedapparent"
tfloat trip_twd "truewinddirection" tfloat trip_twd "truewinddirection"
tfloat trip_tws "truewindspeed" tfloat trip_tws "truewindspeed"
@@ -38,9 +46,11 @@ erDiagram
api_metadata { api_metadata {
boolean active "trigger monitor online/offline" boolean active "trigger monitor online/offline"
boolean active boolean active
jsonb available_keys "Signalk paths with unit for custom mapping"
jsonb available_keys
double_precision beam double_precision beam
text client_id jsonb configuration "Signalk path mapping for metrics"
text configuration jsonb configuration
timestamp_with_time_zone created_at "{NOT_NULL}" timestamp_with_time_zone created_at "{NOT_NULL}"
double_precision height double_precision height
integer id "{NOT_NULL}" integer id "{NOT_NULL}"
@@ -59,7 +69,7 @@ erDiagram
api_metrics { api_metrics {
double_precision anglespeedapparent double_precision anglespeedapparent
text client_id text client_id "Deprecated client_id to be removed"
double_precision courseovergroundtrue double_precision courseovergroundtrue
double_precision latitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger" double_precision latitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
double_precision longitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger" double_precision longitude "With CONSTRAINT but allow NULL value to be ignored silently by trigger"
@@ -82,9 +92,7 @@ erDiagram
jsonb nominatim jsonb nominatim
text notes text notes
jsonb overpass jsonb overpass
integer reference_count
integer stay_code "Link api.stays_at with api.moorages via FOREIGN KEY and REFERENCES" integer stay_code "Link api.stays_at with api.moorages via FOREIGN KEY and REFERENCES"
interval stay_duration "Best to use standard ISO 8601"
text vessel_id "{NOT_NULL}" text vessel_id "{NOT_NULL}"
} }
@@ -264,6 +272,8 @@ erDiagram
api_logbook }o--|| api_metadata : "" api_logbook }o--|| api_metadata : ""
api_logbook }o--|| api_moorages : "" api_logbook }o--|| api_moorages : ""
api_logbook }o--|| api_moorages : "" api_logbook }o--|| api_moorages : ""
api_logbook }o--|| api_moorages : ""
api_logbook }o--|| api_moorages : ""
api_metadata }o--|| auth_vessels : "" api_metadata }o--|| auth_vessels : ""
api_metrics }o--|| api_metadata : "" api_metrics }o--|| api_metadata : ""
api_moorages }o--|| api_metadata : "" api_moorages }o--|| api_metadata : ""

View File

@@ -115,16 +115,21 @@ Then connect to the web UI on port HTTP/5050.
#### Step 3. Start frontend (web) #### Step 3. Start frontend (web)
Then launch the web frontend, execute: Last build and launch the web frontend, execute:
```bash ```bash
$ docker compose up web docker compose build web
docker compose up web
``` ```
This step can take some time as it will first do a build to generate the static website based on your settings.
The first step can take some time as it will first run a build to generate the static website based on your settings.
The frontend is a SPA (Single-Page Application). With SPA, the server provides the user with an empty HTML page and Javascript. The latter is where the magic happens. When the browser receives the HTML + Javascript, it loads the Javascript. Once loaded, the JS takes place and, through a set of operations in the DOM, renders the necessary components to the page. The routing is then handled by the browser itself, not hitting the server.
The frontend should be accessible via port HTTP/8080. The frontend should be accessible via port HTTP/8080.
Users are collaborating on two installation guide: Users are collaborating on two installation guide:
- [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/install_guide.md)
- [Self-hosted-installation-guide on AWS EC2](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide%20on%20AWS.md) - [Self-hosted-installation-guide on AWS EC2](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide%20on%20AWS.md)
- [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide.md) - [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/Self%E2%80%90hosted-installation-guide.md)

View File

@@ -19,109 +19,141 @@ With the following settings:
Configure storage: Configure storage:
The standard storage of 8GiB is too small so change this to 16GiB. The standard storage of 8GiB is too small so change this to 16GiB.
***Create a new security group*** Create a new security group
Go to: EC2>Security groups>Create security group + Go to: EC2>Security groups>Create security group
Add inbound rules for the following ports:443, 8080, 80, 3000, 5432, 22, 5050 Add inbound rules for the following ports:443, 8080, 80, 3000, 5432, 22, 5050
Go to your instance>select your instance>Actions>security>change security group + Go to your instance>select your instance>Actions>security>change security group
And add the correct security group to the instance. + And add the correct security group to the instance.
## Connect to instance with SSH ## Connect to instance with SSH
Copy the key file in your default SSH configuration file location (the one VSCode will use) + Copy the key file in your default SSH configuration file location (the one VSCode will use)
In terminal, go to the folder and run this command to ensure your key is not publicly viewable: + In terminal, go to the folder and run this command to ensure your key is not publicly viewable:
```chmod 600 "privatekey.pem"``` ```
chmod 600 "privatekey.pem"
```
We are using VSCode to connect to the instance: We are using VSCode to connect to the instance:
Install the Remote - SSH Extension for VSCode; + Install the Remote - SSH Extension for VSCode
Open the Command Palette (Ctrl+Shift+P) and type Remote-SSH: Add New SSH Host: + Open the Command Palette (Ctrl+Shift+P) and type Remote-SSH: Add New SSH Host:
```ssh -i "privatekey.pem" ubuntu@ec2-111-22-33-44.eu-west-1.compute.amazonaws.com``` ```
ssh -i "privatekey.pem" ubuntu@ec2-111-22-33-44.eu-west-1.compute.amazonaws.com
```
When prompted, select the default SSH configuration file location. When prompted, select the default SSH configuration file location.
Open the config file and add the location: Open the config file and add the location:
```xIdentityFile ~/.ssh/privatekey.pem``` ```
xIdentityFile ~/.ssh/privatekey.pem
```
## Install Docker on your instance ## Install Docker on your instance
To install Docker on your new EC2 Ubuntu instance via SSH, follow these steps: To install Docker on your new EC2 Ubuntu instance via SSH, follow these steps:
Update your package list: Update your package list:
```sudo apt-get update``` ```
sudo apt-get update
```
Install required dependencies: Install required dependencies:
```sudo apt-get install apt-transport-https ca-certificates curl software-properties-common``` ```
sudo apt-get install apt-transport-https ca-certificates curl software-properties-common
```
Add Docker's official GPG key: Add Docker's official GPG key:
```curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg``` ```
curl -fsSL https://download.docker.com/linux/ubuntu/gpg | sudo gpg --dearmor -o /usr/share/keyrings/docker-archive-keyring.gpg
```
Add Docker's official repository: Add Docker's official repository:
```echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null``` ```
echo "deb [arch=amd64 signed-by=/usr/share/keyrings/docker-archive-keyring.gpg] https://download.docker.com/linux/ubuntu $(lsb_release -cs) stable" | sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
```
Update the package list again: Update the package list again:
```sudo apt-get update``` ```
sudo apt-get update
```
Install Docker: Install Docker:
```sudo apt-get install docker-ce docker-ce-cli containerd.io``` ```
sudo apt-get install docker-ce docker-ce-cli containerd.io
```
Verify Docker installation: Verify Docker installation:
```sudo docker --version``` ```
sudo docker --version
```
Add your user to the docker group to run Docker without sudo: Add your user to the docker group to run Docker without sudo:
```sudo usermod -aG docker ubuntu``` ```
sudo usermod -aG docker ubuntu
```
Then, log out and back in or use the following to apply the changes: Then, log out and back in or use the following to apply the changes:
```newgrp docker``` ```
newgrp docker
```
## Install Postgsail ## Install Postgsail
+ Git clone the postgsail repo: Git clone the postgsail repo:
```git clone https://github.com/xbgmsharp/postgsail.git``` ```
git clone https://github.com/xbgmsharp/postgsail.git
```
## Edit environment variables ## Edit environment variables
Copy the example.env file and edit the environment variables: Copy the example.env file and edit the environment variables:
```cd postgsail``` ```
```cp .env.example .env``` cd postgsail
```nano .env``` cp .env.example .env
nano .env
```
***POSTGRES_USER*** + POSTGRES_USER
Come up with a unique username for the database user. This will be used in the docker image when its started up. Nothing beyond creating a unique username and password is required here. Come up with a unique username for the database user. This will be used in the docker image when its started up. Nothing beyond creating a unique username and password is required here.
This environment variable is used in conjunction with `POSTGRES_PASSWORD` to set a user and its password. This variable will create the specified user with superuser power and a database with the same name. This environment variable is used in conjunction with `POSTGRES_PASSWORD` to set a user and its password. This variable will create the specified user with superuser power and a database with the same name.
https://github.com/docker-library/docs/blob/master/postgres/README.md https://github.com/docker-library/docs/blob/master/postgres/README.md
***POSTGRES_PASSWORD*** + POSTGRES_PASSWORD
This should be a good password. It will be used for the postgres user above. Again this is used in the docker image. This should be a good password. It will be used for the postgres user above. Again this is used in the docker image.
This environment variable is required for you to use the PostgreSQL image. It must not be empty or undefined. This environment variable sets the superuser password for PostgreSQL. The default superuser is defined by the POSTGRES_USER environment variable. This environment variable is required for you to use the PostgreSQL image. It must not be empty or undefined. This environment variable sets the superuser password for PostgreSQL. The default superuser is defined by the POSTGRES_USER environment variable.
***POSTGRES_DB*** + POSTGRES_DB
This is the name of the database within postgres. You can leave it named postgres but give it a unique name if you like. The schema will be loaded into this database and all data will be stored within it. Since this is used inside the docker image the name really doesnt matter. If you plan to run additional databases within the image, then you might care. This is the name of the database within postgres. You can leave it named postgres but give it a unique name if you like. The schema will be loaded into this database and all data will be stored within it. Since this is used inside the docker image the name really doesnt matter. If you plan to run additional databases within the image, then you might care.
This environment variable can be used to define a different name for the default database that is created when the image is first started. If it is not specified, then the value of `POSTGRES_USER` will be used. This environment variable can be used to define a different name for the default database that is created when the image is first started. If it is not specified, then the value of `POSTGRES_USER` will be used.
***PGSAIL_APP_URL*** + PGSAIL_APP_URL
This is the webapp (webui) entrypoint, typically the public DNS or IP This is the webapp (webui) entrypoint, typically the public DNS or IP
```PGSAIL_APP_URL=http://localhost:8080``` ```
PGSAIL_APP_URL=http://localhost:8080
```
***PGSAIL_API_URL*** + PGSAIL_API_URL
This is the URL to your API on your instance on port 3000: This is the URL to your API on your instance on port 3000:
```PGSAIL_API_URL=PGSAIL_API_URL=http://localhost:3000``` ```
PGSAIL_API_URL=PGSAIL_API_URL=http://localhost:3000
```
***PGSAIL_AUTHENTICATOR_PASSWORD*** + PGSAIL_AUTHENTICATOR_PASSWORD
This password is used as part of the database access configuration. Its used as part of the access URI later on. (Put the same password in both lines.) This password is used as part of the database access configuration. Its used as part of the access URI later on. (Put the same password in both lines.)
***PGSAIL_GRAFANA_PASSWORD*** + PGSAIL_GRAFANA_PASSWORD
This password is used for the grafana service This password is used for the grafana service
***PGSAIL_GRAFANA_AUTH_PASSWORD*** + PGSAIL_GRAFANA_AUTH_PASSWORD
??This password is used for user authentication on grafana? ??This password is used for user authentication on grafana?
***PGSAIL_EMAIL_FROM*** + PGSAIL_EMAIL_FROM - PGSAIL_EMAIL_SERVER - PGSAIL_EMAIL_USER - PGSAIL_EMAIL_PASS Pgsail does not include a built in email service - only hooks to send email via an existing server.
***PGSAIL_EMAIL_SERVER***
***PGSAIL_EMAIL_USER***
***PGSAIL_EMAIL_PASS***
Pgsail does not include a built in email service - only hooks to send email via an existing server.
We use gmail as a third party email service: We use gmail as a third party email service:
```
PGSAIL_EMAIL_FROM=email@gmail.com PGSAIL_EMAIL_FROM=email@gmail.com
PGSAIL_EMAIL_SERVER=smtp.gmail.com PGSAIL_EMAIL_SERVER=smtp.gmail.com
PGSAIL_EMAIL_USER=email@gmail.com PGSAIL_EMAIL_USER=email@gmail.com
```
You need to get the PGSAIL_EMAIL_PASS from your gmail account security settings: it is not the account password, instead you need to make an "App password" You need to get the PGSAIL_EMAIL_PASS from your gmail account security settings: it is not the account password, instead you need to make an "App password"
***PGRST_JWT_SECRET*** + PGRST_JWT_SECRET
This secret key must be at least 32 characters long, you can create a random key with the following command: This secret key must be at least 32 characters long, you can create a random key with the following command:
```cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1``` ```
cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1
```
***Other ENV variables*** + Other ENV variables
``` ```
PGSAIL_PUSHOVER_APP_TOKEN PGSAIL_PUSHOVER_APP_TOKEN
PGSAIL_PUSHOVER_APP PGSAIL_PUSHOVER_APP
@@ -129,39 +161,128 @@ PGSAIL_TELEGRAM_BOT_TOKEN
PGSAIL_AUTHENTICATOR_PASSWORD=password PGSAIL_AUTHENTICATOR_PASSWORD=password
PGSAIL_GRAFANA_PASSWORD=password PGSAIL_GRAFANA_PASSWORD=password
PGSAIL_GRAFANA_AUTH_PASSWORD=password PGSAIL_GRAFANA_AUTH_PASSWORD=password
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use #PGSAIL_PUSHOVER_APP_TOKEN= Comment if not used
#PGSAIL_PUSHOVER_APP_URL= Comment if not use #PGSAIL_PUSHOVER_APP_URL= Comment if not used
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use #PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not used
``` ```
## Run the project ## Run the project
If needed, add your user to the docker group to run Docker without sudo: If needed, add your user to the docker group to run Docker without sudo:
```sudo usermod -aG docker ubuntu``` ```
sudo usermod -aG docker ubuntu
```
Then, log out and back in or use the following to apply the changes: Then, log out and back in or use the following to apply the changes:
```newgrp docker``` ```
newgrp docker
```
Step 1. Import the SQL schema, execute: Step 1. Import the SQL schema, execute:
```docker compose up db``` ```
docker compose up db
```
Step 2. Launch the full backend stack (db, api), execute: Step 2. Launch the full backend stack (db, api), execute:
```docker compose up db api``` ```
docker compose up db api
```
Step 3. Launch the frontend webapp Step 3. Launch the frontend webapp
```docker compose up web``` ```
docker compose up web
```
Open browser and navigate to your PGSAIL_APP_URL, you should see the postgsail login screen now: Open browser and navigate to your PGSAIL_APP_URL, you should see the postgsail login screen now:
```
http://ec2-11-234-567-890.eu-west-1.compute.amazonaws.com::8080 http://ec2-11-234-567-890.eu-west-1.compute.amazonaws.com::8080
```
## Additional database setup
## Additional SQL setup
Aditional setup will be required. Aditional setup will be required.
There is no useraccount yet, also cronjobs need to be activated. There is no useraccount yet, also cronjobs need to be activated.
We'll do that by using pgadmin. We'll do that by using pgadmin.
***Run pgadmin*** ### Run Pgadmin & connect to database
First add two more vars to your env. file: First add two more vars to your env. file:
```PGADMIN_DEFAULT_EMAIL=setup@setup.com``` ```
```PGADMIN_DEFAULT_PASSWORD=123456``` PGADMIN_DEFAULT_EMAIL=setup@setup.com
And add pgadmin to the docker-compose.yml file under "services": PGADMIN_DEFAULT_PASSWORD=123456
``` ```
Pgadmin is defined in docker-compose.dev.yml so we need to start the service:
``` ```
docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d pgadmin
```
All services should be up now: api, db, web and pgadmin. Check all services running by:
```
docker ps
```
To open Pgadmin, navigate to your aws-url and port 5050:
```
http://ec2-11-234-567-890.eu-west-1.compute.amazonaws.com::5050
```
<p>
You are now able to login with your credentials: PGADMIN_DEFAULT_EMAIL & PGADMIN_DEFAULT_PASSWORD.<br>
</p>
<p>
In the right-side panel you will see "Servers(1)"; by clicking you'll see the Server: "PostgSail dev db"<br>
</p>
<p>
**Warning:** A dialog box will open, prompting to input the password, but stating the wrong username (postgres) , you have to change this username by right-clicking on the server "PostgSail dev db" > Properties > Connection > enter username: POSTGRES_USER > Save
</p>
<p>
Now right-click and Connect to Server and enter your password: POSTGRES_PASSWORD
</p>
<p>
You'll see 2 databases: "postgres" and "signalk"
</p>
### Enabling cron jobs by SQL query
<p>
Cron jobs are not active by default because if you don't have the correct settings set (for SMTP, PushOver, Telegram), you might enter in a loop with errors and you could be blocked or banned from the external services.
</p>
<p>
Once you have setup the services correctly (entered credentials in .env file) you can activate the cron jobs. (We are only using the SMTP email service in this example) in the "postgres" database:
</p>
+ Right-click on "postgres" database and select "Query Tool"
+ Execute the following SQL query:
```
UPDATE cron.job SET active = True;
```
### Adding a user by SQL query
I was not able to create a new user through the web application (still figuring out what is going on). Therefore I added a new user by SQL in the "signalk" database.
+ Right-click on "signalk" database and select "Query Tool"
+ Check the current users in your database executing the query:
```
SELECT * FROM auth.accounts;
```
+ To add a new user executing the query:
```
INSERT INTO auth.accounts (
email, first, last, pass, role) VALUES (
'your.email@domain.com'::citext, 'Test'::text, 'your_username'::text, 'your_password'::text, 'user_role'::name)
returning email;
```
When SMTP is correctly setup, you will receive two emails: "Welcome" and "Email verification".
<p>
You will be able to login with these credentials on the web
</p>
<p>
Each time you login, you will receive an email: "Email verification". This is the OTP process, you can bypass this process by updating the json key value of "Preferences":
</p>
```
UPDATE auth.accounts
SET preferences='{"email_valid": true}'::jsonb || preferences
WHERE email='your.email@domain.com';
```
Now you are able to use PostGSail on the web on your own AWS server!

84
docs/install_guide.md Normal file
View File

@@ -0,0 +1,84 @@
## Connect to the server
```bash
% ssh root@my.server.com
```
# Clone the git repo
```bash
% git clone https://github.com/xbgmsharp/postgsail
Cloning into 'postgsail'...
...
```
## Edit the configuration
```bash
% cd postgsail
% cp .env.example .env
% cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1
..
% nano .env
```
## Install Docker
From https://docs.docker.com/engine/install/ubuntu/
```bash
% apt-get update
...
% apt-get install -y ca-certificates curl
...
% install -m 0755 -d /etc/apt/keyrings
% curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc
% chmod a+r /etc/apt/keyrings/docker.asc
% echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
% apt-get update
...
% apt-get install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin
...
```
## Init the database
```bash
% docker compose up db
...
Gracefully stopping... (press Ctrl+C again to force)
[+] Stopping 1/1
✔ Container db Stopped
```
## Start the db with the api
```bash
% docker compose pull api
...
% docker compose up -d db api
```
## Checks
Making sure it works.
```bash
% telnet localhost 5432
...
telnet> quit
Connection closed.
% curl localhost:3000
...
% docker ps
...
% docker logs api
...
```
# Run the web instance
```bash
% docker compose -f docker-compose.yml -f docker-compose.dev.yml build web (be patient)
...
% docker compose -f docker-compose.yml -f docker-compose.dev.yml up web (be patient)
...
web |
web | ➜ Local: http://localhost:8080/
web | ➜ Network: http://172.18.0.4:8080/
```

View File

@@ -268,7 +268,7 @@ BEGIN
avg(m.courseovergroundtrue) as courseovergroundtrue, avg(m.courseovergroundtrue) as courseovergroundtrue,
avg(m.speedoverground) as speedoverground, avg(m.speedoverground) as speedoverground,
avg(m.windspeedapparent) as windspeedapparent, avg(m.windspeedapparent) as windspeedapparent,
--last(m.longitude, m.time) as longitude, last(m.latitude, m.time) as latitude, last(m.longitude, m.time) as longitude, last(m.latitude, m.time) as latitude,
'' AS notes, '' AS notes,
last(m.status, m.time) as status, last(m.status, m.time) as status,
COALESCE(metersToKnots(avg((m.metrics->'environment.wind.speedTrue')::NUMERIC)), NULL) as truewindspeed, COALESCE(metersToKnots(avg((m.metrics->'environment.wind.speedTrue')::NUMERIC)), NULL) as truewindspeed,
@@ -334,11 +334,11 @@ BEGIN
SELECT * FROM metrics SELECT * FROM metrics
UNION ALL UNION ALL
SELECT * FROM last_metric SELECT * FROM last_metric
ORDER BY time ASC ORDER BY time_bucket ASC
) )
-- Create mobilitydb temporal sequences -- Create mobilitydb temporal sequences
SELECT SELECT
tgeogpointseq(array_agg(tgeogpoint(ST_SetSRID(o.geo_point, 4326)::geography, o.time_bucket) ORDER BY o.time ASC)) AS trajectory, tgeogpointseq(array_agg(tgeogpoint(ST_SetSRID(o.geo_point, 4326)::geography, o.time_bucket) ORDER BY o.time_bucket ASC)) AS trajectory,
tfloatseq(array_agg(tfloat(o.courseovergroundtrue, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.courseovergroundtrue IS NOT NULL)) AS courseovergroundtrue, tfloatseq(array_agg(tfloat(o.courseovergroundtrue, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.courseovergroundtrue IS NOT NULL)) AS courseovergroundtrue,
tfloatseq(array_agg(tfloat(o.speedoverground, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.speedoverground IS NOT NULL)) AS speedoverground, tfloatseq(array_agg(tfloat(o.speedoverground, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.speedoverground IS NOT NULL)) AS speedoverground,
tfloatseq(array_agg(tfloat(o.windspeedapparent, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.windspeedapparent IS NOT NULL)) AS windspeedapparent, tfloatseq(array_agg(tfloat(o.windspeedapparent, o.time_bucket) ORDER BY o.time_bucket ASC) FILTER (WHERE o.windspeedapparent IS NOT NULL)) AS windspeedapparent,

File diff suppressed because it is too large Load Diff

View File

@@ -0,0 +1,219 @@
---------------------------------------------------------------------------
-- Copyright 2021-2025 Francois Lacroix <xbgmsharp@gmail.com>
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
--
-- Migration January-March 2025
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
\echo 'Timing mode is enabled'
\timing
\echo 'Force timezone, just in case'
set timezone to 'UTC';
-- Update metadata table, mark client_id as deprecated
COMMENT ON COLUMN api.metadata.client_id IS 'Deprecated client_id to be removed';
-- Update metrics table, mark client_id as deprecated
COMMENT ON COLUMN api.metrics.client_id IS 'Deprecated client_id to be removed';
-- Update metadata table update configuration column type to jsonb and comment
ALTER TABLE api.metadata ALTER COLUMN "configuration" TYPE jsonb USING "configuration"::jsonb;
COMMENT ON COLUMN api.metadata.configuration IS 'Signalk path mapping for metrics';
-- Update metadata table add new column available_keys and comment
ALTER TABLE api.metadata ADD available_keys jsonb NULL;
COMMENT ON COLUMN api.metadata.available_keys IS 'Signalk paths with unit for custom mapping';
--DROP FUNCTION public.metadata_upsert_trigger_fn();
-- Update metadata_upsert_trigger_fn to metadata table to support configuration and available_keys and deprecated client_id
CREATE OR REPLACE FUNCTION public.metadata_upsert_trigger_fn()
RETURNS trigger
LANGUAGE plpgsql
AS $function$
DECLARE
metadata_id integer;
metadata_active boolean;
BEGIN
-- Require Signalk plugin version 0.4.0
-- Set client_id to new value to allow RLS
--PERFORM set_config('vessel.client_id', NEW.client_id, false);
-- UPSERT - Insert vs Update for Metadata
--RAISE NOTICE 'metadata_upsert_trigger_fn';
--PERFORM set_config('vessel.id', NEW.vessel_id, true);
--RAISE WARNING 'metadata_upsert_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW;
SELECT m.id,m.active INTO metadata_id, metadata_active
FROM api.metadata m
WHERE m.vessel_id IS NOT NULL AND m.vessel_id = current_setting('vessel.id', true);
--RAISE NOTICE 'metadata_id is [%]', metadata_id;
IF metadata_id IS NOT NULL THEN
-- send notification if boat is back online
IF metadata_active is False THEN
-- Add monitor online entry to process queue for later notification
INSERT INTO process_queue (channel, payload, stored, ref_id)
VALUES ('monitoring_online', metadata_id, now(), current_setting('vessel.id', true));
END IF;
-- Update vessel metadata
UPDATE api.metadata
SET
name = NEW.name,
mmsi = NEW.mmsi,
--client_id = NEW.client_id,
length = NEW.length,
beam = NEW.beam,
height = NEW.height,
ship_type = NEW.ship_type,
plugin_version = NEW.plugin_version,
signalk_version = NEW.signalk_version,
platform = REGEXP_REPLACE(NEW.platform, '[^a-zA-Z0-9\(\) ]', '', 'g'),
-- configuration = NEW.configuration, -- ignore configuration from vessel, it is manage by user
-- time = NEW.time, ignore the time sent by the vessel as it is out of sync sometimes.
time = NOW(), -- overwrite the time sent by the vessel
available_keys = NEW.available_keys,
active = true
WHERE id = metadata_id;
RETURN NULL; -- Ignore insert
ELSE
IF NEW.vessel_id IS NULL THEN
-- set vessel_id from jwt if not present in INSERT query
NEW.vessel_id := current_setting('vessel.id');
END IF;
-- Ignore and overwrite the time sent by the vessel
NEW.time := NOW();
-- Insert new vessel metadata
RETURN NEW; -- Insert new vessel metadata
END IF;
END;
$function$
;
COMMENT ON FUNCTION public.metadata_upsert_trigger_fn() IS 'process metadata from vessel, upsert';
-- Create or replace the function that will be executed by the trigger
-- Add metadata table trigger for update_metadata_configuration
CREATE OR REPLACE FUNCTION api.update_metadata_configuration()
RETURNS TRIGGER AS $$
BEGIN
-- Require Signalk plugin version 0.4.0
-- Update the configuration field with current date in ISO format
-- Using jsonb_set if configuration is already a JSONB field
IF NEW.configuration IS NOT NULL AND
jsonb_typeof(NEW.configuration) = 'object' THEN
NEW.configuration = jsonb_set(
NEW.configuration,
'{update_at}',
to_jsonb(to_char(NOW(), 'YYYY-MM-DD"T"HH24:MI:SS"Z"'))
);
END IF;
RETURN NEW;
END;
$$ LANGUAGE plpgsql;
COMMENT ON FUNCTION api.update_metadata_configuration() IS 'Update the configuration field with current date in ISO format';
-- Create the trigger
CREATE TRIGGER metadata_update_configuration_trigger
BEFORE UPDATE ON api.metadata
FOR EACH ROW
EXECUTE FUNCTION api.update_metadata_configuration();
-- Update api.export_logbook_geojson_linestring_trip_fn, add metadata properties
CREATE OR REPLACE FUNCTION api.export_logbooks_geojson_linestring_trips_fn(
start_log integer DEFAULT NULL::integer,
end_log integer DEFAULT NULL::integer,
start_date text DEFAULT NULL::text,
end_date text DEFAULT NULL::text,
OUT geojson jsonb
) RETURNS jsonb
LANGUAGE plpgsql
AS $function$
DECLARE
logs_geojson jsonb;
BEGIN
-- Normalize start and end values
IF start_log IS NOT NULL AND end_log IS NULL THEN end_log := start_log; END IF;
IF start_date IS NOT NULL AND end_date IS NULL THEN end_date := start_date; END IF;
WITH logbook_data AS (
-- get the logbook geometry and metadata, an array for each log
SELECT id, name,
starttimestamp(trip),
endtimestamp(trip),
--speed(trip_sog),
duration(trip),
--length(trip) as length, -- Meters
(length(trip) * 0.0005399568)::numeric as distance, -- NM
twavg(trip_sog) as avg_sog,
maxValue(trip_sog) as max_sog,
maxValue(trip_depth) as max_depth, -- Depth
maxValue(trip_batt_charge) as max_batt_charge, -- Battery Charge
maxValue(trip_batt_voltage) as max_batt_voltage, -- Battery Voltage
maxValue(trip_temp_water) as max_temp_water, -- Temperature water
maxValue(trip_temp_out) as max_temp_out, -- Temperature outside
maxValue(trip_pres_out) as max_pres_out, -- Pressure outside
maxValue(trip_hum_out) as max_hum_out, -- Humidity outside
twavg(trip_depth) as avg_depth, -- Depth
twavg(trip_batt_charge) as avg_batt_charge, -- Battery Charge
twavg(trip_batt_voltage) as avg_batt_voltage, -- Battery Voltage
twavg(trip_temp_water) as avg_temp_water, -- Temperature water
twavg(trip_temp_out) as avg_temp_out, -- Temperature outside
twavg(trip_pres_out) as avg_pres_out, -- Pressure outside
twavg(trip_hum_out) as avg_hum_out, -- Humidity outside
trajectory(l.trip)::geometry as track_geog -- extract trip to geography
FROM api.logbook l
WHERE (start_log IS NULL OR l.id >= start_log) AND
(end_log IS NULL OR l.id <= end_log) AND
(start_date IS NULL OR l._from_time >= start_date::TIMESTAMPTZ) AND
(end_date IS NULL OR l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes') AND
l.trip IS NOT NULL
ORDER BY l._from_time ASC
),
collect as (
SELECT ST_Collect(
ARRAY(
SELECT track_geog FROM logbook_data))
)
-- Create the GeoJSON response
SELECT jsonb_build_object(
'type', 'FeatureCollection',
'features', json_agg(ST_AsGeoJSON(logs.*)::json)) INTO geojson FROM logbook_data logs;
END;
$function$;
-- Description
COMMENT ON FUNCTION api.export_logbooks_geojson_linestring_trips_fn IS 'Generate geojson geometry LineString from trip with the corresponding properties';
-- Add public.get_season, return the season based on the input date for logbook tag
CREATE OR REPLACE FUNCTION public.get_season(input_date TIMESTAMPTZ)
RETURNS TEXT AS $$
BEGIN
CASE
WHEN (EXTRACT(MONTH FROM input_date) = 3 AND EXTRACT(DAY FROM input_date) >= 1) OR
(EXTRACT(MONTH FROM input_date) BETWEEN 4 AND 5) THEN
RETURN 'Spring';
WHEN (EXTRACT(MONTH FROM input_date) = 6 AND EXTRACT(DAY FROM input_date) >= 1) OR
(EXTRACT(MONTH FROM input_date) BETWEEN 7 AND 8) THEN
RETURN 'Summer';
WHEN (EXTRACT(MONTH FROM input_date) = 9 AND EXTRACT(DAY FROM input_date) >= 1) OR
(EXTRACT(MONTH FROM input_date) BETWEEN 10 AND 11) THEN
RETURN 'Fall';
ELSE
RETURN 'Winter';
END CASE;
END;
$$ LANGUAGE plpgsql IMMUTABLE;
-- Refresh permissions
GRANT SELECT ON TABLE api.metrics,api.metadata TO scheduler;
GRANT INSERT, UPDATE, SELECT ON TABLE api.logbook,api.moorages,api.stays TO scheduler;
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO scheduler;
GRANT SELECT ON ALL TABLES IN SCHEMA public TO scheduler;
GRANT SELECT, UPDATE ON TABLE public.process_queue TO scheduler;
-- Update version
UPDATE public.app_settings
SET value='0.9.0'
WHERE "name"='app.version';

File diff suppressed because it is too large Load Diff

View File

@@ -1 +1 @@
0.8.0 0.9.1

File diff suppressed because one or more lines are too long

View File

@@ -27,6 +27,7 @@ const metrics_aava = require('./metrics_sample_aava.json');
const fs = require('fs'); const fs = require('fs');
let configtime = new Date().toISOString();
// CNAMEs Array // CNAMEs Array
[ [
@@ -39,7 +40,7 @@ const fs = require('fs');
vessel_metadata: { vessel_metadata: {
name: "kapla", name: "kapla",
mmsi: "123456789", mmsi: "123456789",
client_id: "vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd", //client_id: "vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd",
length: "12", length: "12",
beam: "10", beam: "10",
height: "24", height: "24",
@@ -59,8 +60,8 @@ const fs = require('fs');
user_views: [ user_views: [
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 0}, { url: '/logs_view', res_body_length: 0}, // not processed yet so empty
{ url: '/log_view', res_body_length: 2}, { url: '/log_view', res_body_length: 0}, // not processed yet so empty
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
], ],
@@ -89,7 +90,7 @@ const fs = require('fs');
*/ */
], ],
user_fn: [ user_fn: [
{ url: '/rpc/timelapse_fn', { url: '/rpc/export_logbooks_geojson_point_trips_fn',
payload: { payload: {
start_log: 1 start_log: 1
}, },
@@ -97,7 +98,7 @@ const fs = require('fs');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_geojson_fn', { url: '/rpc/export_logbook_geojson_trip_fn',
payload: { payload: {
_id: 1 _id: 1
}, },
@@ -105,7 +106,15 @@ const fs = require('fs');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_gpx_fn', { url: '/rpc/export_logbook_gpx_trip_fn',
payload: {
_id: 1
},
res: {
obj_name: null
}
},
{ url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 1 _id: 1
}, },
@@ -169,6 +178,18 @@ const fs = require('fs');
obj_name: 'settings' obj_name: 'settings'
} }
} }
],
config_fn: [
{ url: '/metadata?select=configuration',
res: {
obj_name: 'configuration'
}
},
{ url: `/metadata?select=configuration&configuration->>update_at=gt.${configtime}`,
res: {
obj_name: 'settings'
}
},
] ]
}, },
{ cname: process.env.PGSAIL_API_URI, name: "PostgSail unit test, aava", { cname: process.env.PGSAIL_API_URI, name: "PostgSail unit test, aava",
@@ -179,7 +200,7 @@ const fs = require('fs');
vessel_metadata: { vessel_metadata: {
name: "aava", name: "aava",
mmsi: "787654321", mmsi: "787654321",
client_id: "vessels.urn:mrn:imo:mmsi:787654321", //client_id: "vessels.urn:mrn:imo:mmsi:787654321",
length: "12", length: "12",
beam: "10", beam: "10",
height: "24", height: "24",
@@ -198,8 +219,8 @@ const fs = require('fs');
user_views: [ user_views: [
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 0}, { url: '/logs_view', res_body_length: 0}, // not processed yet so empty
{ url: '/log_view', res_body_length: 1}, { url: '/log_view', res_body_length: 0}, // not processed yet so empty
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
], ],
@@ -228,15 +249,15 @@ const fs = require('fs');
*/ */
], ],
user_fn: [ user_fn: [
{ url: '/rpc/timelapse_fn', { url: '/rpc/export_logbooks_geojson_point_trips_fn',
payload: { payload: {
start_log: 3 start_log: 1
}, },
res: { res: {
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_geojson_fn', { url: '/rpc/export_logbook_geojson_trip_fn',
payload: { payload: {
_id: 3 _id: 3
}, },
@@ -244,7 +265,15 @@ const fs = require('fs');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_gpx_fn', { url: '/rpc/export_logbook_gpx_trip_fn',
payload: {
_id: 3
},
res: {
obj_name: null
}
},
{ url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 3 _id: 3
}, },
@@ -302,6 +331,18 @@ const fs = require('fs');
obj_name: 'settings' obj_name: 'settings'
} }
}, },
],
config_fn: [
{ url: '/metadata?select=configuration',
res: {
obj_name: 'configuration'
}
},
{ url: `/metadata?select=configuration&configuration->>update_at=gt.${configtime}`,
res: {
obj_name: 'settings'
}
},
] ]
} }
].forEach( function(test){ ].forEach( function(test){
@@ -595,14 +636,15 @@ request.set('User-Agent', 'PostgSail unit tests');
describe("Vessel POST metrics, JWT vessel_role", function(){ describe("Vessel POST metrics, JWT vessel_role", function(){
let data = []; let data = [];
//console.log(vessel_metrics['metrics'][0]); //console.log(test.vessel_metrics['metrics'][0]);
let i; let i;
for (i = 0; i < test.vessel_metrics['metrics'].length; i++) { for (i = 0; i < test.vessel_metrics['metrics'].length; i++) {
data[i] = test.vessel_metrics['metrics'][i]; data[i] = test.vessel_metrics['metrics'][i];
// Override time, -2h to allow to new data later without delay. // Override time, -2h to allow to new data later without delay.
data[i]['time'] = moment.utc().subtract(1, 'day').add(i, 'minutes').format(); data[i]['time'] = moment.utc().subtract(1, 'day').add(i, 'minutes').format();
// Override client_id // Override client_id
data[i]['client_id'] = test.vessel_metadata.client_id; //data[i]['client_id'] = test.vessel_metadata.client_id;
data[i]['client_id'] = null;
} }
// The last entry are invalid and should be ignore. // The last entry are invalid and should be ignore.
// - Invalid status // - Invalid status
@@ -827,6 +869,37 @@ request.set('User-Agent', 'PostgSail unit tests');
}); // Function OTP endpoint }); // Function OTP endpoint
*/ */
describe("Function Metadata configuration endpoint, JWT vessel_role", function(){
let otp = null;
test.config_fn.forEach(function (subtest) {
it(`${subtest.url}`, function(done) {
try {
//console.log(`${subtest.url} ${subtest.res}`);
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.get(subtest.url)
.set('Authorization', `Bearer ${vessel_jwt}`)
.set('Accept', 'application/json')
.end(function(err,res){
res.status.should.equal(200);
should.exist(res.header['content-type']);
should.exist(res.header['server']);
res.header['content-type'].should.match(new RegExp('json','g'));
res.header['server'].should.match(new RegExp('postgrest','g'));
console.log(res.body);
should.exist(res.body);
done(err);
});
}
catch (error) {
done();
}
});
});
}); // Function metadata configuration endpoint
}); // OpenAPI description }); // OpenAPI description
}); // CNAMEs Array }); // CNAMEs Array

View File

@@ -28,14 +28,15 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
vessel_metadata: { vessel_metadata: {
name: "aava", name: "aava",
mmsi: "787654321", mmsi: "787654321",
client_id: "vessels.urn:mrn:imo:mmsi:787654321", //client_id: "vessels.urn:mrn:imo:mmsi:787654321",
length: "12", length: "12",
beam: "10", beam: "10",
height: "24", height: "24",
ship_type: "37", ship_type: "37",
plugin_version: "1.0.2", plugin_version: "1.0.2",
signalk_version: "1.20.0", signalk_version: "1.20.0",
time: moment().subtract(69, 'minutes').format() time: moment().subtract(69, 'minutes').format(),
available_keys: [],
}, },
vessel_metrics: metrics_simulator, vessel_metrics: metrics_simulator,
user_tables: [ user_tables: [
@@ -48,7 +49,7 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
// not processed yet, { url: '/stays_view', res_body_length: 1}, // not processed yet, { url: '/stays_view', res_body_length: 1},
// not processed yet, { url: '/moorages_view', res_body_length: 1}, // not processed yet, { url: '/moorages_view', res_body_length: 1},
{ url: '/logs_view', res_body_length: 1}, { url: '/logs_view', res_body_length: 1},
{ url: '/log_view', res_body_length: 2}, { url: '/log_view', res_body_length: 0}, // not processed yet so empty
//{ url: '/stats_view', res_body_length: 1}, //{ url: '/stats_view', res_body_length: 1},
{ url: '/vessels_view', res_body_length: 1}, { url: '/vessels_view', res_body_length: 1},
], ],
@@ -77,7 +78,7 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
*/ */
], ],
user_fn: [ user_fn: [
{ url: '/rpc/timelapse_fn', { url: '/rpc/export_logbooks_geojson_point_trips_fn',
payload: { payload: {
start_log: 4 start_log: 4
}, },
@@ -85,7 +86,7 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_geojson_fn', { url: '/rpc/export_logbook_geojson_trip_fn',
payload: { payload: {
_id: 4 _id: 4
}, },
@@ -93,7 +94,15 @@ const metrics_simulator = require('./metrics_sample_simulator.json');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_gpx_fn', { url: '/rpc/export_logbook_gpx_trip_fn',
payload: {
_id: 4
},
res: {
obj_name: null
}
},
{ url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 4 _id: 4
}, },
@@ -414,8 +423,9 @@ request.set('User-Agent', 'PostgSail unit tests');
.set('Content-Type', 'application/json') .set('Content-Type', 'application/json')
.set('Prefer', 'return=headers-only') .set('Prefer', 'return=headers-only')
.end(function(err,res){ .end(function(err,res){
res.status.should.equal(201); //console.log(res.body);
//console.log(res.header); //console.log(res.header);
res.status.should.equal(201);
should.exist(res.header['server']); should.exist(res.header['server']);
res.header['server'].should.match(new RegExp('postgrest','g')); res.header['server'].should.match(new RegExp('postgrest','g'));
done(err); done(err);
@@ -434,7 +444,8 @@ request.set('User-Agent', 'PostgSail unit tests');
// Override time, +1h because previous sample include 47 entry. // Override time, +1h because previous sample include 47 entry.
data[i]['time'] = moment.utc().subtract(2, 'hours').add(i, 'minutes').format(); data[i]['time'] = moment.utc().subtract(2, 'hours').add(i, 'minutes').format();
// Override client_id // Override client_id
data[i]['client_id'] = test.vessel_metadata.client_id; //data[i]['client_id'] = test.vessel_metadata.client_id;
data[i]['client_id'] = null;
} }
//console.log(data[0]); //console.log(data[0]);

View File

@@ -31,7 +31,7 @@ var moment = require('moment');
vessel_metadata: { vessel_metadata: {
name: "kapla", name: "kapla",
mmsi: "123456789", mmsi: "123456789",
client_id: "vessels.urn:mrn:imo:mmsi:123456789", //client_id: "vessels.urn:mrn:imo:mmsi:123456789",
length: "12", length: "12",
beam: "10", beam: "10",
height: "24", height: "24",
@@ -79,7 +79,8 @@ var moment = require('moment');
} }
], ],
user_fn: [ user_fn: [
{ url: '/rpc/timelapse_fn', { //url: '/rpc/timelapse_fn',
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
payload: { payload: {
start_log: 2 start_log: 2
}, },
@@ -87,7 +88,17 @@ var moment = require('moment');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_geojson_fn', { //url: '/rpc/timelapse_fn',
url: '/rpc/export_logbooks_geojson_point_trips_fn',
payload: {
start_log: 2
},
res: {
obj_name: 'geojson'
}
},
{ //url: '/rpc/export_logbook_geojson_fn',
url: '/rpc/export_logbook_geojson_trip_fn',
payload: { payload: {
_id: 2 _id: 2
}, },
@@ -95,7 +106,8 @@ var moment = require('moment');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_gpx_fn', { //url: '/rpc/export_logbook_gpx_fn',
url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 2 _id: 2
}, },
@@ -103,7 +115,8 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_logbook_kml_fn', { //url: '/rpc/export_logbook_kml_fn',
url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 2 _id: 2
}, },
@@ -123,6 +136,12 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_moorages_kml_fn',
payload: {},
res: {
obj_name: null
}
},
{ url: '/rpc/find_log_from_moorage_fn', { url: '/rpc/find_log_from_moorage_fn',
payload: { payload: {
_id: 2 _id: 2
@@ -230,7 +249,7 @@ var moment = require('moment');
vessel_metadata: { vessel_metadata: {
name: "aava", name: "aava",
mmsi: "787654321", mmsi: "787654321",
client_id: "vessels.urn:mrn:imo:mmsi:787654321", //client_id: "vessels.urn:mrn:imo:mmsi:787654321",
length: "12", length: "12",
beam: "10", beam: "10",
height: "24", height: "24",
@@ -277,7 +296,8 @@ var moment = require('moment');
} }
], ],
user_fn: [ user_fn: [
{ url: '/rpc/timelapse_fn', { //url: '/rpc/timelapse_fn',
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
payload: { payload: {
start_log: 4 start_log: 4
}, },
@@ -285,7 +305,17 @@ var moment = require('moment');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_geojson_fn', { //url: '/rpc/timelapse_fn',
url: '/rpc/export_logbooks_geojson_point_trips_fn',
payload: {
start_log: 4
},
res: {
obj_name: 'geojson'
}
},
{ //url: '/rpc/export_logbook_geojson_fn',
url: '/rpc/export_logbook_geojson_trip_fn',
payload: { payload: {
_id: 4 _id: 4
}, },
@@ -293,7 +323,8 @@ var moment = require('moment');
obj_name: 'geojson' obj_name: 'geojson'
} }
}, },
{ url: '/rpc/export_logbook_gpx_fn', { //url: '/rpc/export_logbook_gpx_fn',
url: '/rpc/export_logbook_gpx_trip_fn',
payload: { payload: {
_id: 4 _id: 4
}, },
@@ -301,7 +332,8 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_logbook_kml_fn', { //url: '/rpc/export_logbook_kml_fn',
url: '/rpc/export_logbook_kml_trip_fn',
payload: { payload: {
_id: 4 _id: 4
}, },
@@ -309,7 +341,8 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_logbooks_gpx_fn', { //url: '/rpc/export_logbooks_gpx_fn',
url: '/rpc/export_logbooks_kml_trips_fn',
payload: { payload: {
start_log: 3, start_log: 3,
end_log: 4 end_log: 4
@@ -318,7 +351,8 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_logbooks_kml_fn', { //url: '/rpc/export_logbooks_kml_fn',
url: '/rpc/export_logbooks_kml_trips_fn',
payload: { payload: {
start_log: 3, start_log: 3,
end_log: 4 end_log: 4
@@ -339,6 +373,12 @@ var moment = require('moment');
obj_name: null obj_name: null
} }
}, },
{ url: '/rpc/export_moorages_kml_fn',
payload: {},
res: {
obj_name: null
}
},
{ url: '/rpc/find_log_from_moorage_fn', { url: '/rpc/find_log_from_moorage_fn',
payload: { payload: {
_id: 4 _id: 4

View File

@@ -163,6 +163,10 @@ var moment = require("moment");
url: "/rpc/update_user_preferences_fn", url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_monitoring}", value: true }, payload: { key: "{public_monitoring}", value: true },
}, },
{
url: "/rpc/update_user_preferences_fn",
payload: { key: "{public_timelapse}", value: true },
},
], ],
}, },
{ {
@@ -685,7 +689,7 @@ var moment = require("moment");
let event = res.body; let event = res.body;
//console.log(event); //console.log(event);
// minimum events log for kapla & aava 13 + 4 email_otp = 17 // minimum events log for kapla & aava 13 + 4 email_otp = 17
event.length.should.be.aboveOrEqual(13); event.length.should.be.aboveOrEqual(11);
done(err); done(err);
}); });
}); });

View File

@@ -45,13 +45,22 @@ var moment = require("moment");
res: {}, res: {},
}, },
timelapse: { timelapse: {
url: "/rpc/timelapse_fn", //url: "/rpc/timelapse_fn",
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,1") }, header: { name: "x-is-public", value: btoa("kapla,public_timelapse,1") },
payload: null, payload: null,
res: {}, res: {},
}, },
timelapse_full: { timelapse_full: {
url: "/rpc/timelapse_fn", //url: "/rpc/timelapse_fn",
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,0") },
payload: null,
res: {},
},
replay_full: {
//url: "/rpc/timelapse_fn",
url: '/rpc/export_logbooks_geojson_point_trips_fn',
header: { name: "x-is-public", value: btoa("kapla,public_timelapse,0") }, header: { name: "x-is-public", value: btoa("kapla,public_timelapse,0") },
payload: null, payload: null,
res: {}, res: {},
@@ -69,7 +78,7 @@ var moment = require("moment");
res: {}, res: {},
}, },
export_gpx: { export_gpx: {
url: "/rpc/export_logbook_gpx_fn", url: "/rpc/export_logbook_gpx_trip_fn",
header: { name: "x-is-public", value: btoa("kapla,public_logs,0") }, header: { name: "x-is-public", value: btoa("kapla,public_logs,0") },
payload: null, payload: null,
res: {}, res: {},
@@ -97,13 +106,21 @@ var moment = require("moment");
res: {}, res: {},
}, },
timelapse: { timelapse: {
url: "/rpc/timelapse_fn", //url: "/rpc/timelapse_fn",
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
header: { name: "x-is-public", value: btoa("aava,public_timelapse,3") }, header: { name: "x-is-public", value: btoa("aava,public_timelapse,3") },
payload: null, payload: null,
res: {}, res: {},
}, },
timelapse_full: { timelapse_full: {
url: "/rpc/timelapse_fn", //url: "/rpc/timelapse_fn",
url: '/rpc/export_logbooks_geojson_linestring_trips_fn',
header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") },
payload: null,
res: {},
},
replay_full: {
url: '/rpc/export_logbooks_geojson_point_trips_fn',
header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") }, header: { name: "x-is-public", value: btoa("aava,public_timelapse,0") },
payload: null, payload: null,
res: {}, res: {},
@@ -121,7 +138,7 @@ var moment = require("moment");
res: {}, res: {},
}, },
export_gpx: { export_gpx: {
url: "/rpc/export_logbook_gpx_fn", url: "/rpc/export_logbook_gpx_trip_fn",
header: { name: "x-is-public", value: btoa("aava,public_logs,0") }, header: { name: "x-is-public", value: btoa("aava,public_logs,0") },
payload: null, payload: null,
res: {}, res: {},
@@ -134,7 +151,7 @@ var moment = require("moment");
request.set("User-Agent", "PostgSail unit tests"); request.set("User-Agent", "PostgSail unit tests");
describe("With no JWT as api_anonymous", function () { describe("With no JWT as api_anonymous", function () {
it("/logs_view, api_anonymous no jwt token", function (done) { it("/logs_view, api_anonymous no jwt token, x-is-public header", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
@@ -150,7 +167,7 @@ var moment = require("moment");
done(err); done(err);
}); });
}); });
it("/log_view, api_anonymous no jwt token", function (done) { it("/log_view, api_anonymous no jwt token, x-is-public header", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
@@ -166,7 +183,7 @@ var moment = require("moment");
done(err); done(err);
}); });
}); });
it("/monitoring_view, api_anonymous no jwt token", function (done) { it("/monitoring_view, api_anonymous no jwt token, x-is-public header", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
@@ -183,7 +200,7 @@ var moment = require("moment");
done(err); done(err);
}); });
}); });
it("/rpc/timelapse_fn, api_anonymous no jwt token", function (done) { it("/rpc/export_logbooks_geojson_linestring_trips_fn, api_anonymous no jwt token, x-is-public header", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
@@ -197,6 +214,39 @@ var moment = require("moment");
should.exist(res.header["server"]); should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g")); res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g")); res.header["server"].should.match(new RegExp("postgrest", "g"));
should.exist(res.body.geojson);
/*
if (res.body.geojson.features == null) { // aava
//res.body.geojson.features.should.not.be.ok();
done(err);
}
res.body.geojson.features.length.should.be.equal(4);
*/
done(err);
});
});
it("/rpc/export_logbooks_geojson_point_trips_fn, api_anonymous no jwt token, x-is-public header", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post(test.replay_full.url)
.set(test.replay_full.header.name, test.replay_full.header.value)
.set("Accept", "application/json")
.end(function (err, res) {
console.log(res.text);
res.status.should.equal(200); // return 404 as it is not enable in user settings.
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
should.exist(res.body.geojson);
/*
if (res.body.geojson.features == null) { // aava
//res.body.geojson.features.should.not.be.ok();
done(err);
}
res.body.geojson.features.length.should.be.equal(53);
*/
done(err); done(err);
}); });
}); });

View File

@@ -52,12 +52,17 @@ var moment = require("moment");
res: {}, res: {},
}, },
timelapse: { timelapse: {
url: "/rpc/timelapse_fn", url: "/rpc/export_logbooks_geojson_linestring_trips_fn",
payload: null, payload: null,
res: {}, res: {},
}, },
timelapse_full: { timelapse_full: {
url: "/rpc/timelapse_fn", url: "/rpc/export_logbooks_geojson_linestring_trips_fn",
payload: null,
res: {},
},
replay_full: {
url: "/rpc/export_logbooks_geojson_point_trips_fn",
payload: null, payload: null,
res: {}, res: {},
}, },
@@ -72,7 +77,7 @@ var moment = require("moment");
res: {}, res: {},
}, },
export_gpx: { export_gpx: {
url: "/rpc/export_logbook_gpx_fn", url: "/rpc/export_logbook_gpx_trip_fn",
payload: null, payload: null,
res: {}, res: {},
}, },
@@ -165,7 +170,7 @@ var moment = require("moment");
done(err); done(err);
}); });
}); });
it("/rpc/timelapse_fn, api_anonymous no jwt token", function (done) { it("/rpc/export_logbooks_geojson_linestring_trips_fn, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request
@@ -178,10 +183,28 @@ var moment = require("moment");
should.exist(res.header["server"]); should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g")); res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g")); res.header["server"].should.match(new RegExp("postgrest", "g"));
should.exist(res.body.geojson);
done(err); done(err);
}); });
}); });
it("/rpc/export_logbook_gpx_fn, api_anonymous no jwt token", function (done) { it("/rpc/export_logbooks_geojson_point_trips_fn, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies
request = supertest.agent(test.cname);
request
.post(test.replay_full.url)
.set("Accept", "application/json")
.end(function (err, res) {
console.log(res.body);
res.status.should.equal(200);
should.exist(res.header["content-type"]);
should.exist(res.header["server"]);
res.header["content-type"].should.match(new RegExp("json", "g"));
res.header["server"].should.match(new RegExp("postgrest", "g"));
should.exist(res.body.geojson);
done(err);
});
});
it("/rpc/export_logbook_gpx_trip_fn, api_anonymous no jwt token", function (done) {
// Reset agent so we do not save cookies // Reset agent so we do not save cookies
request = supertest.agent(test.cname); request = supertest.agent(test.cname);
request request

View File

@@ -8,5 +8,8 @@
"moment": "^2.29.4", "moment": "^2.29.4",
"should": "^13.2.3", "should": "^13.2.3",
"supertest": "^6.3.3" "supertest": "^6.3.3"
},
"devDependencies": {
"schemalint": "^2.0.5"
} }
} }

View File

@@ -18,3 +18,10 @@ SELECT api.ispublic_fn('kapla', 'public_logs', 1);
SELECT api.ispublic_fn('kapla', 'public_logs', 3); SELECT api.ispublic_fn('kapla', 'public_logs', 3);
SELECT api.ispublic_fn('kapla', 'public_monitoring'); SELECT api.ispublic_fn('kapla', 'public_monitoring');
SELECT api.ispublic_fn('kapla', 'public_timelapse'); SELECT api.ispublic_fn('kapla', 'public_timelapse');
SELECT api.ispublic_fn('aava', 'public_test');
SELECT api.ispublic_fn('aava', 'public_logs_list');
SELECT api.ispublic_fn('aava', 'public_logs', 1);
SELECT api.ispublic_fn('aava', 'public_logs', 3);
SELECT api.ispublic_fn('aava', 'public_monitoring');
SELECT api.ispublic_fn('aava', 'public_timelapse');

View File

@@ -21,6 +21,24 @@ ispublic_fn | f
-[ RECORD 1 ]-- -[ RECORD 1 ]--
ispublic_fn | t ispublic_fn | t
-[ RECORD 1 ]--
ispublic_fn | t
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | f
-[ RECORD 1 ]--
ispublic_fn | t
-[ RECORD 1 ]--
ispublic_fn | t
-[ RECORD 1 ]-- -[ RECORD 1 ]--
ispublic_fn | f ispublic_fn | f

View File

@@ -17,10 +17,16 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
\echo 'Insert new api.logbook for badges' \echo 'Insert new api.logbook for badges'
INSERT INTO api.logbook INSERT INTO api.logbook
(id, active, "name", "_from", "_from_lat", "_from_lng", "_to", "_to_lat", "_to_lng", track_geom, track_geog, track_geojson, "_from_time", "_to_time", distance, duration, avg_speed, max_speed, max_wind_speed, notes, vessel_id) (id, active, "name", "_from", "_from_lat", "_from_lng", "_to", "_to_lat", "_to_lng", trip, "_from_time", "_to_time", distance, duration, avg_speed, max_speed, max_wind_speed, notes, vessel_id)
OVERRIDING SYSTEM VALUE VALUES OVERRIDING SYSTEM VALUE VALUES
(nextval('api.logbook_id_seq'), false, 'Tropics Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-63.151124640791096 14.01074681627324, -77.0912026418618 12.870995731013664)'::public.geometry, NULL, NULL, NOW(), NOW(), 123, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)), (nextval('api.logbook_id_seq'), false, 'Tropics Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;[Point(-63.151124640791096 14.01074681627324)@2025-01-01, Point(-77.0912026418618 12.870995731013664)@2025-01-02]'::public.tgeogpoint, NOW(), NOW(), 123, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)),
(nextval('api.logbook_id_seq'), false, 'Alaska Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;LINESTRING (-143.5773697471158 59.4404631255976, -152.35402122385003 56.58243132943173)'::public.geometry, NULL, NULL, NOW(), NOW(), 1234, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false)); (nextval('api.logbook_id_seq'), false, 'Alaska Zone', NULL, NULL, NULL, NULL, NULL, NULL, 'SRID=4326;[Point(-143.5773697471158 59.4404631255976)@2025-01-01, Point(-152.35402122385003 56.58243132943173)@2025-01-02]'::public.tgeogpoint, NOW(), NOW(), 1234, NULL, NULL, NULL, NULL, NULL, current_setting('vessel.id', false));
-- Transform static geometry LINESTRING to mobilitydb
-- 'SRID=4326;LINESTRING (-63.151124640791096 14.01074681627324, -77.0912026418618 12.870995731013664)'::public.geometry
-- 'SRID=4326;LINESTRING (-143.5773697471158 59.4404631255976, -152.35402122385003 56.58243132943173)'::public.geometry
--SELECT ST_AsGeoJSON('SRID=4326;LINESTRING (-63.151124640791096 14.01074681627324, -77.0912026418618 12.870995731013664)'::public.geometry);
--SELECT ST_AsGeoJSON(trajectory('SRID=4326;[Point(-63.151124640791096 14.01074681627324)@2025-01-01, Point(-77.0912026418618 12.870995731013664)@2025-01-02]'::public.tgeogpoint));
\echo 'Set config' \echo 'Set config'
SELECT set_config('user.email', 'demo+kapla@openplotter.cloud', false); SELECT set_config('user.email', 'demo+kapla@openplotter.cloud', false);
@@ -51,10 +57,10 @@ SELECT
\echo 'Insert new api.moorages for badges' \echo 'Insert new api.moorages for badges'
INSERT INTO api.moorages INSERT INTO api.moorages
(id,"name",country,stay_code,stay_duration,reference_count,latitude,longitude,geog,home_flag,notes,vessel_id) (id,"name",country,stay_code,latitude,longitude,geog,home_flag,notes,vessel_id)
OVERRIDING SYSTEM VALUE VALUES OVERRIDING SYSTEM VALUE VALUES
(8,'Badge Mooring Pro',NULL,3,'11 days 00:39:56.418',1,NULL,NULL,NULL,false,'Badge Mooring Pro',current_setting('vessel.id', false)), (8,'Badge Mooring Pro',NULL,3,NULL,NULL,NULL,false,'Badge Mooring Pro',current_setting('vessel.id', false)),
(9,'Badge Anchormaster',NULL,2,'26 days 00:49:56.418',1,NULL,NULL,NULL,false,'Badge Anchormaster',current_setting('vessel.id', false)); (9,'Badge Anchormaster',NULL,2,NULL,NULL,NULL,false,'Badge Anchormaster',current_setting('vessel.id', false));
\echo 'Set config' \echo 'Set config'
SELECT set_config('user.email', 'demo+aava@openplotter.cloud', false); SELECT set_config('user.email', 'demo+aava@openplotter.cloud', false);

View File

@@ -25,7 +25,8 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
\echo 'logbook' \echo 'logbook'
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
\echo 'logbook' \echo 'logbook'
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, trajectory(trip)::geometry as track_geom, distance,duration,round(avg_speed::NUMERIC,6),max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); --SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, trajectory(trip)::geometry as track_geom, distance,duration,round(avg_speed::NUMERIC,6),max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, api.export_logbook_geojson_trip_fn(id) IS NOT NULL AS track_geojson, trajectory(trip)::geometry as track_geom, distance,duration,round(avg_speed::NUMERIC,6),max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
-- Test stays for user -- Test stays for user
\echo 'stays' \echo 'stays'
@@ -69,17 +70,28 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT); SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false); SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
\echo 'Check numbers of geojson properties' \echo 'Check logbook geojson LineString properties'
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[0].properties')) WITH logbook_tbl AS (
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false); SELECT api.logbook_update_geojson_trip_fn(id) AS geojson
SELECT jsonb_object_keys(jsonb_path_query(track_geojson, '$.features[1].properties')) FROM api.logbook WHERE id = 1 AND vessel_id = current_setting('vessel.id', false)
FROM api.logbook where id = 1 AND vessel_id = current_setting('vessel.id', false); )
SELECT jsonb_object_keys(jsonb_path_query(geojson, '$.features[0].properties'))
FROM logbook_tbl;
\echo 'Check logbook geojson Point properties'
WITH logbook_tbl AS (
SELECT api.logbook_update_geojson_trip_fn(id) AS geojson
FROM api.logbook WHERE id = 1 AND vessel_id = current_setting('vessel.id', false)
)
SELECT jsonb_object_keys(jsonb_path_query(geojson, '$.features[1].properties'))
FROM logbook_tbl;
-- Check export -- Check export
--\echo 'check logbook export fn' \echo 'Check logbook export fn'
--SELECT api.export_logbook_geojson_fn(1); --SELECT api.export_logbook_geojson_fn(1);
--SELECT api.export_logbook_gpx_fn(1); --SELECT api.export_logbook_gpx_fn(1);
--SELECT api.export_logbook_kml_fn(1); --SELECT api.export_logbook_kml_fn(1);
SELECT api.export_logbook_gpx_trip_fn(1) IS NOT NULL AS gpx_trip;
SELECT api.export_logbook_kml_trip_fn(1) IS NOT NULL AS kml_trip;
-- Check history -- Check history
--\echo 'monitoring history fn' --\echo 'monitoring history fn'

View File

@@ -17,7 +17,7 @@ count | 2
logbook logbook
-[ RECORD 1 ]--+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
name | Pojoviken to Norra hamnen name | Pojoviken to Formanshagen
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t track_geojson | t
@@ -30,7 +30,7 @@ max_wind_speed | 22.1
notes | notes |
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999} extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
-[ RECORD 2 ]--+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
name | Norra hamnen to Ekenäs name | Formanshagen to Ekenäs
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t track_geojson | t
@@ -71,7 +71,7 @@ count | 11
stats_logs_fn stats_logs_fn
SELECT 1 SELECT 1
-[ RECORD 1 ]+---------- -[ RECORD 1 ]+----------
name | "kapla" name | "aava"
count | 4 count | 4
max_speed | 9.5 max_speed | 9.5
max_distance | 68.8677 max_distance | 68.8677
@@ -110,7 +110,7 @@ update_logbook_observations_fn | t
-[ RECORD 1 ]-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999} extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
Check numbers of geojson properties Check logbook geojson LineString properties
-[ RECORD 1 ]-----+----------------- -[ RECORD 1 ]-----+-----------------
jsonb_object_keys | id jsonb_object_keys | id
-[ RECORD 2 ]-----+----------------- -[ RECORD 2 ]-----+-----------------
@@ -144,6 +144,7 @@ jsonb_object_keys | max_wind_speed
-[ RECORD 16 ]----+----------------- -[ RECORD 16 ]----+-----------------
jsonb_object_keys | _from_moorage_id jsonb_object_keys | _from_moorage_id
Check logbook geojson Point properties
-[ RECORD 1 ]-----+------- -[ RECORD 1 ]-----+-------
jsonb_object_keys | cog jsonb_object_keys | cog
-[ RECORD 2 ]-----+------- -[ RECORD 2 ]-----+-------
@@ -163,3 +164,10 @@ jsonb_object_keys | notes
-[ RECORD 9 ]-----+------- -[ RECORD 9 ]-----+-------
jsonb_object_keys | status jsonb_object_keys | status
Check logbook export fn
-[ RECORD 1 ]
gpx_trip | t
-[ RECORD 1 ]
kml_trip | t

View File

@@ -20,7 +20,7 @@ SELECT current_user, current_setting('user.email', true), current_setting('vesse
--SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND cast(a.preferences->>'email_valid' as Boolean) = True AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id; --SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND cast(a.preferences->>'email_valid' as Boolean) = True AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
--SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id; --SELECT a.pass,v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.email = 'demo+kapla@openplotter.cloud' AND a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id;
\echo 'link vessel and user based on current_setting' \echo 'link vessel and user based on current_setting'
SELECT v.name,m.client_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id ORDER BY a.id DESC; SELECT v.name, m.vessel_id IS NOT NULL AS vessel_id FROM auth.accounts a JOIN auth.vessels v ON a.role = 'user_role' AND v.owner_email = a.email JOIN api.metadata m ON m.vessel_id = v.vessel_id ORDER BY a.id DESC;
\echo 'auth.accounts details' \echo 'auth.accounts details'
SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a ORDER BY a.id DESC; SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT NULL AS pass, a.role, a.preferences->'telegram'->'chat' AS telegram, a.preferences->'pushover_user_key' AS pushover_user_key FROM auth.accounts AS a ORDER BY a.id DESC;
@@ -29,7 +29,7 @@ SELECT a.user_id IS NOT NULL AS user_id, a.email, a.first, a.last, a.pass IS NOT
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v; SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
\echo 'api.metadata details' \echo 'api.metadata details'
-- --
SELECT m.id, m.name, m.mmsi, m.client_id, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active FROM api.metadata AS m; SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration, available_keys FROM api.metadata AS m ORDER BY m.name DESC;
-- --
-- grafana -- grafana
@@ -48,14 +48,14 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
--SELECT current_user, current_setting('user.email', true), current_setting('vessel.client_id', true), current_setting('vessel.id', true); --SELECT current_user, current_setting('user.email', true), current_setting('vessel.client_id', true), current_setting('vessel.id', true);
SELECT current_user, current_setting('user.email', true); SELECT current_user, current_setting('user.email', true);
SELECT v.name AS __text, m.client_id AS __value FROM auth.vessels v JOIN api.metadata m ON v.owner_email = 'demo+kapla@openplotter.cloud' and m.vessel_id = v.vessel_id; SELECT v.name AS __text, m.vessel_id IS NOT NULL AS __value FROM auth.vessels v JOIN api.metadata m ON v.owner_email = 'demo+kapla@openplotter.cloud' and m.vessel_id = v.vessel_id;
\echo 'auth.vessels details' \echo 'auth.vessels details'
--SELECT * FROM auth.vessels v; --SELECT * FROM auth.vessels v;
SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v; SELECT v.vessel_id IS NOT NULL AS vessel_id, v.owner_email, v.mmsi, v.name, v.role FROM auth.vessels AS v;
--SELECT * FROM api.metadata m; --SELECT * FROM api.metadata m;
\echo 'api.metadata details' \echo 'api.metadata details'
SELECT m.id, m.name, m.mmsi, m.client_id, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active FROM api.metadata AS m; SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration IS NOT NULL AS configuration, available_keys FROM api.metadata AS m;
\echo 'api.logs_view' \echo 'api.logs_view'
--SELECT * FROM api.logbook l; --SELECT * FROM api.logbook l;
@@ -67,13 +67,18 @@ SELECT l.id, l.name, l.from, l.to, l.distance, l.duration, l._from_moorage_id, l
--SELECT * FROM api.stays s; --SELECT * FROM api.stays s;
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.moorage_id, m.active, m.name IS NOT NULL AS name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m; SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.moorage_id, m.active, m.name IS NOT NULL AS name, m.latitude, m.longitude, m.geog, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.duration, m.stay_code, m.notes FROM api.stays AS m;
\echo 'stays_view' \echo 'api.stays_view'
--SELECT * FROM api.stays_view s; --SELECT * FROM api.stays_view s;
SELECT m.id, m.name IS NOT NULL AS name, m.moorage, m.moorage_id, m.duration, m.stayed_at, m.stayed_at_id, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.notes FROM api.stays_view AS m; SELECT m.id, m.name IS NOT NULL AS name, m.moorage, m.moorage_id, m.duration, m.stayed_at, m.stayed_at_id, m.arrived IS NOT NULL AS arrived, m.departed IS NOT NULL AS departed, m.notes FROM api.stays_view AS m;
\echo 'api.moorages' \echo 'api.moorages'
--SELECT * FROM api.moorages m; --SELECT * FROM api.moorages m;
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.name, m.country, m.stay_code, m.stay_duration, m.reference_count, m.latitude, m.longitude, m.geog, m.home_flag, m.notes FROM api.moorages AS m; --SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.name, m.country, m.stay_code, m.stay_duration, m.reference_count, m.latitude, m.longitude, m.geog, m.home_flag, m.notes FROM api.moorages AS m;
SELECT m.id, m.vessel_id IS NOT NULL AS vessel_id, m.name, m.country, m.stay_code, m.latitude, m.longitude, m.geog, m.home_flag, m.notes FROM api.moorages AS m;
\echo 'api.moorages_view' \echo 'api.moorages_view'
SELECT * FROM api.moorages_view s; SELECT * FROM api.moorages_view;
\echo 'api.moorage_view'
--SELECT * FROM api.moorage_view;
SELECT m.id, m.name, default_stay, m.latitude, m.longitude, m.geog, m.home, m.notes, logs_count, stays_count, stay_first_seen_id, stay_last_seen_id, stay_first_seen IS NOT NULL AS stay_first_seen, stay_last_seen IS NOT NULL AS stay_last_seen FROM api.moorage_view m;

View File

@@ -13,12 +13,12 @@ current_setting |
current_setting | current_setting |
link vessel and user based on current_setting link vessel and user based on current_setting
-[ RECORD 1 ]---------------------------------------------------------------- -[ RECORD 1 ]----
name | aava name | aava
client_id | vessels.urn:mrn:imo:mmsi:787654321 vessel_id | t
-[ RECORD 2 ]---------------------------------------------------------------- -[ RECORD 2 ]----
name | kapla name | kapla
client_id | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd vessel_id | t
auth.accounts details auth.accounts details
-[ RECORD 1 ]-----+----------------------------- -[ RECORD 1 ]-----+-----------------------------
@@ -55,11 +55,10 @@ name | aava
role | vessel_role role | vessel_role
api.metadata details api.metadata details
-[ RECORD 1 ]---+------------------------------------------------------------------ -[ RECORD 1 ]---+----------------
id | 1 id | 1
name | kapla name | kapla
mmsi | 123456789 mmsi | 123456789
client_id | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd
length | 12 length | 12
beam | 10 beam | 10
height | 24 height | 24
@@ -68,11 +67,12 @@ plugin_version | 0.0.1
signalk_version | signalk_version signalk_version | signalk_version
time | t time | t
active | t active | t
-[ RECORD 2 ]---+------------------------------------------------------------------ configuration | t
available_keys |
-[ RECORD 2 ]---+----------------
id | 2 id | 2
name | aava name | aava
mmsi | 787654321 mmsi | 787654321
client_id | vessels.urn:mrn:imo:mmsi:787654321
length | 12 length | 12
beam | 10 beam | 10
height | 24 height | 24
@@ -81,6 +81,8 @@ plugin_version | 1.0.2
signalk_version | 1.20.0 signalk_version | 1.20.0
time | t time | t
active | t active | t
configuration | f
available_keys | []
SET SET
ROLE grafana current_setting ROLE grafana current_setting
@@ -93,9 +95,9 @@ vessel_id | t
current_user | grafana current_user | grafana
current_setting | demo+kapla@openplotter.cloud current_setting | demo+kapla@openplotter.cloud
-[ RECORD 1 ]-------------------------------------------------------------- -[ RECORD 1 ]--
__text | kapla __text | kapla
__value | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd __value | t
auth.vessels details auth.vessels details
-[ RECORD 1 ]----------------------------- -[ RECORD 1 ]-----------------------------
@@ -106,11 +108,10 @@ name | kapla
role | vessel_role role | vessel_role
api.metadata details api.metadata details
-[ RECORD 1 ]---+------------------------------------------------------------------ -[ RECORD 1 ]---+----------------
id | 1 id | 1
name | kapla name | kapla
mmsi | 123456789 mmsi | 123456789
client_id | vessels.urn:mrn:signalk:uuid:5b4f7543-7153-4840-b139-761310b242fd
length | 12 length | 12
beam | 10 beam | 10
height | 24 height | 24
@@ -119,12 +120,14 @@ plugin_version | 0.0.1
signalk_version | signalk_version signalk_version | signalk_version
time | t time | t
active | t active | t
configuration | t
available_keys |
api.logs_view api.logs_view
-[ RECORD 1 ]----+----------------------- -[ RECORD 1 ]----+-----------------------
id | 2 id | 2
name | Norra hamnen to Ekenäs name | Formanshagen to Ekenäs
from | Norra hamnen from | Formanshagen
to | Ekenäs to | Ekenäs
distance | 8.8968 distance | 8.8968
duration | PT20M duration | PT20M
@@ -134,7 +137,7 @@ _to_moorage_id | 3
id | 1 id | 1
name | patch log name 3 name | patch log name 3
from | patch moorage name 3 from | patch moorage name 3
to | Norra hamnen to | Formanshagen
distance | 7.6447 distance | 7.6447
duration | PT27M duration | PT27M
_from_moorage_id | 1 _from_moorage_id | 1
@@ -184,11 +187,11 @@ duration | PT2M
stay_code | 4 stay_code | 4
notes | notes |
stays_view api.stays_view
-[ RECORD 1 ]+--------------------- -[ RECORD 1 ]+---------------------
id | 2 id | 2
name | t name | t
moorage | Norra hamnen moorage | Formanshagen
moorage_id | 2 moorage_id | 2
duration | PT2M duration | PT2M
stayed_at | Dock stayed_at | Dock
@@ -209,40 +212,34 @@ departed | t
notes | new stay note 3 notes | new stay note 3
api.moorages api.moorages
-[ RECORD 1 ]---+--------------------------------------------------- -[ RECORD 1 ]-------------------------------------------------
id | 1 id | 1
vessel_id | t vessel_id | t
name | patch moorage name 3 name | patch moorage name 3
country | fi country | fi
stay_code | 2 stay_code | 2
stay_duration | PT1M
reference_count | 1
latitude | 60.0776666666667 latitude | 60.0776666666667
longitude | 23.5308666666667 longitude | 23.5308666666667
geog | 0101000020E6100000B9DEBBE0E687374052A938FBF0094E40 geog | 0101000020E6100000B9DEBBE0E687374052A938FBF0094E40
home_flag | t home_flag | t
notes | new moorage note 3 notes | new moorage note 3
-[ RECORD 2 ]---+--------------------------------------------------- -[ RECORD 2 ]-------------------------------------------------
id | 2 id | 2
vessel_id | t vessel_id | t
name | Norra hamnen name | Formanshagen
country | fi country | fi
stay_code | 4 stay_code | 4
stay_duration | PT2M
reference_count | 2
latitude | 59.9768833333333 latitude | 59.9768833333333
longitude | 23.4321 longitude | 23.4321
geog | 0101000020E6100000029A081B9E6E3740455658830AFD4D40 geog | 0101000020E6100000029A081B9E6E3740455658830AFD4D40
home_flag | f home_flag | f
notes | notes |
-[ RECORD 3 ]---+--------------------------------------------------- -[ RECORD 3 ]-------------------------------------------------
id | 3 id | 3
vessel_id | t vessel_id | t
name | Ekenäs name | Ekenäs
country | fi country | fi
stay_code | 1 stay_code | 1
stay_duration | PT0S
reference_count | 1
latitude | 59.86 latitude | 59.86
longitude | 23.3657666666667 longitude | 23.3657666666667
geog | 0101000020E6100000E84C5FE2A25D3740AE47E17A14EE4D40 geog | 0101000020E6100000E84C5FE2A25D3740AE47E17A14EE4D40
@@ -252,7 +249,7 @@ notes |
api.moorages_view api.moorages_view
-[ RECORD 1 ]-------+--------------------- -[ RECORD 1 ]-------+---------------------
id | 2 id | 2
moorage | Norra hamnen moorage | Formanshagen
default_stay | Dock default_stay | Dock
default_stay_id | 4 default_stay_id | 4
arrivals_departures | 2 arrivals_departures | 2
@@ -272,3 +269,50 @@ default_stay_id | 1
arrivals_departures | 1 arrivals_departures | 1
total_duration | PT0S total_duration | PT0S
api.moorage_view
-[ RECORD 1 ]------+---------------------------------------------------
id | 3
name | Ekenäs
default_stay | Unknown
latitude | 59.86
longitude | 23.3657666666667
geog | 0101000020E6100000E84C5FE2A25D3740AE47E17A14EE4D40
home | f
notes |
logs_count | 1
stays_count | 0
stay_first_seen_id |
stay_last_seen_id |
stay_first_seen | f
stay_last_seen | f
-[ RECORD 2 ]------+---------------------------------------------------
id | 2
name | Formanshagen
default_stay | Dock
latitude | 59.9768833333333
longitude | 23.4321
geog | 0101000020E6100000029A081B9E6E3740455658830AFD4D40
home | f
notes |
logs_count | 2
stays_count | 1
stay_first_seen_id | 2
stay_last_seen_id | 2
stay_first_seen | t
stay_last_seen | t
-[ RECORD 3 ]------+---------------------------------------------------
id | 1
name | patch moorage name 3
default_stay | Anchor
latitude | 60.0776666666667
longitude | 23.5308666666667
geog | 0101000020E6100000B9DEBBE0E687374052A938FBF0094E40
home | t
notes | new moorage note 3
logs_count | 1
stays_count | 1
stay_first_seen_id | 1
stay_last_seen_id | 1
stay_first_seen | t
stay_last_seen | t

View File

@@ -26,7 +26,8 @@ SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
\echo 'logbook' \echo 'logbook'
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
\echo 'logbook' \echo 'logbook'
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); -- track_geom and track_geojson are now dynamic from mobilitydb
SELECT name,_from_time IS NOT NULL AS _from_time, _to_time IS NOT NULL AS _to_time, trajectory(trip) AS track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false) ORDER BY id ASC;
-- Delete logbook for user -- Delete logbook for user
\echo 'Delete logbook for user kapla' \echo 'Delete logbook for user kapla'

View File

@@ -21,7 +21,6 @@ logbook
name | patch log name 3 name | patch log name 3
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t
track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40 track_geom | 0102000020E61000001A000000B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
distance | 7.6447 distance | 7.6447
duration | PT27M duration | PT27M
@@ -31,10 +30,9 @@ max_wind_speed | 22.1
notes | new log note 3 notes | new log note 3
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999} extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
-[ RECORD 2 ]--+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 2 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
name | Norra hamnen to Ekenäs name | Formanshagen to Ekenäs
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | t
track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40 track_geom | 0102000020E610000013000000029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
distance | 8.8968 distance | 8.8968
duration | PT20M duration | PT20M
@@ -47,7 +45,6 @@ extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations
name | Tropics Zone name | Tropics Zone
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | f
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940 track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
distance | 123 distance | 123
duration | duration |
@@ -60,7 +57,6 @@ extra |
name | Alaska Zone name | Alaska Zone
_from_time | t _from_time | t
_to_time | t _to_time | t
track_geojson | f
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40 track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
distance | 1234 distance | 1234
duration | duration |

32
tests/sql/metadata.sql Normal file
View File

@@ -0,0 +1,32 @@
---------------------------------------------------------------------------
-- Listing
--
-- List current database
select current_database();
-- connect to the DB
\c signalk
-- output display format
\x on
SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
--\echo :"vessel_id"
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
--SELECT * FROM api.metadata m;
\echo 'api.metadata details'
SELECT m.id, m.name, m.mmsi, m.length, m.beam, m.height, m.ship_type, m.plugin_version, m.signalk_version, m.time IS NOT NULL AS time, m.active, configuration, available_keys FROM api.metadata AS m ORDER BY m.name ASC;
\echo 'api.metadata get configuration'
select configuration from api.metadata WHERE vessel_id = current_setting('vessel.id', false);
\echo 'api.metadata update configuration'
UPDATE api.metadata SET configuration = '{ "depthKey": "environment.depth.belowTransducer" }' WHERE vessel_id = current_setting('vessel.id', false);
\echo 'api.metadata get configuration with new value'
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at from api.metadata WHERE vessel_id = current_setting('vessel.id', false);
\echo 'api.metadata get configuration base on update_at value'
select configuration->'depthKey' AS depthKey, configuration->'update_at' IS NOT NULL AS update_at from api.metadata WHERE vessel_id = current_setting('vessel.id', false) AND configuration->>'update_at' <= to_char(NOW(), 'YYYY-MM-DD"T"HH24:MI:SS"Z"');

View File

@@ -0,0 +1,56 @@
current_database
------------------
signalk
(1 row)
You are now connected to database "signalk" as user "username".
Expanded display is on.
-[ RECORD 1 ]
vessel_id | t
api.metadata details
-[ RECORD 1 ]---+----------------
id | 2
name | aava
mmsi | 787654321
length | 12
beam | 10
height | 24
ship_type | 37
plugin_version | 1.0.2
signalk_version | 1.20.0
time | t
active | t
configuration |
available_keys | []
-[ RECORD 2 ]---+----------------
id | 1
name | kapla
mmsi | 123456789
length | 12
beam | 10
height | 24
ship_type | 36
plugin_version | 0.0.1
signalk_version | signalk_version
time | t
active | t
configuration |
available_keys |
api.metadata get configuration
-[ RECORD 1 ]-+-
configuration |
api.metadata update configuration
UPDATE 1
api.metadata get configuration with new value
-[ RECORD 1 ]----------------------------------
depthkey | "environment.depth.belowTransducer"
update_at | t
api.metadata get configuration base on update_at value
-[ RECORD 1 ]----------------------------------
depthkey | "environment.depth.belowTransducer"
update_at | t

View File

@@ -23,14 +23,14 @@ SELECT set_config('vessel.id', :'vessel_id_aava', false) IS NOT NULL as vessel_i
-- Update notes -- Update notes
\echo 'Add a note for an entry from a trip' \echo 'Add a note for an entry from a trip'
-- Get original value, should be empty -- Get original value, should be empty
SELECT numInstants(trip), valueAtTimestamp(trip_notes,timestampN(trip,14)) from api.logbook where id = 3; SELECT numInstants(trip), valueAtTimestamp(trip_notes,timestampN(trip,13)) from api.logbook where id = 3;
-- Create the string -- Create the string
SELECT concat('["fishing"@', timestampN(trip,14),',""@',timestampN(trip,15),']') as to_be_update FROM api.logbook where id = 3 \gset SELECT concat('["fishing"@', timestampN(trip,13),',""@',timestampN(trip,14),']') as to_be_update FROM api.logbook where id = 3 \gset
--\echo :to_be_update --\echo :to_be_update
-- Update the notes -- Update the notes
SELECT api.update_trip_notes_fn(3, :'to_be_update'); SELECT api.update_trip_notes_fn(3, :'to_be_update');
-- Compare with previous value, should include "fishing" -- Compare with previous value, should include "fishing"
SELECT valueAtTimestamp(trip_notes,timestampN(trip,14)) from api.logbook where id = 3; SELECT valueAtTimestamp(trip_notes,timestampN(trip,13)) from api.logbook where id = 3;
-- Delete notes -- Delete notes
\echo 'Delete an entry from a trip' \echo 'Delete an entry from a trip'
@@ -38,7 +38,7 @@ SELECT valueAtTimestamp(trip_notes,timestampN(trip,14)) from api.logbook where i
SELECT numInstants(trip), jsonb_array_length(api.export_logbook_geojson_point_trip_fn(id)->'features') from api.logbook where id = 3; SELECT numInstants(trip), jsonb_array_length(api.export_logbook_geojson_point_trip_fn(id)->'features') from api.logbook where id = 3;
-- Extract the timestamps of the invalid coords -- Extract the timestamps of the invalid coords
--SELECT timestampN(trip,14) as "to_be_delete" FROM api.logbook where id = 3 \gset --SELECT timestampN(trip,14) as "to_be_delete" FROM api.logbook where id = 3 \gset
SELECT concat('[', timestampN(trip,13),',',timestampN(trip,14),')') as to_be_delete FROM api.logbook where id = 3 \gset SELECT concat('[', timestampN(trip,14),',',timestampN(trip,15),')') as to_be_delete FROM api.logbook where id = 3 \gset
--\echo :to_be_delete --\echo :to_be_delete
-- Delete the entry for all trip sequence -- Delete the entry for all trip sequence
SELECT api.delete_trip_entry_fn(3, :'to_be_delete'); SELECT api.delete_trip_entry_fn(3, :'to_be_delete');
@@ -47,7 +47,8 @@ SELECT numInstants(trip), jsonb_array_length(api.export_logbook_geojson_point_tr
-- Export PostGIS geography from a trip -- Export PostGIS geography from a trip
\echo 'Export PostGIS geography from trajectory' \echo 'Export PostGIS geography from trajectory'
SELECT ST_IsValid(trajectory(trip)::geometry) IS TRUE FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); --SELECT ST_IsValid(trajectory(trip)::geometry) IS TRUE FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
SELECT trajectory(trip)::geometry FROM api.logbook WHERE id = 3;
-- Export GeoJSON from a trip -- Export GeoJSON from a trip
\echo 'Export GeoJSON with properties from a trip' \echo 'Export GeoJSON with properties from a trip'
@@ -66,7 +67,21 @@ SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_
-- Export timelapse as Geometry LineString from a trip -- Export timelapse as Geometry LineString from a trip
\echo 'Export timelapse as Geometry LineString from a trip' \echo 'Export timelapse as Geometry LineString from a trip'
SELECT api.export_logbooks_geojson_linestring_trips_fn(1,2) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false); --SELECT api.export_logbooks_geojson_linestring_trips_fn(1,2) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
-- Test geometry_type and num_properties
-- propoerties include endtimestamp and starttimestamp
WITH geojson_output AS (
SELECT api.export_logbooks_geojson_linestring_trips_fn(1, 2) AS geojson
FROM api.logbook
WHERE vessel_id = current_setting('vessel.id', false)
)
SELECT
--geojson
geojson->'features'->0->'geometry'->>'type' AS geometry_type,
--jsonb_array_length(jsonb_object_keys(geojson->'features'->0->'properties')::JSONB),
--jsonb_array_length(jsonb_object_keys(geojson->'features')) AS num_geometry,
(SELECT COUNT(*) FROM jsonb_object_keys(geojson->'features'->0->'properties')) AS num_properties
FROM geojson_output;
-- Export timelapse as Geometry Point from a trip -- Export timelapse as Geometry Point from a trip
\echo 'Export timelapse as Geometry Point from a trip' \echo 'Export timelapse as Geometry Point from a trip'

View File

@@ -33,10 +33,8 @@ numinstants | 44
jsonb_array_length | 44 jsonb_array_length | 44
Export PostGIS geography from trajectory Export PostGIS geography from trajectory
-[ RECORD 1 ] -[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
?column? | t trajectory | 0102000020E61000002B0000001CACA4BA25BC3840217B18B556DC4D40569FABADD8BB384012EA33B10ADC4D408A65E9F989BB384018A023A8D0DB4D40B174F4AE30BB38409951E2299ADB4D407C348B06DFBA3840443A973D64DB4D40FE4465C39ABA38409313927131DB4D402C9CA4F963BA384029E6C52EF6DA4D4010559D7A49BA384019E25817B7DA4D401BF0F96184BC3840F868160DBEDC4D4095B7239C16BC38401DDB7C6D47DC4D40559FABADD8BB384012EA33B10ADC4D408A65E9F989BB384018A023A8D0DB4D40B274F4AE30BB38409951E2299ADB4D407C348B06DFBA3840443A973D64DB4D40FE4465C39ABA38409313927131DB4D402C9CA4F963BA384029E6C52EF6DA4D4010559D7A49BA384019E25817B7DA4D401BF0F96184BC3840F868160DBEDC4D4098B1B2C755BC38404D52F41B81DC4D4095B7239C16BC38401DDB7C6D47DC4D407448C55AD7BB38406CABFEAD09DC4D40D8367B5688BB38400D54C6BFCFDB4D40B274F4AE30BB38409951E2299ADB4D401256BEC2DDBA3840EB22E06B63DB4D40FE4465C39ABA38409313927131DB4D402C9CA4F963BA384029E6C52EF6DA4D40407D152A49BA3840CDA66D0DB6DA4D40BDBFE6C182BC3840F9269710BDDC4D4098B1B2C755BC38404D52F41B81DC4D4024308CAA15BC3840B85DC36746DC4D407448C55AD7BB38406CABFEAD09DC4D40D8367B5688BB38400D54C6BFCFDB4D408224A24E2FBB384070BEC74F99DB4D4028B0A5EC99BA3840F90C4D7E30DB4D402C4080B163BA38402FA93528F5DA4D40407D152A49BA3840CDA66D0DB6DA4D40BDBFE6C182BC3840F9269710BDDC4D4099B1B2C755BC38404D52F41B81DC4D4024308CAA15BC3840B85DC36746DC4D407448C55AD7BB38406CABFEAD09DC4D40D8367B5688BB38400D54C6BFCFDB4D408224A24E2FBB384070BEC74F99DB4D401156BEC2DDBA3840EB22E06B63DB4D40
-[ RECORD 2 ]
?column? | t
Export GeoJSON with properties from a trip Export GeoJSON with properties from a trip
-[ RECORD 1 ]------+--- -[ RECORD 1 ]------+---
@@ -54,8 +52,9 @@ Export KML from a trip
vessel_id | t vessel_id | t
Export timelapse as Geometry LineString from a trip Export timelapse as Geometry LineString from a trip
-[ RECORD 1 ]-------------------------------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--+-----------
export_logbooks_geojson_linestring_trips_fn | {"type": "FeatureCollection", "features": [{"type": "Feature", "geometry": {"type": "LineString", "coordinates": [[23.530866667, 60.077666667], [23.52355, 60.07065], [23.515866667, 60.0637], [23.507866667, 60.056716667], [23.500533333, 60.04915], [23.493, 60.041633333], [23.485466667, 60.033983333], [23.479033333, 60.026216667], [23.47295, 60.01835], [23.46745, 60.01045], [23.461033333, 60.003516667], [23.45415, 59.99755], [23.445683333, 59.99235], [23.438766667, 59.989266667], [23.435116667, 59.987866667], [23.43165, 59.986333333], [23.4292, 59.984833333], [23.432566667, 59.9862], [23.43375, 59.987266667], [23.431566667, 59.98615], [23.4307, 59.98565], [23.429383333, 59.984683333], [23.421066667, 59.978233333], [23.431, 59.977716667], [23.432133333, 59.976883333], [23.4321, 59.976883333]]}, "properties": {}}]} geometry_type | LineString
num_properties | 26
Export timelapse as Geometry Point from a trip Export timelapse as Geometry Point from a trip
-[ RECORD 1 ] -[ RECORD 1 ]

View File

@@ -49,4 +49,5 @@ SELECT public.qgis_bbox_trip_py_fn(CONCAT(:'vessel_id_aava'::TEXT, '_', 3, '_',
--SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id; --SELECT set_config('vessel.id', :'vessel_id_kapla', false) IS NOT NULL as vessel_id;
-- SQL request from QGIS to fetch the necessary data base on vessel_id -- SQL request from QGIS to fetch the necessary data base on vessel_id
--SELECT id, vessel_id, name as logname, ST_Transform(track_geom, 3857) as track_geom, ROUND(distance, 2) as distance, ROUND(EXTRACT(epoch FROM duration)/3600,2) as duration,_from_time,_to_time FROM api.logbook where track_geom is not null and _to_time is not null ORDER BY _from_time DESC; --SELECT id, vessel_id, name as logname, ST_Transform(track_geom, 3857) as track_geom, ROUND(distance, 2) as distance, ROUND(EXTRACT(epoch FROM duration)/3600,2) as duration,_from_time,_to_time FROM api.logbook where track_geom is not null and _to_time is not null ORDER BY _from_time DESC;
SELECT count(*) FROM api.logbook where track_geom is not null and _to_time is not null; --SELECT count(*) FROM api.logbook WHERE track_geom IS NOT NULL AND _to_time iIS NOT NULL;
SELECT count(*) FROM api.logbook WHERE trip IS NOT NULL AND _to_time IS NOT NULL;

View File

@@ -11,35 +11,35 @@ Get BBOX Extent from SQL query for a log: "^/log_(w+)_(d+).png$"
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076 qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
-[ RECORD 1 ]---+---------------------------------------------------- -[ RECORD 1 ]---+----------------------------------------------------
qgis_bbox_py_fn | 2745681,8303937.662962963,2871529,8369891.337037037 qgis_bbox_py_fn | 2749398.035335689,8334944,2756917.964664311,8338885
Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$" Get BBOX Extent from SQL query for a log as line: "^/log_(w+)_(d+)_line.png$"
-[ RECORD 1 ]---+------------------------------------------------------------------------- -[ RECORD 1 ]---+-------------------------------------------------------------------------
qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059 qgis_bbox_py_fn | 2570800.6277114027,8368634.173700442,2645441.4677270483,8417049.85371059
-[ RECORD 1 ]---+----------------------------------------------------------------------- -[ RECORD 1 ]---+--------------------------------------------------------------------------
qgis_bbox_py_fn | 2752672.6236475753,8300633.73408079,2864537.04561218,8373194.440219993 qgis_bbox_py_fn | 2750457.4431765806,8335162.530580978,2755858.0759322727,8338665.643719805
Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$" Get BBOX Extent from SQL query for all logs by vessel_id: "^/logs_(w+)_(d+).png$"
-[ RECORD 1 ]---+------------------------------------------------------ -[ RECORD 1 ]---+------------------------------------------------------
qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076 qgis_bbox_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
-[ RECORD 1 ]---+------------------------------------------------------ -[ RECORD 1 ]---+------------------------------------------------------
qgis_bbox_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049 qgis_bbox_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$" Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_(w+)_(d+)_(d+).png$"
-[ RECORD 1 ]---+------------------------------------- -[ RECORD 1 ]---+-------------------------------------
qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0 qgis_bbox_py_fn | 2595383,4787988.0,2620859,11997696.0
-[ RECORD 1 ]---+--------------------------------------- -[ RECORD 1 ]---+---------------------------------------
qgis_bbox_py_fn | 90420,-201110377.5,3027720,214517572.5 qgis_bbox_py_fn | 97351,-192283890.5,2909895,205691085.5
Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$" Get BBOX Extent from SQL query for a trip by vessel_id: "^/trip_((w+)_(d+)_(d+)).png$"
-[ RECORD 1 ]--------+------------------------------------------------------ -[ RECORD 1 ]--------+------------------------------------------------------
qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076 qgis_bbox_trip_py_fn | 2556155.0636042403,8365608,2660086.9363957597,8420076
-[ RECORD 1 ]--------+------------------------------------------------------ -[ RECORD 1 ]--------+------------------------------------------------------
qgis_bbox_trip_py_fn | -1950837.4558303887,4864146,5068977.455830389,8543049 qgis_bbox_trip_py_fn | -2006284.4558303887,4864146,5013530.455830389,8543049
-[ RECORD 1 ] -[ RECORD 1 ]
count | 3 count | 3

View File

@@ -6,10 +6,10 @@
You are now connected to database "signalk" as user "username". You are now connected to database "signalk" as user "username".
Expanded display is on. Expanded display is on.
-[ RECORD 1 ]--+------------------------------- -[ RECORD 1 ]--+-------------------------------
server_version | 16.6 (Debian 16.6-1.pgdg120+1) server_version | 16.8 (Debian 16.8-1.pgdg120+1)
-[ RECORD 1 ]--------+------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
postgis_full_version | POSTGIS="3.5.0 d2c3ca4" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.11.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)" postgis_full_version | POSTGIS="3.5.2 dea6d0a" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" (compiled against PROJ 9.1.1) LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
-[ RECORD 1 ]-------------------------------------------------------------------------------------- -[ RECORD 1 ]--------------------------------------------------------------------------------------
Name | citext Name | citext
@@ -53,15 +53,20 @@ Schema | pg_catalog
Description | PL/Python3U untrusted procedural language Description | PL/Python3U untrusted procedural language
-[ RECORD 9 ]-------------------------------------------------------------------------------------- -[ RECORD 9 ]--------------------------------------------------------------------------------------
Name | postgis Name | postgis
Version | 3.5.0 Version | 3.5.2
Schema | public Schema | public
Description | PostGIS geometry and geography spatial types and functions Description | PostGIS geometry and geography spatial types and functions
-[ RECORD 10 ]------------------------------------------------------------------------------------- -[ RECORD 10 ]-------------------------------------------------------------------------------------
Name | timescaledb Name | timescaledb
Version | 2.17.2 Version | 2.19.3
Schema | public Schema | public
Description | Enables scalable inserts and complex queries for time-series data (Community Edition) Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
-[ RECORD 11 ]------------------------------------------------------------------------------------- -[ RECORD 11 ]-------------------------------------------------------------------------------------
Name | timescaledb_toolkit
Version | 1.21.0
Schema | public
Description | Library of analytical hyperfunctions, time-series pipelining, and other SQL utilities
-[ RECORD 12 ]-------------------------------------------------------------------------------------
Name | uuid-ossp Name | uuid-ossp
Version | 1.1 Version | 1.1
Schema | public Schema | public
@@ -111,14 +116,14 @@ laninline | 13566
lanvalidator | 13567 lanvalidator | 13567
lanacl | lanacl |
-[ RECORD 5 ]-+----------- -[ RECORD 5 ]-+-----------
oid | 18190 oid | 18225
lanname | plpython3u lanname | plpython3u
lanowner | 10 lanowner | 10
lanispl | t lanispl | t
lanpltrusted | t lanpltrusted | t
lanplcallfoid | 18187 lanplcallfoid | 18222
laninline | 18188 laninline | 18223
lanvalidator | 18189 lanvalidator | 18224
lanacl | lanacl |
-[ RECORD 1 ]+----------- -[ RECORD 1 ]+-----------
@@ -671,16 +676,16 @@ overpass_py_fn | {"fee": "yes", "vhf": "09", "name": "Port Olímpic", "phone": "
-[ RECORD 1 ]--+-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]--+--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta", "check_date": "2024-08-23"} overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure": "marina", "name:ca": "Port de la Ginesta", "wikidata": "Q16621038", "wikipedia": "ca:Port Ginesta", "check_date": "2024-08-23"}
-[ RECORD 1 ]--+---------------------------------------------- -[ RECORD 1 ]--+---------------------------------------------------------------------------------------
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"} overpass_py_fn | {"leisure": "marina", "seamark:type": "harbour", "seamark:harbour:category": "marina"}
-[ RECORD 1 ]---------------------------------------------------------------------------------------------------------------------------------------------------------------------- -[ RECORD 1 ]-----------------------------------------------------------------------------------------------------------------------------------------------------------------------
versions_fn | {"api_version" : "0.8.0", "sys_version" : "PostgreSQL 16.6", "mobilitydb" : "1.2.0", "timescaledb" : "2.17.2", "postgis" : "3.5.0", "postgrest" : "PostgREST 12.2.3"} versions_fn | {"api_version" : "0.9.1", "sys_version" : "PostgreSQL 16.8", "mobilitydb" : "1.2.0", "timescaledb" : "2.19.3", "postgis" : "3.5.2", "postgrest" : "PostgREST 12.2.12"}
-[ RECORD 1 ]----------------- -[ RECORD 1 ]------------------
api_version | 0.8.0 api_version | 0.9.1
sys_version | PostgreSQL 16.6 sys_version | PostgreSQL 16.8
timescaledb | 2.17.2 timescaledb | 2.19.3
postgis | 3.5.0 postgis | 3.5.2
postgrest | PostgREST 12.2.3 postgrest | PostgREST 12.2.12

View File

@@ -18,8 +18,9 @@ fi
if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then if [[ ! -x "/usr/bin/go" || ! -x "/root/go/bin/mermerd" ]]; then
#wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \ #wget -q https://go.dev/dl/go1.21.4.linux-arm64.tar.gz && \
#rm -rf /usr/local/go && tar -C /usr/local -xzf go1.21.4.linux-arm64.tar.gz && \ #rm -rf /usr/local/go && tar -C /usr/local -xzf go1.21.4.linux-arm64.tar.gz && \
apt update && apt -y install golang && \ apt update && apt -y install golang-go && \
go install github.com/KarnerTh/mermerd@latest #go install github.com/KarnerTh/mermerd@latest require latest go version
go install github.com/KarnerTh/mermerd@v0.11.0
fi fi
# pnpm install # pnpm install
@@ -48,6 +49,19 @@ else
exit 1 exit 1
fi fi
# metadata and vessel configuration unit tests
psql ${PGSAIL_DB_URI} < sql/metadata.sql > output/metadata.sql.output
diff sql/metadata.sql.output output/metadata.sql.output > /dev/null
#diff -u sql/metadata.sql.output output/metadata.sql.output | wc -l
#echo 0
if [ $? -eq 0 ]; then
echo OK
else
echo SQL metadata.sql FAILED
diff -u sql/metadata.sql.output output/metadata.sql.output
exit 1
fi
# https://www.postgresql.org/docs/current/app-psql.html # https://www.postgresql.org/docs/current/app-psql.html
# run cron jobs # run cron jobs
#psql -U ${POSTGRES_USER} -h 172.30.0.1 signalk < sql/cron_run_jobs.sql > output/cron_run_jobs.sql.output #psql -U ${POSTGRES_USER} -h 172.30.0.1 signalk < sql/cron_run_jobs.sql > output/cron_run_jobs.sql.output