mirror of
https://github.com/xbgmsharp/postgsail.git
synced 2025-09-17 19:27:49 +00:00
Compare commits
130 Commits
Author | SHA1 | Date | |
---|---|---|---|
![]() |
b3d5b80731 | ||
![]() |
5cefe47fea | ||
![]() |
98f45ace66 | ||
![]() |
ff69d6ee7a | ||
![]() |
52dcdb61fd | ||
![]() |
6bbe9795fb | ||
![]() |
23ea9dfee7 | ||
![]() |
82a0f19566 | ||
![]() |
a7019445e3 | ||
![]() |
f97635cae1 | ||
![]() |
fe93b9ec11 | ||
![]() |
b5be201e65 | ||
![]() |
33b97898d8 | ||
![]() |
c726f6cc07 | ||
![]() |
48647e5978 | ||
![]() |
1702b825c7 | ||
![]() |
3c4f68218f | ||
![]() |
42e85cc498 | ||
![]() |
bd9b207d43 | ||
![]() |
c588fc676c | ||
![]() |
e003985d6c | ||
![]() |
0cf9fa701d | ||
![]() |
23d2ced60c | ||
![]() |
a1a5f29c16 | ||
![]() |
5fe9a37eee | ||
![]() |
2064947457 | ||
![]() |
30b1b8f0a6 | ||
![]() |
4dcd8d2ea5 | ||
![]() |
670aa2e43a | ||
![]() |
1a8790f2a0 | ||
![]() |
6d97bb1e32 | ||
![]() |
779cee21ec | ||
![]() |
e48192e609 | ||
![]() |
f2beead3a7 | ||
![]() |
5df3e1dbd3 | ||
![]() |
ed555c1f32 | ||
![]() |
02dd68f2d8 | ||
![]() |
d9329705ba | ||
![]() |
54af136682 | ||
![]() |
6f96a070b8 | ||
![]() |
e79086c4a2 | ||
![]() |
ed417a4c5d | ||
![]() |
546274ce29 | ||
![]() |
d5b6072273 | ||
![]() |
e8addd2e9c | ||
![]() |
f843a6a1f3 | ||
![]() |
478bbf5529 | ||
![]() |
c9523e2f6f | ||
![]() |
5fa85821de | ||
![]() |
7ccef80904 | ||
![]() |
04cc7de245 | ||
![]() |
a75ba105df | ||
![]() |
7eddeefa47 | ||
![]() |
314cdc71c7 | ||
![]() |
d508ac1662 | ||
![]() |
6f9956ee46 | ||
![]() |
0b854374ff | ||
![]() |
9b647c9a49 | ||
![]() |
800f0c83e3 | ||
![]() |
7424fbbe49 | ||
![]() |
e183530435 | ||
![]() |
4444f73919 | ||
![]() |
241c70fcb5 | ||
![]() |
327324d7c3 | ||
![]() |
28ccc9e2da | ||
![]() |
966846c93d | ||
![]() |
d966d34a29 | ||
![]() |
8bd7594357 | ||
![]() |
4de47fd011 | ||
![]() |
cf0f178d13 | ||
![]() |
b224374e65 | ||
![]() |
1c441af48c | ||
![]() |
e5287d74f6 | ||
![]() |
9cf334125a | ||
![]() |
1342dba298 | ||
![]() |
7e734c412b | ||
![]() |
9ae10b0519 | ||
![]() |
ab1b4c7076 | ||
![]() |
edb7bc7dd8 | ||
![]() |
e124bdea44 | ||
![]() |
b944c39507 | ||
![]() |
b4791f059c | ||
![]() |
78ff78a6b1 | ||
![]() |
b6be05753c | ||
![]() |
30e3797854 | ||
![]() |
e9dea3b124 | ||
![]() |
1a08acda85 | ||
![]() |
633e73b29d | ||
![]() |
a383d0db53 | ||
![]() |
0116ea4feb | ||
![]() |
92bcf0ffa6 | ||
![]() |
414736909b | ||
![]() |
36173658a0 | ||
![]() |
984a7c14da | ||
![]() |
48be759e4d | ||
![]() |
e473baa5a0 | ||
![]() |
9ba79123ec | ||
![]() |
82e7056b0c | ||
![]() |
4829d0b848 | ||
![]() |
9069b11f71 | ||
![]() |
bf3eb3c806 | ||
![]() |
6e863ca355 | ||
![]() |
51128112f3 | ||
![]() |
b150d9706f | ||
![]() |
813460da7b | ||
![]() |
813b8088f3 | ||
![]() |
f90911c523 | ||
![]() |
790bbb671c | ||
![]() |
5455d8246f | ||
![]() |
95d24c538d | ||
![]() |
57799c9ee4 | ||
![]() |
437bfd0252 | ||
![]() |
dcceab2551 | ||
![]() |
cdc2e4e55c | ||
![]() |
76bbe29567 | ||
![]() |
36b8eece52 | ||
![]() |
a5436479cf | ||
![]() |
23ea3bd0d8 | ||
![]() |
826566e097 | ||
![]() |
f942076cc2 | ||
![]() |
8dba0c21b6 | ||
![]() |
a96160ef15 | ||
![]() |
ccf91bb832 | ||
![]() |
b9993ed28f | ||
![]() |
0e5e619625 | ||
![]() |
294a60d13a | ||
![]() |
b6587b1287 | ||
![]() |
74512d0bf3 | ||
![]() |
322a479b4f | ||
![]() |
c5cba6a59f |
@@ -6,17 +6,23 @@ POSTGRES_DB=postgres
|
||||
PGSAIL_AUTHENTICATOR_PASSWORD=password
|
||||
PGSAIL_GRAFANA_PASSWORD=password
|
||||
PGSAIL_GRAFANA_AUTH_PASSWORD=password
|
||||
# SMTP server settings
|
||||
PGSAIL_EMAIL_FROM=root@localhost
|
||||
PGSAIL_EMAIL_SERVER=localhost
|
||||
#PGSAIL_EMAIL_USER= Comment if not use
|
||||
#PGSAIL_EMAIL_PASS= Comment if not use
|
||||
# Pushover settings
|
||||
#PGSAIL_PUSHOVER_APP_TOKEN= Comment if not use
|
||||
#PGSAIL_PUSHOVER_APP_URL= Comment if not use
|
||||
# TELEGRAM BOT, ask BotFather
|
||||
#PGSAIL_TELEGRAM_BOT_TOKEN= Comment if not use
|
||||
# webapp entrypoint, typically the public DNS or IP
|
||||
PGSAIL_APP_URL=http://localhost:8080
|
||||
# API entrypoint from the webapp, typically the public DNS or IP
|
||||
PGSAIL_API_URL=http://localhost:3000
|
||||
# POSTGREST ENV Settings
|
||||
PGRST_DB_URI=postgres://authenticator:${PGSAIL_AUTHENTICATOR_PASSWORD}@db:5432/signalk
|
||||
# % cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1
|
||||
PGRST_JWT_SECRET=_at_least_32__char__long__random
|
||||
# Grafana ENV Settings
|
||||
GF_SECURITY_ADMIN_PASSWORD=password
|
||||
|
14
.github/FUNDING.yml
vendored
Normal file
14
.github/FUNDING.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
# These are supported funding model platforms
|
||||
|
||||
github: [xbgmsharp]
|
||||
patreon: # Replace with a single Patreon username
|
||||
open_collective: # Replace with a single Open Collective username
|
||||
ko_fi: # Replace with a single Ko-fi username
|
||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||
liberapay: # Replace with a single Liberapay username
|
||||
issuehunt: # Replace with a single IssueHunt username
|
||||
lfx_crowdfunding: # Replace with a single LFX Crowdfunding project-name e.g., cloud-foundry
|
||||
polar: # Replace with a single Polar username
|
||||
buy_me_a_coffee: # Replace with a single Buy Me a Coffee username
|
||||
custom: # Replace with up to 4 custom sponsorship URLs e.g., ['link1', 'link2']
|
14
.github/dependabot.yml
vendored
Normal file
14
.github/dependabot.yml
vendored
Normal file
@@ -0,0 +1,14 @@
|
||||
version: 2
|
||||
updates:
|
||||
# Enable version updates for Docker
|
||||
- package-ecosystem: "docker"
|
||||
# Look for a `Dockerfile` in the `root` directory
|
||||
directory: "/"
|
||||
# Check for updates once a week
|
||||
schedule:
|
||||
interval: "weekly"
|
||||
|
||||
- package-ecosystem: github-actions
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: weekly
|
2
.github/workflows/db-lint.yml
vendored
2
.github/workflows/db-lint.yml
vendored
@@ -21,7 +21,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the source
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
2
.github/workflows/db-test.yml
vendored
2
.github/workflows/db-test.yml
vendored
@@ -23,7 +23,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Check out the source
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
8
.github/workflows/frontend-test.yml
vendored
8
.github/workflows/frontend-test.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
with:
|
||||
submodules: 'true'
|
||||
|
||||
@@ -31,7 +31,11 @@ jobs:
|
||||
run: docker compose -f docker-compose.dev.yml -f docker-compose.yml pull db api web_tests
|
||||
|
||||
- name: Build Docker images
|
||||
run: docker compose -f docker-compose.dev.yml -f docker-compose.yml build web_dev
|
||||
run: |
|
||||
set -eu
|
||||
source .env
|
||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml build web_dev
|
||||
docker compose -f docker-compose.dev.yml -f docker-compose.yml build web
|
||||
|
||||
- name: Run PostgSail Web tests
|
||||
# Environment variables
|
||||
|
2
.github/workflows/grafana-test.yml
vendored
2
.github/workflows/grafana-test.yml
vendored
@@ -20,7 +20,7 @@ jobs:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v3
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set env
|
||||
run: cp .env.example .env
|
||||
|
1
CHANGELOG.md
Normal file
1
CHANGELOG.md
Normal file
@@ -0,0 +1 @@
|
||||
## Please see [Releases](https://github.com/xbgmsharp/postgsail/releases) for the release notes.
|
45
CODE_OF_CONDUCT.md
Normal file
45
CODE_OF_CONDUCT.md
Normal file
@@ -0,0 +1,45 @@
|
||||
# Contributor Covenant Code of Conduct
|
||||
|
||||
## Our Pledge
|
||||
|
||||
In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to make participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, sex characteristics, gender identity and expression, level of experience, education, socio-economic status, nationality, personal appearance, race, religion, or sexual identity and orientation.
|
||||
|
||||
## Our Standards
|
||||
|
||||
Examples of behavior that contributes to creating a positive environment include:
|
||||
|
||||
* Using welcoming and inclusive language
|
||||
* Being respectful of differing viewpoints and experiences
|
||||
* Gracefully accepting constructive criticism
|
||||
* Focusing on what is best for the community
|
||||
* Showing empathy towards other community members
|
||||
|
||||
Examples of unacceptable behavior by participants include:
|
||||
|
||||
* The use of sexualized language or imagery and unwelcome sexual attention or advances
|
||||
* Trolling, insulting/derogatory comments, and personal or political attacks
|
||||
* Public or private harassment
|
||||
* Publishing others' private information, such as a physical or electronic address, without explicit permission
|
||||
* Other conduct which could reasonably be considered inappropriate in a professional setting
|
||||
|
||||
## Our Responsibilities
|
||||
|
||||
Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
|
||||
|
||||
Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
|
||||
|
||||
## Scope
|
||||
|
||||
This Code of Conduct applies within all project spaces, and it also applies when an individual is representing the project or its community in public spaces. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
|
||||
|
||||
## Enforcement
|
||||
|
||||
Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project maintainer using any of the [private contact addresses](https://github.com/dec0dOS/amazing-github-template#support). All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
|
||||
|
||||
Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
|
||||
|
||||
## Attribution
|
||||
|
||||
This Code of Conduct is adapted from the [Contributor Covenant](https://www.contributor-covenant.org), version 1.4, available at <https://www.contributor-covenant.org/version/1/4/code-of-conduct.html>
|
||||
|
||||
For answers to common questions about this code of conduct, see <https://www.contributor-covenant.org/faq>
|
3
CONTRIBUTING.md
Normal file
3
CONTRIBUTING.md
Normal file
@@ -0,0 +1,3 @@
|
||||
Styleguides
|
||||
|
||||
Ensure you code is in lint formatting.
|
254
README.md
254
README.md
@@ -1,11 +1,37 @@
|
||||
# PostgSail
|
||||
<br/>
|
||||
<p align="center">
|
||||
<a href="https://github.com/xbgmsharp/postgsail">
|
||||
<img src="https://iot.openplotter.cloud/android-chrome-192x192.png" alt="Logo" width="80" height="80">
|
||||
</a>
|
||||
|
||||
Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data.
|
||||
<h3 align="center">PostgSail</h3>
|
||||
|
||||
<p align="center">
|
||||
PostgSail is an open-source alternative to traditional vessel data management!
|
||||
<br/>
|
||||
<br/>
|
||||
<a href="https://github.com/xbgmsharp/postgsail/blob/main/docs/README.md"><strong>Explore the docs »</strong></a>
|
||||
<br/>
|
||||
<br/>
|
||||
<a href="#about-the-project">View Demo</a>
|
||||
.
|
||||
<a href="https://github.com/xbgmsharp/postgsail/issues">Report Bug</a>
|
||||
.
|
||||
<a href="https://github.com/xbgmsharp/postgsail/issues">Request Feature</a>
|
||||
.
|
||||
<a href="https://xbgmsharp.github.io/postgsail/">Website</a>
|
||||
.
|
||||
<a href="https://github.com/sponsors/xbgmsharp">Sponsors</a>
|
||||
.
|
||||
<a href="https://discord.gg/uuZrwz4dCS">Discord</a>
|
||||
</p>
|
||||
</p>
|
||||
|
||||
[](https://github.com/xbgmsharp/postgsail/releases/latest)
|
||||
[](#license)
|
||||
[](https://github.com/xbgmsharp/postgsail/issues)
|
||||
[](http://makeapullrequest.com)
|
||||

|
||||
|
||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/db-test.yml)
|
||||
[](https://github.com/xbgmsharp/postgsail/actions/workflows/frontend-test.yml)
|
||||
@@ -22,6 +48,35 @@ postgsail-telegram-bot:
|
||||
|
||||
[](https://www.bestpractices.dev/projects/8124)
|
||||
|
||||
|
||||
## Table Of Contents
|
||||
|
||||
- [Table Of Contents](#table-of-contents)
|
||||
- [About The Project](#about-the-project)
|
||||
- [Features](#features)
|
||||
- [Cloud-hosted PostgSail](#cloud-hosted-postgsail)
|
||||
- [On-Premise (for free)](#on-premise-for-free)
|
||||
- [Roadmap](#roadmap)
|
||||
- [Contributing](#contributing)
|
||||
- [Creating A Pull Request](#creating-a-pull-request)
|
||||
- [License](#license)
|
||||
- [Acknowledgements](#acknowledgements)
|
||||
|
||||
## About The Project
|
||||
|
||||
https://github.com/xbgmsharp/signalk-postgsail/assets/1498985/b2669c39-11ad-4a50-9f91-9397f9057ee8
|
||||
|
||||
Effortless cloud based solution for storing and sharing your SignalK data. Allow you to effortlessly log your sails and monitor your boat with historical data.
|
||||
|
||||
Here's how:
|
||||
|
||||
It is all about SQL, object-relational, time-series, spatial databases with a bit of python.
|
||||
|
||||
PostgSail is an open-source alternative to traditional vessel data management.
|
||||
It is based on a well known open-source technology stack, Signalk, PostgreSQL, TimescaleDB, PostGIS, PostgREST. It does perfectly integrate with standard monitoring tool stack like Grafana.
|
||||
|
||||
To understand the why and how, you might want to read [Why.md](https://github.com/xbgmsharp/postgsail/blob/main/Why.md)
|
||||
|
||||
## Features
|
||||
|
||||
- Automatically log your voyages without manually starting or stopping a trip.
|
||||
@@ -29,6 +84,7 @@ postgsail-telegram-bot:
|
||||
- Timelapse video your trips, with or without time control.
|
||||
- Add custom notes to your logs.
|
||||
- Export to CSV, GPX, GeoJSON, KML and download your logs.
|
||||
- Export your logs as image (PNG) or video (MP4).
|
||||
- Aggregate your trip statistics: Longest voyage, time spent at anchorages, home ports etc.
|
||||
- See your moorages on a global map, with incoming and outgoing voyages from each trip.
|
||||
- Monitor your boat (position, depth, wind, temperature, battery charge status, etc.) remotely.
|
||||
@@ -38,191 +94,53 @@ postgsail-telegram-bot:
|
||||
- Offline mode.
|
||||
- Low Bandwidth mode.
|
||||
- Awesome statistics and graphs.
|
||||
- Create and manage your own dashboards.
|
||||
- Windy PWS (Personal Weather Station).
|
||||
- Engine Hours Logger.
|
||||
- Polar performance.
|
||||
- Anything missing? just ask!
|
||||
|
||||
## Context
|
||||
## Cloud-hosted PostgSail
|
||||
|
||||
It is all about SQL, object-relational, time-series, spatial databases with a bit of python.
|
||||
Remove the hassle of running PostgSail yourself. Here you can skip the technical setup, the maintenance work and server costs by getting PostgSail on our reliable and secure PostgSail Cloud. Register and try for free at [iot.openplotter.cloud](https://iot.openplotter.cloud/).
|
||||
|
||||
PostgSail is an open-source alternative to traditional vessel data management.
|
||||
It is based on a well known open-source technology stack, Signalk, PostgreSQL, TimescaleDB, PostGIS, PostgREST. It does perfectly integrate with standard monitoring tool stack like Grafana.
|
||||
## On-Premise (for free)
|
||||
|
||||
To understand the why and how, you might want to read [Why.md](https://github.com/xbgmsharp/postgsail/tree/main/Why.md)
|
||||
Self host postgSail where you want and how you want. There are no restrictions, you’re in full control. [Install Guide](https://github.com/xbgmsharp/postgsail/blob/main/docs/README.md)
|
||||
|
||||
## Architecture
|
||||
A simple scalable architecture:
|
||||
## Roadmap
|
||||
|
||||

|
||||
See the [open issues](https://github.com/xbgmsharp/postgsail/issues) for a list of proposed features (and known issues).
|
||||
|
||||
For more clarity and visibility the complete [Entity-Relationship Diagram (ERD)](https://github.com/xbgmsharp/postgsail/tree/main/ERD/README.md) is export as PNG and SVG file.
|
||||
Join the community, Get support and exchange on [Discord](https://discord.gg/uuZrwz4dCS). Missing a feature? just ask!
|
||||
|
||||
## Cloud
|
||||
## Contributing
|
||||
|
||||
If you prefer not to install or administer your instance of PostgSail, hosted versions of PostgSail are available in the cloud of your choice.
|
||||
Contributions are what make the open source community such an amazing place to be learn, inspire, and create. Any contributions you make are **greatly appreciated**.
|
||||
* If you have suggestions for features, feel free to [open an issue](https://github.com/xbgmsharp/postgsail/issues/new) to discuss it, or directly create a pull request with necessary changes.
|
||||
* Please make sure you check your spelling and grammar.
|
||||
* Create individual PR for each suggestion.
|
||||
* Please also read through the [Code Of Conduct](https://github.com/xbgmsharp/postgsail/blob/main/CODE_OF_CONDUCT.md) before posting your first idea as well.
|
||||
|
||||
### The cloud advantage.
|
||||
### Creating A Pull Request
|
||||
|
||||
Hosted and fully–managed options for PostgSail, designed for all your deployment and business needs. Register and try for free at https://iot.openplotter.cloud/.
|
||||
1. Fork the Project
|
||||
2. Create your Feature Branch (`git checkout -b feature/AmazingFeature`)
|
||||
3. Commit your Changes (`git commit -m 'Add some AmazingFeature'`)
|
||||
4. Push to the Branch (`git push origin feature/AmazingFeature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
## Using PostgSail
|
||||
## License
|
||||
|
||||
A full-featured development environment.
|
||||
Distributed under the Apache License Version 2.0. See [LICENSE](https://github.com/xbgmsharp/postgsail/blob/main/LICENSE) for more information.
|
||||
|
||||
#### With CodeSandbox
|
||||
## Acknowledgements
|
||||
|
||||
- Develop on [](https://codesandbox.io/p/github/xbgmsharp/postgsail/main)
|
||||
- or via [direct link](https://codesandbox.io/p/github/xbgmsharp/postgsail/main)
|
||||
|
||||
#### With DevPod
|
||||
|
||||
- [](https://devpod.sh/open#https://github.com/xbgmsharp/postgsail/&workspace=postgsail&provider=docker&ide=openvscode)
|
||||
- or via [direct link](https://devpod.sh/open#https://github.com/xbgmsharp/postgsail&workspace=postgsail&provider=docker&ide=openvscode)
|
||||
|
||||
#### With Docker Dev Environments
|
||||
- [Open in Docker dev-envs!](https://open.docker.com/dashboard/dev-envs?url=https://github.com/xbgmsharp/postgsail/)
|
||||
|
||||
### pre-deploy configuration
|
||||
|
||||
To get these running, copy `.env.example` and rename to `.env` then set the value accordingly.
|
||||
|
||||
```bash
|
||||
# cp .env.example .env
|
||||
```
|
||||
|
||||
Notice, that `PGRST_JWT_SECRET` must be at least 32 characters long.
|
||||
|
||||
`$ head /dev/urandom | tr -dc A-Za-z0-9 | head -c 42 ; echo ''`
|
||||
|
||||
```bash
|
||||
# nano .env
|
||||
```
|
||||
|
||||
### Deploy
|
||||
|
||||
By default there is no network set and all data are store in a docker volume.
|
||||
You can update the default settings by editing `docker-compose.yml` and `docker-compose.dev.yml` to your need.
|
||||
|
||||
First let's initialize the database.
|
||||
|
||||
#### Step 1. Initialize database
|
||||
|
||||
First let's import the SQL schema, execute:
|
||||
|
||||
```bash
|
||||
$ docker-compose up db
|
||||
```
|
||||
|
||||
#### Step 2. Start backend (db, api)
|
||||
|
||||
Then launch the full stack (db, api) backend, execute:
|
||||
|
||||
```bash
|
||||
$ docker-compose up db api
|
||||
```
|
||||
|
||||
The API should be accessible via port HTTP/3000.
|
||||
The database should be accessible via port TCP/5432.
|
||||
|
||||
You can connect to the database via a web gui like [pgadmin](https://www.pgadmin.org/) or you can use a client [dbeaver](https://dbeaver.io/).
|
||||
|
||||
### SQL Configuration
|
||||
|
||||
Check and update your postgsail settings via SQL in the table `app_settings`:
|
||||
|
||||
```sql
|
||||
SELECT * FROM app_settings;
|
||||
```
|
||||
|
||||
```sql
|
||||
UPDATE app_settings
|
||||
SET
|
||||
value = 'new_value'
|
||||
WHERE name = 'app.email_server';
|
||||
```
|
||||
|
||||
### Ingest data
|
||||
|
||||
Next, to ingest data from signalk, you need to install [signalk-postgsail](https://github.com/xbgmsharp/signalk-postgsail) plugin on your signalk server instance.
|
||||
|
||||
Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers).
|
||||
|
||||
You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux).
|
||||
For InfluxDB 2.x and 3.x. You will need to enable the 1.x APIs to use them. Consult the InfluxDB documentation for more details.
|
||||
|
||||
Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests.
|
||||
If everything goes well all tests pass successfully and you should receive a few notifications by email or PushOver or Telegram.
|
||||
[End-to-End (E2E) Testing.](https://github.com/xbgmsharp/postgsail/blob/main/tests/)
|
||||
|
||||
```
|
||||
$ docker-compose up tests
|
||||
```
|
||||
|
||||
### API Documentation
|
||||
|
||||
The OpenAPI description output depends on the permissions of the role that is contained in the JWT role claim.
|
||||
|
||||
Other applications can also use the [PostgSAIL API](https://petstore.swagger.io/?url=https://raw.githubusercontent.com/xbgmsharp/postgsail/main/openapi.json).
|
||||
|
||||
API anonymous:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/
|
||||
```
|
||||
|
||||
API user_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_login_or_signup_fn'
|
||||
```
|
||||
|
||||
API vessel_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_register_vessel_fn'
|
||||
```
|
||||
|
||||
#### API main workflow
|
||||
|
||||
Check the [End-to-End (E2E) test sample](https://github.com/xbgmsharp/postgsail/blob/main/tests/).
|
||||
|
||||
### Docker dependencies
|
||||
|
||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||
|
||||
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
||||
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
||||
- `grafana` alias `app`, visualize and monitor your data
|
||||
|
||||
### Optional docker images
|
||||
|
||||
- [pgAdmin](https://hub.docker.com/r/dpage/pgadmin4), web UI to monitor and manage multiple PostgreSQL
|
||||
- [Swagger](https://hub.docker.com/r/swaggerapi/swagger-ui), web UI to visualize documentation from PostgREST
|
||||
|
||||
```
|
||||
docker-compose -f docker-compose-optional.yml up
|
||||
```
|
||||
|
||||
### Software reference
|
||||
|
||||
Out of the box iot platform using docker with the following software:
|
||||
An out of the box IoT platform using Docker (could be extend to K3 or K8) with the following software:
|
||||
|
||||
- [Signal K server, a Free and Open Source universal marine data exchange format](https://signalk.org)
|
||||
- [PostgreSQL, open source object-relational database system](https://postgresql.org)
|
||||
- [TimescaleDB, Time-series data extends PostgreSQL](https://www.timescale.com)
|
||||
- [PostGIS, a spatial database extender for PostgreSQL object-relational database.](https://postgis.net/)
|
||||
- [Grafana, open observability platform | Grafana Labs](https://grafana.com)
|
||||
|
||||
### Support
|
||||
|
||||
To get support, please create new [issue](https://github.com/xbgmsharp/postgsail/issues).
|
||||
|
||||
There is more likely security flows and bugs.
|
||||
|
||||
### Contribution
|
||||
|
||||
I'm happy to accept Pull Requests!
|
||||
Feel free to contribute.
|
||||
|
||||
### License
|
||||
|
||||
This is a free software, Apache License Version 2.0.
|
||||
- And many more
|
||||
|
@@ -18,7 +18,7 @@ services:
|
||||
ports:
|
||||
- "5432:5432"
|
||||
volumes:
|
||||
- ./db-data:/var/lib/postgresql/data
|
||||
- postgres-data:/var/lib/postgresql/data
|
||||
- ./initdb:/docker-entrypoint-initdb.d
|
||||
logging:
|
||||
options:
|
||||
@@ -39,6 +39,7 @@ services:
|
||||
- "db:database"
|
||||
ports:
|
||||
- "3000:3000"
|
||||
- "3003:3003"
|
||||
env_file: .env
|
||||
environment:
|
||||
PGRST_DB_SCHEMA: api
|
||||
@@ -70,8 +71,8 @@ services:
|
||||
links:
|
||||
- "db:database"
|
||||
volumes:
|
||||
- data:/var/lib/grafana
|
||||
- data:/var/log/grafana
|
||||
- grafana-data:/var/lib/grafana
|
||||
- grafana-data:/var/log/grafana
|
||||
- ./grafana:/etc/grafana
|
||||
ports:
|
||||
- "3001:3000"
|
||||
@@ -112,18 +113,30 @@ services:
|
||||
max-size: 10m
|
||||
|
||||
web:
|
||||
image: xbgmsharp/postgsail-vuestic
|
||||
image: vuestic-postgsail
|
||||
build:
|
||||
context: https://github.com/xbgmsharp/vuestic-postgsail.git#live
|
||||
dockerfile: Dockerfile
|
||||
args:
|
||||
- VITE_PGSAIL_URL=${PGSAIL_API_URL}
|
||||
- VITE_APP_INCLUDE_DEMOS=false
|
||||
- VITE_APP_BUILD_VERSION=true
|
||||
- VITE_APP_TITLE=${VITE_APP_TITLE}
|
||||
- VITE_GRAFANA_URL=${VITE_GRAFANA_URL}
|
||||
hostname: web
|
||||
container_name: web
|
||||
restart: unless-stopped
|
||||
links:
|
||||
- "api:postgrest"
|
||||
ports:
|
||||
- 8080:8080
|
||||
env_file: .env
|
||||
environment:
|
||||
- VITE_PGSAIL_URL=${PGSAIL_API_URL}
|
||||
- VITE_APP_INCLUDE_DEMOS=false
|
||||
- VITE_APP_BUILD_VERSION=true
|
||||
- VITE_APP_TITLE=${VITE_APP_TITLE}
|
||||
- VITE_GRAFANA_URL=${VITE_GRAFANA_URL}
|
||||
depends_on:
|
||||
- db
|
||||
- api
|
||||
@@ -132,4 +145,5 @@ services:
|
||||
max-size: 10m
|
||||
|
||||
volumes:
|
||||
data: {}
|
||||
grafana-data: {}
|
||||
postgres-data: {}
|
||||
|
@@ -7,7 +7,7 @@ Auto generated Mermaid diagram using [mermerd](https://github.com/KarnerTh/merme
|
||||
[PostgSail SQL Schema](https://github.com/xbgmsharp/postgsail/tree/main/docs/ERD/postgsail.md "PostgSail SQL Schema")
|
||||
|
||||
## Further
|
||||
There is 3 main schemas:
|
||||
There is 3 main schemas into the signalk database:
|
||||
- API Schema:
|
||||
- tables
|
||||
- metrics
|
||||
@@ -32,3 +32,93 @@ There is 3 main schemas:
|
||||
- functions
|
||||
- ...
|
||||
|
||||
## Overview
|
||||
- Insert data into table metadata from API using PostgREST
|
||||
- Insert data into table metrics from API using PostgREST
|
||||
- TimescaleDB Hypertable to store signalk metrics
|
||||
- pgsql functions to generate logbook, stays, moorages
|
||||
- CRON functions to process logbook, stays, moorages
|
||||
- python functions for geo reverse and send notification via email and/or pushover
|
||||
- Views statistics, timelapse, monitoring, logs
|
||||
- Always store time in UTC
|
||||
|
||||
## Ingest flowchart
|
||||
```mermaid
|
||||
graph LR
|
||||
A[SignalK] -- HTTP POST --> B{PostgREST}
|
||||
B -- SQL --> C{PostgreSQL}
|
||||
C --> D((metadata trigger))
|
||||
C --> E((metrics trigger))
|
||||
D --> F{tbl.metadata}
|
||||
E --> G{tbl.metrics}
|
||||
E --> H{tbl.logs}
|
||||
E --> I{tbl.stays}
|
||||
```
|
||||
|
||||
## pg_cron flowchart
|
||||
```mermaid
|
||||
flowchart TD
|
||||
A[pg_cron] --> B((cron_new_notification))
|
||||
A --> C((cron_pre_logbook))
|
||||
A --> D((cron_new_logbook))
|
||||
A --> E((cron_new_stay))
|
||||
A --> F((cron_monitor_offline))
|
||||
A --> G((cron_monitor_online))
|
||||
C --> K{Validate logbook details}
|
||||
D --> L{Update logbook details}
|
||||
E --> M{Update stay details}
|
||||
L --> N{Update Moorages details}
|
||||
M --> N{Update Moorages details}
|
||||
B --> O{Update account,vessel,otp}
|
||||
F --> P{Update metadata}
|
||||
G --> P
|
||||
A --> Q((cron_post_logbook))
|
||||
Q --> R{QGIS and notification}
|
||||
```
|
||||
cron job are not process by default because if you don't have the correct settings set (SMTP, PushOver, Telegram), you might enter in a loop with error and you could be blocked or banned from the external services.
|
||||
|
||||
Therefor by default they are no active job as it require external configuration settings (SMTP, PushOver, Telegram).
|
||||
To activate all cron.job run the following SQL command:
|
||||
```sql
|
||||
UPDATE cron.job SET active = True;
|
||||
```
|
||||
Be sure to review your postgsail settings via SQL in the table `app_settings`:
|
||||
```sql
|
||||
SELECT * FROM public.app_settings;
|
||||
```
|
||||
|
||||
### How to bypass OTP for a local install?
|
||||
|
||||
To can skip the otp process, add or update the following json key value to the account preference.
|
||||
```json
|
||||
"email_valid": true
|
||||
```
|
||||
SQL query
|
||||
```sql
|
||||
UPDATE auth.accounts
|
||||
SET preferences='{"email_valid": true}'::jsonb || preferences
|
||||
WHERE email='your.email@domain.com';
|
||||
```
|
||||
|
||||
OTP is created and sent by email using a cron in postgres/cron/job.
|
||||
```sql
|
||||
SELECT * FROM auth.otp;
|
||||
```
|
||||
|
||||
Accounts are store in table signalk/auth/accounts
|
||||
```sql
|
||||
SELECT * FROM auth.accounts;
|
||||
```
|
||||
|
||||
You should have an history in table signalk/public/process_queue
|
||||
```sql
|
||||
SELECT * from public.process_queue;
|
||||
```
|
||||
|
||||
### How to turn off signups
|
||||
|
||||
If you just want to use this as a standalone application and don't want people to be able to sign up for an account.
|
||||
|
||||
```SQL
|
||||
revoke execute on function api.signup(text,text,text,text) to api_anonymous;
|
||||
```
|
||||
|
206
docs/README.md
206
docs/README.md
@@ -1,2 +1,206 @@
|
||||
|
||||
Simple and scalable architecture.
|
||||
|
||||
## Architecture
|
||||
|
||||
Efficient, simple and scalable architecture.
|
||||
|
||||

|
||||
|
||||
|
||||
For more clarity and visibility the complete [Entity-Relationship Diagram (ERD)](https://github.com/xbgmsharp/postgsail/blob/main/docs/ERD/README.md) is export as Mermaid, PNG and SVG file.
|
||||
|
||||
## Using PostgSail
|
||||
### Development
|
||||
|
||||
A full-featured development environment.
|
||||
|
||||
#### With CodeSandbox
|
||||
|
||||
- Develop on [](https://codesandbox.io/p/github/xbgmsharp/postgsail/main)
|
||||
- or via [direct link](https://codesandbox.io/p/github/xbgmsharp/postgsail/main)
|
||||
|
||||
#### With DevPod
|
||||
|
||||
- [](https://devpod.sh/open#https://github.com/xbgmsharp/postgsail/&workspace=postgsail&provider=docker&ide=openvscode)
|
||||
- or via [direct link](https://devpod.sh/open#https://github.com/xbgmsharp/postgsail&workspace=postgsail&provider=docker&ide=openvscode)
|
||||
|
||||
#### With Docker Dev Environments
|
||||
- [Open in Docker dev-envs!](https://open.docker.com/dashboard/dev-envs?url=https://github.com/xbgmsharp/postgsail/)
|
||||
|
||||
|
||||
### On-premise (self-hosted)
|
||||
|
||||
This kind of deployment needs the [docker application](https://www.docker.com/) to be installed and running. Check this [tutorial](https://www.docker.com/101-tutorial).
|
||||
|
||||
Docker run pre packaged application (aka images) which can be retrieved as sources (Dockerfile and resources) to build or already built from registries (private or public).
|
||||
|
||||
PostgSail depends heavily on [PostgreSQL](https://www.postgresql.org/). Check this [tutorial](https://www.postgresql.org/docs/current/tutorial.html).
|
||||
|
||||
#### pre-deploy configuration
|
||||
|
||||
To get these running, copy `.env.example` and rename to `.env` then set the value accordingly.
|
||||
|
||||
```bash
|
||||
# cp .env.example .env
|
||||
```
|
||||
|
||||
```bash
|
||||
# nano .env
|
||||
```
|
||||
|
||||
Notice, that `PGRST_JWT_SECRET` must be at least 32 characters long.
|
||||
|
||||
`$ cat /dev/urandom | LC_ALL=C tr -dc 'a-zA-Z0-9' | fold -w 42 | head -n 1`
|
||||
|
||||
`PGSAIL_APP_URL` is the URL you connect to from your browser.
|
||||
|
||||
`PGSAIL_API_URL` is the URL where `PGSAIL_APP_URL` connect to.
|
||||
|
||||
`PGRST_DB_URI` is the URI where the `PGSAIL_API_URL` connect to.
|
||||
|
||||
To summarize:
|
||||
```mermaid
|
||||
flowchart LR
|
||||
subgraph frontend
|
||||
direction TB
|
||||
A(PGSAIL_APP_URL)
|
||||
B(PGSAIL_API_URL)
|
||||
end
|
||||
subgraph backend
|
||||
direction TB
|
||||
B(PGSAIL_API_URL) -- SQL --> C(PGRST_DB_URI)
|
||||
end
|
||||
%% ^ These subgraphs are identical, except for the links to them:
|
||||
|
||||
%% Link *to* subgraph1: subgraph1 direction is maintained
|
||||
|
||||
User -- HTTP --> A
|
||||
User -- HTTP --> B
|
||||
%% Link *within* subgraph2:
|
||||
%% subgraph2 inherits the direction of the top-level graph (LR)
|
||||
|
||||
Boat -- HTTP --> B
|
||||
```
|
||||
|
||||
### Deploy
|
||||
|
||||
There is two compose files used. You can update the default settings by editing `docker-compose.yml` and `docker-compose.dev.yml` to your need.
|
||||
|
||||
Now let's initialize the database.
|
||||
|
||||
#### Step 1. Initialize database
|
||||
|
||||
First let's import the SQL schema, execute:
|
||||
|
||||
```bash
|
||||
$ docker compose up db
|
||||
```
|
||||
|
||||
#### Step 2. Start backend (db, api)
|
||||
|
||||
Then launch the full backend stack (db, api), execute:
|
||||
|
||||
```bash
|
||||
$ docker compose up db api
|
||||
```
|
||||
|
||||
The API should be accessible via port HTTP/3000.
|
||||
The database should be accessible via port TCP/5432.
|
||||
|
||||
You can connect to the database via a web gui like [pgadmin](https://www.pgadmin.org/) or you can use a client [dbeaver](https://dbeaver.io/).
|
||||
```bash
|
||||
$ docker compose -f docker-compose.yml -f docker-compose.dev.yml pgadmin
|
||||
```
|
||||
Then connect to the web UI on port HTTP/5050.
|
||||
|
||||
#### Step 3. Start frontend (web)
|
||||
|
||||
Then launch the web frontend, execute:
|
||||
|
||||
```bash
|
||||
$ docker compose up web
|
||||
```
|
||||
This step can take some time as it will first do a build to generate the static website based on your settings.
|
||||
|
||||
The frontend should be accessible via port HTTP/8080.
|
||||
|
||||
Users are collaborating on an installation guide, [Self-hosted-installation-guide](https://github.com/xbgmsharp/postgsail/wiki/Self-hosted-installation-guide)
|
||||
|
||||
### SQL Configuration
|
||||
|
||||
Check and update your postgsail settings via SQL in the table `app_settings`:
|
||||
|
||||
```sql
|
||||
SELECT * FROM app_settings;
|
||||
```
|
||||
|
||||
```sql
|
||||
UPDATE app_settings
|
||||
SET
|
||||
value = 'new_value'
|
||||
WHERE name = 'app.email_server';
|
||||
```
|
||||
|
||||
As it is all about SQL, [Read more](https://github.com/xbgmsharp/postgsail/blob/main/docs/ERD/README.md) about the database to configure your instance and explore your data.
|
||||
|
||||
### Ingest data
|
||||
|
||||
Next, to ingest data from signalk, you need to install [signalk-postgsail](https://github.com/xbgmsharp/signalk-postgsail) plugin on your signalk server instance.
|
||||
|
||||
Also, if you like, you can import saillogger data using the postgsail helpers, [postgsail-helpers](https://github.com/xbgmsharp/postgsail-helpers).
|
||||
|
||||
You might want to import your influxdb1 data as well, [outflux](https://github.com/timescale/outflux).
|
||||
For InfluxDB 2.x and 3.x. You will need to enable the 1.x APIs to use them. Consult the InfluxDB documentation for more details.
|
||||
|
||||
Last, if you like, you can import the sample data from Signalk NMEA Plaka by running the tests.
|
||||
If everything goes well all tests pass successfully and you should receive a few notifications by email or PushOver or Telegram.
|
||||
[End-to-End (E2E) Testing.](https://github.com/xbgmsharp/postgsail/blob/main/tests/)
|
||||
|
||||
```
|
||||
$ docker-compose up tests
|
||||
```
|
||||
|
||||
### API Documentation
|
||||
|
||||
The OpenAPI description output depends on the permissions of the role that is contained in the JWT role claim.
|
||||
|
||||
Other applications can also use the [PostgSAIL API](https://petstore.swagger.io/?url=https://raw.githubusercontent.com/xbgmsharp/postgsail/main/openapi.json).
|
||||
|
||||
API anonymous:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/
|
||||
```
|
||||
|
||||
API user_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_login_or_signup_fn'
|
||||
```
|
||||
|
||||
API vessel_role:
|
||||
|
||||
```
|
||||
$ curl http://localhost:3000/ -H 'Authorization: Bearer my_token_from_register_vessel_fn'
|
||||
```
|
||||
|
||||
#### API main workflow
|
||||
|
||||
Check the [End-to-End (E2E) test sample](https://github.com/xbgmsharp/postgsail/blob/main/tests/).
|
||||
|
||||
### Docker dependencies
|
||||
|
||||
`docker-compose` is used to start environment dependencies. Dependencies consist of 3 containers:
|
||||
|
||||
- `timescaledb-postgis` alias `db`, PostgreSQL with TimescaleDB extension along with the PostGIS extension.
|
||||
- `postgrest` alias `api`, Standalone web server that turns your PostgreSQL database directly into a RESTful API.
|
||||
- `grafana` alias `app`, visualize and monitor your data
|
||||
|
||||
### Optional docker images
|
||||
|
||||
- [pgAdmin](https://hub.docker.com/r/dpage/pgadmin4), web UI to monitor and manage multiple PostgreSQL
|
||||
- [Swagger](https://hub.docker.com/r/swaggerapi/swagger-ui), web UI to visualize documentation from PostgREST
|
||||
|
||||
```
|
||||
docker-compose -f docker-compose-optional.yml up
|
||||
```
|
2
frontend
2
frontend
Submodule frontend updated: 7ca5656336...bc4df1dd5e
@@ -387,7 +387,7 @@ BEGIN
|
||||
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
--RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- add user in keycloak
|
||||
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||
-- Send notification
|
||||
@@ -406,6 +406,97 @@ COMMENT ON FUNCTION
|
||||
public.cron_process_grafana_fn
|
||||
IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_windy_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
windy_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
windy_metric jsonb;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
windy_pws jsonb;
|
||||
BEGIN
|
||||
-- Check for new observations pending update
|
||||
RAISE NOTICE 'cron_windy_fn';
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Find users with Windy active and with an active vessel
|
||||
-- Map account id to Windy Station ID
|
||||
FOR windy_rec in
|
||||
SELECT
|
||||
a.id,a.email,v.vessel_id,v.name,
|
||||
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'public_windy')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_windy_fn for [%]', windy_rec;
|
||||
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_windy_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||
last(latitude, time) AS lat,
|
||||
last(longitude, time) AS lng
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = windy_rec.vessel_id
|
||||
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_windy_fn checking metrics [%]', metric_rec;
|
||||
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
-- temp from kelvin to celcuis
|
||||
-- winddir from radiant to degres
|
||||
-- rh from ratio to percentage
|
||||
SELECT jsonb_build_object(
|
||||
'dateutc', metric_rec.time_bucket,
|
||||
'station', windy_rec.id,
|
||||
'name', windy_rec.name,
|
||||
'lat', metric_rec.lat,
|
||||
'lon', metric_rec.lng,
|
||||
'wind', metric_rec.wind,
|
||||
'gust', metric_rec.gust,
|
||||
'pressure', metric_rec.pressure,
|
||||
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||
'rh', valToPercent(metric_rec.rh::numeric)
|
||||
) INTO windy_metric;
|
||||
RAISE NOTICE '-> cron_windy_fn checking windy_metrics [%]', windy_metric;
|
||||
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||
RAISE NOTICE '-> cron_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||
RAISE NOTICE '-> cron_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||
-- Send metrics to Windy
|
||||
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_windy_fn
|
||||
IS 'init by pg_cron to create (or update) station and uploading observations to Windy Personal Weather Station observations';
|
||||
|
||||
-- CRON for Vacuum database
|
||||
CREATE FUNCTION cron_vacuum_fn() RETURNS void AS $$
|
||||
-- ERROR: VACUUM cannot be executed from a function
|
||||
@@ -426,44 +517,305 @@ COMMENT ON FUNCTION
|
||||
IS 'init by pg_cron to full vacuum tables on schema api';
|
||||
|
||||
-- CRON for alerts notification
|
||||
CREATE FUNCTION cron_process_alerts_fn() RETURNS void AS $$
|
||||
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
app_settings JSONB;
|
||||
user_settings JSONB;
|
||||
alerting JSONB;
|
||||
_alarms JSONB;
|
||||
alarms TEXT;
|
||||
alert_default JSONB := '{
|
||||
"low_pressure_threshold": 990,
|
||||
"high_wind_speed_threshold": 30,
|
||||
"low_water_depth_threshold": 1,
|
||||
"min_notification_interval": 6,
|
||||
"high_pressure_drop_threshold": 12,
|
||||
"low_battery_charge_threshold": 90,
|
||||
"low_battery_voltage_threshold": 12.5,
|
||||
"low_water_temperature_threshold": 10,
|
||||
"low_indoor_temperature_threshold": 7,
|
||||
"low_outdoor_temperature_threshold": 3
|
||||
}';
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_alerts_fn';
|
||||
RAISE NOTICE 'cron_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
AND a.email = v.owner_email
|
||||
AND (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
a.user_id,a.email,v.vessel_id,
|
||||
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||
(a.preferences->'alarms')::JSONB as alarms
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_alert_rec_fn for [%]', alert_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||
PERFORM set_config('user.email', alert_rec.email, false);
|
||||
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Get time from the last metrics entry
|
||||
SELECT m.time INTO last_metric FROM api.metrics m WHERE vessel_id = alert_rec.vessel_id ORDER BY m.time DESC LIMIT 1;
|
||||
-- Get all metrics from the last 10 minutes
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||
FOR metric_rec in
|
||||
SELECT *
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||
avg((m.metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS charge
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = alert_rec.vessel_id
|
||||
AND time >= last_metric - INTERVAL '10 MINUTES'
|
||||
ORDER BY m.time DESC LIMIT 100
|
||||
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_alert_rec_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||
END IF;
|
||||
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||
END IF;
|
||||
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||
END IF;
|
||||
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_alerts_fn
|
||||
public.cron_alerts_fn
|
||||
IS 'init by pg_cron to check for alerts';
|
||||
|
||||
-- CRON for no vessel notification
|
||||
@@ -625,7 +977,7 @@ COMMENT ON FUNCTION
|
||||
-- Need to be in the postgres database.
|
||||
\c postgres
|
||||
-- CRON for clean up job details logs
|
||||
CREATE FUNCTION job_run_details_cleanup_fn() RETURNS void AS $$
|
||||
CREATE FUNCTION public.job_run_details_cleanup_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- Remove job run log older than 3 months
|
||||
|
@@ -105,27 +105,27 @@ INSERT INTO public.email_templates VALUES
|
||||
E'You requested a password recovery. Check your email!\n'),
|
||||
('telegram_otp',
|
||||
'Telegram bot',
|
||||
E'Hello,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nThe PostgSail Team',
|
||||
E'Hello,\nTo connect your account to a @postgsail_bot. Please type this verification code __OTP_CODE__ back to the bot.\nThe code is valid 15 minutes.\nFrancois',
|
||||
'Telegram bot',
|
||||
E'Hello,\nTo connect your account to a @postgsail_bot. Check your email!\n'),
|
||||
('telegram_valid',
|
||||
'Telegram bot',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\n\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You have just connect your account to your vessel, @postgsail_bot.\nFrancois',
|
||||
'Telegram bot!',
|
||||
E'Congratulations!\nYou have just connect your account to your vessel, @postgsail_bot.\n'),
|
||||
('no_vessel',
|
||||
'PostgSail add your boat',
|
||||
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not added your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not added your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois',
|
||||
'PostgSail next step',
|
||||
E'Hello,\nYou should create your vessel. Check your email!\n'),
|
||||
('no_metadata',
|
||||
'PostgSail connect your boat',
|
||||
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team',
|
||||
E'Hello __RECIPIENT__,\nYou created an account on PostgSail but you have not connected your boat yet.\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois',
|
||||
'PostgSail next step',
|
||||
E'Hello,\nYou should connect your vessel. Check your email!\n'),
|
||||
('no_activity',
|
||||
'PostgSail boat inactivity',
|
||||
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nThe PostgSail Team.',
|
||||
E'Hello __RECIPIENT__,\nWe don\'t see any activity on your account, do you need any assistance?\nIf you need any assistance, I would be happy to help. It is free and an open-source.\nFrancois.',
|
||||
'PostgSail inactivity!',
|
||||
E'We detected inactivity. Check your email!\n'),
|
||||
('deactivated',
|
||||
@@ -137,7 +137,17 @@ INSERT INTO public.email_templates VALUES
|
||||
'PostgSail Grafana integration',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! You unlocked Grafana dashboard.\nSee more details at https://app.openplotter.cloud\nHappy sailing!\nFrancois',
|
||||
'PostgSail Grafana!',
|
||||
E'Congratulations!\nYou unlocked Grafana dashboard.\nSee more details at https://app.openplotter.cloud\n');
|
||||
E'Congratulations!\nYou unlocked Grafana dashboard.\nSee more details at https://app.openplotter.cloud\n'),
|
||||
('windy',
|
||||
'PostgSail Windy Weather station',
|
||||
E'Hello __RECIPIENT__,\nCongratulations! Your boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\nHappy sailing!\nFrancois',
|
||||
'PostgSail Windy!',
|
||||
E'Congratulations!\nYour boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\n'),
|
||||
('alert',
|
||||
'PostgSail Alert',
|
||||
E'Hello __RECIPIENT__,\nWe detected an alert __ALERT__.\nSee more details at __APP_URL__\nStay safe.\nFrancois',
|
||||
'PostgSail Alert!',
|
||||
E'We detected an alert __ALERT__.\n');
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Queue handling
|
||||
|
@@ -829,7 +829,8 @@ BEGIN
|
||||
OR name LIKE 'app.url'
|
||||
OR name LIKE 'app.telegram%'
|
||||
OR name LIKE 'app.grafana_admin_uri'
|
||||
OR name LIKE 'app.keycloak_uri';
|
||||
OR name LIKE 'app.keycloak_uri'
|
||||
OR name LIKE 'app.windy_apikey';
|
||||
END;
|
||||
$get_app_settings$
|
||||
LANGUAGE plpgsql;
|
||||
@@ -942,9 +943,7 @@ AS $get_user_settings_from_vesselid$
|
||||
'boat' , v.name,
|
||||
'recipient', a.first,
|
||||
'email', v.owner_email,
|
||||
'settings', a.preferences,
|
||||
'pushover_key', a.preferences->'pushover_key'
|
||||
--'badges', a.preferences->'badges'
|
||||
'settings', a.preferences
|
||||
) INTO user_settings
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
|
@@ -196,3 +196,45 @@ language plpgsql volatile;
|
||||
COMMENT ON FUNCTION
|
||||
public.uuid_generate_v7
|
||||
IS 'Generate UUID v7, Based off IETF draft, https://datatracker.ietf.org/doc/draft-peabody-dispatch-new-uuid-format/';
|
||||
|
||||
---------------------------------------------------------------------------
|
||||
-- Conversion helpers
|
||||
--
|
||||
CREATE OR REPLACE FUNCTION public.kelvinToCel(IN temperature NUMERIC)
|
||||
RETURNS NUMERIC
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN ROUND((((temperature)::numeric - 273.15) * 10) / 10);
|
||||
END
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.kelvinToCel
|
||||
IS 'convert kelvin To Celsius';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.radiantToDegrees(IN angle NUMERIC)
|
||||
RETURNS NUMERIC
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN ROUND((((angle)::numeric * 57.2958) * 10) / 10);
|
||||
END
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.radiantToDegrees
|
||||
IS 'convert radiant To Degrees';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.valToPercent(IN val NUMERIC)
|
||||
RETURNS NUMERIC
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN (val * 100);
|
||||
END
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.valToPercent
|
||||
IS 'convert radiant To Degrees';
|
@@ -142,6 +142,8 @@ AS $send_email_py$
|
||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||
if 'reset_qs' in _user and _user['reset_qs']:
|
||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||
@@ -231,6 +233,8 @@ AS $send_pushover_py$
|
||||
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
pushover_message = pushover_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
||||
@@ -307,6 +311,8 @@ AS $send_telegram_py$
|
||||
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
telegram_message = telegram_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
||||
@@ -515,16 +521,22 @@ AS $grafana_py$
|
||||
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||
return None
|
||||
|
||||
b_name = None
|
||||
if not _v_name:
|
||||
b_name = _v_id
|
||||
else:
|
||||
b_name = _v_name
|
||||
|
||||
# add vessel org
|
||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||
path = 'api/orgs'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'name':_v_name}
|
||||
data_dict = {'name':b_name}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
if r.status_code == 200 and "orgId" in r.json():
|
||||
org_id = r.json()['orgId']
|
||||
else:
|
||||
@@ -538,7 +550,7 @@ AS $grafana_py$
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
if r.status_code == 200 and "id" in r.json():
|
||||
user_id = r.json()['id']
|
||||
else:
|
||||
@@ -550,7 +562,7 @@ AS $grafana_py$
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
r = requests.get(url, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
data_source = r.json()
|
||||
data_source['id'] = 0
|
||||
data_source['orgId'] = org_id
|
||||
@@ -567,7 +579,7 @@ AS $grafana_py$
|
||||
data = json.dumps(data_source)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
del headers['X-Grafana-Org-Id']
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add data_source to vessel org')
|
||||
@@ -581,7 +593,7 @@ AS $grafana_py$
|
||||
if 'X-Grafana-Org-Id' in headers:
|
||||
del headers['X-Grafana-Org-Id']
|
||||
r = requests.get(url, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana read dashboard template')
|
||||
return
|
||||
@@ -598,7 +610,7 @@ AS $grafana_py$
|
||||
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=new_data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add dashboard to vessel org')
|
||||
return
|
||||
@@ -612,7 +624,7 @@ AS $grafana_py$
|
||||
data = json.dumps(home_dashboard)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.patch(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
#plpy.notice(r.json())
|
||||
if r.status_code != 200:
|
||||
plpy.error('Error grafana update org preferences')
|
||||
return
|
||||
@@ -682,7 +694,7 @@ $keycloak_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.keycloak_py_fn
|
||||
IS 'Return set oauth user attribute into keycloak using plpython3u';
|
||||
IS 'Set oauth user attribute into keycloak using plpython3u';
|
||||
|
||||
DROP FUNCTION IF EXISTS keycloak_auth_py_fn;
|
||||
CREATE OR REPLACE FUNCTION keycloak_auth_py_fn(IN _v_id TEXT,
|
||||
@@ -726,7 +738,7 @@ AS $keycloak_auth_py$
|
||||
#plpy.notice(url)
|
||||
if r.status_code == 200 and 'access_token' in r.json():
|
||||
response = r.json()
|
||||
plpy.notice(response)
|
||||
#plpy.notice(response)
|
||||
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||
_headers['Content-Type'] = 'application/json'
|
||||
url = f'{_.scheme}://{host}/admin/realms/postgsail/users'.format(_.scheme, host)
|
||||
@@ -738,7 +750,7 @@ AS $keycloak_auth_py$
|
||||
"emailVerified": True,
|
||||
"requiredActions":["UPDATE_PROFILE", "UPDATE_PASSWORD"]
|
||||
}
|
||||
plpy.notice(_payload)
|
||||
#plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
r = requests.post(url, headers=_headers, data=data, timeout=(5, 60))
|
||||
if r.status_code != 201:
|
||||
@@ -750,7 +762,7 @@ AS $keycloak_auth_py$
|
||||
plpy.notice('Created user : {u} {t}, {l}'.format(u=_payload['email'], t=r.text, l=r.headers['location']))
|
||||
user_url = "{user_url}/execute-actions-email".format(user_url=r.headers['location'])
|
||||
_payload = ["UPDATE_PASSWORD"]
|
||||
plpy.notice(_payload)
|
||||
#plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
r = requests.put(user_url, headers=_headers, data=data, timeout=(5, 60))
|
||||
if r.status_code != 204:
|
||||
@@ -765,4 +777,83 @@ $keycloak_auth_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.keycloak_auth_py_fn
|
||||
IS 'Return set oauth user attribute into keycloak using plpython3u';
|
||||
IS 'Create an oauth user into keycloak using plpython3u';
|
||||
|
||||
CREATE OR REPLACE FUNCTION windy_pws_py_fn(IN metric JSONB,
|
||||
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||
AS $windy_pws_py$
|
||||
"""
|
||||
Send environment data from boat instruments to Windy as a Personal Weather Station (PWS)
|
||||
https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import decimal
|
||||
|
||||
if not 'app.windy_apikey' in app and not app['app.windy_apikey']:
|
||||
plpy.error('Error no windy_apikey defined, check app settings')
|
||||
return none
|
||||
if not 'station' in metric and not metric['station']:
|
||||
plpy.error('Error no metrics defined')
|
||||
return none
|
||||
if not 'temp' in metric and not metric['temp']:
|
||||
plpy.error('Error no metrics defined')
|
||||
return none
|
||||
if not _user:
|
||||
plpy.error('Error no user defined, check user settings')
|
||||
return none
|
||||
|
||||
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com', 'Content-Type': 'application/json'}
|
||||
_payload = {
|
||||
'stations': [
|
||||
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||
'name': metric['name'],
|
||||
'shareOption': 'Open',
|
||||
'type': 'SignalK PostgSail Plugin',
|
||||
'provider': 'PostgSail',
|
||||
'url': 'https://iot.openplotter.cloud/{name}/monitoring'.format(name=metric['name']),
|
||||
'lat': float(decimal.Decimal(metric['lat'])),
|
||||
'lon': float(decimal.Decimal(metric['lon'])),
|
||||
'elevation': 1 }
|
||||
],
|
||||
'observations': [
|
||||
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||
'temp': float(decimal.Decimal(metric['temp'])),
|
||||
'wind': round(float(decimal.Decimal(metric['wind']))),
|
||||
'gust': round(float(decimal.Decimal(metric['wind']))),
|
||||
'winddir': int(decimal.Decimal(metric['winddir'])),
|
||||
'pressure': int(decimal.Decimal(metric['pressure'])),
|
||||
'rh': float(decimal.Decimal(metric['rh'])) }
|
||||
]}
|
||||
#print(_payload)
|
||||
#plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
api_url = 'https://stations.windy.com/pws/update/{api_key}'.format(api_key=app['app.windy_apikey'])
|
||||
r = requests.post(api_url, data=data, headers=_headers, timeout=(5, 60))
|
||||
#print(r.text)
|
||||
#plpy.notice(api_url)
|
||||
if r.status_code == 200:
|
||||
#print('Data sent successfully!')
|
||||
plpy.notice('Data sent successfully to Windy!')
|
||||
#plpy.notice(api_url)
|
||||
if not 'windy' in _user['settings']:
|
||||
api_url = 'https://stations.windy.com/pws/station/{api_key}/{station}'.format(api_key=app['app.windy_apikey'], station=metric['station'])
|
||||
#print(r.text)
|
||||
#plpy.notice(api_url)
|
||||
r = requests.get(api_url, timeout=(5, 60))
|
||||
if r.status_code == 200:
|
||||
#print('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||
plpy.notice('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||
return r.json()
|
||||
else:
|
||||
plpy.error(f'Failed to gather PWS details. Status code: {r.status_code}')
|
||||
else:
|
||||
plpy.error(f'Failed to send data. Status code: {r.status_code}')
|
||||
#print(f'Failed to send data. Status code: {r.status_code}')
|
||||
#print(r.text)
|
||||
return {}
|
||||
$windy_pws_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.windy_pws_py_fn
|
||||
IS 'Forward vessel data to Windy as a Personal Weather Station using plpython3u';
|
||||
|
@@ -49,6 +49,9 @@ SELECT cron.schedule('cron_monitor_online', '*/10 * * * *', 'select public.cron_
|
||||
-- Create a every 5 minute job cron_process_grafana_fn
|
||||
SELECT cron.schedule('cron_grafana', '*/5 * * * *', 'select public.cron_process_grafana_fn()');
|
||||
|
||||
-- Create a every 5 minute job cron_process_windy_fn
|
||||
SELECT cron.schedule('cron_windy', '*/5 * * * *', 'select public.cron_windy_fn()');
|
||||
|
||||
-- Notification
|
||||
-- Create a every 1 minute job cron_process_new_notification_queue_fn, new_account, new_vessel, _new_account_otp
|
||||
SELECT cron.schedule('cron_new_notification', '*/1 * * * *', 'select public.cron_process_new_notification_fn()');
|
||||
@@ -68,23 +71,23 @@ SELECT cron.schedule('cron_reindex_auth', '1 23 1 * *', 'REINDEX TABLE CONCURREN
|
||||
-- Any other maintenance require?
|
||||
|
||||
-- OTP
|
||||
-- Create a every 15 minute job cron_process_prune_otp_fn
|
||||
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_process_prune_otp_fn()');
|
||||
-- Create a every 15 minute job cron_prune_otp_fn
|
||||
SELECT cron.schedule('cron_prune_otp', '*/15 * * * *', 'select public.cron_prune_otp_fn()');
|
||||
|
||||
-- Alerts
|
||||
-- Create a every 11 minute job cron_process_alerts_fn
|
||||
--SELECT cron.schedule('cron_alerts', '*/11 * * * *', 'select public.cron_process_alerts_fn()');
|
||||
-- Create a every 11 minute job cron_alerts_fn
|
||||
SELECT cron.schedule('cron_alerts', '*/11 * * * *', 'select public.cron_alerts_fn()');
|
||||
|
||||
-- Notifications/Reminders of no vessel & no metadata & no activity
|
||||
-- At 08:05 on Sunday.
|
||||
-- At 08:05 on every 4th day-of-month if it's on Sunday.
|
||||
SELECT cron.schedule('cron_no_vessel', '5 8 */4 * 0', 'select public.cron_process_no_vessel_fn()');
|
||||
SELECT cron.schedule('cron_no_metadata', '5 8 */4 * 0', 'select public.cron_process_no_metadata_fn()');
|
||||
SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_process_no_activity_fn()');
|
||||
SELECT cron.schedule('cron_no_vessel', '5 8 */4 * 0', 'select public.cron_no_vessel_fn()');
|
||||
SELECT cron.schedule('cron_no_metadata', '5 8 */4 * 0', 'select public.cron_no_metadata_fn()');
|
||||
SELECT cron.schedule('cron_no_activity', '5 8 */4 * 0', 'select public.cron_no_activity_fn()');
|
||||
|
||||
-- Cron job settings
|
||||
UPDATE cron.job SET database = 'signalk';
|
||||
UPDATE cron.job SET username = 'username'; -- TODO update to scheduler, pending process_queue update
|
||||
UPDATE cron.job SET username = current_user; -- TODO update to scheduler, pending process_queue update
|
||||
--UPDATE cron.job SET username = 'username' where jobname = 'cron_vacuum'; -- TODO Update to superuser for vacuum permissions
|
||||
UPDATE cron.job SET nodename = '/var/run/postgresql/'; -- VS default localhost ??
|
||||
UPDATE cron.job SET database = 'postgres' WHERE jobname = 'job_run_details_cleanup';
|
||||
|
457
initdb/99_migrations_202401.sql
Normal file
457
initdb/99_migrations_202401.sql
Normal file
@@ -0,0 +1,457 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration January 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_moorage_fn
|
||||
IS 'Deprecated, init by pg_cron to check for new moorage pending update, if so perform process_moorage_queue_fn';
|
||||
|
||||
DROP FUNCTION IF EXISTS reverse_geoip_py_fn;
|
||||
CREATE OR REPLACE FUNCTION reverse_geoip_py_fn(IN _ip TEXT) RETURNS JSONB
|
||||
AS $reverse_geoip_py$
|
||||
"""
|
||||
Return ipapi.co ip details
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
# requests
|
||||
url = f'https://ipapi.co/{_ip}/json/'
|
||||
r = requests.get(url)
|
||||
#print(r.text)
|
||||
#plpy.notice('IP [{}] [{}]'.format(_ip, r.status_code))
|
||||
if r.status_code == 200:
|
||||
#plpy.notice('Got [{}] [{}]'.format(r.text, r.status_code))
|
||||
return r.json()
|
||||
else:
|
||||
plpy.error('Failed to get ip details')
|
||||
return {}
|
||||
$reverse_geoip_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.reverse_geoip_py_fn
|
||||
IS 'Retrieve reverse geo IP location via ipapi.co using plpython3u';
|
||||
|
||||
DROP FUNCTION IF EXISTS overpass_py_fn;
|
||||
CREATE OR REPLACE FUNCTION overpass_py_fn(IN lon NUMERIC, IN lat NUMERIC,
|
||||
OUT geo JSONB) RETURNS JSONB
|
||||
AS $overpass_py$
|
||||
"""
|
||||
Return https://overpass-turbo.eu seamark details within 400m
|
||||
https://overpass-turbo.eu/s/1EaG
|
||||
https://wiki.openstreetmap.org/wiki/Key:seamark:type
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||
payload = """
|
||||
[out:json][timeout:20];
|
||||
is_in({0},{1})->.result_areas;
|
||||
(
|
||||
area.result_areas["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||
area.result_areas["leisure"="marina"][~"name"~"."];
|
||||
);
|
||||
out tags;
|
||||
nwr(around:400.0,{0},{1})->.all;
|
||||
(
|
||||
nwr.all["seamark:type"~"(mooring|harbour)"][~"^seamark:.*:category$"~"."];
|
||||
nwr.all["seamark:type"~"(anchorage|anchor_berth|berth)"];
|
||||
nwr.all["leisure"="marina"];
|
||||
nwr.all["natural"~"(bay|beach)"];
|
||||
);
|
||||
out tags;
|
||||
""".format(lat, lon)
|
||||
data = urllib.parse.quote(payload, safe="");
|
||||
url = f'https://overpass-api.de/api/interpreter?data={data}'.format(data)
|
||||
r = requests.get(url, headers)
|
||||
#print(r.text)
|
||||
#plpy.notice(url)
|
||||
plpy.notice('overpass-api coord lon[{}] lat[{}] [{}]'.format(lon, lat, r.status_code))
|
||||
if r.status_code == 200 and "elements" in r.json():
|
||||
r_dict = r.json()
|
||||
plpy.notice('overpass-api Got [{}]'.format(r_dict["elements"]))
|
||||
if r_dict["elements"]:
|
||||
if "tags" in r_dict["elements"][0] and r_dict["elements"][0]["tags"]:
|
||||
return r_dict["elements"][0]["tags"]; # return the first element
|
||||
return {}
|
||||
else:
|
||||
plpy.notice('overpass-api Failed to get overpass-api details')
|
||||
return {}
|
||||
$overpass_py$ IMMUTABLE strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.overpass_py_fn
|
||||
IS 'Return https://overpass-turbo.eu seamark details within 400m using plpython3u';
|
||||
|
||||
CREATE OR REPLACE FUNCTION get_app_settings_fn(OUT app_settings jsonb)
|
||||
RETURNS jsonb
|
||||
AS $get_app_settings$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_object_agg(name, value) INTO app_settings
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name LIKE 'app.email%'
|
||||
OR name LIKE 'app.pushover%'
|
||||
OR name LIKE 'app.url'
|
||||
OR name LIKE 'app.telegram%'
|
||||
OR name LIKE 'app.grafana_admin_uri'
|
||||
OR name LIKE 'app.keycloak_uri';
|
||||
END;
|
||||
$get_app_settings$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION keycloak_auth_py_fn(IN _v_id TEXT,
|
||||
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||
AS $keycloak_auth_py$
|
||||
"""
|
||||
Addkeycloak user
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
safe_uri = host = user = pwd = None
|
||||
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||
host = _.netloc.split('@')[-1]
|
||||
user = _.netloc.split(':')[0]
|
||||
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||
else:
|
||||
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||
return none
|
||||
|
||||
if not host or not user or not pwd:
|
||||
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||
return None
|
||||
|
||||
if not 'email' in _user and _user['email']:
|
||||
plpy.error('Error parsing user email, check user settings')
|
||||
return none
|
||||
|
||||
if not _v_id:
|
||||
plpy.error('Error parsing vessel_id')
|
||||
return none
|
||||
|
||||
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||
#print(r.text)
|
||||
#plpy.notice(url)
|
||||
if r.status_code == 200 and 'access_token' in r.json():
|
||||
response = r.json()
|
||||
plpy.notice(response)
|
||||
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||
_headers['Content-Type'] = 'application/json'
|
||||
url = f'{_.scheme}://{host}/admin/realms/postgsail/users'.format(_.scheme, host)
|
||||
_payload = {
|
||||
"enabled": "true",
|
||||
"email": _user['email'],
|
||||
"firstName": _user['recipient'],
|
||||
"attributes": {"vessel_id": _v_id},
|
||||
"emailVerified": True,
|
||||
"requiredActions":["UPDATE_PROFILE", "UPDATE_PASSWORD"]
|
||||
}
|
||||
plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
r = requests.post(url, headers=_headers, data=data, timeout=(5, 60))
|
||||
if r.status_code != 201:
|
||||
#print("Error creating user: {status}".format(status=r.status_code))
|
||||
plpy.error(f'Error creating user: {user} {status}'.format(user=_payload['email'], status=r.status_code))
|
||||
return None
|
||||
else:
|
||||
#print("Created user : {u}]".format(u=_payload['email']))
|
||||
plpy.notice('Created user : {u} {t}, {l}'.format(u=_payload['email'], t=r.text, l=r.headers['location']))
|
||||
user_url = "{user_url}/execute-actions-email".format(user_url=r.headers['location'])
|
||||
_payload = ["UPDATE_PASSWORD"]
|
||||
plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
r = requests.put(user_url, headers=_headers, data=data, timeout=(5, 60))
|
||||
if r.status_code != 204:
|
||||
plpy.error('Error execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||
else:
|
||||
plpy.notice('execute-actions-email: {u} {s}'.format(u=_user['email'], s=r.status_code))
|
||||
return None
|
||||
else:
|
||||
plpy.error(f'Error getting admin access_token: {status}'.format(status=r.status_code))
|
||||
return None
|
||||
$keycloak_auth_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.keycloak_auth_py_fn
|
||||
IS 'Return set oauth user attribute into keycloak using plpython3u';
|
||||
|
||||
CREATE OR REPLACE FUNCTION keycloak_py_fn(IN user_id TEXT, IN vessel_id TEXT,
|
||||
IN app JSONB) RETURNS JSONB
|
||||
AS $keycloak_py$
|
||||
"""
|
||||
Add vessel_id user attribute to keycloak user {user_id}
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import urllib.parse
|
||||
|
||||
safe_uri = host = user = pwd = None
|
||||
if 'app.keycloak_uri' in app and app['app.keycloak_uri']:
|
||||
#safe_uri = urllib.parse.quote(app['app.keycloak_uri'], safe=':/?&=')
|
||||
_ = urllib.parse.urlparse(app['app.keycloak_uri'])
|
||||
host = _.netloc.split('@')[-1]
|
||||
user = _.netloc.split(':')[0]
|
||||
pwd = _.netloc.split(':')[1].split('@')[0]
|
||||
else:
|
||||
plpy.error('Error no keycloak_uri defined, check app settings')
|
||||
return None
|
||||
|
||||
if not host or not user or not pwd:
|
||||
plpy.error('Error parsing keycloak_uri, check app settings')
|
||||
return None
|
||||
|
||||
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com'}
|
||||
_payload = {'client_id':'admin-cli','grant_type':'password','username':user,'password':pwd}
|
||||
url = f'{_.scheme}://{host}/realms/master/protocol/openid-connect/token'.format(_.scheme, host)
|
||||
r = requests.post(url, headers=_headers, data=_payload, timeout=(5, 60))
|
||||
#print(r.text)
|
||||
#plpy.notice(url)
|
||||
if r.status_code == 200 and 'access_token' in r.json():
|
||||
response = r.json()
|
||||
plpy.notice(response)
|
||||
_headers['Authorization'] = 'Bearer '+ response['access_token']
|
||||
_headers['Content-Type'] = 'application/json'
|
||||
_payload = { 'attributes': {'vessel_id': vessel_id} }
|
||||
url = f'{keycloak_uri}/admin/realms/postgsail/users/{user_id}'.format(keycloak_uri,user_id)
|
||||
#plpy.notice(url)
|
||||
#plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
r = requests.put(url, headers=_headers, data=data, timeout=(5, 60))
|
||||
if r.status_code != 204:
|
||||
plpy.notice("Error updating user: {status} [{text}]".format(
|
||||
status=r.status_code, text=r.text))
|
||||
return None
|
||||
else:
|
||||
plpy.notice("Updated user : {user} [{text}]".format(user=user_id, text=r.text))
|
||||
else:
|
||||
plpy.notice(f'Error getting admin access_token: {status} [{text}]'.format(
|
||||
status=r.status_code, text=r.text))
|
||||
return None
|
||||
$keycloak_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
|
||||
UPDATE public.email_templates
|
||||
SET pushover_message='Congratulations!
|
||||
You unlocked Grafana dashboard.
|
||||
See more details at https://app.openplotter.cloud
|
||||
',email_content='Hello __RECIPIENT__,
|
||||
Congratulations! You unlocked Grafana dashboard.
|
||||
See more details at https://app.openplotter.cloud
|
||||
Happy sailing!
|
||||
Francois'
|
||||
WHERE "name"='grafana';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_grafana_fn()
|
||||
RETURNS void
|
||||
LANGUAGE plpgsql
|
||||
AS $function$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
data_rec record;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- We run grafana provisioning only after the first received vessel metadata
|
||||
-- Check for new vessel metadata pending grafana provisioning
|
||||
RAISE NOTICE 'cron_process_grafana_fn';
|
||||
FOR process_rec in
|
||||
SELECT * from process_queue
|
||||
where channel = 'grafana' and processed is null
|
||||
order by stored asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Get vessel details base on metadata id
|
||||
SELECT * INTO data_rec
|
||||
FROM api.metadata m, auth.vessels v
|
||||
WHERE m.id = process_rec.payload::INTEGER
|
||||
AND m.vessel_id = v.vessel_id;
|
||||
-- as we got data from the vessel we can do the grafana provisioning.
|
||||
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- add user in keycloak
|
||||
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$function$
|
||||
;
|
||||
COMMENT ON FUNCTION public.cron_process_grafana_fn() IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||
|
||||
-- DROP FUNCTION public.grafana_py_fn(text, text, text, jsonb);
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.grafana_py_fn(_v_name text, _v_id text, _u_email text, app jsonb)
|
||||
RETURNS void
|
||||
TRANSFORM FOR TYPE jsonb
|
||||
LANGUAGE plpython3u
|
||||
AS $function$
|
||||
"""
|
||||
https://grafana.com/docs/grafana/latest/developers/http_api/
|
||||
Create organization base on vessel name
|
||||
Create user base on user email
|
||||
Add user to organization
|
||||
Add data_source to organization
|
||||
Add dashboard to organization
|
||||
Update organization preferences
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import re
|
||||
|
||||
grafana_uri = None
|
||||
if 'app.grafana_admin_uri' in app and app['app.grafana_admin_uri']:
|
||||
grafana_uri = app['app.grafana_admin_uri']
|
||||
else:
|
||||
plpy.error('Error no grafana_admin_uri defined, check app settings')
|
||||
return None
|
||||
|
||||
b_name = None
|
||||
if not _v_name:
|
||||
b_name = _v_id
|
||||
else:
|
||||
b_name = _v_name
|
||||
|
||||
# add vessel org
|
||||
headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com',
|
||||
'Accept': 'application/json', 'Content-Type': 'application/json'}
|
||||
path = 'api/orgs'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'name':b_name}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "orgId" in r.json():
|
||||
org_id = r.json()['orgId']
|
||||
else:
|
||||
plpy.error('Error grafana add vessel org %', r.json())
|
||||
return none
|
||||
|
||||
# add user to vessel org
|
||||
path = 'api/admin/users'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data_dict = {'orgId':org_id, 'email':_u_email, 'password':'asupersecretpassword'}
|
||||
data = json.dumps(data_dict)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code == 200 and "id" in r.json():
|
||||
user_id = r.json()['id']
|
||||
else:
|
||||
plpy.error('Error grafana add user to vessel org')
|
||||
return
|
||||
|
||||
# read data_source
|
||||
path = 'api/datasources/1'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
r = requests.get(url, headers=headers)
|
||||
#print(r.text)
|
||||
plpy.notice(r.json())
|
||||
data_source = r.json()
|
||||
data_source['id'] = 0
|
||||
data_source['orgId'] = org_id
|
||||
data_source['uid'] = "ds_" + _v_id
|
||||
data_source['name'] = "ds_" + _v_id
|
||||
data_source['secureJsonData'] = {}
|
||||
data_source['secureJsonData']['password'] = 'mysecretpassword'
|
||||
data_source['readOnly'] = True
|
||||
del data_source['secureJsonFields']
|
||||
|
||||
# add data_source to vessel org
|
||||
path = 'api/datasources'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(data_source)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
del headers['X-Grafana-Org-Id']
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add data_source to vessel org')
|
||||
return
|
||||
|
||||
dashboards_tpl = [ 'pgsail_tpl_electrical', 'pgsail_tpl_logbook', 'pgsail_tpl_monitor', 'pgsail_tpl_rpi', 'pgsail_tpl_solar', 'pgsail_tpl_weather', 'pgsail_tpl_home']
|
||||
for dashboard in dashboards_tpl:
|
||||
# read dashboard template by uid
|
||||
path = 'api/dashboards/uid'
|
||||
url = f'{grafana_uri}/{path}/{dashboard}'.format(grafana_uri,path,dashboard)
|
||||
if 'X-Grafana-Org-Id' in headers:
|
||||
del headers['X-Grafana-Org-Id']
|
||||
r = requests.get(url, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana read dashboard template')
|
||||
return
|
||||
new_dashboard = r.json()
|
||||
del new_dashboard['meta']
|
||||
new_dashboard['dashboard']['version'] = 0
|
||||
new_dashboard['dashboard']['id'] = 0
|
||||
new_uid = re.sub(r'pgsail_tpl_(.*)', r'postgsail_\1', new_dashboard['dashboard']['uid'])
|
||||
new_dashboard['dashboard']['uid'] = f'{new_uid}_{_v_id}'.format(new_uid,_v_id)
|
||||
# add dashboard to vessel org
|
||||
path = 'api/dashboards/db'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
data = json.dumps(new_dashboard)
|
||||
new_data = data.replace('PCC52D03280B7034C', data_source['uid'])
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.post(url, data=new_data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200 and "id" not in r.json():
|
||||
plpy.error('Error grafana add dashboard to vessel org')
|
||||
return
|
||||
|
||||
# Update Org Prefs
|
||||
path = 'api/org/preferences'
|
||||
url = f'{grafana_uri}/{path}'.format(grafana_uri,path)
|
||||
home_dashboard = {}
|
||||
home_dashboard['timezone'] = 'utc'
|
||||
home_dashboard['homeDashboardUID'] = f'postgsail_home_{_v_id}'.format(_v_id)
|
||||
data = json.dumps(home_dashboard)
|
||||
headers['X-Grafana-Org-Id'] = str(org_id)
|
||||
r = requests.patch(url, data=data, headers=headers)
|
||||
plpy.notice(r.json())
|
||||
if r.status_code != 200:
|
||||
plpy.error('Error grafana update org preferences')
|
||||
return
|
||||
|
||||
plpy.notice('Done')
|
||||
$function$
|
||||
;
|
||||
|
||||
COMMENT ON FUNCTION public.grafana_py_fn(text, text, text, jsonb) IS 'Grafana Organization,User,data_source,dashboards provisioning via HTTP API using plpython3u';
|
||||
|
||||
UPDATE public.app_settings
|
||||
SET value='0.6.1'
|
||||
WHERE "name"='app.version';
|
919
initdb/99_migrations_202402.sql
Normal file
919
initdb/99_migrations_202402.sql
Normal file
@@ -0,0 +1,919 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration February 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Update email_templates
|
||||
--INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
-- VALUES ('windy','PostgSail Windy Weather station',E'Hello __RECIPIENT__,\nCongratulations! Your boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\nHappy sailing!\nFrancois','PostgSail Windy!',E'Congratulations!\nYour boat is now a Windy Weather station.\nSee more details at __APP_URL__/windy\n');
|
||||
--INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
--VALUES ('alert','PostgSail Alert',E'Hello __RECIPIENT__,\nWe detected an alert __ALERT__.\nSee more details at __APP_URL__\nStay safe.\nFrancois','PostgSail Alert!',E'Congratulations!\nWe detected an alert __ALERT__.\n');
|
||||
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('windy_error','PostgSail Windy Weather station Error',E'Hello __RECIPIENT__,\nSorry!We could not convert your boat into a Windy Personal Weather Station due to missing data (temp or wind).\nWindy Personal Weather Station is now disable.','PostgSail Windy error!',E'Sorry!\nWe could not convert your boat into a Windy Personal Weather Station.');
|
||||
|
||||
-- Update app_settings
|
||||
CREATE OR REPLACE FUNCTION public.get_app_settings_fn(OUT app_settings jsonb)
|
||||
RETURNS jsonb
|
||||
AS $get_app_settings$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_object_agg(name, value) INTO app_settings
|
||||
FROM
|
||||
public.app_settings
|
||||
WHERE
|
||||
name LIKE 'app.email%'
|
||||
OR name LIKE 'app.pushover%'
|
||||
OR name LIKE 'app.url'
|
||||
OR name LIKE 'app.telegram%'
|
||||
OR name LIKE 'app.grafana_admin_uri'
|
||||
OR name LIKE 'app.keycloak_uri'
|
||||
OR name LIKE 'app.windy_apikey';
|
||||
END;
|
||||
$get_app_settings$
|
||||
LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.get_user_settings_from_vesselid_fn(
|
||||
IN vesselid TEXT,
|
||||
OUT user_settings JSONB
|
||||
) RETURNS JSONB
|
||||
AS $get_user_settings_from_vesselid$
|
||||
DECLARE
|
||||
BEGIN
|
||||
-- If vessel_id is not NULL
|
||||
IF vesselid IS NULL OR vesselid = '' THEN
|
||||
RAISE WARNING '-> get_user_settings_from_vesselid_fn invalid input %', vesselid;
|
||||
END IF;
|
||||
SELECT
|
||||
json_build_object(
|
||||
'boat' , v.name,
|
||||
'recipient', a.first,
|
||||
'email', v.owner_email,
|
||||
'settings', a.preferences
|
||||
) INTO user_settings
|
||||
FROM auth.accounts a, auth.vessels v, api.metadata m
|
||||
WHERE m.vessel_id = v.vessel_id
|
||||
AND m.vessel_id = vesselid
|
||||
AND a.email = v.owner_email;
|
||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||
PERFORM set_config('user.recipient', user_settings->>'recipient'::TEXT, false);
|
||||
END;
|
||||
$get_user_settings_from_vesselid$ LANGUAGE plpgsql;
|
||||
|
||||
-- Create Windy PWS integration
|
||||
CREATE OR REPLACE FUNCTION public.windy_pws_py_fn(IN metric JSONB,
|
||||
IN _user JSONB, IN app JSONB) RETURNS JSONB
|
||||
AS $windy_pws_py$
|
||||
"""
|
||||
Send environment data from boat instruments to Windy as a Personal Weather Station (PWS)
|
||||
https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import decimal
|
||||
|
||||
if not 'app.windy_apikey' in app and not app['app.windy_apikey']:
|
||||
plpy.error('Error no windy_apikey defined, check app settings')
|
||||
return none
|
||||
if not 'station' in metric and not metric['station']:
|
||||
plpy.error('Error no metrics defined')
|
||||
return none
|
||||
if not 'temp' in metric and not metric['temp']:
|
||||
plpy.error('Error no metrics defined')
|
||||
return none
|
||||
if not _user:
|
||||
plpy.error('Error no user defined, check user settings')
|
||||
return none
|
||||
|
||||
_headers = {'User-Agent': 'PostgSail', 'From': 'xbgmsharp@gmail.com', 'Content-Type': 'application/json'}
|
||||
_payload = {
|
||||
'stations': [
|
||||
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||
'name': metric['name'],
|
||||
'shareOption': 'Open',
|
||||
'type': 'SignalK PostgSail Plugin',
|
||||
'provider': 'PostgSail',
|
||||
'url': 'https://iot.openplotter.cloud/{name}/monitoring'.format(name=metric['name']),
|
||||
'lat': float(decimal.Decimal(metric['lat'])),
|
||||
'lon': float(decimal.Decimal(metric['lon'])),
|
||||
'elevation': 1 }
|
||||
],
|
||||
'observations': [
|
||||
{ 'station': int(decimal.Decimal(metric['station'])),
|
||||
'temp': float(decimal.Decimal(metric['temp'])),
|
||||
'wind': round(float(decimal.Decimal(metric['wind']))),
|
||||
'gust': round(float(decimal.Decimal(metric['gust']))),
|
||||
'winddir': int(decimal.Decimal(metric['winddir'])),
|
||||
'pressure': int(decimal.Decimal(metric['pressure'])),
|
||||
'rh': float(decimal.Decimal(metric['rh'])) }
|
||||
]}
|
||||
#print(_payload)
|
||||
#plpy.notice(_payload)
|
||||
data = json.dumps(_payload)
|
||||
api_url = 'https://stations.windy.com/pws/update/{api_key}'.format(api_key=app['app.windy_apikey'])
|
||||
r = requests.post(api_url, data=data, headers=_headers, timeout=(5, 60))
|
||||
#print(r.text)
|
||||
#plpy.notice(api_url)
|
||||
if r.status_code == 200:
|
||||
#print('Data sent successfully!')
|
||||
plpy.notice('Data sent successfully to Windy!')
|
||||
#plpy.notice(api_url)
|
||||
if not 'windy' in _user['settings']:
|
||||
api_url = 'https://stations.windy.com/pws/station/{api_key}/{station}'.format(api_key=app['app.windy_apikey'], station=metric['station'])
|
||||
#print(r.text)
|
||||
#plpy.notice(api_url)
|
||||
r = requests.get(api_url, timeout=(5, 60))
|
||||
if r.status_code == 200:
|
||||
#print('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||
plpy.notice('Windy Personal Weather Station created successfully in Windy Stations!')
|
||||
return r.json()
|
||||
else:
|
||||
plpy.error(f'Failed to gather PWS details. Status code: {r.status_code}')
|
||||
else:
|
||||
plpy.error(f'Failed to send data. Status code: {r.status_code}')
|
||||
#print(f'Failed to send data. Status code: {r.status_code}')
|
||||
#print(r.text)
|
||||
return {}
|
||||
$windy_pws_py$ strict TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.windy_pws_py_fn
|
||||
IS 'Forward vessel data to Windy as a Personal Weather Station using plpython3u';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_windy_fn() RETURNS void AS $cron_windy$
|
||||
DECLARE
|
||||
windy_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
windy_metric jsonb;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
windy_pws jsonb;
|
||||
BEGIN
|
||||
-- Check for new observations pending update
|
||||
RAISE NOTICE 'cron_windy_fn';
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Find users with Windy active and with an active vessel
|
||||
-- Map account id to Windy Station ID
|
||||
FOR windy_rec in
|
||||
SELECT
|
||||
a.id,a.email,v.vessel_id,v.name,
|
||||
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'public_windy')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_windy_fn for [%]', windy_rec;
|
||||
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_windy_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||
last(latitude, time) AS lat,
|
||||
last(longitude, time) AS lng
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = windy_rec.vessel_id
|
||||
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_windy_fn checking metrics [%]', metric_rec;
|
||||
IF metric_rec.wind IS NULL OR metric_rec.temperature IS NULL THEN
|
||||
-- Ignore when there is no metrics
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy_error'::TEXT, user_settings::JSONB);
|
||||
-- Disable windy
|
||||
PERFORM api.update_user_preferences_fn('{public_windy}'::TEXT, 'false'::TEXT);
|
||||
RETURN;
|
||||
END IF;
|
||||
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
-- temp from Kelvin to Celsius
|
||||
-- winddir from radiant to Degrees
|
||||
-- rh from ratio to percentage
|
||||
SELECT jsonb_build_object(
|
||||
'dateutc', metric_rec.time_bucket,
|
||||
'station', windy_rec.id,
|
||||
'name', windy_rec.name,
|
||||
'lat', metric_rec.lat,
|
||||
'lon', metric_rec.lng,
|
||||
'wind', metric_rec.wind,
|
||||
'gust', metric_rec.gust,
|
||||
'pressure', metric_rec.pressure,
|
||||
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||
'rh', valToPercent(metric_rec.rh::numeric)
|
||||
) INTO windy_metric;
|
||||
RAISE NOTICE '-> cron_windy_fn checking windy_metrics [%]', windy_metric;
|
||||
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||
RAISE NOTICE '-> cron_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||
RAISE NOTICE '-> cron_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||
-- Send metrics to Windy
|
||||
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_windy$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_windy_fn
|
||||
IS 'init by pg_cron to create (or update) station and uploading observations to Windy Personal Weather Station observations';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_alerts_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
alert_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ;
|
||||
metric_rec record;
|
||||
app_settings JSONB;
|
||||
user_settings JSONB;
|
||||
alerting JSONB;
|
||||
_alarms JSONB;
|
||||
alarms TEXT;
|
||||
alert_default JSONB := '{
|
||||
"low_pressure_threshold": 990,
|
||||
"high_wind_speed_threshold": 30,
|
||||
"low_water_depth_threshold": 1,
|
||||
"min_notification_interval": 6,
|
||||
"high_pressure_drop_threshold": 12,
|
||||
"low_battery_charge_threshold": 90,
|
||||
"low_battery_voltage_threshold": 12.5,
|
||||
"low_water_temperature_threshold": 10,
|
||||
"low_indoor_temperature_threshold": 7,
|
||||
"low_outdoor_temperature_threshold": 3
|
||||
}';
|
||||
BEGIN
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_alerts_fn';
|
||||
FOR alert_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,v.vessel_id,
|
||||
COALESCE((a.preferences->'alert_last_metric')::TEXT, default_last_metric::TEXT) as last_metric,
|
||||
(alert_default || (a.preferences->'alerting')::JSONB) as alerting,
|
||||
(a.preferences->'alarms')::JSONB as alarms
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'alerting'->'enabled')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn for [%]', alert_rec;
|
||||
PERFORM set_config('vessel.id', alert_rec.vessel_id, false);
|
||||
PERFORM set_config('user.email', alert_rec.email, false);
|
||||
--RAISE WARNING 'public.cron_process_alert_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(alert_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_alerts_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last last_metric avg by 5 minutes
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.inside.temperature')::numeric) AS intemp,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS outtemp,
|
||||
avg((m.metrics->'environment.water.temperature')::numeric) AS wattemp,
|
||||
avg((m.metrics->'environment.depth.belowTransducer')::numeric) AS watdepth,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
avg((m.metrics->'electrical.batteries.House.voltage')::numeric) AS voltage,
|
||||
avg((m.metrics->'electrical.batteries.House.capacity.stateOfCharge')::numeric) AS charge
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = alert_rec.vessel_id
|
||||
AND m.time >= alert_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_alerts_fn checking metrics [%]', metric_rec;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking alerting [%]', alert_rec.alerting;
|
||||
--RAISE NOTICE '-> cron_alerts_fn checking debug [%] [%]', kelvinToCel(metric_rec.intemp), (alert_rec.alerting->'low_indoor_temperature_threshold');
|
||||
IF kelvinToCel(metric_rec.intemp) < (alert_rec.alerting->'low_indoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_indoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_indoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_indoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.intemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.intemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_indoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.outtemp) < (alert_rec.alerting->'low_outdoor_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_outdoor_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_outdoor_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_outdoor_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.outtemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_outdoor_temperature_threshold value:'|| kelvinToCel(metric_rec.outtemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_outdoor_temperature_threshold';
|
||||
END IF;
|
||||
IF kelvinToCel(metric_rec.wattemp) < (alert_rec.alerting->'low_water_temperature_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_temperature_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_temperature_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_temperature_threshold": {"value": '|| kelvinToCel(metric_rec.wattemp) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_temperature_threshold value:'|| kelvinToCel(metric_rec.wattemp) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_temperature_threshold';
|
||||
END IF;
|
||||
IF metric_rec.watdepth < (alert_rec.alerting->'low_water_depth_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_water_depth_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_water_depth_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_water_depth_threshold": {"value": '|| metric_rec.watdepth ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_water_depth_threshold value:'|| metric_rec.watdepth ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_water_depth_threshold';
|
||||
END IF;
|
||||
if metric_rec.pressure < (alert_rec.alerting->'high_pressure_drop_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_pressure_drop_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_pressure_drop_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_pressure_drop_threshold": {"value": '|| metric_rec.pressure ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_pressure_drop_threshold value:'|| metric_rec.pressure ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_pressure_drop_threshold';
|
||||
END IF;
|
||||
IF metric_rec.wind > (alert_rec.alerting->'high_wind_speed_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'high_wind_speed_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'high_wind_speed_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"high_wind_speed_threshold": {"value": '|| metric_rec.wind ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "high_wind_speed_threshold value:'|| metric_rec.wind ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug high_wind_speed_threshold';
|
||||
END IF;
|
||||
if metric_rec.voltage < (alert_rec.alerting->'low_battery_voltage_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = 'lacroix.francois@gmail.com';
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_voltage_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_voltage_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_voltage_threshold": {"value": '|| metric_rec.voltage ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_voltage_threshold value:'|| metric_rec.voltage ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_voltage_threshold';
|
||||
END IF;
|
||||
if (metric_rec.charge*100) < (alert_rec.alerting->'low_battery_charge_threshold')::numeric then
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', (alert_rec.alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug [%]', metric_rec.time_bucket::TIMESTAMPTZ;
|
||||
-- Get latest alarms
|
||||
SELECT preferences->'alarms' INTO _alarms FROM auth.accounts a WHERE a.email = current_setting('user.email', false);
|
||||
-- Is alarm in the min_notification_interval time frame
|
||||
IF (
|
||||
((_alarms->'low_battery_charge_threshold'->>'date') IS NULL) OR
|
||||
(((_alarms->'low_battery_charge_threshold'->>'date')::TIMESTAMPTZ
|
||||
+ ((interval '1 hour') * (alert_rec.alerting->>'min_notification_interval')::NUMERIC))
|
||||
< metric_rec.time_bucket::TIMESTAMPTZ)
|
||||
) THEN
|
||||
-- Add alarm
|
||||
alarms := '{"low_battery_charge_threshold": {"value": '|| (metric_rec.charge*100) ||', "date":"' || metric_rec.time_bucket || '"}}';
|
||||
-- Merge alarms
|
||||
SELECT public.jsonb_recursive_merge(_alarms::jsonb, alarms::jsonb) into _alarms;
|
||||
-- Update alarms for user
|
||||
PERFORM api.update_user_preferences_fn('{alarms}'::TEXT, _alarms::TEXT);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(current_setting('vessel.id', false));
|
||||
SELECT user_settings::JSONB || ('{"alert": "low_battery_charge_threshold value:'|| (metric_rec.charge*100) ||'"}'::text)::JSONB into user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('alert'::TEXT, user_settings::JSONB);
|
||||
-- DEBUG
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold +interval';
|
||||
END IF;
|
||||
RAISE NOTICE '-> cron_alerts_fn checking debug low_battery_charge_threshold';
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{alert_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_alerts_fn
|
||||
IS 'init by pg_cron to check for alerts';
|
||||
|
||||
-- CRON for no vessel notification
|
||||
CREATE FUNCTION public.cron_no_vessel_fn() RETURNS void AS $no_vessel$
|
||||
DECLARE
|
||||
no_vessel record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for user with no vessel register
|
||||
RAISE NOTICE 'cron_no_vessel_fn';
|
||||
FOR no_vessel in
|
||||
SELECT a.user_id,a.email,a.first
|
||||
FROM auth.accounts a
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM auth.vessels v
|
||||
WHERE v.owner_email = a.email)
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_no_vessel_rec_fn for [%]', no_vessel;
|
||||
SELECT json_build_object('email', no_vessel.email, 'recipient', no_vessel.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_no_vessel_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_vessel'::TEXT, user_settings::JSONB);
|
||||
END LOOP;
|
||||
END;
|
||||
$no_vessel$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_no_vessel_fn
|
||||
IS 'init by pg_cron, check for user with no vessel register then send notification';
|
||||
|
||||
CREATE FUNCTION public.cron_no_metadata_fn() RETURNS void AS $no_metadata$
|
||||
DECLARE
|
||||
no_metadata_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for vessel register but with no metadata
|
||||
RAISE NOTICE 'cron_no_metadata_fn';
|
||||
FOR no_metadata_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,a.first
|
||||
FROM auth.accounts a, auth.vessels v
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM api.metadata m
|
||||
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_metadata_rec_fn for [%]', no_metadata_rec;
|
||||
SELECT json_build_object('email', no_metadata_rec.email, 'recipient', no_metadata_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_metadata_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_metadata'::TEXT, user_settings::JSONB);
|
||||
END LOOP;
|
||||
END;
|
||||
$no_metadata$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_no_metadata_fn
|
||||
IS 'init by pg_cron, check for vessel with no metadata then send notification';
|
||||
|
||||
CREATE FUNCTION public.cron_no_activity_fn() RETURNS void AS $no_activity$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for vessel with no activity for more than 230 days
|
||||
RAISE NOTICE 'cron_no_activity_fn';
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_activity_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
END LOOP;
|
||||
END;
|
||||
$no_activity$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_no_activity_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 230 days then send notification';
|
||||
|
||||
CREATE FUNCTION public.cron_deactivated_fn() RETURNS void AS $deactivated$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
RAISE NOTICE 'cron_deactivated_fn';
|
||||
|
||||
-- List accounts with vessel inactivity for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for inactivity [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn inactivity [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
|
||||
-- List accounts with no vessel metadata for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
a.user_id,a.email,a.first,a.created_at
|
||||
FROM auth.accounts a, auth.vessels v
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM api.metadata m
|
||||
WHERE v.vessel_id = m.vessel_id) AND v.owner_email = a.email
|
||||
AND v.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no metadata [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no metadata [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
|
||||
-- List accounts with no vessel created for more than 1 YEAR
|
||||
FOR no_activity_rec in
|
||||
SELECT a.user_id,a.email,a.first,a.created_at
|
||||
FROM auth.accounts a
|
||||
WHERE NOT EXISTS (
|
||||
SELECT *
|
||||
FROM auth.vessels v
|
||||
WHERE v.owner_email = a.email)
|
||||
AND a.created_at < NOW() AT TIME ZONE 'UTC' - INTERVAL '1 YEAR'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_deactivated_rec_fn for no vessel [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_deactivated_rec_fn no vessel [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('deactivated'::TEXT, user_settings::JSONB);
|
||||
--PERFORM public.delete_account_fn(no_activity_rec.owner_email::TEXT, no_activity_rec.vessel_id::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$deactivated$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_deactivated_fn
|
||||
IS 'init by pg_cron, check for vessel with no activity for more than 1 year then send notification and delete data';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.cron_prune_otp_fn;
|
||||
CREATE OR REPLACE FUNCTION public.cron_prune_otp_fn() RETURNS void
|
||||
AS $$
|
||||
DECLARE
|
||||
otp_rec record;
|
||||
BEGIN
|
||||
-- Purge OTP older than 15 minutes
|
||||
RAISE NOTICE 'cron_prune_otp_fn';
|
||||
FOR otp_rec in
|
||||
SELECT *
|
||||
FROM auth.otp
|
||||
WHERE otp_timestamp < NOW() AT TIME ZONE 'UTC' - INTERVAL '15 MINUTES'
|
||||
ORDER BY otp_timestamp desc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_prune_otp_fn deleting expired otp for user [%]', otp_rec.user_email;
|
||||
-- remove entry
|
||||
DELETE FROM auth.otp
|
||||
WHERE user_email = otp_rec.user_email;
|
||||
RAISE NOTICE '-> cron_prune_otp_fn deleted expire otp for user [%]', otp_rec.user_email;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_prune_otp_fn
|
||||
IS 'init by pg_cron to purge older than 15 minutes OTP token';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.cron_process_prune_otp_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_no_vessel_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_no_metadata_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_no_activity_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_deactivated_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_windy_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_alerts_fn();
|
||||
|
||||
-- Remove deprecated fn
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_account_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_account_otp_validation_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_moorage_fn();
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_vessel_fn();
|
||||
|
||||
CREATE OR REPLACE FUNCTION send_notification_fn(
|
||||
IN email_type TEXT,
|
||||
IN user_settings JSONB) RETURNS VOID
|
||||
AS $send_notification$
|
||||
DECLARE
|
||||
app_settings JSONB;
|
||||
_email_notifications BOOLEAN := False;
|
||||
_phone_notifications BOOLEAN := False;
|
||||
_pushover_user_key TEXT := NULL;
|
||||
pushover_settings JSONB := NULL;
|
||||
_telegram_notifications BOOLEAN := False;
|
||||
_telegram_chat_id TEXT := NULL;
|
||||
telegram_settings JSONB := NULL;
|
||||
_email TEXT := NULL;
|
||||
BEGIN
|
||||
-- TODO input check
|
||||
--RAISE NOTICE '--> send_notification_fn type [%]', email_type;
|
||||
-- Gather notification app settings, eg: email, pushover, telegram
|
||||
app_settings := get_app_settings_fn();
|
||||
--RAISE NOTICE '--> send_notification_fn app_settings [%]', app_settings;
|
||||
--RAISE NOTICE '--> user_settings [%]', user_settings->>'email'::TEXT;
|
||||
|
||||
-- Gather notifications settings and merge with user settings
|
||||
-- Send notification email
|
||||
SELECT preferences['email_notifications'] INTO _email_notifications
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = user_settings->>'email'::TEXT;
|
||||
RAISE NOTICE '--> send_notification_fn email_notifications [%]', _email_notifications;
|
||||
-- If email server app settings set and if email user settings set
|
||||
IF app_settings['app.email_server'] IS NOT NULL AND _email_notifications IS True THEN
|
||||
PERFORM send_email_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
END IF;
|
||||
|
||||
-- Send notification pushover
|
||||
SELECT preferences['phone_notifications'],preferences->>'pushover_user_key' INTO _phone_notifications,_pushover_user_key
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = user_settings->>'email'::TEXT;
|
||||
RAISE NOTICE '--> send_notification_fn phone_notifications [%]', _phone_notifications;
|
||||
-- If pushover app settings set and if pushover user settings set
|
||||
IF app_settings['app.pushover_app_token'] IS NOT NULL AND _phone_notifications IS True AND _pushover_user_key IS NOT NULL THEN
|
||||
SELECT json_build_object('pushover_user_key', _pushover_user_key) into pushover_settings;
|
||||
SELECT user_settings::JSONB || pushover_settings::JSONB into user_settings;
|
||||
--RAISE NOTICE '--> send_notification_fn user_settings + pushover [%]', user_settings;
|
||||
PERFORM send_pushover_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
END IF;
|
||||
|
||||
-- Send notification telegram
|
||||
SELECT (preferences->'telegram'->'chat'->'id') IS NOT NULL,preferences['telegram']['chat']['id'] INTO _telegram_notifications,_telegram_chat_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = user_settings->>'email'::TEXT;
|
||||
RAISE NOTICE '--> send_notification_fn telegram_notifications [%]', _telegram_notifications;
|
||||
-- If telegram app settings set and if telegram user settings set
|
||||
IF app_settings['app.telegram_bot_token'] IS NOT NULL AND _telegram_notifications IS True AND _phone_notifications IS True THEN
|
||||
SELECT json_build_object('telegram_chat_id', _telegram_chat_id) into telegram_settings;
|
||||
SELECT user_settings::JSONB || telegram_settings::JSONB into user_settings;
|
||||
--RAISE NOTICE '--> send_notification_fn user_settings + telegram [%]', user_settings;
|
||||
PERFORM send_telegram_py_fn(email_type::TEXT, user_settings::JSONB, app_settings::JSONB);
|
||||
END IF;
|
||||
END;
|
||||
$send_notification$ LANGUAGE plpgsql;
|
||||
|
||||
-- fn to trim new vessel name
|
||||
CREATE FUNCTION new_vessel_trim_fn() RETURNS trigger AS $new_vessel_trim_fn$
|
||||
BEGIN
|
||||
NEW.name := TRIM(NEW.name);
|
||||
RETURN NEW;
|
||||
END;
|
||||
$new_vessel_trim_fn$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.new_vessel_trim_fn
|
||||
IS 'Trim space vessel name';
|
||||
-- Trigger trim new vessel name
|
||||
CREATE TRIGGER new_vessel_trim BEFORE INSERT ON auth.vessels
|
||||
FOR EACH ROW EXECUTE FUNCTION public.new_vessel_trim_fn();
|
||||
-- Description
|
||||
COMMENT ON TRIGGER new_vessel_trim
|
||||
ON auth.vessels
|
||||
IS 'Trim space vessel name';
|
||||
|
||||
CREATE or REPLACE FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geojson JSON
|
||||
) AS $logbook_geojson$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
_to_time
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
notes,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metrics->'environment.wind.speedTrue', null) as truewindspeed,
|
||||
coalesce(metrics->'environment.wind.directionTrue', null) as truewinddirection,
|
||||
coalesce(status, null) as status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMPTZ
|
||||
AND time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$logbook_geojson$ LANGUAGE plpgsql;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.0'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
-- Create a cron job
|
||||
\c postgres
|
||||
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_prune_otp_fn()'
|
||||
WHERE jobname = 'cron_prune_otp';
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_no_vessel_fn()'
|
||||
WHERE jobname = 'cron_no_vessel';
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_no_metadata_fn()'
|
||||
WHERE jobname = 'cron_no_metadata';
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_no_activity_fn()'
|
||||
WHERE jobname = 'cron_no_activity';
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_windy_fn()'
|
||||
WHERE jobname = 'cron_windy';
|
||||
UPDATE cron.job
|
||||
SET command='select public.cron_alerts_fn()'
|
||||
WHERE jobname = 'cron_alerts';
|
||||
|
528
initdb/99_migrations_202403.sql
Normal file
528
initdb/99_migrations_202403.sql
Normal file
@@ -0,0 +1,528 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration March 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.process_lat_lon_fn(IN lon NUMERIC, IN lat NUMERIC,
|
||||
OUT moorage_id INTEGER,
|
||||
OUT moorage_type INTEGER,
|
||||
OUT moorage_name TEXT,
|
||||
OUT moorage_country TEXT
|
||||
) AS $process_lat_lon$
|
||||
DECLARE
|
||||
stay_rec record;
|
||||
--moorage_id INTEGER := NULL;
|
||||
--moorage_type INTEGER := 1; -- Unknown
|
||||
--moorage_name TEXT := NULL;
|
||||
--moorage_country TEXT := NULL;
|
||||
existing_rec record;
|
||||
geo jsonb;
|
||||
overpass jsonb;
|
||||
BEGIN
|
||||
RAISE NOTICE '-> process_lat_lon_fn';
|
||||
IF lon IS NULL OR lat IS NULL THEN
|
||||
RAISE WARNING '-> process_lat_lon_fn invalid input lon %, lat %', lon, lat;
|
||||
--return NULL;
|
||||
END IF;
|
||||
|
||||
-- Do we have an existing moorages within 300m of the new stay
|
||||
FOR existing_rec in
|
||||
SELECT
|
||||
*
|
||||
FROM api.moorages m
|
||||
WHERE
|
||||
m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.geog IS NOT NULL
|
||||
AND ST_DWithin(
|
||||
Geography(ST_MakePoint(m.longitude, m.latitude)),
|
||||
Geography(ST_MakePoint(lon, lat)),
|
||||
300 -- in meters
|
||||
)
|
||||
AND m.vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY id ASC
|
||||
LOOP
|
||||
-- found previous stay within 300m of the new moorage
|
||||
IF existing_rec.id IS NOT NULL AND existing_rec.id > 0 THEN
|
||||
RAISE NOTICE '-> process_lat_lon_fn found previous moorages within 300m %', existing_rec;
|
||||
EXIT; -- exit loop
|
||||
END IF;
|
||||
END LOOP;
|
||||
|
||||
-- if with in 300m use existing name and stay_code
|
||||
-- else insert new entry
|
||||
IF existing_rec.id IS NOT NULL AND existing_rec.id > 0 THEN
|
||||
RAISE NOTICE '-> process_lat_lon_fn found close by moorage using existing name and stay_code %', existing_rec;
|
||||
moorage_id := existing_rec.id;
|
||||
moorage_name := existing_rec.name;
|
||||
moorage_type := existing_rec.stay_code;
|
||||
ELSE
|
||||
RAISE NOTICE '-> process_lat_lon_fn create new moorage';
|
||||
-- query overpass api to guess moorage type
|
||||
overpass := overpass_py_fn(lon::NUMERIC, lat::NUMERIC);
|
||||
RAISE NOTICE '-> process_lat_lon_fn overpass name:[%] seamark:type:[%]', overpass->'name', overpass->'seamark:type';
|
||||
moorage_type = 1; -- Unknown
|
||||
IF overpass->>'seamark:type' = 'harbour' AND overpass->>'seamark:harbour:category' = 'marina' then
|
||||
moorage_type = 4; -- Dock
|
||||
ELSIF overpass->>'seamark:type' = 'mooring' AND overpass->>'seamark:mooring:category' = 'buoy' then
|
||||
moorage_type = 3; -- Mooring Buoy
|
||||
ELSIF overpass->>'seamark:type' ~ '(anchorage|anchor_berth|berth)' OR overpass->>'natural' ~ '(bay|beach)' then
|
||||
moorage_type = 2; -- Anchor
|
||||
ELSIF overpass->>'seamark:type' = 'mooring' then
|
||||
moorage_type = 3; -- Mooring Buoy
|
||||
ELSIF overpass->>'leisure' = 'marina' then
|
||||
moorage_type = 4; -- Dock
|
||||
END IF;
|
||||
-- geo reverse _lng _lat
|
||||
geo := reverse_geocode_py_fn('nominatim', lon::NUMERIC, lat::NUMERIC);
|
||||
moorage_country := geo->>'country_code';
|
||||
IF overpass->>'name:en' IS NOT NULL then
|
||||
moorage_name = overpass->>'name:en';
|
||||
ELSIF overpass->>'name' IS NOT NULL then
|
||||
moorage_name = overpass->>'name';
|
||||
ELSE
|
||||
moorage_name := geo->>'name';
|
||||
END IF;
|
||||
RAISE NOTICE '-> process_lat_lon_fn output name:[%] type:[%]', moorage_name, moorage_type;
|
||||
RAISE NOTICE '-> process_lat_lon_fn insert new moorage for [%] name:[%] type:[%]', current_setting('vessel.id', false), moorage_name, moorage_type;
|
||||
-- Insert new moorage from stay
|
||||
INSERT INTO api.moorages
|
||||
(vessel_id, name, country, stay_code, reference_count, latitude, longitude, geog, overpass, nominatim)
|
||||
VALUES (
|
||||
current_setting('vessel.id', false),
|
||||
coalesce(moorage_name, null),
|
||||
coalesce(moorage_country, null),
|
||||
moorage_type,
|
||||
1,
|
||||
lat,
|
||||
lon,
|
||||
Geography(ST_MakePoint(lon, lat)),
|
||||
coalesce(overpass, null),
|
||||
coalesce(geo, null)
|
||||
) returning id into moorage_id;
|
||||
-- Add moorage entry to process queue for reference
|
||||
--INSERT INTO process_queue (channel, payload, stored, ref_id, processed)
|
||||
-- VALUES ('new_moorage', moorage_id, now(), current_setting('vessel.id', true), now());
|
||||
END IF;
|
||||
--return json_build_object(
|
||||
-- 'id', moorage_id,
|
||||
-- 'name', moorage_name,
|
||||
-- 'type', moorage_type
|
||||
-- )::jsonb;
|
||||
END;
|
||||
$process_lat_lon$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE or replace FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geojson JSON
|
||||
) AS $logbook_geojson$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
_to_time
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
notes,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) as status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMPTZ
|
||||
AND time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$logbook_geojson$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.metersToKnots(IN meters NUMERIC)
|
||||
RETURNS NUMERIC
|
||||
AS $$
|
||||
BEGIN
|
||||
RETURN ROUND(((meters * 1.9438445) * 10) / 10, 2);
|
||||
END
|
||||
$$
|
||||
LANGUAGE plpgsql IMMUTABLE;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.metersToKnots
|
||||
IS 'convert speed meters/s To Knots';
|
||||
|
||||
CREATE OR REPLACE FUNCTION logbook_update_extra_json_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _extra_json JSON
|
||||
) AS $logbook_extra_json$
|
||||
declare
|
||||
obs_json jsonb default '{ "seaState": -1, "cloudCoverage": -1, "visibility": -1}'::jsonb;
|
||||
log_json jsonb default '{}'::jsonb;
|
||||
runtime_json jsonb default '{}'::jsonb;
|
||||
metrics_json jsonb default '{}'::jsonb;
|
||||
metric_rec record;
|
||||
BEGIN
|
||||
-- Calculate 'navigation.log' metrics
|
||||
WITH
|
||||
start_trip as (
|
||||
-- Fetch 'navigation.log' start, first entry
|
||||
SELECT key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'navigation.log'
|
||||
AND time = _start::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
),
|
||||
end_trip as (
|
||||
-- Fetch 'navigation.log' end, last entry
|
||||
SELECT key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'navigation.log'
|
||||
AND time = _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
),
|
||||
nm as (
|
||||
-- calculate distance and convert meter to nautical miles
|
||||
SELECT ((end_trip.value::NUMERIC - start_trip.value::numeric) * 0.00053996) as trip from start_trip,end_trip
|
||||
)
|
||||
-- Generate JSON
|
||||
SELECT jsonb_build_object('navigation.log', trip) INTO log_json FROM nm;
|
||||
RAISE NOTICE '-> logbook_update_extra_json_fn navigation.log: %', log_json;
|
||||
|
||||
-- Calculate engine hours from propulsion.%.runTime first entry
|
||||
FOR metric_rec IN
|
||||
SELECT key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE 'propulsion.%.runTime'
|
||||
AND time = _start::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
LOOP
|
||||
-- Engine Hours in seconds
|
||||
RAISE NOTICE '-> logbook_update_extra_json_fn propulsion.*.runTime: %', metric_rec;
|
||||
with
|
||||
end_runtime AS (
|
||||
-- Fetch 'propulsion.*.runTime' last entry
|
||||
SELECT key, value
|
||||
FROM api.metrics m,
|
||||
jsonb_each_text(m.metrics)
|
||||
WHERE key ILIKE metric_rec.key
|
||||
AND time = _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
),
|
||||
runtime AS (
|
||||
-- calculate runTime Engine Hours as ISO duration
|
||||
--SELECT (end_runtime.value::numeric - metric_rec.value::numeric) AS value FROM end_runtime
|
||||
SELECT (((end_runtime.value::numeric - metric_rec.value::numeric) / 3600) * '1 hour'::interval)::interval as value FROM end_runtime
|
||||
)
|
||||
-- Generate JSON
|
||||
SELECT jsonb_build_object(metric_rec.key, runtime.value) INTO runtime_json FROM runtime;
|
||||
RAISE NOTICE '-> logbook_update_extra_json_fn key: %, value: %', metric_rec.key, runtime_json;
|
||||
END LOOP;
|
||||
|
||||
-- Update logbook with extra value and return json
|
||||
SELECT COALESCE(log_json::JSONB, '{}'::jsonb) || COALESCE(runtime_json::JSONB, '{}'::jsonb) INTO metrics_json;
|
||||
SELECT jsonb_build_object('metrics', metrics_json, 'observations', obs_json) INTO _extra_json;
|
||||
RAISE NOTICE '-> logbook_update_extra_json_fn log_json: %, runtime_json: %, _extra_json: %', log_json, runtime_json, _extra_json;
|
||||
END;
|
||||
$logbook_extra_json$ LANGUAGE plpgsql;
|
||||
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_gpx_fn();
|
||||
|
||||
CREATE OR REPLACE FUNCTION metadata_upsert_trigger_fn() RETURNS trigger AS $metadata_upsert$
|
||||
DECLARE
|
||||
metadata_id integer;
|
||||
metadata_active boolean;
|
||||
BEGIN
|
||||
-- Set client_id to new value to allow RLS
|
||||
--PERFORM set_config('vessel.client_id', NEW.client_id, false);
|
||||
-- UPSERT - Insert vs Update for Metadata
|
||||
--RAISE NOTICE 'metadata_upsert_trigger_fn';
|
||||
--PERFORM set_config('vessel.id', NEW.vessel_id, true);
|
||||
--RAISE WARNING 'metadata_upsert_trigger_fn [%] [%]', current_setting('vessel.id', true), NEW;
|
||||
SELECT m.id,m.active INTO metadata_id, metadata_active
|
||||
FROM api.metadata m
|
||||
WHERE m.vessel_id IS NOT NULL AND m.vessel_id = current_setting('vessel.id', true);
|
||||
--RAISE NOTICE 'metadata_id is [%]', metadata_id;
|
||||
IF metadata_id IS NOT NULL THEN
|
||||
-- send notification if boat is back online
|
||||
IF metadata_active is False THEN
|
||||
-- Add monitor online entry to process queue for later notification
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('monitoring_online', metadata_id, now(), current_setting('vessel.id', true));
|
||||
END IF;
|
||||
-- Update vessel metadata
|
||||
UPDATE api.metadata
|
||||
SET
|
||||
name = NEW.name,
|
||||
mmsi = NEW.mmsi,
|
||||
client_id = NEW.client_id,
|
||||
length = NEW.length,
|
||||
beam = NEW.beam,
|
||||
height = NEW.height,
|
||||
ship_type = NEW.ship_type,
|
||||
plugin_version = NEW.plugin_version,
|
||||
signalk_version = NEW.signalk_version,
|
||||
platform = REGEXP_REPLACE(NEW.platform, '[^a-zA-Z0-9\(\) ]', '', 'g'),
|
||||
configuration = NEW.configuration,
|
||||
-- time = NEW.time, ignore the time sent by the vessel as it is out of sync sometimes.
|
||||
time = NOW(), -- overwrite the time sent by the vessel
|
||||
active = true
|
||||
WHERE id = metadata_id;
|
||||
RETURN NULL; -- Ignore insert
|
||||
ELSE
|
||||
IF NEW.vessel_id IS NULL THEN
|
||||
-- set vessel_id from jwt if not present in INSERT query
|
||||
NEW.vessel_id := current_setting('vessel.id');
|
||||
END IF;
|
||||
-- Ignore and overwrite the time sent by the vessel
|
||||
NEW.time := NOW();
|
||||
-- Insert new vessel metadata
|
||||
RETURN NEW; -- Insert new vessel metadata
|
||||
END IF;
|
||||
END;
|
||||
$metadata_upsert$ LANGUAGE plpgsql;
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_windy_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
windy_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ := NOW();
|
||||
metric_rec record;
|
||||
windy_metric jsonb;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
windy_pws jsonb;
|
||||
BEGIN
|
||||
-- Check for new observations pending update
|
||||
RAISE NOTICE 'cron_process_windy_fn';
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Find users with Windy active and with an active vessel
|
||||
-- Map account id to Windy Station ID
|
||||
FOR windy_rec in
|
||||
SELECT
|
||||
a.id,a.email,v.vessel_id,v.name,
|
||||
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'public_windy')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn for [%]', windy_rec;
|
||||
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||
last(latitude, time) AS lat,
|
||||
last(longitude, time) AS lng
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = windy_rec.vessel_id
|
||||
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking metrics [%]', metric_rec;
|
||||
IF metric_rec.wind is null or metric_rec.temperature is null THEN
|
||||
-- Ignore when there is no metrics.
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy_error'::TEXT, user_settings::JSONB);
|
||||
-- Disable windy
|
||||
PERFORM api.update_user_preferences_fn('{public_windy}'::TEXT, 'false'::TEXT);
|
||||
RETURN;
|
||||
END IF;
|
||||
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
-- temp from kelvin to celcuis
|
||||
-- winddir from radiant to degres
|
||||
-- rh from ratio to percentage
|
||||
SELECT jsonb_build_object(
|
||||
'dateutc', metric_rec.time_bucket,
|
||||
'station', windy_rec.id,
|
||||
'name', windy_rec.name,
|
||||
'lat', metric_rec.lat,
|
||||
'lon', metric_rec.lng,
|
||||
'wind', metric_rec.wind,
|
||||
'gust', metric_rec.gust,
|
||||
'pressure', metric_rec.pressure,
|
||||
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||
'rh', valToPercent(metric_rec.rh::numeric)
|
||||
) INTO windy_metric;
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking windy_metrics [%]', windy_metric;
|
||||
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||
RAISE NOTICE '-> cron_process_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||
RAISE NOTICE '-> cron_process_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||
-- Send metrics to Windy
|
||||
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||
-- Refresh user settings after first success
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
|
||||
DROP FUNCTION public.delete_vessel_fn;
|
||||
CREATE OR REPLACE FUNCTION public.delete_vessel_fn(IN _vessel_id TEXT) RETURNS JSONB
|
||||
AS $delete_vessel$
|
||||
DECLARE
|
||||
total_metrics INTEGER;
|
||||
del_metrics INTEGER;
|
||||
del_logs INTEGER;
|
||||
del_stays INTEGER;
|
||||
del_moorages INTEGER;
|
||||
del_queue INTEGER;
|
||||
out_json JSONB;
|
||||
BEGIN
|
||||
select count(*) INTO total_metrics from api.metrics m where vessel_id = _vessel_id;
|
||||
WITH deleted AS (delete from api.metrics m where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_metrics FROM deleted;
|
||||
WITH deleted AS (delete from api.logbook l where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_logs FROM deleted;
|
||||
WITH deleted AS (delete from api.stays s where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_stays FROM deleted;
|
||||
WITH deleted AS (delete from api.moorages m where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_moorages FROM deleted;
|
||||
WITH deleted AS (delete from public.process_queue m where ref_id = _vessel_id RETURNING *) SELECT count(*) INTO del_queue FROM deleted;
|
||||
SELECT jsonb_build_object('total_metrics', total_metrics,
|
||||
'del_metrics', del_metrics,
|
||||
'del_logs', del_logs,
|
||||
'del_stays', del_stays,
|
||||
'del_moorages', del_moorages,
|
||||
'del_queue', del_queue) INTO out_json;
|
||||
RETURN out_json;
|
||||
END
|
||||
$delete_vessel$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.delete_vessel_fn
|
||||
IS 'Delete all vessel data (metrics,logbook,stays,moorages,process_queue) for a vessel_id';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.cron_process_no_activity_fn();
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_no_activity_fn() RETURNS void AS $no_activity$
|
||||
DECLARE
|
||||
no_activity_rec record;
|
||||
user_settings jsonb;
|
||||
total_metrics INTEGER;
|
||||
total_logs INTEGER;
|
||||
del_metrics INTEGER;
|
||||
out_json JSONB;
|
||||
BEGIN
|
||||
-- Check for vessel with no activity for more than 230 days
|
||||
RAISE NOTICE 'cron_process_no_activity_fn';
|
||||
FOR no_activity_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.time,a.first
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.time < NOW() AT TIME ZONE 'UTC' - INTERVAL '230 DAYS'
|
||||
AND v.owner_email <> 'demo@openplotter.cloud'
|
||||
ORDER BY m.time DESC
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_no_activity_rec_fn for [%]', no_activity_rec;
|
||||
SELECT json_build_object('email', no_activity_rec.owner_email, 'recipient', no_activity_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_no_activity_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('no_activity'::TEXT, user_settings::JSONB);
|
||||
SELECT count(*) INTO total_metrics from api.metrics where vessel_id = no_activity_rec.vessel_id;
|
||||
WITH deleted AS (delete from api.metrics m where vessel_id = no_activity_rec.vessel_id RETURNING *) SELECT count(*) INTO del_metrics FROM deleted;
|
||||
SELECT count(*) INTO total_logs from api.logbook where vessel_id = no_activity_rec.vessel_id;
|
||||
SELECT jsonb_build_object('total_metrics', total_metrics, 'total_logs', total_logs, 'del_metrics', del_metrics) INTO out_json;
|
||||
RAISE NOTICE '-> debug cron_process_no_activity_rec_fn [%]', out_json;
|
||||
END LOOP;
|
||||
END;
|
||||
$no_activity$ language plpgsql;
|
||||
|
||||
DROP FUNCTION public.delete_account_fn(text,text);
|
||||
CREATE OR REPLACE FUNCTION public.delete_account_fn(IN _email TEXT, IN _vessel_id TEXT) RETURNS JSONB
|
||||
AS $delete_account$
|
||||
DECLARE
|
||||
del_vessel_data JSONB;
|
||||
del_meta INTEGER;
|
||||
del_vessel INTEGER;
|
||||
del_account INTEGER;
|
||||
out_json JSONB;
|
||||
BEGIN
|
||||
SELECT public.delete_vessel_fn(_vessel_id) INTO del_vessel_data;
|
||||
WITH deleted AS (delete from api.metadata where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_meta FROM deleted;
|
||||
WITH deleted AS (delete from auth.vessels where vessel_id = _vessel_id RETURNING *) SELECT count(*) INTO del_vessel FROM deleted;
|
||||
WITH deleted AS (delete from auth.accounts where email = _email RETURNING *) SELECT count(*) INTO del_account FROM deleted;
|
||||
SELECT jsonb_build_object('del_metadata', del_meta,
|
||||
'del_vessel', del_vessel,
|
||||
'del_account', del_account) || del_vessel_data INTO out_json;
|
||||
RETURN out_json;
|
||||
END
|
||||
$delete_account$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.delete_account_fn
|
||||
IS 'Delete all data for a account by email and vessel_id';
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.1'
|
||||
WHERE "name"='app.version';
|
625
initdb/99_migrations_202404.sql
Normal file
625
initdb/99_migrations_202404.sql
Normal file
@@ -0,0 +1,625 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration April 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
UPDATE public.email_templates
|
||||
SET email_content='Hello __RECIPIENT__,
|
||||
Sorry!We could not convert your boat into a Windy Personal Weather Station due to missing data (temperature, wind or pressure).
|
||||
Windy Personal Weather Station is now disable.'
|
||||
WHERE "name"='windy_error';
|
||||
|
||||
CREATE OR REPLACE FUNCTION public.cron_windy_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
windy_rec record;
|
||||
default_last_metric TIMESTAMPTZ := NOW() - interval '1 day';
|
||||
last_metric TIMESTAMPTZ := NOW();
|
||||
metric_rec record;
|
||||
windy_metric jsonb;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
windy_pws jsonb;
|
||||
BEGIN
|
||||
-- Check for new observations pending update
|
||||
RAISE NOTICE 'cron_process_windy_fn';
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Find users with Windy active and with an active vessel
|
||||
-- Map account id to Windy Station ID
|
||||
FOR windy_rec in
|
||||
SELECT
|
||||
a.id,a.email,v.vessel_id,v.name,
|
||||
COALESCE((a.preferences->'windy_last_metric')::TEXT, default_last_metric::TEXT) as last_metric
|
||||
FROM auth.accounts a
|
||||
LEFT JOIN auth.vessels AS v ON v.owner_email = a.email
|
||||
LEFT JOIN api.metadata AS m ON m.vessel_id = v.vessel_id
|
||||
WHERE (a.preferences->'public_windy')::boolean = True
|
||||
AND m.active = True
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn for [%]', windy_rec;
|
||||
PERFORM set_config('vessel.id', windy_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.cron_process_windy_rec_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking user_settings [%]', user_settings;
|
||||
-- Get all metrics from the last windy_last_metric avg by 5 minutes
|
||||
-- TODO json_agg to send all data in once, but issue with py jsonb transformation decimal.
|
||||
FOR metric_rec in
|
||||
SELECT time_bucket('5 minutes', m.time) AS time_bucket,
|
||||
avg((m.metrics->'environment.outside.temperature')::numeric) AS temperature,
|
||||
avg((m.metrics->'environment.outside.pressure')::numeric) AS pressure,
|
||||
avg((m.metrics->'environment.outside.relativeHumidity')::numeric) AS rh,
|
||||
avg((m.metrics->'environment.wind.directionTrue')::numeric) AS winddir,
|
||||
avg((m.metrics->'environment.wind.speedTrue')::numeric) AS wind,
|
||||
max((m.metrics->'environment.wind.speedTrue')::numeric) AS gust,
|
||||
last(latitude, time) AS lat,
|
||||
last(longitude, time) AS lng
|
||||
FROM api.metrics m
|
||||
WHERE vessel_id = windy_rec.vessel_id
|
||||
AND m.time >= windy_rec.last_metric::TIMESTAMPTZ
|
||||
GROUP BY time_bucket
|
||||
ORDER BY time_bucket ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking metrics [%]', metric_rec;
|
||||
if metric_rec.wind is null or metric_rec.temperature is null
|
||||
or metric_rec.pressure is null or metric_rec.rh is null then
|
||||
-- Ignore when there is no metrics.
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy_error'::TEXT, user_settings::JSONB);
|
||||
-- Disable windy
|
||||
PERFORM api.update_user_preferences_fn('{public_windy}'::TEXT, 'false'::TEXT);
|
||||
RETURN;
|
||||
end if;
|
||||
-- https://community.windy.com/topic/8168/report-your-weather-station-data-to-windy
|
||||
-- temp from kelvin to Celsius
|
||||
-- winddir from radiant to degrees
|
||||
-- rh from ratio to percentage
|
||||
SELECT jsonb_build_object(
|
||||
'dateutc', metric_rec.time_bucket,
|
||||
'station', windy_rec.id,
|
||||
'name', windy_rec.name,
|
||||
'lat', metric_rec.lat,
|
||||
'lon', metric_rec.lng,
|
||||
'wind', metric_rec.wind,
|
||||
'gust', metric_rec.gust,
|
||||
'pressure', metric_rec.pressure,
|
||||
'winddir', radiantToDegrees(metric_rec.winddir::numeric),
|
||||
'temp', kelvinToCel(metric_rec.temperature::numeric),
|
||||
'rh', valToPercent(metric_rec.rh::numeric)
|
||||
) INTO windy_metric;
|
||||
RAISE NOTICE '-> cron_process_windy_fn checking windy_metrics [%]', windy_metric;
|
||||
SELECT windy_pws_py_fn(windy_metric, user_settings, app_settings) into windy_pws;
|
||||
RAISE NOTICE '-> cron_process_windy_fn Windy PWS [%]', ((windy_pws->'header')::JSONB ? 'id');
|
||||
IF NOT((user_settings->'settings')::JSONB ? 'windy') and ((windy_pws->'header')::JSONB ? 'id') then
|
||||
RAISE NOTICE '-> cron_process_windy_fn new Windy PWS [%]', (windy_pws->'header')::JSONB->>'id';
|
||||
-- Send metrics to Windy
|
||||
PERFORM api.update_user_preferences_fn('{windy}'::TEXT, ((windy_pws->'header')::JSONB->>'id')::TEXT);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('windy'::TEXT, user_settings::JSONB);
|
||||
-- Refresh user settings after first success
|
||||
user_settings := get_user_settings_from_vesselid_fn(windy_rec.vessel_id::TEXT);
|
||||
END IF;
|
||||
-- Record last metrics time
|
||||
SELECT metric_rec.time_bucket INTO last_metric;
|
||||
END LOOP;
|
||||
PERFORM api.update_user_preferences_fn('{windy_last_metric}'::TEXT, last_metric::TEXT);
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
|
||||
-- Add security definer, run this function as admin to avoid weird bug
|
||||
-- ERROR: variable not found in subplan target list
|
||||
CREATE OR REPLACE FUNCTION api.delete_logbook_fn(IN _id integer) RETURNS BOOLEAN AS $delete_logbook$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
previous_stays_id numeric;
|
||||
current_stays_departed text;
|
||||
current_stays_id numeric;
|
||||
current_stays_active boolean;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> delete_logbook_fn invalid input %', _id;
|
||||
RETURN FALSE;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE id = _id;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> delete_logbook_fn invalid logbook %', _id;
|
||||
RETURN FALSE;
|
||||
END IF;
|
||||
-- Update logbook
|
||||
UPDATE api.logbook l
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE l.vessel_id = current_setting('vessel.id', false)
|
||||
AND id = logbook_rec.id;
|
||||
-- Update metrics status to moored
|
||||
-- This generate an error when run as user_role "variable not found in subplan target list"
|
||||
UPDATE api.metrics
|
||||
SET status = 'moored'
|
||||
WHERE time >= logbook_rec._from_time
|
||||
AND time <= logbook_rec._to_time
|
||||
AND vessel_id = current_setting('vessel.id', false);
|
||||
-- Get related stays
|
||||
SELECT id,departed,active INTO current_stays_id,current_stays_departed,current_stays_active
|
||||
FROM api.stays s
|
||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
||||
AND s.arrived = logbook_rec._to_time;
|
||||
-- Update related stays
|
||||
UPDATE api.stays s
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
||||
AND s.arrived = logbook_rec._to_time;
|
||||
-- Find previous stays
|
||||
SELECT id INTO previous_stays_id
|
||||
FROM api.stays s
|
||||
WHERE s.vessel_id = current_setting('vessel.id', false)
|
||||
AND s.arrived < logbook_rec._to_time
|
||||
ORDER BY s.arrived DESC LIMIT 1;
|
||||
-- Update previous stays with the departed time from current stays
|
||||
-- and set the active state from current stays
|
||||
UPDATE api.stays
|
||||
SET departed = current_stays_departed::TIMESTAMPTZ,
|
||||
active = current_stays_active
|
||||
WHERE vessel_id = current_setting('vessel.id', false)
|
||||
AND id = previous_stays_id;
|
||||
-- Clean up, remove invalid logbook and stay entry
|
||||
DELETE FROM api.logbook WHERE id = logbook_rec.id;
|
||||
RAISE WARNING '-> delete_logbook_fn delete logbook [%]', logbook_rec.id;
|
||||
DELETE FROM api.stays WHERE id = current_stays_id;
|
||||
RAISE WARNING '-> delete_logbook_fn delete stays [%]', current_stays_id;
|
||||
-- Clean up, Subtract (-1) moorages ref count
|
||||
UPDATE api.moorages
|
||||
SET reference_count = reference_count - 1
|
||||
WHERE vessel_id = current_setting('vessel.id', false)
|
||||
AND id = previous_stays_id;
|
||||
RETURN TRUE;
|
||||
END;
|
||||
$delete_logbook$ LANGUAGE plpgsql security definer;
|
||||
|
||||
-- Allow users to update certain columns on specific TABLES on API schema add reference_count, when deleting a log
|
||||
GRANT UPDATE (name, notes, stay_code, home_flag, reference_count) ON api.moorages TO user_role;
|
||||
|
||||
-- Allow users to update certain columns on specific TABLES on API schema add track_geojson
|
||||
GRANT UPDATE (name, _from, _to, notes, track_geojson) ON api.logbook TO user_role;
|
||||
|
||||
DROP FUNCTION IF EXISTS api.timelapse2_fn;
|
||||
CREATE OR REPLACE FUNCTION api.timelapse2_fn(
|
||||
IN start_log INTEGER DEFAULT NULL,
|
||||
IN end_log INTEGER DEFAULT NULL,
|
||||
IN start_date TEXT DEFAULT NULL,
|
||||
IN end_date TEXT DEFAULT NULL,
|
||||
OUT geojson JSONB) RETURNS JSONB AS $timelapse2$
|
||||
DECLARE
|
||||
_geojson jsonb;
|
||||
BEGIN
|
||||
-- Using sub query to force id order by time
|
||||
-- User can now directly edit the json to add comment or remove track point
|
||||
-- Merge json track_geojson with Geometry Point into a single GeoJSON Points
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND end_log IS NULL THEN
|
||||
end_log := start_log;
|
||||
END IF;
|
||||
IF start_date IS NOT NULL AND end_date IS NULL THEN
|
||||
end_date := start_date;
|
||||
END IF;
|
||||
--raise WARNING 'input % % %' , start_log, end_log, public.isnumeric(end_log::text);
|
||||
IF start_log IS NOT NULL AND public.isnumeric(start_log::text) AND public.isnumeric(end_log::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.id >= start_log
|
||||
AND l.id <= end_log
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
ELSIF start_date IS NOT NULL AND public.isdate(start_date::text) AND public.isdate(end_date::text) THEN
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l._from_time >= start_date::TIMESTAMPTZ
|
||||
AND l._to_time <= end_date::TIMESTAMPTZ + interval '23 hours 59 minutes'
|
||||
AND l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
ELSE
|
||||
SELECT jsonb_agg(
|
||||
jsonb_build_object('type', 'Feature',
|
||||
'properties', f->'properties',
|
||||
'geometry', jsonb_build_object( 'coordinates', f->'geometry'->'coordinates', 'type', 'Point'))
|
||||
) INTO _geojson
|
||||
FROM (
|
||||
SELECT jsonb_array_elements(track_geojson->'features') AS f
|
||||
FROM api.logbook l
|
||||
WHERE l.track_geojson IS NOT NULL
|
||||
ORDER BY l._from_time ASC
|
||||
) AS sub
|
||||
WHERE (f->'geometry'->>'type') = 'Point';
|
||||
END IF;
|
||||
-- Return a GeoJSON MultiLineString
|
||||
-- result _geojson [null, null]
|
||||
--RAISE WARNING 'result _geojson %' , _geojson;
|
||||
SELECT jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _geojson ) INTO geojson;
|
||||
END;
|
||||
$timelapse2$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.timelapse2_fn
|
||||
IS 'Export all selected logs geojson `track_geojson` to a geojson as points including properties';
|
||||
|
||||
-- Allow timelapse2_fn execution for user_role and api_anonymous (public replay)
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON FUNCTION api.timelapse2_fn TO api_anonymous;
|
||||
|
||||
DROP FUNCTION IF EXISTS public.process_logbook_queue_fn;
|
||||
CREATE OR REPLACE FUNCTION public.process_logbook_queue_fn(IN _id integer) RETURNS void AS $process_logbook_queue$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
from_name text;
|
||||
to_name text;
|
||||
log_name text;
|
||||
from_moorage record;
|
||||
to_moorage record;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
log_settings jsonb;
|
||||
user_settings jsonb;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
trip_note jsonb;
|
||||
from_moorage_note jsonb;
|
||||
to_moorage_note jsonb;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
|
||||
-- Do we have an existing moorage within 300m of the new log
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
from_moorage := process_lat_lon_fn(logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||
to_moorage := process_lat_lon_fn(logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
||||
SELECT CONCAT(from_moorage.moorage_name, ' to ' , to_moorage.moorage_name) INTO log_name;
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
duration = (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
_from = from_moorage.moorage_name,
|
||||
_from_moorage_id = from_moorage.moorage_id,
|
||||
_to_moorage_id = to_moorage.moorage_id,
|
||||
_to = to_moorage.moorage_name,
|
||||
name = log_name,
|
||||
track_geom = geo_rec._track_geom,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
notes = NULL -- reset pre_log process
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GeoJSON require track_geom field
|
||||
geojson := logbook_update_geojson_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_geojson = geojson
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- Add trip details name as note for the first geometry point entry from the GeoJSON
|
||||
SELECT format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into trip_note;
|
||||
-- Update the properties of the first feature
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 1, properties}',
|
||||
(track_geojson -> 'features' -> 1 -> 'properties' || trip_note)::jsonb
|
||||
)
|
||||
WHERE id = logbook_rec.id
|
||||
and track_geojson -> 'features' -> 1 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Add moorage name as note for the third and last entry of the GeoJSON
|
||||
SELECT format('{"notes": "%s"}', from_moorage.moorage_name) into from_moorage_note;
|
||||
-- Update the properties of the third feature, the second with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 2, properties}',
|
||||
(track_geojson -> 'features' -> 2 -> 'properties' || from_moorage_note)::jsonb
|
||||
)
|
||||
WHERE id = logbook_rec.id
|
||||
AND track_geojson -> 'features' -> 2 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Update the note properties of the last feature with geometry point
|
||||
SELECT format('{"notes": "%s"}', to_moorage.moorage_name) into to_moorage_note;
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, -1, properties}',
|
||||
CASE
|
||||
WHEN COALESCE((track_geojson -> 'features' -> -1 -> 'properties' ->> 'notes'), '') = '' THEN
|
||||
(track_geojson -> 'features' -> -1 -> 'properties' || to_moorage_note)::jsonb
|
||||
ELSE
|
||||
track_geojson -> 'features' -> -1 -> 'properties'
|
||||
END
|
||||
)
|
||||
WHERE id = logbook_rec.id
|
||||
AND track_geojson -> 'features' -> -1 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('logbook_name', log_name, 'logbook_link', logbook_rec.id) into log_settings;
|
||||
user_settings := get_user_settings_from_vesselid_fn(logbook_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || log_settings::JSONB into user_settings;
|
||||
RAISE NOTICE '-> debug process_logbook_queue_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
RAISE NOTICE '-> debug process_logbook_queue_fn log_settings [%]', log_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('logbook'::TEXT, user_settings::JSONB);
|
||||
-- Process badges
|
||||
RAISE NOTICE '-> debug process_logbook_queue_fn user_settings [%]', user_settings->>'email'::TEXT;
|
||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||
PERFORM badges_logbook_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
PERFORM badges_geom_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
END;
|
||||
$process_logbook_queue$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.process_logbook_queue_fn
|
||||
IS 'Update logbook details when completed, logbook_update_avg_fn, logbook_update_geom_distance_fn, reverse_geocode_py_fn';
|
||||
|
||||
-- Update the pre.check for the new timelapse function
|
||||
CREATE OR REPLACE FUNCTION public.check_jwt() RETURNS void AS $$
|
||||
-- Prevent unregister user or unregister vessel access
|
||||
-- Allow anonymous access
|
||||
-- Need to be refactor and simplify, specially the anonymous part.
|
||||
DECLARE
|
||||
_role name;
|
||||
_email text;
|
||||
anonymous record;
|
||||
_path name;
|
||||
_vid text;
|
||||
_vname text;
|
||||
boat TEXT;
|
||||
_pid INTEGER := 0; -- public_id
|
||||
_pvessel TEXT := NULL; -- public_type
|
||||
_ptype TEXT := NULL; -- public_type
|
||||
_ppath BOOLEAN := False; -- public_path
|
||||
_pvalid BOOLEAN := False; -- public_valid
|
||||
_pheader text := NULL; -- public_header
|
||||
valid_public_type BOOLEAN := False;
|
||||
account_rec record;
|
||||
vessel_rec record;
|
||||
BEGIN
|
||||
-- Extract email and role from jwt token
|
||||
--RAISE WARNING 'check_jwt jwt %', current_setting('request.jwt.claims', true);
|
||||
SELECT current_setting('request.jwt.claims', true)::json->>'email' INTO _email;
|
||||
PERFORM set_config('user.email', _email, false);
|
||||
SELECT current_setting('request.jwt.claims', true)::json->>'role' INTO _role;
|
||||
--RAISE WARNING 'jwt email %', current_setting('request.jwt.claims', true)::json->>'email';
|
||||
--RAISE WARNING 'jwt role %', current_setting('request.jwt.claims', true)::json->>'role';
|
||||
--RAISE WARNING 'cur_user %', current_user;
|
||||
|
||||
--TODO SELECT current_setting('request.jwt.uid', true)::json->>'uid' INTO _user_id;
|
||||
--TODO RAISE WARNING 'jwt user_id %', current_setting('request.jwt.uid', true)::json->>'uid';
|
||||
--TODO SELECT current_setting('request.jwt.vid', true)::json->>'vid' INTO _vessel_id;
|
||||
--TODO RAISE WARNING 'jwt vessel_id %', current_setting('request.jwt.vid', true)::json->>'vid';
|
||||
IF _role = 'user_role' THEN
|
||||
-- Check the user exist in the accounts table
|
||||
SELECT * INTO account_rec
|
||||
FROM auth.accounts
|
||||
WHERE auth.accounts.email = _email;
|
||||
IF account_rec.email IS NULL THEN
|
||||
RAISE EXCEPTION 'Invalid user'
|
||||
USING HINT = 'Unknown user or password';
|
||||
END IF;
|
||||
-- Set session variables
|
||||
PERFORM set_config('user.id', account_rec.user_id, false);
|
||||
SELECT current_setting('request.path', true) into _path;
|
||||
--RAISE WARNING 'req path %', current_setting('request.path', true);
|
||||
-- Function allow without defined vessel like for anonymous role
|
||||
IF _path ~ '^\/rpc\/(login|signup|recover|reset)$' THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Function allow without defined vessel as user role
|
||||
-- openapi doc, user settings, otp code and vessel registration
|
||||
IF _path = '/rpc/settings_fn'
|
||||
OR _path = '/rpc/register_vessel'
|
||||
OR _path = '/rpc/update_user_preferences_fn'
|
||||
OR _path = '/rpc/versions_fn'
|
||||
OR _path = '/rpc/email_fn'
|
||||
OR _path = '/' THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Check a vessel and user exist
|
||||
SELECT auth.vessels.* INTO vessel_rec
|
||||
FROM auth.vessels, auth.accounts
|
||||
WHERE auth.vessels.owner_email = auth.accounts.email
|
||||
AND auth.accounts.email = _email;
|
||||
-- check if boat exist yet?
|
||||
IF vessel_rec.owner_email IS NULL THEN
|
||||
-- Return http status code 551 with message
|
||||
RAISE sqlstate 'PT551' using
|
||||
message = 'Vessel Required',
|
||||
detail = 'Invalid vessel',
|
||||
hint = 'Unknown vessel';
|
||||
--RETURN; -- ignore if not exist
|
||||
END IF;
|
||||
-- Redundant?
|
||||
IF vessel_rec.vessel_id IS NULL THEN
|
||||
RAISE EXCEPTION 'Invalid vessel'
|
||||
USING HINT = 'Unknown vessel id';
|
||||
END IF;
|
||||
-- Set session variables
|
||||
PERFORM set_config('vessel.id', vessel_rec.vessel_id, false);
|
||||
PERFORM set_config('vessel.name', vessel_rec.name, false);
|
||||
--RAISE WARNING 'public.check_jwt() user_role vessel.id [%]', current_setting('vessel.id', false);
|
||||
--RAISE WARNING 'public.check_jwt() user_role vessel.name [%]', current_setting('vessel.name', false);
|
||||
ELSIF _role = 'vessel_role' THEN
|
||||
SELECT current_setting('request.path', true) into _path;
|
||||
--RAISE WARNING 'req path %', current_setting('request.path', true);
|
||||
-- Function allow without defined vessel like for anonymous role
|
||||
IF _path ~ '^\/rpc\/(oauth_\w+)$' THEN
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Extract vessel_id from jwt token
|
||||
SELECT current_setting('request.jwt.claims', true)::json->>'vid' INTO _vid;
|
||||
-- Check the vessel and user exist
|
||||
SELECT auth.vessels.* INTO vessel_rec
|
||||
FROM auth.vessels, auth.accounts
|
||||
WHERE auth.vessels.owner_email = auth.accounts.email
|
||||
AND auth.accounts.email = _email
|
||||
AND auth.vessels.vessel_id = _vid;
|
||||
IF vessel_rec.owner_email IS NULL THEN
|
||||
RAISE EXCEPTION 'Invalid vessel'
|
||||
USING HINT = 'Unknown vessel owner_email';
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', vessel_rec.vessel_id, false);
|
||||
PERFORM set_config('vessel.name', vessel_rec.name, false);
|
||||
--RAISE WARNING 'public.check_jwt() user_role vessel.name %', current_setting('vessel.name', false);
|
||||
--RAISE WARNING 'public.check_jwt() user_role vessel.id %', current_setting('vessel.id', false);
|
||||
ELSIF _role = 'api_anonymous' THEN
|
||||
--RAISE WARNING 'public.check_jwt() api_anonymous';
|
||||
-- Check if path is a valid allow anonymous path
|
||||
SELECT current_setting('request.path', true) ~ '^/(logs_view|log_view|rpc/timelapse_fn|rpc/timelapse2_fn|monitoring_view|stats_logs_view|stats_moorages_view|rpc/stats_logs_fn)$' INTO _ppath;
|
||||
if _ppath is True then
|
||||
-- Check is custom header is present and valid
|
||||
SELECT current_setting('request.headers', true)::json->>'x-is-public' into _pheader;
|
||||
RAISE WARNING 'public.check_jwt() api_anonymous _pheader [%]', _pheader;
|
||||
if _pheader is null then
|
||||
RAISE EXCEPTION 'Invalid public_header'
|
||||
USING HINT = 'Stop being so evil and maybe you can log in';
|
||||
end if;
|
||||
SELECT convert_from(decode(_pheader, 'base64'), 'utf-8')
|
||||
~ '\w+,public_(logs|logs_list|stats|timelapse|monitoring),\d+$' into _pvalid;
|
||||
RAISE WARNING 'public.check_jwt() api_anonymous _pvalid [%]', _pvalid;
|
||||
if _pvalid is null or _pvalid is False then
|
||||
RAISE EXCEPTION 'Invalid public_valid'
|
||||
USING HINT = 'Stop being so evil and maybe you can log in';
|
||||
end if;
|
||||
WITH regex AS (
|
||||
SELECT regexp_match(
|
||||
convert_from(
|
||||
decode(_pheader, 'base64'), 'utf-8'),
|
||||
'(\w+),(public_(logs|logs_list|stats|timelapse|monitoring)),(\d+)$') AS match
|
||||
)
|
||||
SELECT match[1], match[2], match[4] into _pvessel, _ptype, _pid
|
||||
FROM regex;
|
||||
RAISE WARNING 'public.check_jwt() api_anonymous [%] [%] [%]', _pvessel, _ptype, _pid;
|
||||
if _pvessel is not null and _ptype is not null then
|
||||
-- Everything seem fine, get the vessel_id base on the vessel name.
|
||||
SELECT _ptype::name = any(enum_range(null::public_type)::name[]) INTO valid_public_type;
|
||||
IF valid_public_type IS False THEN
|
||||
-- Ignore entry if type is invalid
|
||||
RAISE EXCEPTION 'Invalid public_type'
|
||||
USING HINT = 'Stop being so evil and maybe you can log in';
|
||||
END IF;
|
||||
-- Check if boat name match public_vessel name
|
||||
boat := '^' || _pvessel || '$';
|
||||
IF _ptype ~ '^public_(logs|timelapse)$' AND _pid > 0 THEN
|
||||
WITH log as (
|
||||
SELECT vessel_id from api.logbook l where l.id = _pid
|
||||
)
|
||||
SELECT v.vessel_id, v.name into anonymous
|
||||
FROM auth.accounts a, auth.vessels v, jsonb_each_text(a.preferences) as prefs, log l
|
||||
WHERE v.vessel_id = l.vessel_id
|
||||
AND a.email = v.owner_email
|
||||
AND a.preferences->>'public_vessel'::text ~* boat
|
||||
AND prefs.key = _ptype::TEXT
|
||||
AND prefs.value::BOOLEAN = true;
|
||||
RAISE WARNING '-> ispublic_fn public_logs output boat:[%], type:[%], result:[%]', _pvessel, _ptype, anonymous;
|
||||
IF anonymous.vessel_id IS NOT NULL THEN
|
||||
PERFORM set_config('vessel.id', anonymous.vessel_id, false);
|
||||
PERFORM set_config('vessel.name', anonymous.name, false);
|
||||
RETURN;
|
||||
END IF;
|
||||
ELSE
|
||||
SELECT v.vessel_id, v.name into anonymous
|
||||
FROM auth.accounts a, auth.vessels v, jsonb_each_text(a.preferences) as prefs
|
||||
WHERE a.email = v.owner_email
|
||||
AND a.preferences->>'public_vessel'::text ~* boat
|
||||
AND prefs.key = _ptype::TEXT
|
||||
AND prefs.value::BOOLEAN = true;
|
||||
RAISE WARNING '-> ispublic_fn output boat:[%], type:[%], result:[%]', _pvessel, _ptype, anonymous;
|
||||
IF anonymous.vessel_id IS NOT NULL THEN
|
||||
PERFORM set_config('vessel.id', anonymous.vessel_id, false);
|
||||
PERFORM set_config('vessel.name', anonymous.name, false);
|
||||
RETURN;
|
||||
END IF;
|
||||
END IF;
|
||||
RAISE sqlstate 'PT404' using message = 'unknown resource';
|
||||
END IF; -- end anonymous path
|
||||
END IF;
|
||||
ELSIF _role <> 'api_anonymous' THEN
|
||||
RAISE EXCEPTION 'Invalid role'
|
||||
USING HINT = 'Stop being so evil and maybe you can log in';
|
||||
END IF;
|
||||
END
|
||||
$$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.check_jwt
|
||||
IS 'PostgREST API db-pre-request check, set_config according to role (api_anonymous,vessel_role,user_role)';
|
||||
|
||||
GRANT EXECUTE ON FUNCTION public.check_jwt() TO api_anonymous;
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.2'
|
||||
WHERE "name"='app.version';
|
784
initdb/99_migrations_202405.sql
Normal file
784
initdb/99_migrations_202405.sql
Normal file
@@ -0,0 +1,784 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration May 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('account_disable','PostgSail Account disable',E'Hello __RECIPIENT__,\nSorry!Your account is disable. Please contact me to solve the issue.','PostgSail Account disable!',E'Sorry!\nYour account is disable. Please contact me to solve the issue.');
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
-- Track IP per user to avoid abuse
|
||||
create or replace function
|
||||
api.login(in email text, in pass text) returns auth.jwt_token as $$
|
||||
declare
|
||||
_role name;
|
||||
result auth.jwt_token;
|
||||
app_jwt_secret text;
|
||||
_email_valid boolean := false;
|
||||
_email text := email;
|
||||
_user_id text := null;
|
||||
_user_disable boolean := false;
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
begin
|
||||
-- check email and password
|
||||
select auth.user_role(email, pass) into _role;
|
||||
if _role is null then
|
||||
-- HTTP/403
|
||||
--raise invalid_password using message = 'invalid user or password';
|
||||
-- HTTP/401
|
||||
raise insufficient_privilege using message = 'invalid user or password';
|
||||
end if;
|
||||
|
||||
-- Check if user is disable due to abuse
|
||||
SELECT preferences['disable'],user_id INTO _user_disable,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _user_disable is True then
|
||||
-- due to the raise, the insert is never committed.
|
||||
--INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
-- VALUES ('account_disable', _email, now(), _user_id);
|
||||
RAISE sqlstate 'PT402' using message = 'Account disable, contact us',
|
||||
detail = 'Quota exceeded',
|
||||
hint = 'Upgrade your plan';
|
||||
END IF;
|
||||
|
||||
-- Check email_valid and generate OTP
|
||||
SELECT preferences['email_valid'],user_id INTO _email_valid,_user_id
|
||||
FROM auth.accounts a
|
||||
WHERE a.email = _email;
|
||||
IF _email_valid is null or _email_valid is False THEN
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('email_otp', _email, now(), _user_id);
|
||||
END IF;
|
||||
|
||||
-- Track IP per user to avoid abuse
|
||||
--RAISE WARNING 'api.login debug: [%],[%]', client_ip, login.email;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE auth.accounts a SET preferences = jsonb_recursive_merge(a.preferences, jsonb_build_object('ip', client_ip)) WHERE a.email = login.email;
|
||||
END IF;
|
||||
|
||||
-- Get app_jwt_secret
|
||||
SELECT value INTO app_jwt_secret
|
||||
FROM app_settings
|
||||
WHERE name = 'app.jwt_secret';
|
||||
|
||||
--RAISE WARNING 'api.login debug: [%],[%],[%]', app_jwt_secret, _role, login.email;
|
||||
-- Generate jwt
|
||||
select jwt.sign(
|
||||
-- row_to_json(r), ''
|
||||
-- row_to_json(r)::json, current_setting('app.jwt_secret')::text
|
||||
row_to_json(r)::json, app_jwt_secret
|
||||
) as token
|
||||
from (
|
||||
select _role as role, login.email as email, -- TODO replace with user_id
|
||||
-- select _role as role, user_id as uid, -- add support in check_jwt
|
||||
extract(epoch from now())::integer + 60*60 as exp
|
||||
) r
|
||||
into result;
|
||||
return result;
|
||||
end;
|
||||
$$ language plpgsql security definer;
|
||||
|
||||
-- Add moorage name to view
|
||||
DROP VIEW IF EXISTS api.moorages_stays_view;
|
||||
CREATE OR REPLACE VIEW api.moorages_stays_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
select
|
||||
_to.name AS _to_name,
|
||||
_to.id AS _to_id,
|
||||
_to._to_time,
|
||||
_from.id AS _from_id,
|
||||
_from.name AS _from_name,
|
||||
_from._from_time,
|
||||
s.stay_code,s.duration,m.id,m.name
|
||||
FROM api.stays_at sa, api.moorages m, api.stays s
|
||||
LEFT JOIN api.logbook AS _from ON _from._from_time = s.departed
|
||||
LEFT JOIN api.logbook AS _to ON _to._to_time = s.arrived
|
||||
WHERE s.departed IS NOT NULL
|
||||
AND s.name IS NOT NULL
|
||||
AND s.stay_code = sa.stay_code
|
||||
AND s.moorage_id = m.id
|
||||
ORDER BY _to._to_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorages_stays_view
|
||||
IS 'Moorages stay listing web view';
|
||||
|
||||
-- Create a merge_logbook_fn
|
||||
CREATE OR REPLACE FUNCTION api.merge_logbook_fn(IN id_start integer, IN id_end integer) RETURNS void AS $merge_logbook$
|
||||
DECLARE
|
||||
logbook_rec_start record;
|
||||
logbook_rec_end record;
|
||||
log_name text;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
BEGIN
|
||||
-- If id_start or id_end is not NULL
|
||||
IF (id_start IS NULL OR id_start < 1) OR (id_end IS NULL OR id_end < 1) THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % %', id_start, id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- If id_end is lower than id_start
|
||||
IF id_end <= id_start THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid input % < %', id_end, id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the start logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_start
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_start
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_start.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_start;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the end logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec_end
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = id_end
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec_end.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> merge_logbook_fn invalid logbook %', id_end;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
RAISE WARNING '-> merge_logbook_fn logbook start:% end:%', id_start, id_end;
|
||||
PERFORM set_config('vessel.id', logbook_rec_start.vessel_id, false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
-- add the avg_wind_speed
|
||||
extra_json := extra_json || jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed);
|
||||
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
SELECT CONCAT(logbook_rec_start._from, ' to ', logbook_rec_end._to) INTO log_name;
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec_start.id, logbook_rec_start._from_time, logbook_rec_end._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
-- Update the start logbook with the new calculate metrics
|
||||
duration = (logbook_rec_end._to_time::TIMESTAMPTZ - logbook_rec_start._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
name = log_name,
|
||||
track_geom = geo_rec._track_geom,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
-- Set _to metrics from end logbook
|
||||
_to = logbook_rec_end._to,
|
||||
_to_moorage_id = logbook_rec_end._to_moorage_id,
|
||||
_to_lat = logbook_rec_end._to_lat,
|
||||
_to_lng = logbook_rec_end._to_lng,
|
||||
_to_time = logbook_rec_end._to_time
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
-- GeoJSON require track_geom field
|
||||
geojson := logbook_update_geojson_fn(logbook_rec_start.id, logbook_rec_start._from_time::TEXT, logbook_rec_end._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_geojson = geojson
|
||||
WHERE id = logbook_rec_start.id;
|
||||
|
||||
-- Update logbook mark for deletion
|
||||
UPDATE api.logbook
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE id = logbook_rec_end.id;
|
||||
-- Update related stays mark for deletion
|
||||
UPDATE api.stays
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE arrived = logbook_rec_start._to_time;
|
||||
-- Update related moorages mark for deletion
|
||||
UPDATE api.moorages
|
||||
SET notes = 'mark for deletion'
|
||||
WHERE id = logbook_rec_start._to_moorage_id;
|
||||
|
||||
-- Clean up, remove invalid logbook and stay, moorage entry
|
||||
DELETE FROM api.logbook WHERE id = logbook_rec_end.id;
|
||||
RAISE WARNING '-> merge_logbook_fn delete logbook id [%]', logbook_rec_end.id;
|
||||
DELETE FROM api.stays WHERE arrived = logbook_rec_start._to_time;
|
||||
RAISE WARNING '-> merge_logbook_fn delete stay arrived [%]', logbook_rec_start._to_time;
|
||||
DELETE FROM api.moorages WHERE id = logbook_rec_start._to_moorage_id;
|
||||
RAISE WARNING '-> merge_logbook_fn delete moorage id [%]', logbook_rec_start._to_moorage_id;
|
||||
END;
|
||||
$merge_logbook$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.merge_logbook_fn
|
||||
IS 'Merge 2 logbook by id, from the start of the lower log id and the end of the higher log id, update the calculate data as well (avg, geojson)';
|
||||
|
||||
-- Add tags to view
|
||||
DROP VIEW IF EXISTS api.logs_view;
|
||||
CREATE OR REPLACE VIEW api.logs_view
|
||||
WITH(security_invoker=true,security_barrier=true)
|
||||
AS SELECT id,
|
||||
name,
|
||||
_from AS "from",
|
||||
_from_time AS started,
|
||||
_to AS "to",
|
||||
_to_time AS ended,
|
||||
distance,
|
||||
duration,
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
extra->'tags' AS tags
|
||||
FROM api.logbook l
|
||||
WHERE name IS NOT NULL AND _to_time IS NOT NULL
|
||||
ORDER BY _from_time DESC;
|
||||
-- Description
|
||||
COMMENT ON VIEW api.logs_view IS 'Logs web view';
|
||||
|
||||
-- Update a logbook with avg wind speed
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_avg_fn;
|
||||
CREATE OR REPLACE FUNCTION public.logbook_update_avg_fn(
|
||||
IN _id integer,
|
||||
IN _start TEXT,
|
||||
IN _end TEXT,
|
||||
OUT avg_speed double precision,
|
||||
OUT max_speed double precision,
|
||||
OUT max_wind_speed double precision,
|
||||
OUT avg_wind_speed double precision,
|
||||
OUT count_metric integer
|
||||
) AS $logbook_update_avg$
|
||||
BEGIN
|
||||
RAISE NOTICE '-> logbook_update_avg_fn calculate avg for logbook id=%, start:"%", end:"%"', _id, _start, _end;
|
||||
SELECT AVG(speedoverground), MAX(speedoverground), MAX(windspeedapparent), AVG(windspeedapparent), COUNT(*) INTO
|
||||
avg_speed, max_speed, max_wind_speed, avg_wind_speed, count_metric
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= _start::TIMESTAMPTZ
|
||||
AND m.time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false);
|
||||
RAISE NOTICE '-> logbook_update_avg_fn avg for logbook id=%, avg_speed:%, max_speed:%, avg_wind_speed:%, max_wind_speed:%, count:%', _id, avg_speed, max_speed, avg_wind_speed, max_wind_speed, count_metric;
|
||||
END;
|
||||
$logbook_update_avg$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_update_avg_fn
|
||||
IS 'Update logbook details with calculate average and max data, AVG(speedOverGround), MAX(speedOverGround), MAX(windspeedapparent), count_metric';
|
||||
|
||||
-- Update pending new logbook from process queue
|
||||
DROP FUNCTION IF EXISTS process_logbook_queue_fn;
|
||||
CREATE OR REPLACE FUNCTION process_logbook_queue_fn(IN _id integer) RETURNS void AS $process_logbook_queue$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
from_name text;
|
||||
to_name text;
|
||||
log_name text;
|
||||
from_moorage record;
|
||||
to_moorage record;
|
||||
avg_rec record;
|
||||
geo_rec record;
|
||||
log_settings jsonb;
|
||||
user_settings jsonb;
|
||||
geojson jsonb;
|
||||
extra_json jsonb;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_logbook_queue_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_logbook_queue_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Calculate logbook data average and geo
|
||||
-- Update logbook entry with the latest metric data and calculate data
|
||||
avg_rec := logbook_update_avg_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
geo_rec := logbook_update_geom_distance_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
|
||||
-- Do we have an existing moorage within 300m of the new log
|
||||
-- generate logbook name, concat _from_location and _to_location from moorage name
|
||||
from_moorage := process_lat_lon_fn(logbook_rec._from_lng::NUMERIC, logbook_rec._from_lat::NUMERIC);
|
||||
to_moorage := process_lat_lon_fn(logbook_rec._to_lng::NUMERIC, logbook_rec._to_lat::NUMERIC);
|
||||
SELECT CONCAT(from_moorage.moorage_name, ' to ' , to_moorage.moorage_name) INTO log_name;
|
||||
|
||||
-- Process `propulsion.*.runTime` and `navigation.log`
|
||||
-- Calculate extra json
|
||||
extra_json := logbook_update_extra_json_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
-- add the avg_wind_speed
|
||||
extra_json := extra_json || jsonb_build_object('avg_wind_speed', avg_rec.avg_wind_speed);
|
||||
|
||||
RAISE NOTICE 'Updating valid logbook entry logbook id:[%] start:[%] end:[%]', logbook_rec.id, logbook_rec._from_time, logbook_rec._to_time;
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
duration = (logbook_rec._to_time::TIMESTAMPTZ - logbook_rec._from_time::TIMESTAMPTZ),
|
||||
avg_speed = avg_rec.avg_speed,
|
||||
max_speed = avg_rec.max_speed,
|
||||
max_wind_speed = avg_rec.max_wind_speed,
|
||||
_from = from_moorage.moorage_name,
|
||||
_from_moorage_id = from_moorage.moorage_id,
|
||||
_to_moorage_id = to_moorage.moorage_id,
|
||||
_to = to_moorage.moorage_name,
|
||||
name = log_name,
|
||||
track_geom = geo_rec._track_geom,
|
||||
distance = geo_rec._track_distance,
|
||||
extra = extra_json,
|
||||
notes = NULL -- reset pre_log process
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GeoJSON require track_geom field geometry linestring
|
||||
geojson := logbook_update_geojson_fn(logbook_rec.id, logbook_rec._from_time::TEXT, logbook_rec._to_time::TEXT);
|
||||
UPDATE api.logbook
|
||||
SET
|
||||
track_geojson = geojson
|
||||
WHERE id = logbook_rec.id;
|
||||
|
||||
-- GeoJSON Timelapse require track_geojson geometry point
|
||||
-- Add properties to the geojson for timelapse purpose
|
||||
PERFORM public.logbook_timelapse_geojson_fn(logbook_rec.id);
|
||||
|
||||
-- Add post logbook entry to process queue for notification and QGIS processing
|
||||
-- Require as we need the logbook to be updated with SQL commit
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('post_logbook', logbook_rec.id, NOW(), current_setting('vessel.id', true));
|
||||
|
||||
END;
|
||||
$process_logbook_queue$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.process_logbook_queue_fn
|
||||
IS 'Update logbook details when completed, logbook_update_avg_fn, logbook_update_geom_distance_fn, reverse_geocode_py_fn';
|
||||
|
||||
-- Add avg_wind_speed to logbook geojson
|
||||
-- Add back truewindspeed and truewinddirection to logbook geojson
|
||||
DROP FUNCTION IF EXISTS public.logbook_update_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_update_geojson_fn(IN _id integer, IN _start text, IN _end text,
|
||||
OUT _track_geojson JSON
|
||||
) AS $logbook_geojson$
|
||||
declare
|
||||
log_geojson jsonb;
|
||||
metrics_geojson jsonb;
|
||||
_map jsonb;
|
||||
begin
|
||||
-- GeoJson Feature Logbook linestring
|
||||
SELECT
|
||||
ST_AsGeoJSON(log.*) into log_geojson
|
||||
FROM
|
||||
( SELECT
|
||||
id,name,
|
||||
distance,
|
||||
duration,
|
||||
avg_speed,
|
||||
max_speed,
|
||||
max_wind_speed,
|
||||
_from_time,
|
||||
_to_time
|
||||
_from_moorage_id,
|
||||
_to_moorage_id,
|
||||
notes,
|
||||
extra['avg_wind_speed'] as avg_wind_speed,
|
||||
track_geom
|
||||
FROM api.logbook
|
||||
WHERE id = _id
|
||||
) AS log;
|
||||
-- GeoJson Feature Metrics point
|
||||
SELECT
|
||||
json_agg(ST_AsGeoJSON(t.*)::json) into metrics_geojson
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) as status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND time >= _start::TIMESTAMPTZ
|
||||
AND time <= _end::TIMESTAMPTZ
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
) AS t;
|
||||
|
||||
-- Merge jsonb
|
||||
SELECT log_geojson::jsonb || metrics_geojson::jsonb into _map;
|
||||
-- output
|
||||
SELECT
|
||||
json_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', _map
|
||||
) into _track_geojson;
|
||||
END;
|
||||
$logbook_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_update_geojson_fn
|
||||
IS 'Update log details with geojson';
|
||||
|
||||
-- Add properties to the geojson for timelapse purpose
|
||||
DROP FUNCTION IF EXISTS public.logbook_timelapse_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_timelapse_geojson_fn(IN _id INT) returns void
|
||||
AS $logbook_timelapse$
|
||||
declare
|
||||
first_feature_note JSONB;
|
||||
second_feature_note JSONB;
|
||||
last_feature_note JSONB;
|
||||
logbook_rec record;
|
||||
begin
|
||||
-- We need to fetch the processed logbook data.
|
||||
SELECT name,duration,distance,_from,_to INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
--raise warning '-> logbook_rec: %', logbook_rec;
|
||||
select format('{"trip": { "name": "%s", "duration": "%s", "distance": "%s" }}', logbook_rec.name, logbook_rec.duration, logbook_rec.distance) into first_feature_note;
|
||||
select format('{"notes": "%s"}', logbook_rec._from) into second_feature_note;
|
||||
select format('{"notes": "%s"}', logbook_rec._to) into last_feature_note;
|
||||
--raise warning '-> logbook_rec: % % %', first_feature_note, second_feature_note, last_feature_note;
|
||||
|
||||
-- Update the properties of the first feature, the second with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 1, properties}',
|
||||
(track_geojson -> 'features' -> 1 -> 'properties' || first_feature_note)::jsonb
|
||||
)
|
||||
WHERE id = _id
|
||||
and track_geojson -> 'features' -> 1 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Update the properties of the third feature, the second with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, 2, properties}',
|
||||
(track_geojson -> 'features' -> 2 -> 'properties' || second_feature_note)::jsonb
|
||||
)
|
||||
where id = _id
|
||||
and track_geojson -> 'features' -> 2 -> 'geometry' ->> 'type' = 'Point';
|
||||
|
||||
-- Update the properties of the last feature with geometry point
|
||||
UPDATE api.logbook
|
||||
SET track_geojson = jsonb_set(
|
||||
track_geojson,
|
||||
'{features, -1, properties}',
|
||||
CASE
|
||||
WHEN COALESCE((track_geojson -> 'features' -> -1 -> 'properties' ->> 'notes'), '') = '' THEN
|
||||
(track_geojson -> 'features' -> -1 -> 'properties' || last_feature_note)::jsonb
|
||||
ELSE
|
||||
track_geojson -> 'features' -> -1 -> 'properties'
|
||||
END
|
||||
)
|
||||
WHERE id = _id
|
||||
and track_geojson -> 'features' -> -1 -> 'geometry' ->> 'type' = 'Point';
|
||||
end;
|
||||
$logbook_timelapse$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_timelapse_geojson_fn
|
||||
IS 'Update logbook geojson, Add properties to some geojson features for timelapse purpose';
|
||||
|
||||
-- CRON for signalk plugin upgrade
|
||||
-- The goal is to avoid error from old plugin version by enforcing upgrade.
|
||||
-- ERROR: there is no unique or exclusion constraint matching the ON CONFLICT specification
|
||||
-- "POST /metadata?on_conflict=client_id HTTP/1.1" 400 137 "-" "postgsail.signalk v0.0.9"
|
||||
DROP FUNCTION IF EXISTS public.cron_process_skplugin_upgrade_fn;
|
||||
CREATE FUNCTION public.cron_process_skplugin_upgrade_fn() RETURNS void AS $skplugin_upgrade$
|
||||
DECLARE
|
||||
skplugin_upgrade_rec record;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- Check for signalk plugin version
|
||||
RAISE NOTICE 'cron_process_plugin_upgrade_fn';
|
||||
FOR skplugin_upgrade_rec in
|
||||
SELECT
|
||||
v.owner_email,m.name,m.vessel_id,m.plugin_version,a.first
|
||||
FROM api.metadata m
|
||||
LEFT JOIN auth.vessels v ON v.vessel_id = m.vessel_id
|
||||
LEFT JOIN auth.accounts a ON v.owner_email = a.email
|
||||
WHERE m.plugin_version <= '0.3.0'
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_skplugin_upgrade_rec_fn for [%]', skplugin_upgrade_rec;
|
||||
SELECT json_build_object('email', skplugin_upgrade_rec.owner_email, 'recipient', skplugin_upgrade_rec.first) into user_settings;
|
||||
RAISE NOTICE '-> debug cron_process_skplugin_upgrade_rec_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('skplugin_upgrade'::TEXT, user_settings::JSONB);
|
||||
END LOOP;
|
||||
END;
|
||||
$skplugin_upgrade$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_skplugin_upgrade_fn
|
||||
IS 'init by pg_cron, check for signalk plugin version and notify for upgrade';
|
||||
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('skplugin_upgrade','PostgSail Signalk plugin upgrade',E'Hello __RECIPIENT__,\nPlease upgrade your postgsail signalk plugin. Be sure to contact me if you encounter any issue.','PostgSail Signalk plugin upgrade!',E'Please upgrade your postgsail signalk plugin.');
|
||||
|
||||
DROP FUNCTION IF EXISTS public.metadata_ip_trigger_fn;
|
||||
-- Track IP per vessel to avoid abuse
|
||||
CREATE FUNCTION public.metadata_ip_trigger_fn() RETURNS trigger
|
||||
AS $metadata_ip_trigger$
|
||||
DECLARE
|
||||
headers json := current_setting('request.headers', true)::json;
|
||||
client_ip text := coalesce(headers->>'x-client-ip', NULL);
|
||||
BEGIN
|
||||
RAISE WARNING 'metadata_ip_trigger_fn [%] [%]', current_setting('vessel.id', true), client_ip;
|
||||
IF client_ip IS NOT NULL THEN
|
||||
UPDATE api.metadata
|
||||
SET
|
||||
configuration = NEW.configuration || jsonb_build_object('ip', client_ip)
|
||||
WHERE id = NEW.id;
|
||||
END IF;
|
||||
RETURN NULL;
|
||||
END;
|
||||
$metadata_ip_trigger$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION public.metadata_ip_trigger_fn() IS 'Add IP from vessel in metadata, track abuse';
|
||||
|
||||
DROP TRIGGER IF EXISTS metadata_ip_trigger ON api.metadata;
|
||||
-- Generate an error
|
||||
--CREATE TRIGGER metadata_ip_trigger BEFORE UPDATE ON api.metadata
|
||||
-- FOR EACH ROW EXECUTE FUNCTION metadata_ip_trigger_fn();
|
||||
-- Description
|
||||
--COMMENT ON TRIGGER
|
||||
-- metadata_ip_trigger ON api.metadata
|
||||
-- IS 'AFTER UPDATE ON api.metadata run function metadata_ip_trigger_fn for tracking vessel IP';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.logbook_active_geojson_fn;
|
||||
CREATE FUNCTION public.logbook_active_geojson_fn(
|
||||
OUT _track_geojson jsonb
|
||||
) AS $logbook_active_geojson$
|
||||
BEGIN
|
||||
WITH log_active AS (
|
||||
SELECT * FROM api.logbook WHERE active IS True
|
||||
),
|
||||
log_gis_line AS (
|
||||
SELECT ST_MakeLine(
|
||||
ARRAY(
|
||||
SELECT st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m, log_active l
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
AND m.time >= l._from_time::TIMESTAMPTZ
|
||||
AND m.time <= l._to_time::TIMESTAMPTZ
|
||||
ORDER BY m.time ASC
|
||||
)
|
||||
)
|
||||
),
|
||||
log_gis_point AS (
|
||||
SELECT
|
||||
ST_AsGeoJSON(t.*)::json AS GeoJSONPoint
|
||||
FROM (
|
||||
( SELECT
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
windspeedapparent,
|
||||
longitude,latitude,
|
||||
'' AS notes,
|
||||
coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null) as truewindspeed,
|
||||
coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null) as truewinddirection,
|
||||
coalesce(status, null) AS status,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics m
|
||||
WHERE m.latitude IS NOT NULL
|
||||
AND m.longitude IS NOT NULL
|
||||
ORDER BY m.time DESC LIMIT 1
|
||||
)
|
||||
) as t
|
||||
),
|
||||
log_agg as (
|
||||
SELECT
|
||||
CASE WHEN log_gis_line.st_makeline IS NOT NULL THEN
|
||||
( SELECT jsonb_agg(ST_AsGeoJSON(log_gis_line.*)::json)::jsonb AS GeoJSONLine FROM log_gis_line )
|
||||
ELSE
|
||||
( SELECT '[]'::json AS GeoJSONLine )::jsonb
|
||||
END
|
||||
FROM log_gis_line
|
||||
)
|
||||
SELECT
|
||||
jsonb_build_object(
|
||||
'type', 'FeatureCollection',
|
||||
'features', log_agg.GeoJSONLine::jsonb || log_gis_point.GeoJSONPoint::jsonb
|
||||
) INTO _track_geojson FROM log_agg, log_gis_point;
|
||||
END;
|
||||
$logbook_active_geojson$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.logbook_active_geojson_fn
|
||||
IS 'Create a GeoJSON with 2 features, LineString with a current active log and Point with the last position';
|
||||
|
||||
-- Update monitoring view to support live trip and truewindspeed and truewinddirection to stationary GeoJSON.
|
||||
DROP VIEW IF EXISTS api.monitoring_view;
|
||||
CREATE VIEW api.monitoring_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT
|
||||
time AS "time",
|
||||
(NOW() AT TIME ZONE 'UTC' - time) > INTERVAL '70 MINUTES' as offline,
|
||||
metrics-> 'environment.water.temperature' AS waterTemperature,
|
||||
metrics-> 'environment.inside.temperature' AS insideTemperature,
|
||||
metrics-> 'environment.outside.temperature' AS outsideTemperature,
|
||||
metrics-> 'environment.wind.speedOverGround' AS windSpeedOverGround,
|
||||
metrics-> 'environment.wind.directionTrue' AS windDirectionTrue,
|
||||
metrics-> 'environment.inside.relativeHumidity' AS insideHumidity,
|
||||
metrics-> 'environment.outside.relativeHumidity' AS outsideHumidity,
|
||||
metrics-> 'environment.outside.pressure' AS outsidePressure,
|
||||
metrics-> 'environment.inside.pressure' AS insidePressure,
|
||||
metrics-> 'electrical.batteries.House.capacity.stateOfCharge' AS batteryCharge,
|
||||
metrics-> 'electrical.batteries.House.voltage' AS batteryVoltage,
|
||||
metrics-> 'environment.depth.belowTransducer' AS depth,
|
||||
jsonb_build_object(
|
||||
'type', 'Feature',
|
||||
'geometry', ST_AsGeoJSON(st_makepoint(longitude,latitude))::jsonb,
|
||||
'properties', jsonb_build_object(
|
||||
'name', current_setting('vessel.name', false),
|
||||
'latitude', m.latitude,
|
||||
'longitude', m.longitude,
|
||||
'time', m.time,
|
||||
'speedoverground', m.speedoverground,
|
||||
'windspeedapparent', m.windspeedapparent,
|
||||
'truewindspeed', coalesce(metersToKnots((metrics->'environment.wind.speedTrue')::NUMERIC), null),
|
||||
'truewinddirection', coalesce(radiantToDegrees((metrics->'environment.wind.directionTrue')::NUMERIC), null),
|
||||
'status', coalesce(m.status, null)
|
||||
)::jsonb ) AS geojson,
|
||||
current_setting('vessel.name', false) AS name,
|
||||
m.status,
|
||||
CASE WHEN m.status <> 'moored' THEN (
|
||||
SELECT public.logbook_active_geojson_fn() )
|
||||
END AS live
|
||||
FROM api.metrics m
|
||||
ORDER BY time DESC LIMIT 1;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.monitoring_view
|
||||
IS 'Monitoring static web view';
|
||||
|
||||
-- Allow to access tables for user_role and grafana and api_anonymous
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO user_role;
|
||||
GRANT SELECT ON ALL TABLES IN SCHEMA api TO grafana;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO user_role;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO api_anonymous;
|
||||
GRANT SELECT ON TABLE api.monitoring_view TO grafana;
|
||||
|
||||
-- Allow to execute fn for user_role and grafana and api_anonymous
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO grafana;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana;
|
||||
GRANT EXECUTE ON FUNCTION public.logbook_active_geojson_fn TO api_anonymous;
|
||||
GRANT EXECUTE ON FUNCTION public.metersToKnots TO api_anonymous;
|
||||
GRANT EXECUTE ON FUNCTION public.radiantToDegrees TO api_anonymous;
|
||||
|
||||
-- Fix vessel name (Organization) ensure we have a value either from metadata tbl (signalk) or from vessel tbl
|
||||
DROP FUNCTION IF EXISTS public.cron_process_grafana_fn;
|
||||
CREATE OR REPLACE FUNCTION public.cron_process_grafana_fn() RETURNS void
|
||||
AS $cron_process_grafana_fn$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
data_rec record;
|
||||
app_settings jsonb;
|
||||
user_settings jsonb;
|
||||
BEGIN
|
||||
-- We run grafana provisioning only after the first received vessel metadata
|
||||
-- Check for new vessel metadata pending grafana provisioning
|
||||
RAISE NOTICE 'cron_process_grafana_fn';
|
||||
FOR process_rec in
|
||||
SELECT * from process_queue
|
||||
where channel = 'grafana' and processed is null
|
||||
order by stored asc
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_grafana_fn [%]', process_rec.payload;
|
||||
-- Gather url from app settings
|
||||
app_settings := get_app_settings_fn();
|
||||
-- Get vessel details base on metadata id
|
||||
SELECT
|
||||
v.owner_email,coalesce(m.name,v.name) as name,m.vessel_id into data_rec
|
||||
FROM auth.vessels v
|
||||
LEFT JOIN api.metadata m ON v.vessel_id = m.vessel_id
|
||||
WHERE m.id = process_rec.payload::INTEGER;
|
||||
IF data_rec.vessel_id IS NULL OR data_rec.name IS NULL THEN
|
||||
RAISE WARNING '-> DEBUG cron_process_grafana_fn grafana_py_fn error [%]', data_rec;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- as we got data from the vessel we can do the grafana provisioning.
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn grafana_py_fn provisioning [%]', data_rec;
|
||||
PERFORM grafana_py_fn(data_rec.name, data_rec.vessel_id, data_rec.owner_email, app_settings);
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(data_rec.vessel_id::TEXT);
|
||||
RAISE DEBUG '-> DEBUG cron_process_grafana_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- add user in keycloak
|
||||
PERFORM keycloak_auth_py_fn(data_rec.vessel_id, user_settings, app_settings);
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('grafana'::TEXT, user_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_grafana_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$cron_process_grafana_fn$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_grafana_fn
|
||||
IS 'init by pg_cron to check for new vessel pending grafana provisioning, if so perform grafana_py_fn';
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.3'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Notifications/Reminders for old signalk plugin
|
||||
-- At 08:06 on Sunday.
|
||||
-- At 08:06 on every 4th day-of-month if it's on Sunday.
|
||||
SELECT cron.schedule('cron_skplugin_upgrade', '6 8 */4 * 0', 'select public.cron_process_skplugin_upgrade_fn()');
|
||||
UPDATE cron.job SET database = 'postgres' WHERE jobname = 'cron_skplugin_upgrade';
|
879
initdb/99_migrations_202406.sql
Normal file
879
initdb/99_migrations_202406.sql
Normal file
@@ -0,0 +1,879 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Copyright 2021-2024 Francois Lacroix <xbgmsharp@gmail.com>
|
||||
-- This file is part of PostgSail which is released under Apache License, Version 2.0 (the "License").
|
||||
-- See file LICENSE or go to http://www.apache.org/licenses/LICENSE-2.0 for full license details.
|
||||
--
|
||||
-- Migration June 2024
|
||||
--
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
\echo 'Timing mode is enabled'
|
||||
\timing
|
||||
|
||||
\echo 'Force timezone, just in case'
|
||||
set timezone to 'UTC';
|
||||
|
||||
-- Add video timelapse notification message
|
||||
INSERT INTO public.email_templates ("name",email_subject,email_content,pushover_title,pushover_message)
|
||||
VALUES ('video_ready','PostgSail Video ready',E'Hey,\nYour video is available at __VIDEO_LINK__.\nPlease make sure you download your video as it will delete in 7 days.','PostgSail Video ready!',E'Your video is ready __VIDEO_LINK__.');
|
||||
|
||||
-- Generate and request the logbook image url to be cache on QGIS server.
|
||||
DROP FUNCTION IF EXISTS public.qgis_getmap_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_getmap_py_fn(IN vessel_id TEXT DEFAULT NULL, IN log_id NUMERIC DEFAULT NULL, IN extent TEXT DEFAULT NULL, IN logs_url BOOLEAN DEFAULT False) RETURNS VOID
|
||||
AS $qgis_getmap_py$
|
||||
import requests
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_image_size_to_bbox(extent, width, height):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox_aspect_ratio = (max_x - min_x) / (max_y - min_y)
|
||||
image_aspect_ratio = width / height
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
height = width / bbox_aspect_ratio
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
width = height * bbox_aspect_ratio
|
||||
|
||||
return int(width), int(height)
|
||||
|
||||
def calculate_width(extent, fixed_height):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox_aspect_ratio = (max_x - min_x) / (max_y - min_y)
|
||||
width = fixed_height * bbox_aspect_ratio
|
||||
return int(width)
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = max_x - min_x
|
||||
bbox_height = max_y - min_y
|
||||
bbox_aspect_ratio = bbox_width / bbox_height
|
||||
image_aspect_ratio = fixed_width / fixed_height
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
def generate_getmap_url(server_url, project_path, layer_name, extent, width=1080, height=566, crs="EPSG:3857", format="image/png"):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
bbox = f"{min_x},{min_y},{max_x},{max_y}"
|
||||
|
||||
# Adjust image size to match BBOX aspect ratio
|
||||
#width, height = adjust_image_size_to_bbox(extent, width, height)
|
||||
|
||||
# Calculate width to maintain aspect ratio with fixed height
|
||||
#width = calculate_width(extent, height)
|
||||
|
||||
url = (
|
||||
f"{server_url}?SERVICE=WMS&VERSION=1.3.0&REQUEST=GetMap&FORMAT={format}&CRS={crs}"
|
||||
f"&BBOX={bbox}&WIDTH={width}&HEIGHT={height}&LAYERS={layer_name}&MAP={project_path}"
|
||||
)
|
||||
return url
|
||||
|
||||
if logs_url == False:
|
||||
server_url = f"https://gis.openplotter.cloud/log_{vessel_id}_{log_id}.png".format(vessel_id, log_id)
|
||||
else:
|
||||
server_url = f"https://gis.openplotter.cloud/logs_{vessel_id}_{log_id}.png".format(vessel_id, log_id)
|
||||
project_path = "/projects/postgsail5.qgz"
|
||||
layer_name = "OpenStreetMap,SQLLayer"
|
||||
#plpy.notice('qgis_getmap_py vessel_id [{}], log_id [{}], extent [{}]'.format(vessel_id, log_id, extent))
|
||||
|
||||
# Parse extent and scale factor
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(extent))
|
||||
#plpy.notice('qgis_getmap_py scaled_extent [{}]'.format(scaled_extent))
|
||||
|
||||
fixed_width = 1080
|
||||
fixed_height = 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_getmap_py adjusted_extent [{}]'.format(adjusted_extent))
|
||||
|
||||
getmap_url = generate_getmap_url(server_url, project_path, layer_name, adjusted_extent)
|
||||
if logs_url == False:
|
||||
filter_url = f"{getmap_url}&FILTER=SQLLayer:\"vessel_id\" = '{vessel_id}' AND \"id\" = {log_id}".format(getmap_url, vessel_id, log_id)
|
||||
else:
|
||||
filter_url = f"{getmap_url}&FILTER=SQLLayer:\"vessel_id\" = '{vessel_id}'".format(getmap_url, vessel_id)
|
||||
#plpy.notice('qgis_getmap_py getmap_url [{}]'.format(filter_url))
|
||||
|
||||
# Fetch image to be cache in qgis server
|
||||
headers = {"User-Agent": "PostgSail", "From": "xbgmsharp@gmail.com"}
|
||||
r = requests.get(filter_url, headers=headers, timeout=100)
|
||||
# Parse response
|
||||
if r.status_code != 200:
|
||||
plpy.warning('Failed to get WMS image, url[{}]'.format(filter_url))
|
||||
$qgis_getmap_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_getmap_py_fn
|
||||
IS 'Generate a log map, to generate the cache data for faster access later';
|
||||
|
||||
-- Generate the logbook extent for the logbook image to access the QGIS server.
|
||||
DROP FUNCTION IF EXISTS public.qgis_bbox_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.qgis_bbox_py_fn(IN vessel_id TEXT DEFAULT NULL, IN log_id NUMERIC DEFAULT NULL, IN width NUMERIC DEFAULT 1080, IN height NUMERIC DEFAULT 566, IN scaleout BOOLEAN DEFAULT True, OUT bbox TEXT)
|
||||
AS $qgis_bbox_py$
|
||||
log_extent = None
|
||||
# If we have a vessel_id then it is logs image map
|
||||
if vessel_id:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if vessel_id in SD:
|
||||
plan = SD[vessel_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("WITH merged AS ( SELECT ST_Union(track_geom) AS merged_geometry FROM api.logbook WHERE vessel_id = $1 ) SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT FROM merged;", ["text"])
|
||||
SD[vessel_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [vessel_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
# Else we have a log_id then it is single log image map
|
||||
else:
|
||||
# Use the shared cache to avoid preparing the log extent
|
||||
if log_id in SD:
|
||||
plan = SD[log_id]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT ST_Extent(ST_Transform(track_geom, 3857)) FROM api.logbook WHERE id = $1::NUMERIC", ["text"])
|
||||
SD[log_id] = plan
|
||||
# Execute the statement with the log extent param and limit to 1 result
|
||||
rv = plpy.execute(plan, [log_id], 1)
|
||||
log_extent = rv[0]['st_extent']
|
||||
|
||||
# Extract extent
|
||||
def parse_extent_from_db(extent_raw):
|
||||
# Parse the extent_raw to extract coordinates
|
||||
extent = extent_raw.replace('BOX(', '').replace(')', '').split(',')
|
||||
min_x, min_y = map(float, extent[0].split())
|
||||
max_x, max_y = map(float, extent[1].split())
|
||||
return min_x, min_y, max_x, max_y
|
||||
|
||||
# ZoomOut from linestring extent
|
||||
def apply_scale_factor(extent, scale_factor=1.125):
|
||||
min_x, min_y, max_x, max_y = extent
|
||||
center_x = (min_x + max_x) / 2
|
||||
center_y = (min_y + max_y) / 2
|
||||
width = max_x - min_x
|
||||
height = max_y - min_y
|
||||
new_width = width * scale_factor
|
||||
new_height = height * scale_factor
|
||||
scaled_extent = (
|
||||
round(center_x - new_width / 2),
|
||||
round(center_y - new_height / 2),
|
||||
round(center_x + new_width / 2),
|
||||
round(center_y + new_height / 2),
|
||||
)
|
||||
return scaled_extent
|
||||
|
||||
def adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height):
|
||||
min_x, min_y, max_x, max_y = scaled_extent
|
||||
bbox_width = float(max_x - min_x)
|
||||
bbox_height = float(max_y - min_y)
|
||||
bbox_aspect_ratio = float(bbox_width / bbox_height)
|
||||
image_aspect_ratio = float(fixed_width / fixed_height)
|
||||
|
||||
if bbox_aspect_ratio > image_aspect_ratio:
|
||||
# Adjust height to match aspect ratio
|
||||
new_bbox_height = bbox_width / image_aspect_ratio
|
||||
height_diff = new_bbox_height - bbox_height
|
||||
min_y -= height_diff / 2
|
||||
max_y += height_diff / 2
|
||||
else:
|
||||
# Adjust width to match aspect ratio
|
||||
new_bbox_width = bbox_height * image_aspect_ratio
|
||||
width_diff = new_bbox_width - bbox_width
|
||||
min_x -= width_diff / 2
|
||||
max_x += width_diff / 2
|
||||
|
||||
adjusted_extent = (min_x, min_y, max_x, max_y)
|
||||
return adjusted_extent
|
||||
|
||||
if not log_extent:
|
||||
plpy.warning('Failed to get sql qgis_bbox_py log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], extent [{}]'.format(log_id, log_extent))
|
||||
# Parse extent and apply ZoomOut scale factor
|
||||
if scaleout:
|
||||
scaled_extent = apply_scale_factor(parse_extent_from_db(log_extent))
|
||||
else:
|
||||
scaled_extent = parse_extent_from_db(log_extent)
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], scaled_extent [{}]'.format(log_id, scaled_extent))
|
||||
fixed_width = width # default 1080
|
||||
fixed_height = height # default 566
|
||||
adjusted_extent = adjust_bbox_to_fixed_size(scaled_extent, fixed_width, fixed_height)
|
||||
#plpy.notice('qgis_bbox_py log_id [{}], adjusted_extent [{}]'.format(log_id, adjusted_extent))
|
||||
min_x, min_y, max_x, max_y = adjusted_extent
|
||||
return f"{min_x},{min_y},{max_x},{max_y}"
|
||||
$qgis_bbox_py$ LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.qgis_bbox_py_fn
|
||||
IS 'Generate the BBOX base on log extent and adapt extent to the image size for QGIS Server';
|
||||
|
||||
-- qgis_role user and role with login, read-only on auth.accounts, limit 20 connections
|
||||
CREATE ROLE qgis_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 20 LOGIN PASSWORD 'mysecretpassword';
|
||||
COMMENT ON ROLE qgis_role IS
|
||||
'Role use by QGIS server and Apache to connect and lookup the logbook table.';
|
||||
-- Allow read on VIEWS on API schema
|
||||
GRANT USAGE ON SCHEMA api TO qgis_role;
|
||||
GRANT SELECT ON TABLE api.logbook TO qgis_role;
|
||||
GRANT USAGE ON SCHEMA public TO qgis_role;
|
||||
-- For all postgis fn, st_extent, st_transform
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO qgis_role;
|
||||
-- Allow qgis_role to select all logbook records
|
||||
CREATE POLICY logbook_qgis_role ON api.logbook TO qgis_role
|
||||
USING (true)
|
||||
WITH CHECK (false);
|
||||
|
||||
-- Add support for HTML email with image inline for logbook
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_email_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_email_py_fn(IN email_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_email_py$
|
||||
# Import smtplib for the actual sending function
|
||||
import smtplib
|
||||
import requests
|
||||
|
||||
# Import the email modules we need
|
||||
from email.message import EmailMessage
|
||||
from email.utils import formatdate,make_msgid
|
||||
from email.mime.text import MIMEText
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if email_type in SD:
|
||||
plan = SD[email_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[email_type] = plan
|
||||
|
||||
# Execute the statement with the email_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [email_type], 1)
|
||||
email_subject = rv[0]['email_subject']
|
||||
email_content = rv[0]['email_content']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if not _user or not app:
|
||||
plpy.notice('send_email_py_fn Parameters [{}] [{}]'.format(_user, app))
|
||||
plpy.error('Error missing parameters')
|
||||
return None
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
email_content = email_content.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
email_content = email_content.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'logbook_img' in _user and _user['logbook_img']:
|
||||
email_content = email_content.replace('__LOGBOOK_IMG__', str(_user['logbook_img']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
email_content = email_content.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
email_content = email_content.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
email_content = email_content.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
email_content = email_content.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'otp_code' in _user and _user['otp_code']:
|
||||
email_content = email_content.replace('__OTP_CODE__', _user['otp_code'])
|
||||
if 'reset_qs' in _user and _user['reset_qs']:
|
||||
email_content = email_content.replace('__RESET_QS__', _user['reset_qs'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
email_content = email_content.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
email_content = email_content.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
email_from = 'root@localhost'
|
||||
if 'app.email_from' in app and app['app.email_from']:
|
||||
email_from = 'PostgSail <' + app['app.email_from'] + '>'
|
||||
#plpy.notice('Sending email from [{}] [{}]'.format(email_from, app['app.email_from']))
|
||||
|
||||
email_to = 'root@localhost'
|
||||
if 'email' in _user and _user['email']:
|
||||
email_to = _user['email']
|
||||
#plpy.notice('Sending email to [{}] [{}]'.format(email_to, _user['email']))
|
||||
else:
|
||||
plpy.error('Error email to')
|
||||
return None
|
||||
|
||||
if email_type == 'logbook':
|
||||
msg = EmailMessage()
|
||||
msg.set_content(email_content)
|
||||
else:
|
||||
msg = MIMEText(email_content, 'plain', 'utf-8')
|
||||
msg["Subject"] = email_subject
|
||||
msg["From"] = email_from
|
||||
msg["To"] = email_to
|
||||
msg["Date"] = formatdate()
|
||||
msg["Message-ID"] = make_msgid()
|
||||
|
||||
if email_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Create a Content-ID for the image
|
||||
image_cid = make_msgid()
|
||||
# Set an alternative html body
|
||||
msg.add_alternative("""\
|
||||
<html>
|
||||
<body>
|
||||
<p>{email_content}</p>
|
||||
<img src="cid:{image_cid}">
|
||||
</body>
|
||||
</html>
|
||||
""".format(email_content=email_content, image_cid=image_cid[1:-1]), subtype='html')
|
||||
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
response = requests.get(img_url, stream=True)
|
||||
if response.status_code == 200:
|
||||
msg.get_payload()[1].add_related(response.raw.data,
|
||||
maintype='image',
|
||||
subtype='png',
|
||||
cid=image_cid)
|
||||
|
||||
server_smtp = 'localhost'
|
||||
if 'app.email_server' in app and app['app.email_server']:
|
||||
server_smtp = app['app.email_server']
|
||||
#plpy.notice('Sending server [{}] [{}]'.format(server_smtp, app['app.email_server']))
|
||||
|
||||
# Send the message via our own SMTP server.
|
||||
try:
|
||||
# send your message with credentials specified above
|
||||
with smtplib.SMTP(server_smtp, 587) as server:
|
||||
if 'app.email_user' in app and app['app.email_user'] \
|
||||
and 'app.email_pass' in app and app['app.email_pass']:
|
||||
server.starttls()
|
||||
server.login(app['app.email_user'], app['app.email_pass'])
|
||||
#server.send_message(msg)
|
||||
server.sendmail(msg["From"], msg["To"], msg.as_string())
|
||||
server.quit()
|
||||
# tell the script to report if your message was sent or which errors need to be fixed
|
||||
plpy.notice('Sent email successfully to [{}] [{}]'.format(msg["To"], msg["Subject"]))
|
||||
return None
|
||||
except OSError as error:
|
||||
plpy.error('OS Error occurred: ' + str(error))
|
||||
except smtplib.SMTPConnectError:
|
||||
plpy.error('Failed to connect to the server. Bad connection settings?')
|
||||
except smtplib.SMTPServerDisconnected:
|
||||
plpy.error('Failed to connect to the server. Wrong user/password?')
|
||||
except smtplib.SMTPException as e:
|
||||
plpy.error('SMTP error occurred: ' + str(e))
|
||||
$send_email_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_email_py_fn
|
||||
IS 'Send email notification using plpython3u';
|
||||
|
||||
-- Add vessel_id key, expose vessel_id
|
||||
DROP FUNCTION IF EXISTS api.vessel_fn;
|
||||
CREATE OR REPLACE FUNCTION api.vessel_fn(OUT vessel JSON) RETURNS JSON
|
||||
AS $vessel$
|
||||
DECLARE
|
||||
BEGIN
|
||||
SELECT
|
||||
jsonb_build_object(
|
||||
'name', coalesce(m.name, null),
|
||||
'mmsi', coalesce(m.mmsi, null),
|
||||
'vessel_id', m.vessel_id,
|
||||
'created_at', v.created_at,
|
||||
'first_contact', coalesce(m.created_at, null),
|
||||
'last_contact', coalesce(m.time, null),
|
||||
'geojson', coalesce(ST_AsGeoJSON(geojson_t.*)::json, null)
|
||||
)::jsonb || api.vessel_details_fn()::jsonb
|
||||
INTO vessel
|
||||
FROM auth.vessels v, api.metadata m,
|
||||
( select
|
||||
current_setting('vessel.name') as name,
|
||||
time,
|
||||
courseovergroundtrue,
|
||||
speedoverground,
|
||||
anglespeedapparent,
|
||||
longitude,latitude,
|
||||
st_makepoint(longitude,latitude) AS geo_point
|
||||
FROM api.metrics
|
||||
WHERE
|
||||
latitude IS NOT NULL
|
||||
AND longitude IS NOT NULL
|
||||
AND vessel_id = current_setting('vessel.id', false)
|
||||
ORDER BY time DESC LIMIT 1
|
||||
) AS geojson_t
|
||||
WHERE
|
||||
m.vessel_id = current_setting('vessel.id')
|
||||
AND m.vessel_id = v.vessel_id;
|
||||
--RAISE notice 'api.vessel_fn %', obj;
|
||||
END;
|
||||
$vessel$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.vessel_fn
|
||||
IS 'Expose vessel details to API';
|
||||
|
||||
-- Update pending new logbook from process queue
|
||||
DROP FUNCTION IF EXISTS public.process_post_logbook_fn;
|
||||
CREATE OR REPLACE FUNCTION public.process_post_logbook_fn(IN _id integer) RETURNS void AS $process_post_logbook_queue$
|
||||
DECLARE
|
||||
logbook_rec record;
|
||||
log_settings jsonb;
|
||||
user_settings jsonb;
|
||||
extra_json jsonb;
|
||||
log_img_url text;
|
||||
logs_img_url text;
|
||||
extent_bbox text;
|
||||
BEGIN
|
||||
-- If _id is not NULL
|
||||
IF _id IS NULL OR _id < 1 THEN
|
||||
RAISE WARNING '-> process_post_logbook_fn invalid input %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
-- Get the logbook record with all necessary fields exist
|
||||
SELECT * INTO logbook_rec
|
||||
FROM api.logbook
|
||||
WHERE active IS false
|
||||
AND id = _id
|
||||
AND _from_lng IS NOT NULL
|
||||
AND _from_lat IS NOT NULL
|
||||
AND _to_lng IS NOT NULL
|
||||
AND _to_lat IS NOT NULL;
|
||||
-- Ensure the query is successful
|
||||
IF logbook_rec.vessel_id IS NULL THEN
|
||||
RAISE WARNING '-> process_post_logbook_fn invalid logbook %', _id;
|
||||
RETURN;
|
||||
END IF;
|
||||
|
||||
PERFORM set_config('vessel.id', logbook_rec.vessel_id, false);
|
||||
--RAISE WARNING 'public.process_post_logbook_fn() scheduler vessel.id %, user.id', current_setting('vessel.id', false), current_setting('user.id', false);
|
||||
|
||||
-- Generate logbook image map name from QGIS
|
||||
SELECT CONCAT('log_', logbook_rec.vessel_id::TEXT, '_', logbook_rec.id, '.png') INTO log_img_url;
|
||||
SELECT ST_Extent(ST_Transform(logbook_rec.track_geom, 3857))::TEXT AS envelope INTO extent_bbox FROM api.logbook WHERE id = logbook_rec.id;
|
||||
PERFORM public.qgis_getmap_py_fn(logbook_rec.vessel_id::TEXT, logbook_rec.id, extent_bbox::TEXT, False);
|
||||
-- Generate logs image map name from QGIS
|
||||
WITH merged AS (
|
||||
SELECT ST_Union(logbook_rec.track_geom) AS merged_geometry
|
||||
FROM api.logbook WHERE vessel_id = logbook_rec.vessel_id
|
||||
)
|
||||
SELECT ST_Extent(ST_Transform(merged_geometry, 3857))::TEXT AS envelope INTO extent_bbox FROM merged;
|
||||
SELECT CONCAT('logs_', logbook_rec.vessel_id::TEXT, '_', logbook_rec.id, '.png') INTO logs_img_url;
|
||||
PERFORM public.qgis_getmap_py_fn(logbook_rec.vessel_id::TEXT, logbook_rec.id, extent_bbox::TEXT, True);
|
||||
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('logbook_name', logbook_rec.name, 'logbook_link', logbook_rec.id, 'logbook_img', log_img_url) INTO log_settings;
|
||||
user_settings := get_user_settings_from_vesselid_fn(logbook_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || log_settings::JSONB into user_settings;
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn log_settings [%]', log_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('logbook'::TEXT, user_settings::JSONB);
|
||||
-- Process badges
|
||||
RAISE NOTICE '-> debug process_post_logbook_fn user_settings [%]', user_settings->>'email'::TEXT;
|
||||
PERFORM set_config('user.email', user_settings->>'email'::TEXT, false);
|
||||
PERFORM badges_logbook_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
PERFORM badges_geom_fn(logbook_rec.id, logbook_rec._to_time::TEXT);
|
||||
END;
|
||||
$process_post_logbook_queue$ LANGUAGE plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.process_post_logbook_fn
|
||||
IS 'Generate QGIS image and Notify user for new logbook.';
|
||||
|
||||
-- Check for new logbook pending notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_post_logbook_fn;
|
||||
CREATE FUNCTION public.cron_process_post_logbook_fn() RETURNS void AS $$
|
||||
DECLARE
|
||||
process_rec record;
|
||||
BEGIN
|
||||
-- Check for new logbook pending update
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn init loop';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'post_logbook' AND processed IS NULL
|
||||
ORDER BY stored ASC LIMIT 100
|
||||
LOOP
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn processing queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||
-- update logbook
|
||||
PERFORM process_post_logbook_fn(process_rec.payload::INTEGER);
|
||||
-- update process_queue table , processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE 'cron_process_post_logbook_fn processed queue [%] for logbook id [%]', process_rec.id, process_rec.payload;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_post_logbook_fn
|
||||
IS 'init by pg_cron to check for new logbook pending qgis and notification, after process_new_logbook_fn';
|
||||
|
||||
DROP FUNCTION IF EXISTS public.run_cron_jobs;
|
||||
CREATE FUNCTION public.run_cron_jobs() RETURNS void AS $$
|
||||
BEGIN
|
||||
-- In correct order
|
||||
perform public.cron_process_new_notification_fn();
|
||||
perform public.cron_process_monitor_online_fn();
|
||||
--perform public.cron_process_grafana_fn();
|
||||
perform public.cron_process_pre_logbook_fn();
|
||||
perform public.cron_process_new_logbook_fn();
|
||||
perform public.cron_process_post_logbook_fn();
|
||||
perform public.cron_process_new_stay_fn();
|
||||
--perform public.cron_process_new_moorage_fn();
|
||||
perform public.cron_process_monitor_offline_fn();
|
||||
END
|
||||
$$ language plpgsql;
|
||||
|
||||
DROP VIEW IF EXISTS api.eventlogs_view;
|
||||
CREATE VIEW api.eventlogs_view WITH (security_invoker=true,security_barrier=true) AS
|
||||
SELECT pq.*
|
||||
FROM public.process_queue pq
|
||||
WHERE channel <> 'pre_logbook'
|
||||
AND channel <> 'post_logbook'
|
||||
AND (ref_id = current_setting('user.id', true)
|
||||
OR ref_id = current_setting('vessel.id', true))
|
||||
ORDER BY id ASC;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.eventlogs_view
|
||||
IS 'Event logs view';
|
||||
|
||||
-- CRON for new video notification
|
||||
DROP FUNCTION IF EXISTS public.cron_process_new_video_fn;
|
||||
CREATE FUNCTION public.cron_process_new_video_fn() RETURNS void AS $$
|
||||
declare
|
||||
process_rec record;
|
||||
metadata_rec record;
|
||||
video_settings jsonb;
|
||||
user_settings jsonb;
|
||||
begin
|
||||
-- Check for new event notification pending update
|
||||
RAISE NOTICE 'cron_process_new_video_fn';
|
||||
FOR process_rec in
|
||||
SELECT * FROM process_queue
|
||||
WHERE channel = 'new_video'
|
||||
AND processed IS NULL
|
||||
ORDER BY stored ASC
|
||||
LOOP
|
||||
RAISE NOTICE '-> cron_process_new_video_fn for [%]', process_rec.payload;
|
||||
SELECT * INTO metadata_rec
|
||||
FROM api.metadata
|
||||
WHERE vessel_id = process_rec.ref_id::TEXT;
|
||||
|
||||
IF metadata_rec.vessel_id IS NULL OR metadata_rec.vessel_id = '' THEN
|
||||
RAISE WARNING '-> cron_process_new_video_fn invalid metadata record vessel_id %', vessel_id;
|
||||
RAISE EXCEPTION 'Invalid metadata'
|
||||
USING HINT = 'Unknown vessel_id';
|
||||
RETURN;
|
||||
END IF;
|
||||
PERFORM set_config('vessel.id', metadata_rec.vessel_id, false);
|
||||
RAISE DEBUG '-> DEBUG cron_process_new_video_fn vessel_id %', current_setting('vessel.id', false);
|
||||
-- Prepare notification, gather user settings
|
||||
SELECT json_build_object('video_link', CONCAT('https://videos.openplotter.cloud/', process_rec.payload)) into video_settings;
|
||||
-- Gather user settings
|
||||
user_settings := get_user_settings_from_vesselid_fn(metadata_rec.vessel_id::TEXT);
|
||||
SELECT user_settings::JSONB || video_settings::JSONB into user_settings;
|
||||
RAISE DEBUG '-> DEBUG cron_process_new_video_fn get_user_settings_from_vesselid_fn [%]', user_settings;
|
||||
-- Send notification
|
||||
PERFORM send_notification_fn('video_ready'::TEXT, user_settings::JSONB);
|
||||
-- update process_queue entry as processed
|
||||
UPDATE process_queue
|
||||
SET
|
||||
processed = NOW()
|
||||
WHERE id = process_rec.id;
|
||||
RAISE NOTICE '-> cron_process_new_video_fn updated process_queue table [%]', process_rec.id;
|
||||
END LOOP;
|
||||
END;
|
||||
$$ language plpgsql;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.cron_process_new_video_fn
|
||||
IS 'init by pg_cron to check for new video event pending notifications, if so perform process_notification_queue_fn';
|
||||
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_pushover_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_pushover_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_pushover_py$
|
||||
"""
|
||||
https://pushover.net/api#messages
|
||||
Send a notification to a pushover user
|
||||
"""
|
||||
import requests
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
pushover_title = rv[0]['pushover_title']
|
||||
pushover_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
pushover_message = pushover_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
pushover_message = pushover_message.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
pushover_message = pushover_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
pushover_message = pushover_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
pushover_message = pushover_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
pushover_message = pushover_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
pushover_message = pushover_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
pushover_token = None
|
||||
if 'app.pushover_app_token' in app and app['app.pushover_app_token']:
|
||||
pushover_token = app['app.pushover_app_token']
|
||||
else:
|
||||
plpy.error('Error no pushover token defined, check app settings')
|
||||
return None
|
||||
pushover_user = None
|
||||
if 'pushover_user_key' in _user and _user['pushover_user_key']:
|
||||
pushover_user = _user['pushover_user_key']
|
||||
else:
|
||||
plpy.error('Error no pushover user token defined, check user settings')
|
||||
return None
|
||||
|
||||
if message_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Send notification with gis image logbook as attachment
|
||||
img_url = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
response = requests.get(img_url, stream=True)
|
||||
if response.status_code == 200:
|
||||
r = requests.post("https://api.pushover.net/1/messages.json", data = {
|
||||
"token": pushover_token,
|
||||
"user": pushover_user,
|
||||
"title": pushover_title,
|
||||
"message": pushover_message
|
||||
}, files = {
|
||||
"attachment": (str(_user['logbook_img']), response.raw.data, "image/png")
|
||||
})
|
||||
else:
|
||||
r = requests.post("https://api.pushover.net/1/messages.json", data = {
|
||||
"token": pushover_token,
|
||||
"user": pushover_user,
|
||||
"title": pushover_title,
|
||||
"message": pushover_message
|
||||
})
|
||||
|
||||
#print(r.text)
|
||||
# Return ?? or None if not found
|
||||
#plpy.notice('Sent pushover successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent pushover successfully to [{}] [{}] [{}]'.format(pushover_user, pushover_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send pushover')
|
||||
return None
|
||||
$send_pushover_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_pushover_py_fn
|
||||
IS 'Send pushover notification using plpython3u';
|
||||
|
||||
-- Add support for video link for maplapse
|
||||
DROP FUNCTION IF EXISTS public.send_telegram_py_fn;
|
||||
CREATE OR REPLACE FUNCTION public.send_telegram_py_fn(IN message_type TEXT, IN _user JSONB, IN app JSONB) RETURNS void
|
||||
AS $send_telegram_py$
|
||||
"""
|
||||
https://core.telegram.org/bots/api#sendmessage
|
||||
Send a message to a telegram user or group specified on chatId
|
||||
chat_id must be a number!
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
|
||||
# Use the shared cache to avoid preparing the email metadata
|
||||
if message_type in SD:
|
||||
plan = SD[message_type]
|
||||
# A prepared statement from Python
|
||||
else:
|
||||
plan = plpy.prepare("SELECT * FROM email_templates WHERE name = $1", ["text"])
|
||||
SD[message_type] = plan
|
||||
|
||||
# Execute the statement with the message_type param and limit to 1 result
|
||||
rv = plpy.execute(plan, [message_type], 1)
|
||||
telegram_title = rv[0]['pushover_title']
|
||||
telegram_message = rv[0]['pushover_message']
|
||||
|
||||
# Replace fields using input jsonb obj
|
||||
if 'logbook_name' in _user and _user['logbook_name']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_NAME__', _user['logbook_name'])
|
||||
if 'logbook_link' in _user and _user['logbook_link']:
|
||||
telegram_message = telegram_message.replace('__LOGBOOK_LINK__', str(_user['logbook_link']))
|
||||
if 'video_link' in _user and _user['video_link']:
|
||||
telegram_message = telegram_message.replace('__VIDEO_LINK__', str( _user['video_link']))
|
||||
if 'recipient' in _user and _user['recipient']:
|
||||
telegram_message = telegram_message.replace('__RECIPIENT__', _user['recipient'])
|
||||
if 'boat' in _user and _user['boat']:
|
||||
telegram_message = telegram_message.replace('__BOAT__', _user['boat'])
|
||||
if 'badge' in _user and _user['badge']:
|
||||
telegram_message = telegram_message.replace('__BADGE_NAME__', _user['badge'])
|
||||
if 'alert' in _user and _user['alert']:
|
||||
telegram_message = telegram_message.replace('__ALERT__', _user['alert'])
|
||||
|
||||
if 'app.url' in app and app['app.url']:
|
||||
telegram_message = telegram_message.replace('__APP_URL__', app['app.url'])
|
||||
|
||||
telegram_token = None
|
||||
if 'app.telegram_bot_token' in app and app['app.telegram_bot_token']:
|
||||
telegram_token = app['app.telegram_bot_token']
|
||||
else:
|
||||
plpy.error('Error no telegram token defined, check app settings')
|
||||
return None
|
||||
telegram_chat_id = None
|
||||
if 'telegram_chat_id' in _user and _user['telegram_chat_id']:
|
||||
telegram_chat_id = _user['telegram_chat_id']
|
||||
else:
|
||||
plpy.error('Error no telegram user token defined, check user settings')
|
||||
return None
|
||||
|
||||
# sendMessage via requests
|
||||
headers = {'Content-Type': 'application/json',
|
||||
'Proxy-Authorization': 'Basic base64'}
|
||||
data_dict = {'chat_id': telegram_chat_id,
|
||||
'text': telegram_message,
|
||||
'parse_mode': 'HTML',
|
||||
'disable_notification': False}
|
||||
data = json.dumps(data_dict)
|
||||
url = f'https://api.telegram.org/bot{telegram_token}/sendMessage'
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
if message_type == 'logbook' and 'logbook_img' in _user and _user['logbook_img']:
|
||||
# Send gis image logbook
|
||||
# https://core.telegram.org/bots/api#sendphoto
|
||||
data_dict['photo'] = 'https://gis.openplotter.cloud/{}'.format(str(_user['logbook_img']))
|
||||
del data_dict['text']
|
||||
data = json.dumps(data_dict)
|
||||
url = f'https://api.telegram.org/bot{telegram_token}/sendPhoto'
|
||||
r = requests.post(url, data=data, headers=headers)
|
||||
|
||||
#print(r.text)
|
||||
# Return something boolean?
|
||||
#plpy.notice('Sent telegram successfully to [{}] [{}]'.format(r.text, r.status_code))
|
||||
if r.status_code == 200:
|
||||
plpy.notice('Sent telegram successfully to [{}] [{}] [{}]'.format(telegram_chat_id, telegram_title, r.text))
|
||||
else:
|
||||
plpy.error('Failed to send telegram')
|
||||
return None
|
||||
$send_telegram_py$ TRANSFORM FOR TYPE jsonb LANGUAGE plpython3u;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
public.send_telegram_py_fn
|
||||
IS 'Send a message to a telegram user or group specified on chatId using plpython3u';
|
||||
|
||||
-- Add maplapse video record in queue
|
||||
DROP FUNCTION IF EXISTS api.maplapse_record_fn;
|
||||
CREATE OR REPLACE FUNCTION api.maplapse_record_fn(IN maplapse TEXT) RETURNS BOOLEAN
|
||||
AS $maplapse_record$
|
||||
BEGIN
|
||||
-- payload: 'Bromera,?start_log=8430&end_log=8491&height=100vh'
|
||||
IF maplapse ~ '^(\w+)\,\?(start_log=\d+).*$' then
|
||||
INSERT INTO process_queue (channel, payload, stored, ref_id)
|
||||
VALUES ('maplapse_video', maplapse, NOW(), current_setting('vessel.id', true));
|
||||
RETURN True;
|
||||
ELSE
|
||||
RETURN False;
|
||||
END IF;
|
||||
END;
|
||||
$maplapse_record$ language plpgsql security definer;
|
||||
-- Description
|
||||
COMMENT ON FUNCTION
|
||||
api.maplapse_record_fn
|
||||
IS 'Add maplapse video record in queue';
|
||||
|
||||
CREATE ROLE maplapse_role WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 10 LOGIN PASSWORD 'mysecretpassword';
|
||||
COMMENT ON ROLE maplapse_role IS
|
||||
'Role use by maplapse external cronjob to connect and lookup the process_queue table.';
|
||||
GRANT USAGE ON SCHEMA public TO maplapse_role;
|
||||
GRANT SELECT,UPDATE,INSERT ON TABLE public.process_queue TO maplapse_role;
|
||||
GRANT USAGE, SELECT ON SEQUENCE public.process_queue_id_seq TO maplapse_role;
|
||||
-- Allow maplapse_role to select,update,insert on tbl process_queue
|
||||
CREATE POLICY public_maplapse_role ON public.process_queue TO maplapse_role
|
||||
USING (true)
|
||||
WITH CHECK (true);
|
||||
|
||||
-- Allow to execute fn for user_role and grafana
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA api TO grafana;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO user_role;
|
||||
GRANT EXECUTE ON ALL FUNCTIONS IN SCHEMA public TO grafana;
|
||||
GRANT SELECT ON TABLE api.eventlogs_view TO user_role;
|
||||
|
||||
-- Update grafana role SQl connection to 30
|
||||
ALTER ROLE grafana WITH NOSUPERUSER NOCREATEDB NOCREATEROLE NOINHERIT NOBYPASSRLS NOREPLICATION CONNECTION LIMIT 30 LOGIN;
|
||||
|
||||
-- Alter moorages table with default duration of 0.
|
||||
ALTER TABLE api.moorages ALTER COLUMN stay_duration SET DEFAULT 'PT0S';
|
||||
-- Update moorage_view to default with duration of 0
|
||||
DROP VIEW IF EXISTS api.moorage_view;
|
||||
CREATE OR REPLACE VIEW api.moorage_view WITH (security_invoker=true,security_barrier=true) AS -- TODO
|
||||
SELECT id,
|
||||
m.name AS Name,
|
||||
sa.description AS Default_Stay,
|
||||
sa.stay_code AS Default_Stay_Id,
|
||||
m.home_flag AS Home,
|
||||
EXTRACT(DAY FROM justify_hours ( COALESCE(m.stay_duration, 'PT0S') )) AS Total_Stay,
|
||||
COALESCE(m.stay_duration, 'PT0S') AS Total_Duration,
|
||||
m.reference_count AS Arrivals_Departures,
|
||||
m.notes
|
||||
-- m.geog
|
||||
FROM api.moorages m, api.stays_at sa
|
||||
-- m.stay_duration is only process on a stay
|
||||
-- default with duration of 0sec
|
||||
WHERE geog IS NOT NULL
|
||||
AND m.stay_code = sa.stay_code;
|
||||
-- Description
|
||||
COMMENT ON VIEW
|
||||
api.moorage_view
|
||||
IS 'Moorage details web view';
|
||||
|
||||
-- Update version
|
||||
UPDATE public.app_settings
|
||||
SET value='0.7.4'
|
||||
WHERE "name"='app.version';
|
||||
|
||||
\c postgres
|
||||
|
||||
-- Create a every 7 minutes for cron_process_post_logbook_fn
|
||||
SELECT cron.schedule('cron_post_logbook', '*/7 * * * *', 'select public.cron_process_post_logbook_fn()');
|
||||
UPDATE cron.job SET database = 'signalk' where jobname = 'cron_post_logbook';
|
||||
-- Create a every 15 minutes for cron_process_post_logbook_fn
|
||||
SELECT cron.schedule('cron_new_video', '*/15 * * * *', 'select public.cron_process_new_video_fn()');
|
||||
UPDATE cron.job SET database = 'signalk' where jobname = 'cron_new_video';
|
@@ -28,3 +28,5 @@ ALTER ROLE authenticator WITH PASSWORD '${PGSAIL_AUTHENTICATOR_PASSWORD}';
|
||||
ALTER ROLE grafana WITH PASSWORD '${PGSAIL_GRAFANA_PASSWORD}';
|
||||
ALTER ROLE grafana_auth WITH PASSWORD '${PGSAIL_GRAFANA_AUTH_PASSWORD}';
|
||||
END
|
||||
|
||||
curl -s -XPOST -Hx-pgsail:${PGSAIL_VERSION} https://api.openplotter.cloud/rpc/telemetry_fn
|
||||
|
@@ -1 +1 @@
|
||||
0.6.1
|
||||
0.7.4
|
||||
|
File diff suppressed because one or more lines are too long
@@ -49,7 +49,7 @@ var moment = require('moment');
|
||||
],
|
||||
user_views: [
|
||||
{ url: '/stays_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 3},
|
||||
{ url: '/logs_view', res_body_length: 2},
|
||||
{ url: '/log_view', res_body_length: 2},
|
||||
//{ url: '/stats_view', res_body_length: 1},
|
||||
@@ -247,7 +247,7 @@ var moment = require('moment');
|
||||
],
|
||||
user_views: [
|
||||
{ url: '/stays_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 2},
|
||||
{ url: '/moorages_view', res_body_length: 4},
|
||||
{ url: '/logs_view', res_body_length: 2},
|
||||
{ url: '/log_view', res_body_length: 2},
|
||||
//{ url: '/stats_view', res_body_length: 1},
|
||||
|
@@ -347,7 +347,7 @@ var moment = require("moment");
|
||||
res.header["content-type"].should.match(new RegExp("json", "g"));
|
||||
res.header["server"].should.match(new RegExp("postgrest", "g"));
|
||||
should.exist(res.body.token);
|
||||
res.body.token.should.match(user_jwt);
|
||||
//res.body.token.should.match(user_jwt);
|
||||
console.log(user_jwt);
|
||||
should.exist(user_jwt);
|
||||
done(err);
|
||||
|
@@ -64,6 +64,11 @@ SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('ve
|
||||
SELECT api.update_logbook_observations_fn(1, '{"observations":{"cloudCoverage":1}}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
\echo 'add tags to logbook'
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
SELECT api.update_logbook_observations_fn(1, '{"tags": ["tag_name"]}'::TEXT);
|
||||
SELECT extra FROM api.logbook l WHERE id = 1 AND vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Check export
|
||||
--\echo 'check logbook export fn'
|
||||
--SELECT api.export_logbook_geojson_fn(1);
|
||||
|
@@ -28,7 +28,7 @@ avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
@@ -41,7 +41,7 @@ avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": 11}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
|
||||
stays
|
||||
-[ RECORD 1 ]
|
||||
@@ -91,12 +91,22 @@ DROP TABLE
|
||||
stats_logs_fn |
|
||||
|
||||
update_logbook_observations_fn
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}}
|
||||
-[ RECORD 1 ]-----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
-[ RECORD 1 ]------------------+--
|
||||
update_logbook_observations_fn | t
|
||||
|
||||
-[ RECORD 1 ]---------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": 10}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}}
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
add tags to logbook
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
-[ RECORD 1 ]------------------+--
|
||||
update_logbook_observations_fn | t
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
|
||||
|
@@ -215,7 +215,7 @@ api.moorages
|
||||
id | 1
|
||||
vessel_id | t
|
||||
name | patch moorage name 3
|
||||
country |
|
||||
country | fi
|
||||
stay_code | 2
|
||||
stay_duration | PT1M
|
||||
reference_count | 1
|
||||
@@ -228,7 +228,7 @@ notes | new moorage note 3
|
||||
id | 2
|
||||
vessel_id | t
|
||||
name | Norra hamnen
|
||||
country |
|
||||
country | fi
|
||||
stay_code | 4
|
||||
stay_duration | PT2M
|
||||
reference_count | 2
|
||||
@@ -243,7 +243,7 @@ vessel_id | t
|
||||
name | Ekenäs
|
||||
country | fi
|
||||
stay_code | 1
|
||||
stay_duration |
|
||||
stay_duration | PT0S
|
||||
reference_count | 1
|
||||
latitude | 59.86
|
||||
longitude | 23.3657666666667
|
||||
@@ -268,4 +268,12 @@ default_stay_id | 2
|
||||
total_stay | 0
|
||||
total_duration | PT1M
|
||||
arrivals_departures | 1
|
||||
-[ RECORD 3 ]-------+---------------------
|
||||
id | 3
|
||||
moorage | Ekenäs
|
||||
default_stay | Unknown
|
||||
default_stay_id | 1
|
||||
total_stay | 0
|
||||
total_duration | PT0S
|
||||
arrivals_departures | 1
|
||||
|
||||
|
38
tests/sql/logbook.sql
Normal file
38
tests/sql/logbook.sql
Normal file
@@ -0,0 +1,38 @@
|
||||
---------------------------------------------------------------------------
|
||||
-- Listing
|
||||
--
|
||||
|
||||
-- List current database
|
||||
select current_database();
|
||||
|
||||
-- connect to the DB
|
||||
\c signalk
|
||||
|
||||
-- output display format
|
||||
\x on
|
||||
|
||||
\echo 'Validate logbook operation'
|
||||
-- set user_id
|
||||
SELECT a.user_id as "user_id" FROM auth.accounts a WHERE a.email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"user_id"
|
||||
SELECT set_config('user.id', :'user_id', false) IS NOT NULL as user_id;
|
||||
|
||||
-- set vessel_id
|
||||
SELECT v.vessel_id as "vessel_id" FROM auth.vessels v WHERE v.owner_email = 'demo+kapla@openplotter.cloud' \gset
|
||||
--\echo :"vessel_id"
|
||||
SELECT set_config('vessel.id', :'vessel_id', false) IS NOT NULL as vessel_id;
|
||||
|
||||
-- Delete logbook for user
|
||||
\echo 'logbook'
|
||||
SELECT count(*) FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
\echo 'logbook'
|
||||
SELECT name,_from_time IS NOT NULL AS _from_time,_to_time IS NOT NULL AS _to_time, track_geojson IS NOT NULL AS track_geojson, track_geom, distance,duration,avg_speed,max_speed,max_wind_speed,notes,extra FROM api.logbook WHERE vessel_id = current_setting('vessel.id', false);
|
||||
|
||||
-- Delete logbook for user
|
||||
\echo 'Delete logbook for user kapla'
|
||||
SELECT api.delete_logbook_fn(5); -- delete Tropics Zone
|
||||
SELECT api.delete_logbook_fn(6); -- delete Alaska Zone
|
||||
|
||||
-- Merge logbook for user
|
||||
\echo 'Merge logbook for user kapla'
|
||||
SELECT api.merge_logbook_fn(1,2);
|
83
tests/sql/logbook.sql.output
Normal file
83
tests/sql/logbook.sql.output
Normal file
@@ -0,0 +1,83 @@
|
||||
current_database
|
||||
------------------
|
||||
signalk
|
||||
(1 row)
|
||||
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
Validate logbook operation
|
||||
-[ RECORD 1 ]
|
||||
user_id | t
|
||||
|
||||
-[ RECORD 1 ]
|
||||
vessel_id | t
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]
|
||||
count | 4
|
||||
|
||||
logbook
|
||||
-[ RECORD 1 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | patch log name 3
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E61000001C000000B0DEBBE0E68737404DA938FBF0094E40B0DEBBE0E68737404DA938FBF0094E4020D26F5F0786374030BB270F0B094E400C6E7ED60F843740AA60545227084E40D60FC48C03823740593CE27D42074E407B39D9F322803740984C158C4A064E4091ED7C3F357E3740898BB63D54054E40A8A1208B477C37404BA3DC9059044E404C5CB4EDA17A3740C4F856115B034E40A9A44E4013793740D8F0F44A59024E40E4839ECDAA773740211FF46C56014E405408D147067637408229F03B73004E40787AA52C43743740F90FE9B7AFFF4D40F8098D4D18723740C217265305FF4D4084E82303537037409A2D464AA0FE4D4022474DCE636F37402912396A72FE4D408351499D806E374088CFB02B40FE4D4076711B0DE06D3740B356C7040FFE4D404EAC66B0BC6E374058A835CD3BFE4D40D7A3703D0A6F3740D3E10EC15EFE4D4087602F277B6E3740A779C7293AFE4D4087602F277B6E3740A779C7293AFE4D402063EE5A426E3740B5A679C729FE4D40381DEE10EC6D37409ECA7C1A0AFE4D40E2C46A06CB6B37400A43F7BF36FD4D4075931804566E3740320BDAD125FD4D409A2D464AA06E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D40
|
||||
distance | 7.6447
|
||||
duration | PT27M
|
||||
avg_speed | 3.6357142857142852
|
||||
max_speed | 6.1
|
||||
max_wind_speed | 22.1
|
||||
notes | new log note 3
|
||||
extra | {"tags": ["tag_name"], "metrics": {"propulsion.main.runTime": "PT10S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": 1}, "avg_wind_speed": 14.549999999999999}
|
||||
-[ RECORD 2 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Norra hamnen to Ekenäs
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | t
|
||||
track_geom | 0102000020E610000015000000029A081B9E6E37404A5658830AFD4D40029A081B9E6E37404A5658830AFD4D404806A6C0EF6C3740DA1B7C6132FD4D40FE65F7E461693740226C787AA5FC4D407DD3E10EC1663740B29DEFA7C6FB4D40898BB63D5465374068479724BCFA4D409A5271F6E1633740B6847CD0B3F94D40431CEBE236623740E9263108ACF84D402C6519E2585F37407E678EBFC7F74D4096218E75715B374027C5B45C23F74D402AA913D044583740968DE1C46AF64D405AF5B9DA8A5537407BEF829B9FF54D407449C2ABD253374086C954C1A8F44D407D1A0AB278543740F2B0506B9AF34D409D11A5BDC15737406688635DDCF24D4061C3D32B655937402CAF6F3ADCF14D408988888888583740B3319C58CDF04D4021FAC8C0145837408C94405DB7EF4D40B8F9593F105B37403DC0804BEDEE4D40DE4C5FE2A25D3740AE47E17A14EE4D40DE4C5FE2A25D3740AE47E17A14EE4D40
|
||||
distance | 8.8968
|
||||
duration | PT20M
|
||||
avg_speed | 5.4523809523809526
|
||||
max_speed | 6.5
|
||||
max_wind_speed | 37.2
|
||||
notes |
|
||||
extra | {"metrics": {"propulsion.main.runTime": "PT11S"}, "observations": {"seaState": -1, "visibility": -1, "cloudCoverage": -1}, "avg_wind_speed": 10.476190476190478}
|
||||
-[ RECORD 3 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Tropics Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | f
|
||||
track_geom | 0102000020E610000002000000A4E85E0D58934FC000DC509B80052C40BC069B43D64553C090510727F3BD2940
|
||||
distance | 123
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
-[ RECORD 4 ]--+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
name | Alaska Zone
|
||||
_from_time | t
|
||||
_to_time | t
|
||||
track_geojson | f
|
||||
track_geom | 0102000020E610000002000000FDB11ED079F261C090C47F1861B84D40D3505124540B63C09C091C1C8D4A4C40
|
||||
distance | 1234
|
||||
duration |
|
||||
avg_speed |
|
||||
max_speed |
|
||||
max_wind_speed |
|
||||
notes |
|
||||
extra |
|
||||
|
||||
Delete logbook for user kapla
|
||||
-[ RECORD 1 ]-----+--
|
||||
delete_logbook_fn | t
|
||||
|
||||
-[ RECORD 1 ]-----+--
|
||||
delete_logbook_fn | t
|
||||
|
||||
Merge logbook for user kapla
|
||||
-[ RECORD 1 ]----+-
|
||||
merge_logbook_fn |
|
||||
|
@@ -51,3 +51,12 @@ select count(*) from api.monitoring_temperatures;
|
||||
-- Test monitoring for user
|
||||
--select * from api.monitoring_humidity;
|
||||
select count(*) from api.monitoring_humidity;
|
||||
|
||||
\echo 'Test metersToKnots'
|
||||
select public.metersToKnots(1);
|
||||
|
||||
\echo 'Test radiantToDegrees'
|
||||
select public.radiantToDegrees(1);
|
||||
|
||||
\echo 'Test valToPercent'
|
||||
select public.valToPercent(1);
|
@@ -36,3 +36,15 @@ Test monitoring_humidity for user
|
||||
-[ RECORD 1 ]
|
||||
count | 0
|
||||
|
||||
Test metersToKnots
|
||||
-[ RECORD 1 ]-+-----
|
||||
meterstoknots | 1.94
|
||||
|
||||
Test radiantToDegrees
|
||||
-[ RECORD 1 ]----+---
|
||||
radianttodegrees | 57
|
||||
|
||||
Test valToPercent
|
||||
-[ RECORD 1 ]+----
|
||||
valtopercent | 100
|
||||
|
||||
|
@@ -6,10 +6,10 @@
|
||||
You are now connected to database "signalk" as user "username".
|
||||
Expanded display is on.
|
||||
-[ RECORD 1 ]--+-------------------------------
|
||||
server_version | 16.1 (Debian 16.1-1.pgdg110+1)
|
||||
server_version | 16.3 (Debian 16.3-1.pgdg120+1)
|
||||
|
||||
-[ RECORD 1 ]--------+----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.4.1 ca035b9" [EXTENSION] PGSQL="160" GEOS="3.9.0-CAPI-1.16.2" PROJ="7.2.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.10" LIBJSON="0.15" LIBPROTOBUF="1.3.3" WAGYU="0.5.0 (Internal)"
|
||||
-[ RECORD 1 ]--------+-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------
|
||||
postgis_full_version | POSTGIS="3.4.2 c19ce56" [EXTENSION] PGSQL="160" GEOS="3.11.1-CAPI-1.17.1" PROJ="9.1.1 NETWORK_ENABLED=OFF URL_ENDPOINT=https://cdn.proj.org USER_WRITABLE_DIRECTORY=/var/lib/postgresql/.local/share/proj DATABASE_PATH=/usr/share/proj/proj.db" LIBXML="2.9.14" LIBJSON="0.16" LIBPROTOBUF="1.4.1" WAGYU="0.5.0 (Internal)"
|
||||
|
||||
-[ RECORD 1 ]--------------------------------------------------------------------------------------
|
||||
Name | citext
|
||||
@@ -48,12 +48,12 @@ Schema | pg_catalog
|
||||
Description | PL/Python3U untrusted procedural language
|
||||
-[ RECORD 8 ]--------------------------------------------------------------------------------------
|
||||
Name | postgis
|
||||
Version | 3.4.1
|
||||
Version | 3.4.2
|
||||
Schema | public
|
||||
Description | PostGIS geometry and geography spatial types and functions
|
||||
-[ RECORD 9 ]--------------------------------------------------------------------------------------
|
||||
Name | timescaledb
|
||||
Version | 2.13.1
|
||||
Version | 2.15.3
|
||||
Schema | public
|
||||
Description | Enables scalable inserts and complex queries for time-series data (Community Edition)
|
||||
-[ RECORD 10 ]-------------------------------------------------------------------------------------
|
||||
@@ -96,24 +96,24 @@ laninline | 0
|
||||
lanvalidator | 2248
|
||||
lanacl |
|
||||
-[ RECORD 4 ]-+-----------
|
||||
oid | 13545
|
||||
oid | 13568
|
||||
lanname | plpgsql
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 13542
|
||||
laninline | 13543
|
||||
lanvalidator | 13544
|
||||
lanplcallfoid | 13565
|
||||
laninline | 13566
|
||||
lanvalidator | 13567
|
||||
lanacl |
|
||||
-[ RECORD 5 ]-+-----------
|
||||
oid | 18297
|
||||
oid | 18168
|
||||
lanname | plpython3u
|
||||
lanowner | 10
|
||||
lanispl | t
|
||||
lanpltrusted | t
|
||||
lanplcallfoid | 18294
|
||||
laninline | 18295
|
||||
lanvalidator | 18296
|
||||
lanplcallfoid | 18165
|
||||
laninline | 18166
|
||||
lanvalidator | 18167
|
||||
lanacl |
|
||||
|
||||
-[ RECORD 1 ]+-----------
|
||||
@@ -450,6 +450,24 @@ qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 23 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | logbook
|
||||
policyname | logbook_qgis_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {qgis_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | public_maplapse_role
|
||||
permissive | PERMISSIVE
|
||||
roles | {maplapse_role}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_user_role
|
||||
permissive | PERMISSIVE
|
||||
@@ -457,7 +475,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 24 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_scheduler_role
|
||||
@@ -466,7 +484,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 25 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | grafana_role
|
||||
@@ -475,7 +493,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 26 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | stays
|
||||
policyname | api_anonymous_role
|
||||
@@ -484,7 +502,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 27 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | admin_all
|
||||
@@ -493,7 +511,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 28 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_vessel_role
|
||||
@@ -502,7 +520,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | true
|
||||
-[ RECORD 29 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_user_role
|
||||
@@ -511,7 +529,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, true))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 30 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_scheduler_role
|
||||
@@ -520,7 +538,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
-[ RECORD 31 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | grafana_role
|
||||
@@ -529,7 +547,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 32 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | api
|
||||
tablename | moorages
|
||||
policyname | api_anonymous_role
|
||||
@@ -538,7 +556,7 @@ roles | {api_anonymous}
|
||||
cmd | ALL
|
||||
qual | (vessel_id = current_setting('vessel.id'::text, false))
|
||||
with_check | false
|
||||
-[ RECORD 33 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | admin_all
|
||||
@@ -547,7 +565,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 34 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | api_user_role
|
||||
@@ -556,7 +574,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
with_check | ((vessel_id = current_setting('vessel.id'::text, true)) AND ((owner_email)::text = current_setting('user.email'::text, true)))
|
||||
-[ RECORD 35 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | vessels
|
||||
policyname | grafana_role
|
||||
@@ -565,7 +583,7 @@ roles | {grafana}
|
||||
cmd | ALL
|
||||
qual | ((owner_email)::text = current_setting('user.email'::text, true))
|
||||
with_check | false
|
||||
-[ RECORD 36 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_user_role
|
||||
@@ -574,7 +592,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 37 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | api_scheduler_role
|
||||
@@ -583,7 +601,7 @@ roles | {scheduler}
|
||||
cmd | ALL
|
||||
qual | ((email)::text = current_setting('user.email'::text, true))
|
||||
with_check | ((email)::text = current_setting('user.email'::text, true))
|
||||
-[ RECORD 38 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | auth
|
||||
tablename | accounts
|
||||
policyname | grafana_proxy_role
|
||||
@@ -592,7 +610,7 @@ roles | {grafana_auth}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | false
|
||||
-[ RECORD 39 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | admin_all
|
||||
@@ -601,7 +619,7 @@ roles | {username}
|
||||
cmd | ALL
|
||||
qual | true
|
||||
with_check | true
|
||||
-[ RECORD 40 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_vessel_role
|
||||
@@ -610,7 +628,7 @@ roles | {vessel_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | true
|
||||
-[ RECORD 41 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 43 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_user_role
|
||||
@@ -619,7 +637,7 @@ roles | {user_role}
|
||||
cmd | ALL
|
||||
qual | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
with_check | ((ref_id = current_setting('user.id'::text, true)) OR (ref_id = current_setting('vessel.id'::text, true)))
|
||||
-[ RECORD 42 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
-[ RECORD 44 ]-----------------------------------------------------------------------------------------------------------------------------
|
||||
schemaname | public
|
||||
tablename | process_queue
|
||||
policyname | api_scheduler_role
|
||||
@@ -645,12 +663,12 @@ overpass_py_fn | {"name": "Port de la Ginesta", "type": "multipolygon", "leisure
|
||||
overpass_py_fn | {"name": "Norra hamnen", "leisure": "marina"}
|
||||
|
||||
-[ RECORD 1 ]----------------------------------------------------------------------------------------------------------------------------------------------
|
||||
versions_fn | {"api_version" : "0.6.1", "sys_version" : "PostgreSQL 16.1", "timescaledb" : "2.13.1", "postgis" : "3.4.1", "postgrest" : "PostgREST 12.0.2"}
|
||||
versions_fn | {"api_version" : "0.7.4", "sys_version" : "PostgreSQL 16.3", "timescaledb" : "2.15.3", "postgis" : "3.4.2", "postgrest" : "PostgREST 12.2.2"}
|
||||
|
||||
-[ RECORD 1 ]-----------------
|
||||
api_version | 0.6.1
|
||||
sys_version | PostgreSQL 16.1
|
||||
timescaledb | 2.13.1
|
||||
postgis | 3.4.1
|
||||
postgrest | PostgREST 12.0.2
|
||||
api_version | 0.7.4
|
||||
sys_version | PostgreSQL 16.3
|
||||
timescaledb | 2.15.3
|
||||
postgis | 3.4.2
|
||||
postgrest | PostgREST 12.2.2
|
||||
|
||||
|
@@ -182,6 +182,19 @@ else
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# logbook SQL unit tests
|
||||
psql ${PGSAIL_DB_URI} < sql/logbook.sql > output/logbook.sql.output
|
||||
diff sql/logbook.sql.output output/logbook.sql.output > /dev/null
|
||||
#diff -u sql/logbook.sql.output output/logbook.sql.output | wc -l
|
||||
#echo 0
|
||||
if [ $? -eq 0 ]; then
|
||||
echo SQL logbook.sql OK
|
||||
else
|
||||
echo SQL logbook.sql FAILED
|
||||
diff -u sql/logbook.sql.output output/logbook.sql.output
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Download and update openapi documentation
|
||||
wget ${PGSAIL_API_URI} -O openapi.json
|
||||
#echo 0
|
||||
@@ -195,8 +208,8 @@ fi
|
||||
|
||||
# Generate and update mermaid schema documentation
|
||||
/root/go/bin/mermerd --runConfig ../docs/ERD/mermerdConfig.yaml
|
||||
echo $?
|
||||
echo 0
|
||||
#echo $?
|
||||
echo 0 # not working in github-actions
|
||||
if [ $? -eq 0 ]; then
|
||||
cp postgsail.md ../docs/ERD/postgsail.md
|
||||
echo postgsail.md OK
|
||||
@@ -204,3 +217,5 @@ else
|
||||
echo postgsail.md FAILED
|
||||
exit 1
|
||||
fi
|
||||
|
||||
#npm i -D schemalint && npx schemalint
|
||||
|
Reference in New Issue
Block a user