- add hourly cron job for importing piezeometer data for sensor bohrloch1-6
This commit is contained in:
parent
14d3168d0e
commit
b4d7ce40b3
6 changed files with 90 additions and 70 deletions
|
@ -6,10 +6,11 @@ Python version: 3.7
|
|||
import json
|
||||
import os
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
from sqlalchemy.orm import session
|
||||
from sqlalchemy import asc, desc, func
|
||||
from dotenv import load_dotenv, find_dotenv
|
||||
import requests
|
||||
from datetime import datetime
|
||||
from db.models import (
|
||||
Observation,
|
||||
create_pg_session,
|
||||
|
@ -26,9 +27,9 @@ def main():
|
|||
pg_session: session = create_pg_session()
|
||||
platform_sta_identifier = "pechgraben_piezometer"
|
||||
# sensor = "bohrloch1"
|
||||
# sensor_list = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', [])
|
||||
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
|
||||
|
||||
# sensor_list = os.environ.get('PIEZOMETER_GSCHLIEFGRABEN_SENSORS', [])
|
||||
sensor_list = json.loads(os.environ['PIEZOMETER_GSCHLIEFGRABEN_SENSORS'])
|
||||
|
||||
url = 'https://jaa5ixl2y0.execute-api.ap-southeast-2.amazonaws.com/v1/data'
|
||||
params = {}
|
||||
headers = {'content-type': 'application/json'}
|
||||
|
@ -44,7 +45,7 @@ def main():
|
|||
Phenomenon.sta_identifier == "Elevation").first()
|
||||
if not elevation_dataset:
|
||||
print("Sensor " + sensor + " ist noch nicht angelegt!")
|
||||
exit()
|
||||
continue
|
||||
if not elevation_dataset.is_published:
|
||||
elevation_dataset.is_published = 1
|
||||
elevation_dataset.is_hidden = 0
|
||||
|
@ -55,7 +56,21 @@ def main():
|
|||
|
||||
platform_exists: bool = pg_session.query(Platform.id).filter_by(
|
||||
sta_identifier=platform_sta_identifier).scalar() is not None
|
||||
if platform_exists:
|
||||
# if platform_exists:
|
||||
# sensor_platform = pg_session.query(Platform.id) \
|
||||
# .filter(Platform.sta_identifier == platform_sta_identifier) \
|
||||
# .first()
|
||||
# elevation_dataset.fk_platform_id = sensor_platform.id
|
||||
|
||||
if not platform_exists:
|
||||
sensor_platform = Platform()
|
||||
max_id = pg_session.query(func.max(Platform.id)).scalar()
|
||||
# sensor_platform.id = max_id + 1
|
||||
sensor_platform.sta_identifier = platform_sta_identifier.lower()
|
||||
sensor_platform.identifier = platform_sta_identifier.lower()
|
||||
sensor_platform.name = platform_sta_identifier.lower()
|
||||
elevation_dataset.platform = sensor_platform
|
||||
else:
|
||||
sensor_platform = pg_session.query(Platform.id) \
|
||||
.filter(Platform.sta_identifier == platform_sta_identifier) \
|
||||
.first()
|
||||
|
@ -66,16 +81,39 @@ def main():
|
|||
).scalar() is not None
|
||||
if format_exists:
|
||||
sensor_format = pg_session.query(Format.id) \
|
||||
.filter(Format.definition == "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
|
||||
.filter(Format.definition ==
|
||||
"http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_Measurement") \
|
||||
.first()
|
||||
elevation_dataset.fk_format_id = sensor_format.id
|
||||
if sensor in data:
|
||||
create_observation(elevation_dataset, sensor, data, pg_session)
|
||||
pg_session.commit()
|
||||
|
||||
first_elevation_observation = pg_session.query(Observation) \
|
||||
.filter(Observation.fk_dataset_id == elevation_dataset.id) \
|
||||
.order_by(asc('sampling_time_start')) \
|
||||
.first()
|
||||
if first_elevation_observation is not None:
|
||||
elevation_dataset.first_time = first_elevation_observation.sampling_time_start
|
||||
elevation_dataset.first_value = first_elevation_observation.value_quantity
|
||||
elevation_dataset.fk_first_observation_id = first_elevation_observation.id
|
||||
last_elevation_observation = pg_session.query(Observation) \
|
||||
.filter(Observation.fk_dataset_id == elevation_dataset.id) \
|
||||
.order_by(desc('sampling_time_start')) \
|
||||
.first()
|
||||
if last_elevation_observation is not None:
|
||||
elevation_dataset.last_time = last_elevation_observation.sampling_time_start
|
||||
elevation_dataset.last_value = last_elevation_observation.value_quantity
|
||||
elevation_dataset.fk_last_observation_id = last_elevation_observation.id
|
||||
|
||||
pg_session.commit()
|
||||
pg_session.close()
|
||||
|
||||
|
||||
def create_observation(elevation_dataset: Dataset, sensor_key: str, data: json, db_session: session):
|
||||
def create_observation(elevation_dataset: Dataset,
|
||||
sensor_key: str,
|
||||
data: json,
|
||||
db_session: session):
|
||||
''' create observation in db'''
|
||||
print("Sesnor key exist in JSON data")
|
||||
sensor_object = data[sensor_key]
|
||||
|
@ -86,7 +124,8 @@ def create_observation(elevation_dataset: Dataset, sensor_key: str, data: json,
|
|||
|
||||
existing_observation: bool = (
|
||||
db_session.query(Observation)
|
||||
.filter(Observation.result_time == date_obj, Observation.fk_dataset_id == elevation_dataset.id)
|
||||
.filter(Observation.result_time ==
|
||||
date_obj, Observation.fk_dataset_id == elevation_dataset.id)
|
||||
.one_or_none()
|
||||
)
|
||||
# Can we insert this observation?
|
||||
|
@ -100,6 +139,7 @@ def create_observation(elevation_dataset: Dataset, sensor_key: str, data: json,
|
|||
new_observation.sampling_time_end = new_observation.result_time
|
||||
new_observation.value_type = "quantity"
|
||||
new_observation.value_quantity = abstich
|
||||
new_observation.fk_dataset_id = elevation_dataset.id
|
||||
db_session.add(new_observation)
|
||||
|
||||
|
||||
|
@ -135,6 +175,6 @@ def test():
|
|||
|
||||
if __name__ == "__main__":
|
||||
load_dotenv(find_dotenv())
|
||||
sensor_list1 = os.environ.get('GLASFASER_GSCHLIEFGRABEN_SENSORS', [])
|
||||
sensor_list1 = os.environ.get('PIEZOMETER_GSCHLIEFGRABEN_SENSORS', [])
|
||||
print(f'sensors: {sensor_list1} .')
|
||||
main()
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue