- Ausbesserungen Gschliefgraben Glasfaser

This commit is contained in:
Arno Kaimbacher 2022-03-07 15:37:28 +01:00
parent 87cb78af65
commit 675dd2f641
8 changed files with 335 additions and 57 deletions

View file

@ -5,23 +5,28 @@ Sqlalchemy version: 1.2.15
Python version: 3.10
'''
import os, json
import os
import json
import uuid
from datetime import datetime
from dotenv import load_dotenv, find_dotenv
from sqlalchemy.orm import session
from sqlalchemy import func, asc, desc
# from db.pg_models import Platform
from gschliefgraben_glasfaser.models import ObservationSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
from gschliefgraben_glasfaser.models import (
ObservationSchema, Observation, create_pg_session,
Dataset, Procedure, Phenomenon, Platform)
from gschliefgraben_glasfaser.my_api import MyApi
from datetime import datetime
def main():
''' main method '''
pg_session: session = create_pg_session()
platform_sta_identifier = "gschliefgraben_glasfaser"
platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"]
#sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
# this will print elements along with their index value
for sensor in sensor_list:
pg_query = pg_session.query(Dataset) \
@ -32,7 +37,8 @@ def main():
Phenomenon.sta_identifier == "Slope").first()
if not slope_dataset:
print("Sensor " + sensor + " ist noch nicht angelegt!")
exit()
# exit()
continue
if not slope_dataset.is_published:
slope_dataset.is_published = 1
slope_dataset.is_hidden = 0
@ -40,18 +46,18 @@ def main():
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
pg_session.commit()
platform_exists: bool = pg_session.query(Platform.id).filter_by(
sta_identifier = platform_sta_identifier).scalar() is not None
sta_identifier=platform_sta_identifier).scalar() is not None
if platform_exists:
sensor_platform = pg_session.query(Platform.id) \
.filter(Platform.sta_identifier == platform_sta_identifier) \
.first()
slope_dataset.fk_platform_id = sensor_platform.id
# create all the observation for the given sensor names
create_observations(sensor, slope_dataset)
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
.order_by(asc('sampling_time_start')) \
@ -68,27 +74,29 @@ def main():
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
pg_session.commit()
pg_session.close()
def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor '''
pg_session: session = create_pg_session()
def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor '''
pg_session: session = create_pg_session()
# create access token
token_api = os.environ.get("TOKEN_API")
test_api = MyApi(token_api)
# The size of each step in days
# consider the start date as 2021-february 1 st
start_date = datetime.today()
query_date = start_date.strftime('%Y-%m-%d')
create_db_observations(sensor, query_date, test_api, pg_session, slope_dataset)
create_db_observations(sensor, query_date, test_api,
pg_session, slope_dataset)
pg_session.commit()
def create_db_observations(sensor: str, query_date, test_api, pg_session, dataset: Dataset):
''' to do '''
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
@ -113,7 +121,8 @@ def create_db_observations(sensor: str, query_date, test_api, pg_session, datase
max_id = create_observation(
observation_json, pg_session, max_id, dataset)
# pg_session.commit()
print("observations for date " + query_date + " succesfully imported \n")
print("observations for date " + query_date + " and sensor " + sensor +
" succesfully imported \n")
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset):
@ -142,8 +151,8 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
new_observation: Observation = schema.load(observation_json)
new_observation.id = max_id
new_observation.sta_identifier = str(uuid.uuid4())
new_observation.sampling_time_start=new_observation.result_time
new_observation.sampling_time_end=new_observation.result_time
new_observation.sampling_time_start = new_observation.result_time
new_observation.sampling_time_end = new_observation.result_time
new_observation.fk_dataset_id = dataset.id
# Add the person to the database
@ -159,6 +168,10 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
else:
print(409, f'Observation {ob_id} exists already')
return max_id
if __name__ == "__main__":
load_dotenv(find_dotenv())
print('sensors: {}'.format(os.environ.get(
'GLASFASER_GSCHLIEFGRABEN_SENSORS', [])))
main()