- Ausbesserungen Gschliefgraben Glasfaser

This commit is contained in:
Arno Kaimbacher 2022-03-07 15:37:28 +01:00
parent 87cb78af65
commit 675dd2f641
8 changed files with 335 additions and 57 deletions

View file

@ -15,24 +15,28 @@ import json
# parentdir = os.path.dirname(currentdir)
# sys.path.insert(0, parentdir)
# import requests
from datetime import datetime, date, timedelta
from dotenv import load_dotenv, find_dotenv
from sqlalchemy.orm import session
from sqlalchemy import func, asc, desc
# from db.pg_models import Platform
from gschliefgraben_glasfaser.models import ObservationSchema, Person, PersonSchema, Observation, create_pg_session, Dataset, Procedure, Phenomenon, Platform
from gschliefgraben_glasfaser.models import (
ObservationSchema, Person, PersonSchema, Observation,
create_pg_session, Dataset, Procedure, Phenomenon, Platform)
from gschliefgraben_glasfaser.my_api import MyApi
from datetime import datetime, date, timedelta
def main():
''' main method '''
pg_session: session = create_pg_session()
platform_sta_identifier = "gschliefgraben_glasfaser"
platform_sta_identifier = "gschliefgraben_glasfaser"
# sensor_list = ["inclino1_14", "inclino1_02"]
#sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
sensor_list = json.loads(os.environ['GLASFASER_GSCHLIEFGRABEN_SENSORS'])
# this will print elements along with their index value
for sensor in sensor_list:
pg_query = pg_session.query(Dataset) \
.join(Procedure) \
.join(Phenomenon) \
@ -49,9 +53,9 @@ def main():
slope_dataset.observation_type = "simple"
slope_dataset.value_type = "quantity"
pg_session.commit()
platform_exists: bool = pg_session.query(Platform.id).filter_by(
sta_identifier = platform_sta_identifier).scalar() is not None
sta_identifier=platform_sta_identifier).scalar() is not None
if platform_exists:
sensor_platform = pg_session.query(Platform.id) \
.filter(Platform.sta_identifier == platform_sta_identifier) \
@ -59,10 +63,10 @@ def main():
slope_dataset.fk_platform_id = sensor_platform.id
else:
exit()
# create all the observation for the given sensor names
create_observations(sensor, slope_dataset)
# update first and last observations for the dataset
first_slope_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == slope_dataset.id) \
@ -80,20 +84,21 @@ def main():
slope_dataset.last_time = last_slope_observation.sampling_time_start
slope_dataset.last_value = last_slope_observation.value_quantity
slope_dataset.fk_last_observation_id = last_slope_observation.id
pg_session.commit()
pg_session.close()
def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor '''
pg_session: session = create_pg_session()
def create_observations(sensor: str, slope_dataset: Dataset):
''' create_observations method for given sensor '''
pg_session: session = create_pg_session()
# The size of each step in days
# consider the start date as 2021-february 1 st
start_date = date(2022, 1, 1)
# consider the end date as 2021-march 1 st
end_date = date(2022, 3, 3)
end_date = date(2022, 3, 6)
# delta time
delta = timedelta(days=7)
@ -104,11 +109,12 @@ def create_observations(sensor: str, slope_dataset: Dataset):
while start_date <= end_date:
# print(start_date, end="\n")
query_date_start: str = start_date.strftime('%Y-%m-%d')
end_date_temp: date = start_date + delta # (plus 7 days)
end_date_temp: date = start_date + delta # (plus 7 days)
if end_date_temp > end_date:
end_date_temp = end_date
query_date_end: str = end_date_temp.strftime('%Y-%m-%d')
create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, slope_dataset)
create_db_observations(
sensor, query_date_start, query_date_end, test_api, pg_session, slope_dataset)
# for next loop step set new start_date (1 day greate then last end_date)
start_date = end_date_temp + timedelta(days=1)
pg_session.commit()
@ -136,7 +142,8 @@ def create_observations(sensor: str, slope_dataset: Dataset):
# pg_session.commit()
def create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, dataset: Dataset):
def create_db_observations(sensor, query_date_start, query_date_end, test_api,
pg_session, dataset: Dataset):
''' to do '''
query_date_start_obj = datetime.strptime(query_date_start, "%Y-%m-%d")
query_date_end_obj = datetime.strptime(query_date_end, "%Y-%m-%d")
@ -169,14 +176,16 @@ def create_db_observations(sensor, query_date_start, query_date_end, test_api,
max_id = create_observation(
observation_json, pg_session, max_id, dataset, value_identifier_db_list)
# pg_session.commit()
print("observations for date " + query_date_start + " to " + query_date_end + " succesfully imported \n")
print("observations for date " + query_date_start +
" to " + query_date_end + " for sensor " + sensor + " succesfully imported \n")
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset, value_identifier_db_list):
def create_observation(observation_json: ObservationSchema, db_session,
max_id, dataset: Dataset, value_identifier_db_list):
"""
This function creates a new observation in the people structure
based on the passed-in observation data
:param observation: person to create in people structure
:param observation: observation to create in people structure
:return: 201 on success, observation on person exists
"""
@ -259,4 +268,7 @@ def create(person_json: PersonSchema):
if __name__ == "__main__":
load_dotenv(find_dotenv())
print('sensors: {}'.format(os.environ.get(
'GLASFASER_GSCHLIEFGRABEN_SENSORS', [])))
main()