- add daily cron job for inserting sensor data
This commit is contained in:
parent
f104e9e74b
commit
fdc5da7373
9 changed files with 228 additions and 112 deletions
|
@ -7,6 +7,8 @@ Python version: 3.7
|
|||
|
||||
import os
|
||||
import uuid
|
||||
from typing import List
|
||||
from itertools import chain
|
||||
# import sys, inspect
|
||||
# currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
|
||||
# parentdir = os.path.dirname(currentdir)
|
||||
|
@ -25,75 +27,75 @@ from datetime import datetime, date, timedelta
|
|||
# print(response.json()) # shows the response's JSON response body, if it has one
|
||||
# print(response.content) # get the data content of the response
|
||||
|
||||
|
||||
def main():
|
||||
''' main method '''
|
||||
db_user = os.environ.get("POSTGIS_DBUSER")
|
||||
print(db_user)
|
||||
|
||||
pg_session: session = create_pg_session()
|
||||
# pg_person: Person = pg_session.query(Person).first()
|
||||
observation: Observation = pg_session.query(Observation).first()
|
||||
# print(pg_person)
|
||||
|
||||
# serialize db data to json
|
||||
# person_schema = PersonSchema()
|
||||
# dump_data = person_schema.dump(pg_person)
|
||||
# print(dump_data)
|
||||
# serialize db data to json
|
||||
# observation_schema = ObservationSchema()
|
||||
# dump_data = observation_schema.dump(observation)
|
||||
# print(dump_data)
|
||||
|
||||
# request ortmann api
|
||||
# response =
|
||||
# requests.get('https://api.dgnss-sensors.com/gschliefgraben?sensors=("inclino1_14")',
|
||||
# headers={
|
||||
# 'Authorization': 'Bearer' + token,
|
||||
# 'cache-control': 'no-cache',
|
||||
# 'Content-Type': 'application/x-www-form-urlencoded',
|
||||
# 'accept': 'application/json'
|
||||
# },
|
||||
# data='grant_type=client_credentials&scope=gschliefgraben')
|
||||
# print(response)
|
||||
|
||||
sensor: str = "inclino1_14"
|
||||
pg_query = pg_session.query(Dataset) \
|
||||
.join(Procedure) \
|
||||
.join(Phenomenon) \
|
||||
.filter(Procedure.sta_identifier == sensor.lower())
|
||||
slope_dataset: Dataset = pg_query.filter(
|
||||
Phenomenon.sta_identifier == "Slope").first()
|
||||
if not slope_dataset.is_published:
|
||||
slope_dataset.is_published = 1
|
||||
slope_dataset.is_hidden = 0
|
||||
slope_dataset.dataset_type = "timeseries"
|
||||
slope_dataset.observation_type = "simple"
|
||||
slope_dataset.value_type = "quantity"
|
||||
pg_session.commit()
|
||||
platform_sta_identifier = "gschliefgraben_glasfaser"
|
||||
# sensor_list = ["inclino1_14", "inclino1_02"]
|
||||
sensor_list = os.environ.get("GLASFASER_GSCHLIEFGRABEN_SENSORS")
|
||||
|
||||
# this will print elements along with their index value
|
||||
for sensor in enumerate(sensor_list):
|
||||
|
||||
pg_query = pg_session.query(Dataset) \
|
||||
.join(Procedure) \
|
||||
.join(Phenomenon) \
|
||||
.filter(Procedure.sta_identifier == sensor.lower())
|
||||
slope_dataset: Dataset = pg_query.filter(
|
||||
Phenomenon.sta_identifier == "Slope").first()
|
||||
if not slope_dataset:
|
||||
print("Sensor " + sensor + " ist noch nicht angelegt!")
|
||||
exit()
|
||||
if not slope_dataset.is_published:
|
||||
slope_dataset.is_published = 1
|
||||
slope_dataset.is_hidden = 0
|
||||
slope_dataset.dataset_type = "timeseries"
|
||||
slope_dataset.observation_type = "simple"
|
||||
slope_dataset.value_type = "quantity"
|
||||
pg_session.commit()
|
||||
|
||||
platform_exists: bool = pg_session.query(Platform.id).filter_by(
|
||||
sta_identifier = platform_sta_identifier).scalar() is not None
|
||||
if platform_exists:
|
||||
sensor_platform = pg_session.query(Platform.id) \
|
||||
.filter(Platform.sta_identifier == platform_sta_identifier) \
|
||||
.first()
|
||||
slope_dataset.fk_platform_id = sensor_platform.id
|
||||
else:
|
||||
exit()
|
||||
|
||||
# create all the observation for the given sensor names
|
||||
create_observations(sensor, slope_dataset)
|
||||
|
||||
def create_observations(sensor: str, slope_dataset: Dataset):
|
||||
''' create_observations method for given sensor '''
|
||||
|
||||
pg_session: session = create_pg_session()
|
||||
|
||||
# The size of each step in days
|
||||
# consider the start date as 2021-february 1 st
|
||||
start_date = date(2022, 1, 1)
|
||||
# consider the end date as 2021-march 1 st
|
||||
end_date = date(2022, 3, 1)
|
||||
end_date = date(2022, 3, 3)
|
||||
|
||||
# delta time
|
||||
delta = timedelta(days=1)
|
||||
delta = timedelta(days=7)
|
||||
token_api = os.environ.get("TOKEN_API")
|
||||
test_api = MyApi(token_api)
|
||||
|
||||
# iterate over range of dates
|
||||
while start_date <= end_date:
|
||||
# print(start_date, end="\n")
|
||||
query_date = start_date.strftime('%Y-%m-%d')
|
||||
create_db_observations(query_date, test_api, pg_session, slope_dataset)
|
||||
start_date += delta
|
||||
query_date_start: str = start_date.strftime('%Y-%m-%d')
|
||||
end_date_temp: date = start_date + delta # (plus 7 days)
|
||||
if end_date_temp > end_date:
|
||||
end_date_temp = end_date
|
||||
query_date_end: str = end_date_temp.strftime('%Y-%m-%d')
|
||||
create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, slope_dataset)
|
||||
# for next loop step set new start_date (1 day greate then last end_date)
|
||||
start_date = end_date_temp + timedelta(days=1)
|
||||
pg_session.commit()
|
||||
|
||||
# for i in rrule(DAILY , dtstart=start_date,until=end_date):
|
||||
# print(i.strftime('%Y%b%d'),sep='\n')
|
||||
|
||||
# query_date = "2022-02-28"
|
||||
# create_db_observations(query_date, test_api, pg_session)
|
||||
# query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
||||
|
@ -117,13 +119,20 @@ def main():
|
|||
# pg_session.commit()
|
||||
|
||||
|
||||
def create_db_observations(query_date, test_api, pg_session, dataset: Dataset):
|
||||
def create_db_observations(sensor, query_date_start, query_date_end, test_api, pg_session, dataset: Dataset):
|
||||
''' to do '''
|
||||
query_date_obj = datetime.strptime(query_date, "%Y-%m-%d")
|
||||
data = test_api.getSensorData("inclino1_14", query_date)
|
||||
query_date_start_obj = datetime.strptime(query_date_start, "%Y-%m-%d")
|
||||
query_date_end_obj = datetime.strptime(query_date_end, "%Y-%m-%d")
|
||||
data = test_api.getSensorData(sensor, query_date_start, query_date_end)
|
||||
observation_array = (data['FeatureCollection']
|
||||
['Features'][0]['geometry']['properties'][0])
|
||||
# print(observation_array)
|
||||
result = (
|
||||
pg_session.query(Observation.value_identifier)
|
||||
.filter(Observation.fk_dataset_id == dataset.id)
|
||||
.all()
|
||||
)
|
||||
value_identifier_db_list: List[str] = list(chain(*result))
|
||||
|
||||
max_id = pg_session.query(func.max(Observation.id)).scalar()
|
||||
if max_id is None:
|
||||
|
@ -132,19 +141,21 @@ def create_db_observations(query_date, test_api, pg_session, dataset: Dataset):
|
|||
for observation_json in observation_array:
|
||||
ob_date_time = observation_json.get('DateTime')
|
||||
datetime_obj = datetime.strptime(ob_date_time, "%Y-%m-%dT%H:%M:%S.%fZ")
|
||||
if datetime_obj.date() != query_date_obj.date():
|
||||
if datetime_obj.date() < query_date_start_obj.date():
|
||||
continue
|
||||
if datetime_obj.date() > query_date_end_obj.date():
|
||||
continue
|
||||
ob_value = observation_json.get('Value')
|
||||
if ob_value is None:
|
||||
continue
|
||||
# max_id = max_id + 1
|
||||
max_id = create_observation(
|
||||
observation_json, pg_session, max_id, dataset)
|
||||
observation_json, pg_session, max_id, dataset, value_identifier_db_list)
|
||||
# pg_session.commit()
|
||||
print("observations for date " + query_date + "succesfully imported \n")
|
||||
print("observations for date " + query_date_start + " to " + query_date_end + " succesfully imported \n")
|
||||
|
||||
|
||||
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset):
|
||||
def create_observation(observation_json: ObservationSchema, db_session, max_id, dataset: Dataset, value_identifier_db_list):
|
||||
"""
|
||||
This function creates a new observation in the people structure
|
||||
based on the passed-in observation data
|
||||
|
@ -155,14 +166,15 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
|
|||
ob_id: str = str(observation_json.get('id'))
|
||||
# db_session = create_pg_session()
|
||||
|
||||
existing_observation: bool = (
|
||||
db_session.query(Observation)
|
||||
.filter(Observation.value_identifier == ob_id)
|
||||
.one_or_none()
|
||||
)
|
||||
# existing_observation: bool = (
|
||||
# db_session.query(Observation)
|
||||
# .filter(Observation.value_identifier == ob_id)
|
||||
# .one_or_none()
|
||||
# )
|
||||
existing_observation: bool = ob_id in value_identifier_db_list
|
||||
|
||||
# Can we insert this observation?
|
||||
if existing_observation is None:
|
||||
if existing_observation is False:
|
||||
max_id += 1
|
||||
# Create a person instance using the schema and the passed in person
|
||||
schema = ObservationSchema()
|
||||
|
@ -170,8 +182,8 @@ def create_observation(observation_json: ObservationSchema, db_session, max_id,
|
|||
new_observation: Observation = schema.load(observation_json)
|
||||
new_observation.id = max_id
|
||||
new_observation.sta_identifier = str(uuid.uuid4())
|
||||
new_observation.sampling_time_start=new_observation.result_time
|
||||
new_observation.sampling_time_end=new_observation.result_time
|
||||
new_observation.sampling_time_start = new_observation.result_time
|
||||
new_observation.sampling_time_end = new_observation.result_time
|
||||
new_observation.fk_dataset_id = dataset.id
|
||||
|
||||
# Add the person to the database
|
||||
|
|
Loading…
Add table
editor.link_modal.header
Reference in a new issue