diff --git a/db/models.py b/db/models.py
index 57aedeb..40bdabc 100644
--- a/db/models.py
+++ b/db/models.py
@@ -190,7 +190,7 @@ class Observation(Base):
value_identifier = Column('value_identifier', String)
value_quantity = Column('value_quantity', Numeric(20, 10), nullable=True)
value_text = Column('value_text', String, nullable=True)
- value_geometry = Column(Geometry(geometry_type='POINTZ', srid=4326, dimension=3), nullable=True)
+ value_geometry = Column(Geometry(geometry_type='POINT', srid=4326, dimension=3), nullable=True)
fk_dataset_id = Column(Integer, ForeignKey(
'gba.dataset.dataset_id'), nullable=False)
@@ -267,7 +267,7 @@ def create_db():
# session_maker = sessionmaker(bind=engine)
# session = session_maker()
# Base.metadata.drop_all(bind=engine)
- # Base.metadata.create_all(engine)
+ Base.metadata.create_all(engine)
if __name__ == "__main__":
diff --git a/notes.txt b/notes.txt
index 615d92d..1324ac5 100644
--- a/notes.txt
+++ b/notes.txt
@@ -81,4 +81,6 @@ https://stackoverflow.com/questions/51737548/how-to-set-primary-key-auto-increme
UPDATE pg_extension SET extrelocatable = TRUE WHERE extname = 'postgis';
ALTER EXTENSION postgis SET SCHEMA gba;
-ALTER DATABASE sos_db SET search_path TO gba, public;
\ No newline at end of file
+ALTER DATABASE sos_db SET search_path TO gba, public;
+oder ??
+alter role sos_admin set search_path = "$user", public, gba;
\ No newline at end of file
diff --git a/voegelsberg/ImportMobileObservation_example.xml b/voegelsberg/ImportMobileObservation_example.xml
index ed36639..d2ba7d7 100644
--- a/voegelsberg/ImportMobileObservation_example.xml
+++ b/voegelsberg/ImportMobileObservation_example.xml
@@ -15,11 +15,11 @@
xmlns:xs="http://www.w3.org/2001/XMLSchema" xsi:schemaLocation="http://www.opengis.net/sos/2.0 http://schemas.opengis.net/sos/2.0/sos.xsd http://www.opengis.net/samplingSpatial/2.0 http://schemas.opengis.net/samplingSpatial/2.0/spatialSamplingFeature.xsd">
D5_2
-
+
- 2021-09-01T00:00:00
+ 2021-08-01T00:00:00
@@ -42,8 +42,8 @@
D5_2
origin of D5_2
-
-
+
+
11.597409730065536 47.27196543449542
@@ -60,4 +60,55 @@
-
\ No newline at end of file
+
+
+
+
+{
+ "request": "InsertObservation",
+ "service": "SOS",
+ "version": "2.0.0",
+ "offering": "D5_2",
+ "observation": {
+ "type": "http://www.opengis.net/def/observationType/OGC-OM/2.0/OM_GeometryObservation",
+ "procedure": "D5_2",
+ "observedProperty": "TachymeterLocation",
+ "featureOfInterest": {
+ "identifier": {
+ "value": "D5_2",
+ "codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
+ },
+ "name": [
+ {
+ "value": "origin of D5_2",
+ "codespace": "http://www.opengis.net/def/nil/OGC/0/unknown"
+ }
+ ],
+ "sampledFeature": [
+ "http://www.52north.org/test/featureOfInterest/world"
+ ],
+ "geometry": {
+ "type": "Point",
+ "coordinates": [
+ 10.874314927293595,
+44.48931950733285
+ ],
+ "crs": {
+ "type": "name",
+ "properties": {
+ "name": "EPSG:4326"
+ }
+ }
+ }
+ },
+ "phenomenonTime" : "2021-08-16T15:18:30.738Z",
+"resultTime" : "2021-08-16T15:18:30.738Z",
+ "result": {
+ "type" : "Point",
+ "coordinates" : [
+ 11.597688540227727,
+ 47.271865827824854,
+ 909.7036
+ ]
+ }
+ }
\ No newline at end of file
diff --git a/voegelsberg/import_tachymeter_observations.py b/voegelsberg/import_tachymeter_observations.py
index 50e00b6..0c70b90 100644
--- a/voegelsberg/import_tachymeter_observations.py
+++ b/voegelsberg/import_tachymeter_observations.py
@@ -68,6 +68,19 @@ def main():
.first()
location_dataset.fk_format_id = sensor_format.id
pg_session.commit()
+
+ # offering = Offering(
+ # "https://geomon.geologie.ac.at/52n-sos-webapp/api/offerings/",
+ # sensor,
+ # "Vögelsberg Tachymeter"
+ # )
+ # procedure = Procedure(sensor, sensor)
+ # foi_name = "origin of " + sensor
+ # foi = FoI("degree", "m", (cord_x, cord_y, z_1),
+ # sensor, foi_name)
+ # xml = get_xml(offering, procedure, foi, result_time, identifier)
+ # print(xml)
+
successfully_inserted = create_observation(
location_dataset, row, pg_session)
@@ -76,9 +89,11 @@ def main():
if not location_dataset.is_published:
location_dataset.is_published = 1
location_dataset.is_hidden = 0
- location_dataset.dataset_type = "trajectory"
+ location_dataset.dataset_type = "timeseries"
+ # location_dataset.dataset_type = "trajectory"
location_dataset.observation_type = "simple"
- location_dataset.value_type = "geometry"
+ # location_dataset.value_type = "geometry"
+ location_dataset.value_type = "text"
pg_session.commit()
# last_location_observation = pg_session.query(Observation) \
@@ -106,6 +121,7 @@ def main():
pg_session.close()
+
def create_observation(location_dataset: Dataset, data, pg_session: session):
''' create observation in db'''
# print("Sesnor key exist in JSON data")
@@ -135,8 +151,9 @@ def create_observation(location_dataset: Dataset, data, pg_session: session):
new_observation.result_time = date_obj
new_observation.sampling_time_start = new_observation.result_time
new_observation.sampling_time_end = new_observation.result_time
- new_observation.value_type = "geometry"
- new_observation.value_geometry = f'SRID=4326;POINTZ({cord_x} {cord_y} {z_1})'
+ new_observation.value_type = "text"
+ new_observation.value_geometry = f'POINT({cord_x} {cord_y} {z_1})'
+ new_observation.value_text = '{"type":"Point","coordinates":['+ str(cord_x) +',' + str(cord_y) + ',' + str(z_1) + ']}'
new_observation.fk_dataset_id = location_dataset.id
pg_session.add(new_observation)
print(f"new observation with result time {new_observation.result_time} "
@@ -149,7 +166,7 @@ def create_observation(location_dataset: Dataset, data, pg_session: session):
def actualize_first_last_observations():
- ''' iterate throug all datasets of Voregelsberg project area
+ ''' iterate through all datasets of Voregelsberg project area
and actualize last and first corresponding observations'''
pg_session: session = create_pg_session()
platform_sta_identifier = "voegelsberg_tachymeter"
@@ -165,8 +182,6 @@ def actualize_first_last_observations():
.filter(Platform.sta_identifier == platform_sta_identifier).all()
for location_dataset in voegelsberg_datasets:
- ''' iterate throug all datasets of Voregelsberg project area
- and actualize last and first corresponding observations'''
last_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(desc('sampling_time_start')) \
@@ -175,7 +190,7 @@ def actualize_first_last_observations():
location_dataset.last_time = last_location_observation.sampling_time_start
# location_dataset.last_value = last_location_observation.value_quantity
location_dataset.fk_last_observation_id = last_location_observation.id
-
+
first_location_observation = pg_session.query(Observation) \
.filter(Observation.fk_dataset_id == location_dataset.id) \
.order_by(asc('sampling_time_start')) \