Commit e8a70ae1 authored by Timm Schoening's avatar Timm Schoening
Browse files

adding support for the OSIS underway API to publish position date

parent f4e07a1b
......@@ -142,11 +142,17 @@ def createiFDO(header:dict,items:dict):
# Validate item information
invalid_items = 0
missing_all_items = {}
for item in items:
missing = miqtt.isValidiFDOItem(item,header,all_items_have)
if len(missing) > 0:
for req in missing:
if req not in missing_all_items:
missing_all_items[req] = [item['image-filename']]
# Put all item information into the yaml struct to write to disk
yml['image-set-items'][item['image-filename']] = {}
for it in item:
......@@ -154,13 +160,20 @@ def createiFDO(header:dict,items:dict):
yml['image-set-items'][item['image-filename']][it] = item[it]
invalid_items += 1
print("Invalid image item:",item)
if invalid_items == len(items):
raiseException("All items are invalid")
elif invalid_items > 0:
print(invalid_items," items were invalid (of",len(items),")")
# Validate header information
except Exception as e:
print("iFDO Header is not complete",all_items_have)
return False
# Add all header fields to the yaml struct for writing
for key in header:
from elements_sdk import StorageApi, MediaLibraryApi
def getWorkspaceIDsForProduction(api:StorageApi,production_id:int):
""" Returns a list of all workspaces in the production given by the production.ID.
Formatted as a list of dicts ([{'name','id':workspace.ID,'path':workspace.path},...]) """
wss = api.get_all_workspaces(production=production_id)
ret = []
for ws in wss:
return ret
def getEventsInWorkspace(api:StorageApi,workspace_id:int):
""" Returns a list of folder names in the root of the workspace."""
ws = api.get_workspace(id=workspace_id)
dirs = api.get_file(path=ws.path)
ret = []
for dir in dirs.files:
if dir.is_dir == True and[0] != ".":
return ret
def getEquipmentInEvent(api:StorageApi,workspace_id:int,event:str):
""" Returns a list of equipment names used in the event."""
ws = api.get_workspace(id=workspace_id)
eqs = api.get_file(path=ws.path+"/"+event)
ret = []
for eq in eqs.files:
if eq.is_dir == True and[0] != ".":
return ret
def getAssetDownloadURLByName(api:MediaLibraryApi,name:str):
""" Searches for the one asset of filename name and returns its id, bundle_id (for downloading!) and file path"""
assets = api.get_all_assets(display_name=name)
# TODO: Why is this necessary? Why does the Elements API deliver this as a string???
bundles = eval(assets[0].bundles)
return ''+str(bundles[0]['id'])
def getiFDO(api:StorageApi,workspace_id:int,event:str,equipment:str):
""" Returns the file path to the iFDO """
ws = api.get_workspace(id=workspace_id)
dirs = api.get_file(path=ws.path)
for dir in dirs.files:
if == event:
ifdo_path = ws.path + "/" + event + "/" + equipment + "/products/" + event + "_" + equipment + "_iFDO.yaml"
return api.get_file(path = ifdo_path).path
return False
from osisunderwayconnector import OsisUnderwayConnector
import mariqt.geo as miqtg
import mariqt.files as miqtf
import mariqt.variables as miqtv
import mariqt.navigation as miqtn
import json
import yaml
import datetime
# TODO: iFDO Connector
def uploadEventListToUnderway(csv_path:str,platform:str,user:str,api_url:str = miqtv.apis['osis_underway']):
positions = miqtn.readAllPositionsFromFilePath(csv_path,{'utc':'Date Time','lon':'Longitude','lat':'Latitude','dep':'Depth'},miqtv.date_formats['dship'])
tmp_events = miqtf.tabFileData(csv_path,['Date Time','Event'],key_col = 'Date Time')
events = {}
for e in tmp_events:
dt = datetime.datetime.strptime(e+"+0000",miqtv.date_formats['dship']+"%z")
events[int(dt.timestamp())] = tmp_events[e]['Event']
con = miqtsu.MarIQTConnector(api_url,platform,user,'MarIQTEvents')
class MarIQTConnector(OsisUnderwayConnector):
""" Connects the mariqt positions world to the OSIS underway positions world.
Create an instance of this class and provide it with the API URL (ask cfaber for one if you do not know it) and
a platform (shortname) for the gear you are adding positions for (again, ask cfaber ... ).
Then get your positions ready in a mariqt.geo.Positions format.
If you want to add payload to the data (underway-speech for e.g. parameters like temperature at a position,
or a station name) then you also need to provide this as a list of equal size as the Positions list.
Once you have the instance of this object created, run its *do_import* method to do the magic!"""
def __init__(self, api_url:str, platform:str, contact:str, stream:str = "MarIQT"):
self.platform = platform = contact = stream
self.positions = []
def datastream(self):
def contact_person(self):
def get_positions(self):
return self.positions
def set_positions(self, positions:miqtg.Positions, payloads:list = []):
use_payload = False
if len(payloads) > 0 and len(payloads) != positions.len():
raise Exception("Positions and payload lengths do not match!")
use_payload = True
self.positions = []
for utc in positions.positions:
pos = positions.positions[utc]
# Get the time in the correct format
utc_str = datetime.datetime.fromtimestamp(pos.utc,tz=datetime.timezone.utc).strftime(miqtv.date_formats['underway'])
if use_payload:
payload = payloads[utc]
self.positions.append({'latitude', 'longitude':pos.lon, 'obs_timestamp':utc_str, 'platform':self.platform, 'payload': {'data': {'event':payload},'data_format': "string"}})
self.positions.append({'latitude', 'longitude':pos.lon, 'obs_timestamp':utc_str, 'platform':self.platform})
......@@ -174,6 +174,9 @@ def isValidiFDOField(field,value):
return value
def isValidiFDOItem(item:dict,header:dict,all_items_have:dict):
missing_all_items = []
for req in miqtv.ifdo_item_fields:
field_found = False
......@@ -194,12 +197,14 @@ def isValidiFDOItem(item:dict,header:dict,all_items_have:dict):
if not field_found:
if req in all_items_have:
del all_items_have[req]
if not alt_field_found:
raise Exception('Missing',req,'in item',item,"and alternative fields.")
# A required field was found, now check its value
item[req] = isValidiFDOField(req,item[req])
return missing_all_items
def isValidiFDOCoreHeader(header:dict,all_items_have:dict):
""" A dictionary holding various header field names to store 4.5D navigation information in the form of: t (utc time), x (longitude), y (latitude), z (depth: below sea level), a (altitude: above seafloor)"""
version = '0.9.0'
version = '0.2.5'
apis = {
pos_header = {
# Field/column name definition for internally handling this kind of t,y,x,z,h position data
......@@ -82,7 +86,8 @@ date_formats = {"pangaea":"%Y-%m-%dT%H:%M:%S",
"mariqt_short":"%Y-%m-%d %H:%M:%S",
"dship":"%Y/%m/%d %H:%M:%S"}
"dship":"%Y/%m/%d %H:%M:%S",
col_header = {'pangaea':{'annotation_label':'Annotation label'},
......@@ -115,8 +120,6 @@ ifdo_header_fields = {
'image-set-sensor':{'comment':'Sensors URN or Equipment Git ID (Handle)','alt-fields':['image-sensor']},
'image-set-uuid':{'comment':'A UUID (version 4 - random) for the entire image set','alt-fields':['']},
'image-set-handle':{'comment':'A Handle (using the UUID?) to point to the landing page of the data set','alt-fields':['']},
'image-set-data-handle':{'comment':'A Handle (using the UUID?) to point from the metadata to the data','alt-fields':['']},
'image-set-metadata-handle':{'comment':'A Handle (using the UUID?) to point to this metadata record','alt-fields':['']},
'image-set-creators':{'comment':'Orcids (or Name, E-Mail)','alt-fields':['image-creators']},
'image-set-pi':{'comment':'Orcid (or Name & E-Mail) of principal investigator','alt-fields':['image-pi']},
'image-set-license':{'comment':'License to use the data (should be FAIR!)','alt-fields':['image-license']},
......@@ -4,7 +4,7 @@ from setuptools import setup
description='The MareHub & Marine Imaging Community Image QA/QC and curation toolbox',
author='Timm Schoening',
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment