Commit 28fc67f3 authored by Timm Schoening's avatar Timm Schoening
Browse files

initial commit after adapting from previous version

parent 87301aa8
......@@ -2,4 +2,4 @@
# MarIQT
Image Quality control / quality assurance and curation Tools (IQT) conceptualized and developed by the MareHub working group on Videos/Images (part of the DataHub, a research data manangement initiative by the Helmholtz association). The MarIQT core is a [python package](https://gitlab.hzdr.de/datahub/marehub/ag-videosimages/software/mar-iqt/-/tree/master/mariqt) which is helpful on its own but is key to the more user-friendly [jupyter notebooks](https://gitlab.hzdr.de/datahub/marehub/ag-videosimages/software/mar-iqt/-/tree/master/jupyter) that make extensive use of the python package.
Image Quality control / quality assurance and curation Tools (IQT) conceptualised and developed by the MareHub working group on Videos/Images (part of the DataHub, a research data manangement initiative by the Helmholtz association). The MarIQT core is a [python package](https://gitlab.hzdr.de/datahub/marehub/ag-videosimages/software/mar-iqt/-/tree/master/mariqt) which is helpful on its own but is key to the more user-friendly [jupyter notebooks](https://gitlab.hzdr.de/datahub/marehub/ag-videosimages/software/mar-iqt/-/tree/master/jupyter) that make extensive use of the python package.
This diff is collapsed.
%% Cell type:markdown id:comic-steal tags:
# Curation Overview
This notebook provides an overview of the curation process in your data folders. It stores results in the `../files/<project>_curation-cache.yaml` cache files so that subsequent runs of this notebook will be faster. You can clear the cache and rescan everything by setting the `rescan` variable in the next cell to `True` and then running the notebook.
%% Cell type:code id:hungry-balloon tags:
``` python
rescan = False
```
%% Cell type:code id:emotional-haiti tags:
``` python
#################################################################################################################
### You should not see - and not modify (!) - this cell, unless you are sure what you are doing! Just run it. ###
#################################################################################################################
import mariqt.processing.files as miqtpf
cfg = miqtpf.cfgFileLoadProjectDefault()
import os
import mariqt.core as miqtc
# Check base_paths where data resides
all_good = True
for bp in cfg['data']['base_paths']:
if not os.path.exists(bp):
all_good = False
print("Issue: Base path",bp,"not found")
elif not os.path.isdir(bp):
all_good = False
print("Issue: Base path",bp,"points to a file but we require a directory.")
elif len(os.listdir(bp)) == 0:
all_good = False
print("Issue: No data available in base path",bp)
if all_good:
print("It looks like your base path settings are good. There is data. Lets continue to start curating.")
# Check curation paths
if "DSHIP" in cfg['navigation_data']['sources']:
miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_device_operations_file'))
miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_underwater_navigation_file'))
miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_event_navigation_data_folder'))
print("All is good.")
```
%%%% Output: stream
Issue: Base path /volumes/project/ not found
%%%% Output: error
---------------------------------------------------------------------------
NameError Traceback (most recent call last)
<ipython-input-2-3a63bfd7ab9c> in <module>
25 # Check curation paths
26 if "DSHIP" in cfg['navigation_data']['sources']:
---> 27 miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_device_operations_file'))
28 miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_underwater_navigation_file'))
29 miqtc.assertExists(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_event_navigation_data_folder'))
/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/mariqt/core.py in assertExists(path)
7 def assertExists(path):
8 if not os.path.exists(path):
----> 9 raise NameError("Could not find: " + path)
10
11 ### Asserts that a path string to a directory ends with a slash
NameError: Could not find: /Users/tschoening/dev/repos/mariqt-test/files/PRJ23_all-device-operations.dat
%% Cell type:code id:engaging-theology tags:
``` python
#################################################################################################################
### You should not see - and not modify (!) - this cell, unless you are sure what you are doing! Just run it. ###
#################################################################################################################
import copy
import yaml
import datetime
# Get list of events, expected to be formatted like so:
# events[device_operation] = {'code':<device acronym>,'actions':[{'action':<action>,'lat':<latitude>,'lon':<longitude>,'dep':<depth>,'utc':<timestamp>,...],'start':<start timestamp>}
if "DSHIP" in cfg['navigation_data']['sources']:
import mariqt.sources.dship as miqtsd
events = miqtsd.parseDSHIPDeviceOperationsOrEventsFile(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_device_operations_file'))
miqtsd.removeEventsByOtherCruises(events,cfg['cruise']['number'])
miqtsd.renameEvents(events)
else:
raise Exception("Can only process DSHIP events by now. Sorry.")
# These are the status information fields that will be collected for each event in the following
one_event_status = {"changed":False,"dir_exists":False,"event_exists":False,"doi":"","num_actions":0,"num_sensors":0,"has_gps_nav_raw":False,"has_gps_nav_cur":False,"has_usbl_nav_raw":False,"has_usbl_nav_cur":False,"has_protocol":False,
"raw_data_vol":0,"raw_data_num":0,"cur_data_vol":0,"cur_data_num":0,"prt_data_vol":0,"prt_data_num":0,"prd_data_vol":0,"prd_data_num":0,"ext_data_vol":0,"ext_data_num":0,
"has_images":False}
# How to map the path names of the folder convention to the short names used here in the script
path_to_key = {"external":"ext","raw":"raw","protocol":"prt","products":"prd","processed":"cur"}
# Check whether a cache file exists and shall be loaded
if os.path.exists("../files/" + cfg['project']['number']+"_curation-cache.yaml") and not rescan:
with open("../files/" + cfg['project']['number']+"_curation-cache.yaml","r") as yaml_file:
cache = yaml.safe_load(yaml_file)
all_event_status = cache['events']
print("Showing cached status from ",cache['date_created'])
cache_unix = datetime.datetime.strptime(cache['date_created']+"+0000","%Y-%m-%d %H:%M:%S.%f%z").timestamp()
else:
rescan = True
# Find all events
all_event_status = {}
for event in events:
if event not in all_event_status:
all_event_status[event] = copy.deepcopy(one_event_status)
all_event_status[event]['event_exists'] = True
all_event_status[event]['num_actions'] = len(events[event]['actions'])
# Browse all the data base_paths folders and look for event subfolders
event_folders = {}
for path in cfg['data']['base_paths']:
tmp_events_folders = os.listdir(path)
for tmp_event in tmp_event_folders:
if not tmp_event.startswith('.') and os.path.isdir(path+tmp_event):
if not tmp_event in event_folders:
event_folders[tmp_event] = [path]
else:
events_folders[tmp_event].append(path)
if os.path.getmtime(path+tmp_event) > cache_unix:
all_event_status[tmp_event]['changed'] = True
# Did we find events that are not known in the event files we opened earlier?
if tmp_event not in all_event_status:
all_event_status[tmp_event] = copy.deepcopy(one_event_status)
all_event_status[tmp_event]['changed'] = True
else:
all_event_status[event]['dir_exists'] = True
if rescan:
import mariqt.definitions as miqtd
satellite_navigation_sensor = miqtpf.cfgValue(cfg,['navigation:sources:DSHIP:satellite_navigation:sensor_equipment_id'])
underwater_navigation_sensor = miqtpf.cfgValue(cfg,['navigation:sources:DSHIP:underwater_navigation:sensor_equipment_id'])
for event in event_folders:
# Find sensors for event
event_sensors = []
for base_folder in event_folders[event]:
tmp_sensors = os.listdir(base_folder+event)
for tmp_sensor in tmp_sensors:
if tmp_sensor[0] != "." and tmp_sensor not in event_sensors:
if tmp_sensor == "protocol":
all_event_status[event]['has_protocol'] += (len([f for f in os.listdir(base_folder+event+"/protocol") if not f.startswith('.')]) > 0)
else:
event_sensors.append(tmp_sensor)
all_event_status[event]['num_sensors'] += len(event_sensors)
# Iterate through all sensors and fetch file information
for sensor in event_sensors:
data_volume = {}
for sub in path_to_key:
data_volume[path_to_key[sub]+"_data_num"] = 0
data_volume[path_to_key[sub]+"_data_vol"] = 0
for base_folder in event_folders[event]:
for sub in path_to_key:
tmp = miqtpf.recursiveFileStat(base_folder+event+"/"+sensor+"/"+sub+"/")
data_volume[path_to_key[sub]+"_data_num"] += tmp['num']
data_volume[path_to_key[sub]+"_data_vol"] += tmp['size']
all_event_status[event][path_to_key[sub]+"_data_num"] += tmp['num']
all_event_status[event][path_to_key[sub]+"_data_vol"] += tmp['size']
tmp = miqtpf.recursiveFileStat(base_folder+event+"/"+sensor+"/raw/",miqtd.image_types)
if tmp['num'] > 0:
all_event_status[event]['has_images'] = True
if sensor == satellite_navigation_sensor:
all_event_status[event]['has_gps_nav_raw'] = data_volume["raw_data_num"] > 0 or all_event_status[event]['has_gps_nav_raw']
all_event_status[event]['has_gps_nav_cur'] = data_volume["cur_data_num"] > 0 or all_event_status[event]['has_gps_nav_cur']
elif sensor == underwater_navigation_sensor:
all_event_status[event]['has_usbl_nav_raw'] = data_volume["raw_data_num"] > 0 or all_event_status[event]['has_usbl_nav_raw']
all_event_status[event]['has_usbl_nav_cur'] = data_volume["cur_data_num"] > 0 or all_event_status[event]['has_usbl_nav_cur']
with open("../files/" + cfg['project']['number']+"_curation-cache.yaml","w") as yaml_file:
yaml.dump({'date_created':datetime.datetime.now(),'events':all_event_status[event]},yaml_file)
```
%%%% Output: error
---------------------------------------------------------------------------
ModuleNotFoundError Traceback (most recent call last)
<ipython-input-8-39fd363a0933> in <module>
10 # events[device_operation] = {'code':<device acronym>,'actions':[{'action':<action>,'lat':<latitude>,'lon':<longitude>,'dep':<depth>,'utc':<timestamp>,...],'start':<start timestamp>}
11 if "DSHIP" in cfg['navigation_data']['sources']:
---> 12 import mariqt.sources.dship as miqtsd
13
14 events = miqtsd.parseDSHIPDeviceOperationsOrEventsFile(miqtpf.cfgValue(cfg,'navigation_data:sources:DSHIP:dship_all_device_operations_file'))
/Library/Frameworks/Python.framework/Versions/3.6/lib/python3.6/site-packages/mariqt/sources/dship.py in <module>
6
7 import mariqt.geo as miqtg
----> 8 import marqit.source.dship_settings
9
10 def addEndToDSHIPEventsByLastActionBeforeNextEvent(dship_events):
ModuleNotFoundError: No module named 'marqit'
%% Cell type:code id:executed-surge tags:
``` python
#################################################################################################################
### You should not see - and not modify (!) - this cell, unless you are sure what you are doing! Just run it. ###
#################################################################################################################
import pandas as pd
pd.set_option('display.max_rows', None)
def color_false_red(val):
color = 'red' if val == False or val == "" or val == "0" else 'black'
return 'color: %s' % color
print_copy = copy.deepcopy(all_event_status)
for event in print_copy:
for key in path_to_key:
print_copy[event][path_to_key[key]+"_data_vol"] = gmrcc.helper.humanReadable(print_copy[event][path_to_key[key]+"_data_vol"])
df = pd.DataFrame(print_copy).T
df.style.applymap(color_false_red)
```
%% Cell type:code id:introductory-military tags:
``` python
import mariqt.paths as miqtp
p = miqtp.Path("/Volumes/","/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/raw/")
p.dump()
print(p.str())
print(p.validDataPath())
p1 = p.replaceCreatePath('SENSOR','foobar')
print(p1.str())
print(p1.type())
print(p1.validDataPath())
```
%%%% Output: stream
Basepath: /Volumes/ Path: {<dp.PRJ: 0>: 'SO268', <dp.GEAR: 1>: '', <dp.EVENT: 2>: 'SO268-1_21-1_OFOS', <dp.SENSOR: 3>: 'SO_CAM-1_Photo_OFOS', <dp.TYPE: 4>: 'raw'}
/Volumes/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/raw/
True
/Volumes/SO268/SO268-1_21-1_OFOS/foobar/
False
%% Cell type:code id:thousand-establishment tags:
``` python
import mariqt.provenance as miqto
name = "SO268-1_21-1_OFOS_SO_CAM-1"
executable = "pounding"
version = "1.1"
path = p.replace("TYPE","intermediate")
params = [{'name':'foobar','value':'/Volumes/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/products/SO268-1_21-1_OFOS_SO_CAM-1_Photo_OFOS_MeFex-proxy.png'}]
miqto.createProvenanceFile(path,name,executable,version,params,prev_provenance='/Volumes/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/intermediate/SO268-1_21-1_OFOS_SO_CAM-1_provenance-pounding-20210409_110232.yaml')
```
%%%% Output: stream
Writing to /Volumes/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/intermediate/SO268-1_21-1_OFOS_SO_CAM-1_provenance-pounding-20210409_111146.yaml
{'provenance': [{'action': {'executable': {'name': 'pounding', 'version': '1.1'}, 'parameter': {}}, 'hash': None, 'time': '20210409 11:02:32.321548'}, {'action': {'executable': {'name': 'pounding', 'version': '1.1'}, 'parameter': [{'name': 'foobar', 'value': '/Volumes/SO268/SO268-1_21-1_OFOS/SO_CAM-1_Photo_OFOS/products/SO268-1_21-1_OFOS_SO_CAM-1_Photo_OFOS_MeFex-proxy.png', 'hash': '515b23ba963ff643787dadec11f27314'}]}, 'hash': '5881ff77f089f6612954874b84c11b69', 'time': '2021-04-09 11:11:46.157388'}]}
%% Cell type:code id:connected-april tags:
``` python
```
%% Cell type:code id:exclusive-knowing tags:
``` python
```
import os
import gmrcc.helper
import gmrcc.navprocessing
import gmrcc.essential
known_ofop_files = ["_posi.txt","_prot.txt","_obser.txt","_ROV.txt","_image_#1.bmp"]
def applyDataStructure(path):
assertExists(path)
path = validateDir(path)
files = os.listdir(path)
external = ["waypoint",".jpeg",".map",".jgw","button"]
products = [".bmp"]
create = ["raw","external","intermediate","processed","data_products"]
for c in create:
if not os.path.exists(path+c):
os.mkdir(path+c,0o775)
for file in files:
file_lower = file.lower()
if file.startswith("."):
continue
if file in create:
continue
target = "raw"
for s in products:
if s in file_lower:
target = "data_products"
break
for s in external:
if s in file_lower:
target = "external"
os.rename(path+file, path+"/"+target+"/"+file)
### Find OFOP annotation files in a folder
def findOFOPDataFiles(path):
if not os.path.exists(path):
return {}
ofops = {}
files = os.listdir(path)
for file in files:
if file.startswith('.'):
continue
for tmp in known_ofop_files:
if tmp in file:
ofop_name = file.replace(tmp,"")
if ofop_name not in ofops:
ofops[ofop_name] = []
ofops[ofop_name].append(file)
return ofops
def validOFOPDataFiles(ofop_name, ofop_file_list, base_path, position_only = False):
unknown = []
empty = []
required_ofop = ["_posi.txt","_prot.txt","_obser.txt"]
if position_only:
required_ofop = ["_posi.txt"]
for file in ofop_file_list:
file_size = os.stat(base_path+file).st_size
type = file.replace(ofop_name,"")
if type not in known_ofop_files:
unknown.append(file)
else:
if file_size == 0:
empty.append(file)
elif type in required_ofop:
required_ofop.remove(type)
if len(required_ofop) > 0:
return False, "Required files are missing: " + ",".join(required_ofop)
elif len(empty) > 0:
return True, "Files are empty: " + ",".join(empty)
return True, "Further files: " + ",".join(unknown)
def findValidPosiColumns(file_path):
file = open(file_path,"r",errors="ignore")
first = True
columns = {}
for line in file:
if first:
header_cols = line.split("\t")
idx = 0
for col in header_cols:
columns[idx] = {'name':col,'index':idx,'min':False,'max':False}
idx += 1
first = False
else:
cols = line.split("\t")
for idx in range(0,min(len(header_cols),len(cols))):
if cols[idx] != "":
try:
val = float(cols[idx])
if columns[idx]['min'] == False:
columns[idx]['min'] = val
columns[idx]['max'] = val
else:
columns[idx]['min'] = min(columns[idx]['min'],val)
columns[idx]['max'] = max(columns[idx]['max'],val)
except ValueError:
continue
ret_cols = []
for idx in columns:
if columns[idx]['min'] != False and columns[idx]['min'] != columns[idx]['max']:
ret_cols.append(columns[idx]['name'])
return ret_cols
#max_col_idx,cis = acmw_hlp.getColumnIndicesFromFile(file,cols)
#date_fmt = "%m/%d/%Y %H:%M:%S"
#cis['utc'] = str(cis['date'])+";;;"+str(cis['time'])
#Date Time PC_UTC PC_Time SHIP_Lon SHIP_Lat SHIP_SOG SHIP_COG SHIP_Hdg Water_Depth REF_Lon REF_Lat SHIP_Roll SHIP_Pitch SHIP_Heave SUB1_Lon SUB1_Lat SUB1_Depth SUB1_USBL_Depth SUB1_Altitude SUB1_COG SUB1_Hdg SUB1_Roll SUB1_Pitch SUB1_Camera_Pan SUB1_Battery SUB1_Magnetic_field_strength SUB2_Lon SUB2_Lat SUB2_Depth SUB2_USBL_Depth SUB2_Altitude SUB3_Lon SUB3_Lat SUB3_USBL_Depth SUB4_Lon SUB4_Lat SUB4_USBL_Depth
### Type can be "obser" or "prot"
### Multi-row replacements handle subsequent annotations belonging to the same object:
### they are defined by mrr['pre'] = first label, mrr['cur'] = second label, mrr['rep'] = replacement of first label.
### The second annotation will be ignored and thus its label not returned!
def getOFOPAnnotations(file_path,type="obser",multi_row_replacements=[{'pre':'*','cur':'delete','rep':''}]):
file = open(file_path,"r",errors="ignore",encoding="utf-8")
if type == "prot":
# Skip protocol header rows
for line in file:
if line[0:20] == "--------------------":
break
cols = {'date':'#Date','time':'Time','obs':'Image-Video Path'}
elif type == 'obser':
cols = {'date':'#Date','time':'Time','id':'ID_Number','name':'ID_Name'}
else:
die("Unknown OFOP annotation type: " + type)
#print(file.readline())
max_col_idx,cis = gmrcc.helper.getColumnIndicesFromFile(file,cols)
date_fmt = "%m/%d/%Y %H:%M:%S"
cis['utc'] = str(cis['date'])+";;;"+str(cis['time'])
annotations = []
prev_label = ""
prev_timestamp = -1
prev_double_row = False
c = 0
for line in file:
try:
row,timestamp = gmrcc.navprocessing.extractColumnsFromLine(line,cis,date_fmt)
if type == "prot":
tmp = row['obs'].split("]")
label = tmp[1].strip()
id = tmp[0][1:].strip()
else:
id = row['id']
label = row['name'].replace(id,"").strip()
id = id.replace("]","").replace("[","").strip()
# Handles double-click annotations and deletes
double_row = False
for mrr in multi_row_replacements:
if (mrr['pre'] == "*" or mrr['pre'] == prev_label) and (mrr['cur'] == "*" or mrr['cur'] == label):
prev_label = mrr['rep']
double_row = True
if not prev_label == "" and not prev_timestamp < 0 and not prev_double_row:
annotations.append([prev_timestamp,prev_label,prev_id])
prev_label = label
prev_timestamp = timestamp
prev_double_row = double_row
prev_id = id
except Exception as e:
#print(line,e)
continue
return annotations
import os
import sys
import uuid
import hashlib
### Asserts that a file/folder exists and otherwise terminates the program
def assertExists(path):
if not os.path.exists(path):
raise NameError("Could not find: " + path)
### Asserts that a path string to a directory ends with a slash
def assertSlash(path):
if not os.path.isdir(path):
return path
elif not path[-1] == "/":
return path + "/"
else:
return path
### Turns a number > 0 (int/float) into a shorter, human-readable string with a size character (k,M,G,...)
def humanReadable(val,suffixes=['k','M','G','T']):
idx = -1
while val > 1000:
val /= 1000
idx += 1
if idx >= 0:
return str(round(val))+suffixes[idx]
else:
return str(val)
### Returns a random UUID (i.e. a UUID version 4)
def uuid4():
return uuid.uuid4()
### Returns the SHA256 hash of the file at path
def sha256HashFile(path):
sha256_hash = hashlib.sha256()
with open(path,"rb") as f:
for byte_block in iter(lambda: f.read(4096),b""):
sha256_hash.update(byte_block)
return sha256_hash.hexdigest()
def md5HashFile(path):
md5_hash = hashlib.md5()
with open(path, "rb") as f:
for byte_block in iter(lambda: f.read(4096), b""):
md5_hash.update(byte_block)
return md5_hash.hexdigest()
### A dictionary holding various header field names to store 4.5D navigation information in the form of:
### t (utc time), x (longitude), y (latitude), z (depth: below sea level), a (altitude: above seafloor)
pos_header = {
# Field/column name definition for internally handling this kind of t,y,x,z,h position data
"mariqt":{
'utc':'utc', # YYYY-MM-DD HH:ii:ss.sssss+0000 (UTC!!!) -> t-axis
'lat':'lat', # Decimal degrees, WGS84 / EPSG4362 -> y-axis
'lon':'lon', # Decimal degrees, WGS84 / EPSG4326 -> x-axis
'dep':'dep', # Depth of the signal, sample, platform, ... *in the water* -> z-axis, positive when submerged, negative when in air
'hgt':'hgt' # Height above the seafloor -> relative measure!
},
# Definition of field/column names according to the iFDO specification:
# https://gitlab.hzdr.de/datahub/marehub/ag-videosimages/metadata-profiles-fdos/-/blob/master/MareHub_AGVI_iFDO.md
"ifdo":{'utc':'image-datetime','lat':'image-latitude','lon':'image-longitude','dep':'image-depth','hgt':'image-meters-above-ground'},
# Definition of field/column names according to the "Acquisition, Curation and Management Workflow"
# for marine image data https://www.nature.com/articles/sdata2018181
"acmw":{'utc':'SUB_datetime','lat':'SUB_latitude','lon':'SUB_longitude','dep':'SUB_depth','hgt':'SUB_distance'},
# Definition of field/colum names as they occur in a DSHIP export file
# for RV Sonne posidonia beacons
"posidonia_1":{'utc':'date time','lat':'USBL.PTSAG.1.Latitude','lon':'USBL.PTSAG.1.Longitude','dep':'USBL.PTSAG.1.Depth'},
"posidonia_2":{'utc':'date time','lat':'USBL.PTSAG.2.Latitude','lon':'USBL.PTSAG.2.Longitude','dep':'USBL.PTSAG.2.Depth'},
"posidonia_4":{'utc':'date time','lat':'USBL.PTSAG.4.Latitude','lon':'USBL.PTSAG.4.Longitude','dep':'USBL.PTSAG.4.Depth'},
"posidonia_5":{'utc':'date time','lat':'USBL.PTSAG.5.Latitude','lon':'USBL.PTSAG.5.Longitude','dep':'USBL.PTSAG.5.Depth'},
# for RV Sonne itself (GPS)
"SO":{'utc':'date time','lat':'SYS.STR.PosLat','lon':'SYS.STR.PosLon'},
# for RV Maria S Merian sonardyne beacons
"sonardyne_2104":{'utc':'date time','lat':'Ranger2.PSONLLD.2104.position_latitude','lon':'Ranger2.PSONLLD.2104.position_longitude','dep':'Ranger2.PSONLLD.2104.depth'},
"sonardyne_2105":{'utc':'date time','lat':'Ranger2.PSONLLD.2105.position_latitude','lon':'Ranger2.PSONLLD.2105.position_longitude','dep':'Ranger2.PSONLLD.2105.depth'},
# for RV Maria S Metian itself (GPS)
"MSM":{'utc':'date time','lat':'SYS.STR.PosLat','lon':'SYS.STR.PosLon'},
# Definition of field/column names according to the DSM Workbench
"workbench": {},
# Definition of field/column names required for assigning EXIF infos to a JPG file
"exif":{'utc':'CreateDate','lat':'GPSLatitude','lon':'GPSLongitude','dep':'GPSAltitude','hgt':'GPSDestDistance'},
# Definition of field/column names according to the AWI O2A GeoCSV standard
# https://confluence.digitalearth-hgf.de/display/DM/O2A+GeoCSV+Format
# Warning: GeoCSVs need an additional WKT column: geometry [point] with values like: POINT(latitude longitude)
# Warning: depth and altitude are guessed as i could not find it in the documentation
"o2a":{'utc':'datetime','lat':'latitude [deg]','lon':'longitude [deg]','dep':'depth [m]','hgt':'altitude [m]'},
# Definition of field/column names according to the OFOP software
<