Skip to content

Commit

Permalink
#15: start revising broker and sar data search
Browse files Browse the repository at this point in the history
  • Loading branch information
mortenwh committed Apr 18, 2024
1 parent 74452a4 commit f3d5781
Show file tree
Hide file tree
Showing 8 changed files with 185 additions and 351 deletions.
1 change: 1 addition & 0 deletions broker.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@

os.environ.setdefault('GDAL_ENABLE_DEPRECATED_DRIVER_DODS','YES')


class Broker():
"""
This class search for Sentinel-1 data from NBS and Arome-Arctic wind data
Expand Down
8 changes: 0 additions & 8 deletions config.json

This file was deleted.

23 changes: 23 additions & 0 deletions config.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,23 @@
sar_data:
outpath: /home/froded/data/sarwind/
scp: False
scp_path: /lustre/storeB/project/fou/fd/project/sar-wind/products/
scp_host: ppi-clogin-a1.met.no
scp_user: froded
id_rsa: /home/ubuntu/.ssh/id_rsa
BINDIR: /home/fou-fd-oper/software/sarwind/met-sar-vind/sarwind
BASEDIR: /lustre/storeA/project/fou/fd/project/sarwind
BASEDIR: /home/froded/data/sarwind
RAWDIR: raw # BASEDIR/raw
TMPDIR: tmp # BASEDIR/tmp
PNGDIR: png # BASEDIR/png
NETCDFDIR: netcdf # BASEDIR/netcdf
LOGDIR: log # BASEDIR/log
MODELDIR: /lustre/storeB/project/metproduction/products/arome_arctic
USER_NBS:
PASSWD_NBS:
url: https://colhub.met.no/
typ: GRD
mode: EW


199 changes: 91 additions & 108 deletions sardata/sardata.py
Original file line number Diff line number Diff line change
@@ -1,85 +1,72 @@
# -*- coding: utf-8 -*-
"""
Spyder Editor
This is a temporary script file.
"""

import os
import logging
import subprocess
from datetime import datetime
from datetime
from pytz import timezone
from xml.dom.minidom import parse

BINDIR = '/home/fou-fd-oper/software/sarwind/met-sar-vind/sarwind'

# Data paths
BASEDIR = '/lustre/storeA/project/fou/fd/project/sarwind'
BASEDIR = '/home/froded/data/sarwind'
RAWDIR = '%s/raw' % BASEDIR
TMPDIR = '%s/tmp' % BASEDIR
PNGDIR = '%s/png' % BASEDIR
NETCDFDIR = '%s/netcdf' % BASEDIR
LOGDIR = '%s/log' % BASEDIR
MODELDIR = '/lustre/storeB/project/metproduction/products/arome_arctic'

USER_NBS = ''
PASSWD_NBS = ''


class SARData():

"""
A class for downloading Sentinel-1 SAR data from NBS (https://colhub.met.no)
"""A class for downloading Sentinel-1 SAR SAFE files from NBS
(https://colhub.met.no).
Parameters
-----------
date_str : string
A date string on format "YYYY-MM-DD" specifying date data to download.
If not spesified the current day will be used.
ULX : float/int
Upper left corner longitude in decimal degree
ULY : float/int
Upper left corner latitude in decimal degree
URX : float/int
Upper right corner longitude in decimal degree
URY : float/int
Upper right corner latitude in decimal degree
LLX : float/int
Lower left corner longitude in decimal degree
LLY : float/int
Lower left corner latitude in decimal degree
LRX : float/int
Lower Right corner longitude in decimal degree
LRY : float/int
Lower Right corner latitude in decimal degree
date : datetime.datetime
Specifies the date (UTC) of the data search. If not specified,
the current day will be used.
ULX : float/int
Upper left corner longitude in decimal degree
ULY : float/int
Upper left corner latitude in decimal degree
URX : float/int
Upper right corner longitude in decimal degree
URY : float/int
Upper right corner latitude in decimal degree
LLX : float/int
Lower left corner longitude in decimal degree
LLY : float/int
Lower left corner latitude in decimal degree
LRX : float/int
Lower Right corner longitude in decimal degree
LRY : float/int
Lower Right corner latitude in decimal degree
Example of use:
Data will be downloaded to RAWDIR and uncompressed.
New data will be listet in variable sar_safe_list
---
s = SARData() # Init
s.get_NBS_ColhubData() # Downlaod and uncompress
s.sar_safe_list # List of data available in RAWDIR
"""
def __init__(self, date_str=None, LLX=-19.7, LLY=63.8, LRX=70.0, LRY=63.8, URX=70.0,
URY=82.3, ULX=-19.7, ULY=82.3):
if not isinstance(date_str, str):
now = datetime.now()
yyyy = now.year
mm = now.month
dd = now.day
else:
(yyyy, mm, dd) = date_str.split('-')
Data will be downloaded to RAWDIR and uncompressed.
New data will be listet in variable sar_safe_list
---
s = SARData() # Init
s.get_NBS_ColhubData() # Download and uncompress
s.sar_safe_list # List of data available in RAWDIR
"""

self.year = yyyy
self.month = mm
self.day = dd
if not isinstance(LLX, (float, int)) and not isinstance(LLY, (float, int)) \
and not isinstance(LRX, (float, int)) and not isinstance(LRY, (float, int)) \
and not isinstance(ULX, (float, int)) and not isinstance(ULY, (float, int)) \
and not isinstance(URX, (float, int)) and not isinstance(URY, (float, int)):
raise ValueError('Input parameter for corner coordinates must be a number')
def __init__(self, date=None, LLX=-180., LLY=-90., LRX=180., LRY=-90., URX=180., URY=90.,
ULX=-180., ULY=90.):
self.date = date
if self.date is None:
self.date = datetime.datetime.now(timezone("utc"))

# Check input
if not isinstance(time, datetime.datetime):
raise ValueError("Input time must be of type datetime.datetime")
if not isinstance(LLX, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(LLY, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(LRX, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(LRY, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(ULX, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(ULY, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(URX, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")
if not isinstance(URY, (float, int)):
raise ValueError("Input parameter for corner coordinates must be a number")

self.ULX = ULX
self.ULY = ULY
Expand All @@ -90,25 +77,24 @@ def __init__(self, date_str=None, LLX=-19.7, LLY=63.8, LRX=70.0, LRY=63.8, URX=7
self.URX = URX
self.URY = URY

print('Look for SAR from date: %s-%s-%s' % (self.year, self.month, self.day))
logging.info("Searching for SAR from %s" % self.time.date().isoformat())

def uncompress_zip(self, proclist):
""" Uncompress infile if needed
"""
sar_safe_list = []
for infile in proclist:
if os.path.isfile(infile):
print('\n########################################################')
print('## Uncompress if needed infile: %s' % (infile))
tmpstr = infile.split('/')[-1]
dstfile = '%s/%s.SAFE' % (RAWDIR, tmpstr.split('.')[0])
print(dstfile)
logging.debug("Uncompress if needed infile: %s" % (infile))
tmpstr = infile.split("/")[-1]
dstfile = "%s/%s.SAFE" % (RAWDIR, tmpstr.split(".")[0])
logging.debug(dstfile)
if not os.path.isdir(dstfile):
print('\nfile does not exists\n')
if infile.find('.zip') != -1:
cmd = '/usr/bin/unzip '+infile+' -d '+RAWDIR
print('\nStart uncompressing\n')
print(cmd)
logging.debug("\nfile does not exists\n")
if infile.find(".zip") != -1:
cmd = "/usr/bin/unzip "+infile+" -d "+RAWDIR
logging.debug("\nStart uncompressing\n")
logging.debug(cmd)
subprocess.call(cmd, shell=True)
if os.path.isdir(dstfile):
sar_safe_list.append(dstfile)
Expand All @@ -123,34 +109,31 @@ def get_NBS_ColhubData(self):
# Returns a list of available products to be processed.
##################################################

datestr = ("%04d%02d%02d" % (self.year, self.month, self.day))
startDate = '%04d-%02d-%02dT02:00:00.000Z' % (self.year, self.month, self.day)
stopDate = '%04d-%02d-%02dT09:30:59.999Z' % (self.year, self.month, self.day)
datestr = ("%04d%02d%02d" % (self.date.year, self.date.month, self.date.day))
startDate = "%04d-%02d-%02dT02:00:00.000Z" % (self.date.year, self.date.month, self.date.day)
stopDate = "%04d-%02d-%02dT09:30:59.999Z" % (self.date.year, self.date.month, self.date.day)

##################################################
# Query relevant Sentinel-1 data from colhob.met.no
# Returns a list of available products to be processed.
##################################################
url = 'https://colhub.met.no/'
typ = 'GRD'
mode = 'EW'

# timeliness = 'NRT-3h'
area = '%f %f,%f %f,%f %f,%f %f,%f %f' % (self.LLX, self.LLY,
# timeliness = "NRT-3h"
area = "%f %f,%f %f,%f %f,%f %f,%f %f" % (self.LLX, self.LLY,
self.LRX, self.LRY,
self.URX, self.URY,
self.ULX, self.ULY,
self.LLX, self.LLY)

xmlFile = '%s/qres.xml' % (LOGDIR)
xmlFile = "%s/qres.xml" % (LOGDIR)

cmd = 'wget --no-check-certificate --user=%s --password=%s --output-document=%s ' % (
cmd = "wget --no-check-certificate --user=%s --password=%s --output-document=%s " % (
USER_NBS, PASSWD_NBS, xmlFile)
cmd = cmd + '\'%ssearch?q=(beginPosition:[%s TO %s] AND endPosition:[%s TO %s]) ' % (
cmd = cmd + "\"%ssearch?q=(beginPosition:[%s TO %s] AND endPosition:[%s TO %s]) " % (
url, startDate, stopDate, startDate, stopDate)
cmd = cmd + 'AND %s AND %s ' % (typ, mode)
cmd = cmd + "AND %s AND %s " % (typ, mode)
cmd = cmd + 'AND footprint:"Intersects(POLYGON((%s)))"&rows=100&start=0\' ' % (area)
print(cmd)
logging.debug(cmd)
subprocess.call(cmd, shell=True)

# Pars xmlFile to generate list of available data
Expand All @@ -161,41 +144,41 @@ def get_NBS_ColhubData(self):
proclist_val = []

for node in entrys:
fname = '%s/%s.zip' % (
RAWDIR, node.getElementsByTagName('title')[0].childNodes[0].nodeValue)
fval = node.getElementsByTagName("link")[0].getAttribute('href').replace(
'$value', '\\$value')
fname = "%s/%s.zip" % (
RAWDIR, node.getElementsByTagName("title")[0].childNodes[0].nodeValue)
fval = node.getElementsByTagName("link")[0].getAttribute("href").replace(
"$value", "\\$value")
if fname.find(datestr) > -1:
proclist_tmp.append(fname)
proclist_val.append(fval)

# Pars the processedFile log and make a list of products to be downloaded
processedFile = '%s/processedFile%s' % (LOGDIR, datestr)
print(processedFile)
processedLog = ''
# Parse the processedFile log and make a list of products to be downloaded
processedFile = "%s/processedFile%s" % (LOGDIR, datestr)
logging.debug(processedFile)
processedLog = ""
proclist = []
if not os.path.isfile(processedFile):
fid = open(processedFile, 'w')
fid = open(processedFile, "w")
else:
fid = open(processedFile, 'r+')
fid = open(processedFile, "r+")
processedLog = fid.read()

for i in range(len(proclist_tmp)):
# Only search for first part of the string. Similar files
# may have different ending
fsub = proclist_tmp[i].split('/')[-1][:30]
fsub = proclist_tmp[i].split("/")[-1][:30]
val = proclist_val[i]

if (processedLog.find(fsub) == -1): # Check if file processed before
if (str(proclist).find(fsub) == -1): # Check if file already put to proclist ?
cmd = (
'wget --no-check-certificate --user=%s --password=%s '
'--output-document=%s ' % (USER_NBS, PASSWD_NBS, proclist_tmp[i]))
"wget --no-check-certificate --user=%s --password=%s "
"--output-document=%s " % (USER_NBS, PASSWD_NBS, proclist_tmp[i]))
cmd = cmd + '\"%s\" ' % (val)
subprocess.call(cmd, shell=True)
print(cmd)
logging.debug(cmd)
proclist.append(proclist_tmp[i])
fid.write('%s,' % (proclist_tmp[i]))
fid.write("%s," % (proclist_tmp[i]))
fid.close()

if len(proclist) > 0:
Expand Down
Loading

0 comments on commit f3d5781

Please sign in to comment.