-
Notifications
You must be signed in to change notification settings - Fork 1
/
Copy pathknmi2influxdb.py
executable file
·486 lines (437 loc) · 20.1 KB
/
knmi2influxdb.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
#!/usr/bin/env python3
#
# # About
#
# Convert KNMI hourly CSV data to influxdb line protocol.
#
# Input can be either a KNMI data file, or the script can query data directly
# Output can be either a file, or a URI to an influxdb server to push data directly.
#
# # Quick start
#
# /usr/bin/python3 knmi2influxdb.py --time actual --station 260
#
# # References
#
# - https://www.knmi.nl/kennis-en-datacentrum/achtergrond/data-ophalen-vanuit-een-script
# - http://projects.knmi.nl/klimatologie/uurgegevens/
# - https://data.knmi.nl/download/Actuele10mindataKNMIstations/1
#
#
# # KNMI stations
# id name
# 210 Valkenburg
# 215 Voorschoten
# 225 IJmuiden
# 235 De Kooy
# 240 Schiphol
# 242 Vlieland
# 249 Berkhout
# 251 Hoorn (Terschelling)
# 257 Wijk aan Zee
# 258 Houtribdijk
# 260 De Bilt
# 265 Soesterberg
# 267 Stavoren
# 269 Lelystad
# 270 Leeuwarden
# 273 Marknesse
# 275 Deelen
# 277 Lauwersoog
# 278 Heino
# 279 Hoogeveen
# 280 Eelde
# 283 Hupsel
# 286 Nieuw Beerta
# 290 Twenthe
# 310 Vlissingen
# 319 Westdorpe
# 323 Wilhelminadorp
# 330 Hoek van Holland
# 340 Woensdrecht
# 344 Rotterdam
# 348 Cabauw
# 350 Gilze-Rijen
# 356 Herwijnen
# 370 Eindhoven
# 375 Volkel
# 377 Ell
# 380 Maastricht
# 391 Arcen
import urllib.request
import requests
import csv
import argparse
import datetime
import netCDF4
import logging
import logging.handlers
import time
import yaml
#DEFAULTQUERY='test,src=outside_knmi{STN} wind={DD},windspeed={FF:.1f},temp={T:.1f},irrad={Q:.2f},rain={RH:.1f} {DATETIME}'
#DEFAULTQUERY='temperaturev2 outside_knmi{STN}={T:.1f} {DATETIME}{NEWLINE}weatherv2 rain_duration_knmi{STN}={DR:.1f},rain_qty_knmi{STN}={RH:.1f},wind_speed_knmi{STN}={FF:.1f},wind_gust_knmi{STN}={FX:.1f},wind_dir_knmi{STN}={DD} {DATETIME}{NEWLINE}energyv2 irradiance_knmi{STN}={Q:.0f} {DATETIME}'
DEFAULTQUERY='temperaturev2 outside_knmi{STN}={T:.1f} {DATETIME}'
KNMISTATION=260 # KNMI station for getting live data. See http://projects.knmi.nl/klimatologie/uurgegevens/
KNMIURI = 'https://www.daggegevens.knmi.nl/klimatologie/uurgegevens'
# Required for graceful None formatting, sometimes KNMI data has null entries,
# but influxdb does not recognize this. We solve this by rendering None and
# then removing those fields
# https://stackoverflow.com/questions/20248355/how-to-get-python-to-gracefully-format-none-and-non-existing-fields
import string
class PartialFormatter(string.Formatter):
def __init__(self, missing='~~', bad_fmt='!!'):
self.missing, self.bad_fmt=missing, bad_fmt
def get_field(self, field_name, args, kwargs):
# Handle a key not found
try:
val=super(PartialFormatter, self).get_field(field_name, args, kwargs)
# Python 3, 'super().get_field(field_name, args, kwargs)' works
except (KeyError, AttributeError):
val=None,field_name
return val
def format_field(self, value, spec):
# handle an invalid format
if value==None: return self.missing
try:
return super(PartialFormatter, self).format_field(value, spec)
except ValueError:
if self.bad_fmt is not None: return self.bad_fmt
else:
my_logger.exception("Exception occurred in format_field()")
raise
def get_knmi_data_historical(knmistation=KNMISTATION, histrange=(21,)):
my_logger.debug("get_knmi_data_historical(knmistation={}, histrange={})".format(knmistation, histrange))
if len(histrange) == 1:
histdays = int(histrange[0])
# Get data from last 21 days by default, explicitly determine end
histstart = (datetime.datetime.now() - datetime.timedelta(days=histdays)).strftime("%Y%m%d")
histend = (datetime.datetime.now()).strftime("%Y%m%d")
elif len(histrange) == 2:
histstart, histend = histrange
# Try parsing histrange to see if formatting is OK (if strptime is happy, KNMI should be happy)
try:
_ = (time.strptime(histstart,"%Y%m%d"))
_ = (time.strptime(histend,"%Y%m%d"))
except:
my_logger.exception("Exception occurred")
raise ValueError("histrange formatting not OK, should be YYYYMMDD")
else:
my_logger.exception("Exception occurred")
raise ValueError("histrange should be either [days] or [start, end] and thus have 1 or 2 elements.")
knmiquery = "start={}01&end={}24&vars=ALL&stns={}".format(histstart, histend, knmistation)
my_logger.info("get_knmi_data_historical(): getting query={}".format(knmiquery))
# Query can take quite long, set long-ish timeout
r = requests.post(KNMIURI, data=knmiquery, timeout=30)
# Return line-wise iterable for next stage
return r.text.splitlines()
def get_knmi_data_actual(api_key, knmistation=KNMISTATION, query=DEFAULTQUERY):
my_logger.debug("get_knmi_data_actual(knmistation={}, query={})".format(knmistation, query))
# Get real-time data from now, store to disk (netCDF https support is limited)
# Old approach (deprecated)
# Latest: https://data.knmi.nl/download/Actuele10mindataKNMIstations/1/noversion/2020/01/08/KMDS__OPER_P___10M_OBS_L2.nc
# Specific time: https://data.knmi.nl/download/Actuele10mindataKNMIstations/1/noversion/2020/01/08/KMDS__OPER_P___10M_OBS_L2_1620.nc
# https://stackoverflow.com/questions/22676/how-do-i-download-a-file-over-http-using-python/22776#22776
# New approach 20200800: https://developer.dataplatform.knmi.nl/portal/example-scripts#list-10-files
# Get latest file with data
api_url = "https://api.dataplatform.knmi.nl/open-data"
dataset_name = "Actuele10mindataKNMIstations"
dataset_version = "2"
# Get the latest files since one hour ago, which should show 6 files (1 file per 10min). We take the last file of this, which should be the newest. Guaranteed to work if files are available max 1 hour later
timestamp_now = datetime.datetime.utcnow()
timestamp_one_hour_ago = timestamp_now - datetime.timedelta(hours=1)
filename_one_hour_ago = f"KMDS__OPER_P___10M_OBS_L2_{timestamp_one_hour_ago.strftime('%Y%m%d%H%M')}.nc"
list_files_response = requests.get(f"{api_url}/datasets/{dataset_name}/versions/{dataset_version}/files",
headers={"Authorization": api_key},
params={"maxKeys": 10, "startAfterFilename": filename_one_hour_ago})
list_files = list_files_response.json()
filename = list_files.get("files")[-1].get("filename")
# Get latest file by constructing filename ourselves. Could fail if there's a delay before the files are available.
# timestamp_now = datetime.datetime.utcnow()
# timestamp_one_hour_ago = timestamp_now - timedelta(hours=1) - datetime.timedelta(minutes=timestamp_now.minute % 10)
# filename = f"KMDS__OPER_P___10M_OBS_L2_{timestamp_one_hour_ago.strftime('%Y%m%d%H%M')}.nc"
# Get data file
endpoint = f"{api_url}/datasets/{dataset_name}/versions/{dataset_version}/files/{filename}/url"
my_logger.debug(f"get_knmi_data_actual: getting {endpoint}")
get_file_response = requests.get(endpoint, headers={"Authorization": api_key})
download_url = get_file_response.json().get("temporaryDownloadUrl")
dataset_file = requests.get(download_url,)
urllib.request.urlretrieve(download_url, "/tmp/KMDS__OPER_P___10M_OBS_L2.nc")
rootgrp = netCDF4.Dataset("/tmp/KMDS__OPER_P___10M_OBS_L2.nc", "r", format="NETCDF4")
# Data file contains the following variables:
# for k, v in rootgrp.variables.items():
# try:
# print ("{}, unit: {}, name: {}".format(k,v.units,v.long_name))
# except:
# print(k)
# station
# time, unit: seconds since 1950-01-01 00:00:00, name: time of measurement
# stationname
# lat, unit: degrees_north, name: station latitude
# lon, unit: degrees_east, name: station longitude
# height, unit: m, name: Station height
# dd, unit: degree, name: Wind Direction 10 Min Average
# ff, unit: m s-1, name: Wind Speed at 10m 10 Min Average
# gff, unit: m s-1, name: Wind Gust at 10m 10 Min Maximum
# ta, unit: degrees Celsius, name: Air Temperature 1 Min Average
# rh, unit: %, name: Relative Humidity 1 Min Average
# pp, unit: hPa, name: Air Pressure at Sea Level 1 Min Average
# zm, unit: m, name: Meteorological Optical Range 10 Min Average
# D1H, unit: min, name: Rainfall Duration in last Hour
# dr, unit: sec, name: Precipitation Duration (Rain Gauge) 10 Min Sum
# hc, unit: ft, name: Cloud Base
# hc1, unit: ft, name: Cloud Base First Layer
# hc2, unit: ft, name: Cloud Base Second Layer
# hc3, unit: ft, name: Cloud Base Third Layer
# nc, unit: octa, name: Total cloud cover
# nc1, unit: octa, name: Cloud Amount First Layer
# nc2, unit: octa, name: Cloud Amount Second Layer
# nc3, unit: octa, name: Cloud Amount Third Layer
# pg, unit: mm/h, name: Precipitation Intensity (PWS) 10 Min Average
# pr, unit: sec, name: Precipitation Duration (PWS) 10 Min Sum
# qg, unit: W m-2, name: Global Solar Radiation 10 Min Average
# R12H, unit: mm, name: Rainfall in last 12 Hours
# R1H, unit: mm, name: Rainfall in last Hour
# R24H, unit: mm, name: Rainfall in last 24 Hours
# R6H, unit: mm, name: Rainfall in last 6 Hours
# rg, unit: mm/h, name: Precipitation Intensity (Rain Gauge) 10 Min Average
# ss, unit: min, name: Sunshine Duration
# td, unit: degrees Celsius, name: Dew Point Temperature 1.5m 1 Min Average
# tgn, unit: degrees Celsius, name: Grass Temperature 10cm 10 Min Minimum
# Tgn12, unit: degrees Celsius, name: Grass Temperature Minimum last 12 Hours
# Tgn14, unit: degrees Celsius, name: Grass Temperature Minimum last 14 Hours
# Tgn6, unit: degrees Celsius, name: Grass Temperature Minimum last 6 Hours
# tn, unit: degrees Celsius, name: Ambient Temperature 1.5m 10 Min Minimum
# Tn12, unit: degrees Celsius, name: Air Temperature Minimum last 12 Hours
# Tn14, unit: degrees Celsius, name: Air Temperature Minimum last 14 Hours
# Tn6, unit: degrees Celsius, name: Air Temperature Minimum last 6 Hours
# tx, unit: degrees Celsius, name: Ambient Temperature 1.5m 10 Min Maximum
# Tx12, unit: degrees Celsius, name: Air Temperature Maximum last 12 Hours
# Tx24, unit: degrees Celsius, name: Air Temperature Maximum last 24 Hours
# Tx6, unit: degrees Celsius, name: Air Temperature Maximum last 6 Hours
# ww, unit: code, name: wawa Weather Code
# pwc, unit: code, name: Present Weather
# ww-10, unit: code, name: wawa Weather Code for Previous 10 Min Interval
# ts1, unit: Number, name: Number of Lightning Discharges at Station
# ts2, unit: Number, name: Number of Lightning Discharges near Station
# iso_dataset
# product, unit: 1, name: ADAGUC Data Products Standard
# projection
# Get number of stations, then get station by station id.
nstations = rootgrp.dimensions['station'].size
stationid = [(rootgrp["/station"][i]) for i in range(nstations)].index("06"+str(knmistation))
fieldval = {'NEWLINE':"\n"}
# time units is: seconds since 1950-01-01 00:00:00
naivetime = netCDF4.num2date(rootgrp["/time"][:], rootgrp["/time"].units)[0]
# Cumbersome way to make into utc timestamp
obstime = datetime.datetime(naivetime.year, naivetime.month, naivetime.day, naivetime.hour, naivetime.minute, tzinfo=datetime.timezone.utc)
fieldval['DATETIME'] = int(obstime.timestamp())
# tzinfo=datetime.timezone.utc
fieldval['STN'] = knmistation
fieldval['T'] = rootgrp["/ta"][stationid][0]
fieldval['FF'] = rootgrp["/ff"][stationid][0]
fieldval['FX'] = rootgrp["/gff"][stationid][0]
fieldval['DD'] = rootgrp["/dd"][stationid][0]
fieldval['Q'] = rootgrp["/qg"][stationid][0]
# fieldval['SQ'] = rootgrp["/gq"][stationid]
# fieldval['DR'] = rootgrp["/dr"][stationid][0]/600. # seconds to fraction
fieldval['DR'] = rootgrp["/D1H"][stationid][0]/60. # minutes to fraction
fieldval['RH'] = rootgrp["/R1H"][stationid][0]
fieldval['P'] = rootgrp["/pp"][stationid][0]
fmt = PartialFormatter()
outline = fmt.format(query, **fieldval)
print(outline)
# Return as array so we're compatible with convert_knmi() format (which
# returns multiple lines)
return [outline]
def convert_knmi(knmidata, query):
my_logger.debug("convert_knmi(knmidata, query={})".format(query))
start = False
fieldpos = {}
fieldval = {'NEWLINE':"\n"}
fieldfunc = {
'YYYYMMDD': lambda x: datetime.datetime(int(x[0:4]), int(x[4:6]), int(x[6:8]), tzinfo=datetime.timezone.utc), ## time
'HH': lambda x: int(x),
'DD': lambda x: int(x),
'FF': lambda x: int(x)/10,
'FX': lambda x: int(x)/10,
'T': lambda x: int(x)/10,
'SQ': lambda x: int(x)/10,
'Q': lambda x: int(x)*10000/3600, # Convert J/cm^2/hour to W/m^2, i.e. 10000cm^2/m^2 and 1/3600 hr/sec
'DR': lambda x: int(x)/10,
'RH': lambda x: max(int(x),0)/10, # RH is values? From their doc: RH = Uursom van de neerslag (in 0.1 mm) (-1 voor <0.05 mm);
'P': lambda x: int(x)/10,
}
parsed_lines = []
fmt=PartialFormatter()
for r in knmidata:
row = r.replace(' ','').split(',')
# Find start row (syntax should be like # STN,YYYYMMDD, HH, DD, FH, FF, FX, T, T10, TD, SQ, Q, DR, RH, P, VV, N, U, WW, IX, M, R, S, O, Y)
if (row[0][0] == "#" and len(row)>2 and "YYYYMMDD" in row[1] and not start):
try:
fieldpos['STN'] = 0
fieldpos['YYYYMMDD'] = row.index("YYYYMMDD")
fieldpos['HH'] = row.index("HH")
fieldpos['DD'] = row.index("DD")
fieldpos['FF'] = row.index("FF")
fieldpos['FX'] = row.index("FX")
fieldpos['T'] = row.index("T")
fieldpos['SQ'] = row.index("SQ")
fieldpos['Q'] = row.index("Q")
fieldpos['DR'] = row.index("DR")
fieldpos['RH'] = row.index("RH")
fieldpos['P'] = row.index("P")
except ValueError as e:
my_logger.exception("KNMI data file incompatible, could not find fields HH, DD or others: {}".format(row))
quit("KNMI data file incompatible, could not find fields HH, DD or others: {}".format(e))
start = True
my_logger.debug("Started on row: {}".format(r))
# After we found the start marker, parse data.
elif (start and len(row) > 1):
# valstn = row[0]
# valyyyymmdd = row[1]
for fname, pos in fieldpos.items():
try:
# Apply conversion function to each field's value,
# if none specified, pass value as is.
fieldval[fname] = fieldfunc.get(fname, lambda x: x)(row[pos])
except:
# print("Empty val for {}".format(fname))
fieldval[fname] = None
# print(fname, row[pos], fieldval[fname])
# Construct datetime from date and hour fields
# N.B. Although observations run from hour-1 to hour, (e.g.
# slot 1 runs from 00:00 to 01:00: 'Uurvak 05 loopt van 04.00 UT
# tot 5.00 UT'), most measurements are taken at the end of the
# slot, so we use that timestamp as given, which is good enough.
# N.B. HH runs from 1-24, so we can't make a time directly
# (which runs from 0-23) for hour 24, so instead we add as
# timedelta to allow for wrapping over days (e.g. 1->1, 2->2,
# but 24->0 next day)
# N.B. timestamp() only works in python3
fieldval['DATETIME'] = int((fieldval['YYYYMMDD'] + datetime.timedelta(hours=fieldval['HH'])).timestamp())
# Unpack dict to format query to give influxdb line protocol "value=X"
# See https://github.com/influxdata/docs.influxdata.com/issues/717#issuecomment-249618099
outline = fmt.format(query, **fieldval)
# Influxdb does not recognize None or null as values, instead
# remove fields by filtering out all ~~ values given by PartialFormatter().
outline_fix = []
for l in outline.split('\n'):
# First get field sets by splitting by space into three parts (https://docs.influxdata.com/influxdb/v1.7/write_protocols/line_protocol_tutorial/)
try:
outline_meas, outline_field, outline_time = l.split(' ')
except:
my_logger.exception("Could not unpack line: {}".format(l))
# Replace None values
outline_field = ','.join([w for w in outline_field.split(',') if not '~~' in w])
outline_fix.append(" ".join([outline_meas, outline_field, outline_time]))
# Store
parsed_lines.append("\n".join(outline_fix))
# If we found nothing (during initialization), we continue to next
# line (added for clarity)
else:
continue
return parsed_lines
def influxdb_output(outuri, influxdata, influxusername=None, influxpassword=None):
my_logger.debug("influxdb_output(outuri={}, influxdata)".format(outuri))
if (outuri[:4].lower() == 'http'):
r = requests.post(outuri, data="\n".join(influxdata), timeout=10, auth=(influxusername, influxpassword))
if r.status_code == 204:
my_logger.debug("Query successfully handed to influxdb.")
else:
my_logger.error("Could not push to influxdb: {} - {}".format(r.status_code, r.content))
else:
# Store to file
with open(outuri, 'w+') as fdo:
fdo.write("\n".join(influxdata))
def get_secrets(secretsfile):
"""
Get secrets from YAML file `secretsfile` as alternative to command line arguments
"""
with open(secretsfile, 'r') as stream:
try:
data = yaml.safe_load(stream)
# API key
KNMIAPIKEY = data['knmi2influxdb']['knmiapikey']
# Influxdb settings URI
INFLUX_USER = data['knmi2influxdb']['influx_username']
INFLUX_PASSWD = data['knmi2influxdb']['influx_password']
except yaml.YAMLError as exc:
my_logger.exception('Could not load yaml file: {}'.format(exc))
return KNMIAPIKEY, INFLUX_USER, INFLUX_PASSWD
# Init logger, defaults to console
my_logger = logging.getLogger("MyLogger")
my_logger.setLevel(logging.DEBUG)
# create syslog handler which also shows filename in log
handler_syslog = logging.handlers.SysLogHandler(address = '/dev/log')
formatter = logging.Formatter('%(filename)s: %(message)s')
handler_syslog.setFormatter(formatter)
handler_syslog.setLevel(logging.INFO)
my_logger.addHandler(handler_syslog)
my_logger.debug("Init logging & parsing command line args.")
# Parse commandline arguments
parser = argparse.ArgumentParser(description="Convert KNMI data to influxdb line protocol. Optionally insert into database directly")
parser.add_argument("--time", choices=['actual', 'historical'], help="Get actual (default, updated in 10-min interval) or historical (hourly, updated daily) data. ", default='actual')
parser.add_argument("--histrange", help="Time range to get historical data for. Either days since now (if one parameter), or timerange in format of YYYYMMDD (if two parameters)", nargs="*", default=['21'])
parser.add_argument("--station", help="""KNMI station (default: de Bilt). Possible values:
210: Valkenburg
215: Voorschoten
225: IJmuiden
235: De Kooy
240: Schiphol
242: Vlieland
249: Berkhout
251: Hoorn (Terschelling)
257: Wijk aan Zee
258: Houtribdijk
260: De Bilt
265: Soesterberg
267: Stavoren
269: Lelystad
270: Leeuwarden
273: Marknesse
275: Deelen
277: Lauwersoog
278: Heino
279: Hoogeveen
280: Eelde
283: Hupsel
286: Nieuw Beerta
290: Twenthe
310: Vlissingen
319: Westdorpe
323: Wilhelminadorp
330: Hoek van Holland
340: Woensdrecht
344: Rotterdam
348: Cabauw
350: Gilze-Rijen
356: Herwijnen
370: Eindhoven
375: Volkel
377: Ell
380: Maastricht
391: Arcen""", default=260)
parser.add_argument("--api_key", help="KNMI opendata api key, required for actuals.")
parser.add_argument("--outuri", help="Output target, either influxdb server (if starts with http, e.g. http://localhost:8086/write?db=smarthome&precision=s), or filename (else)")
parser.add_argument("--influxusername", help="Influxdb username (if outuri points to influxdb server)")
parser.add_argument("--influxpassword", help="Influxdb password (if outuri points to influxdb server)")
parser.add_argument("--secretsfile", help="YAML file containing secrets, e.g. KNMI API & Influddb authentication.")
parser.add_argument("--query", help="Query template for influxdb line protocol, where {DATETIME}=UT date in seconds since epoch, {STN}=station, {T}=temp in C, {FF}=windspeed in m/s, {FX}=windgust in m/s, {DD}=wind direction in deg, {Q}=irradiance in W/m^2, {RH}=precipitation in mm, {NEWLINE} is newline, e.g. 'weather,device=knmi temp={T} wind={DD}'", default=DEFAULTQUERY)
args = parser.parse_args()
logging.debug("Got command line args:" + str(args))
# Load secrets
if (args.secretsfile):
args.api_key, args.influxusername, args.influxpassword = get_secrets(args.secretsfile)
influxdata=None
if (args.time == 'historical'):
knmidata = get_knmi_data_historical(args.station, args.histrange)
influxdata = convert_knmi(knmidata, args.query)
else:
if (not args.api_key):
logging.error("Need apikey for actual data query.")
influxdata = get_knmi_data_actual(args.api_key, args.station, args.query)
if (args.outuri):
influxdb_output(args.outuri, influxdata, influxusername=args.influxusername, influxpassword=args.influxpassword)
else:
print (influxdata)
# Run for live data