Ofc here is the code im trying to run. I just copy pasted it inside DAT text and let it it output to a DAT text which I have know deleted for testing purposes. I did check and found that requests does live inside C:\Program Files\Derivative\TouchDesigner\bin\Lib\site-packages\requests
import logging
import os
import sys
import json
import xarray as xr
import requests
# Set up logging
logging.basicConfig()
logger = logging.getLogger(__name__)
logger.setLevel(os.environ.get("LOG_LEVEL", logging.INFO))
# Station lookup for convenience
station_lookup = {
"IJmond": 209,
"Valkenburg Zh": 210,
"Voorschoten": 215,
"IJmuiden": 225,
"De Kooy": 235,
"Schiphol": 240,
"Vlieland": 242,
"Wijdenes": 248,
"Berkhout": 249,
"Hoorn Terschelling": 251,
"Wijk aan Zee": 257,
"Houtribdijk": 258,
"De Bilt": 260,
"Soesterberg": 265,
"Stavoren": 267,
"Lelystad": 269,
"Leeuwarden": 270,
"Marknesse": 273,
"Deelen": 275,
"Lauwersoog": 277,
"Heino": 278,
"Hoogeveen": 279,
"Eelde": 280,
"Hupsel": 283,
"Huibertgat": 285,
"Nieuw Beerta": 286,
"Twenthe": 290,
"Cadzand": 308,
"Vlissingen": 310,
"Hoofdplaat": 311,
"Oosterschelde": 312,
"Vlakte van De Raan": 313,
"Hansweert": 315,
"Schaar": 316,
"Westdorpe": 319,
"Wilhelminadorp": 323,
"Stavenisse": 324,
"Hoek van Holland": 330,
"Tholen": 331,
"Woensdrecht": 340,
"Rotterdam Geulhaven": 343,
"Rotterdam": 344,
"Cabauw Mast": 348,
"Gilze-Rijen": 350,
"Herwijnen": 356,
"Eindhoven": 370,
"Volkel": 375,
"Ell": 377,
"Maastricht": 380,
"Arcen": 391
}
class OpenDataAPI:
def __init__(self, api_token: str):
self.base_url = "https://api.dataplatform.knmi.nl/open-data/v1"
self.headers = {"Authorization": api_token}
def __get_data(self, url, params=None):
return requests.get(url, headers=self.headers, params=params).json()
def list_files(self, dataset_name: str, dataset_version: str, params: dict):
return self.__get_data(
f"{self.base_url}/datasets/{dataset_name}/versions/{dataset_version}/files",
params=params,
)
def get_file_url(self, dataset_name: str, dataset_version: str, file_name: str):
return self.__get_data(
f"{self.base_url}/datasets/{dataset_name}/versions/{dataset_version}/files/{file_name}/url"
)
def download_file_from_temporary_download_url(download_url, filename):
try:
with requests.get(download_url, stream=True) as r:
r.raise_for_status()
with open(filename, "wb") as f:
for chunk in r.iter_content(chunk_size=8192):
f.write(chunk)
except Exception:
logger.exception("Unable to download file using download URL")
sys.exit(1)
logger.info(f"Successfully downloaded dataset file to {filename}")
def convert_nc_to_json(nc_file, json_file, target_station_id):
try:
# Open the NetCDF file using xarray
dataset = xr.open_dataset(nc_file)
# Correctly pad the station ID to match the dataset's format (e.g., 260 -> 06260)
target_station_id_padded = f"06{target_station_id:03d}" # Ensures it becomes "06260"
station_ids = dataset['station'].values # Assuming 'station' is a variable in the dataset
# Ensure the station exists in the dataset
if target_station_id_padded not in station_ids:
print(f"Station ID {target_station_id_padded} not found in the file.")
return None
# Find the index of the station in the dataset
station_index = list(station_ids).index(target_station_id_padded)
print(f"Found station {target_station_id} (padded: {target_station_id_padded}) at index {station_index}")
# Extract relevant data for that station
station_data = {}
for var_name in dataset.variables:
var_data = dataset[var_name].isel(station=station_index) # Extract data for this station
# If the data is a multi-dimensional array (time, latitude, longitude)
# Flatten it to be a simple list or convert to a single value.
if isinstance(var_data.values, (int, float)):
station_data[var_name] = var_data.values
else:
# Handle multidimensional data and extract just the relevant slice
station_data[var_name] = var_data.values.tolist() # Assuming it's time-series or multi-dim data
# Save the data for the station to a JSON file
with open(json_file, "w") as f:
json.dump(station_data, f, indent=4)
print(f"Succesfully filtered and saved data for station {target_station_id} to {json_file}")
return station_data
except Exception as e:
print(f"Error during conversion: {e}")
finally:
dataset.close()
def main():
api_key = "your-api-key-here" # Make sure to replace with your API key
dataset_name = "Actuele10mindataKNMIstations"
dataset_version = "2"
logger.info(f"Fetching latest file of {dataset_name} version {dataset_version}")
target_station_id = station_lookup.get(station_name)
if target_station_id is None:
logger.error(f"Station '{station_name}' not found in station list.")
sys.exit(1)
api = OpenDataAPI(api_token=api_key)
# sort the files in descending order and only retrieve the first file
params = {"maxKeys": 1, "orderBy": "created", "sorting": "desc"}
response = api.list_files(dataset_name, dataset_version, params)
if "error" in response:
logger.error(f"Unable to retrieve list of files: {response['error']}")
sys.exit(1)
latest_file = response["files"][0].get("filename")
logger.info(f"Latest file is: {latest_file}")
# fetch the download url and download the file
response = api.get_file_url(dataset_name, dataset_version, latest_file)
download_file_from_temporary_download_url(response["temporaryDownloadUrl"], latest_file)
downloaded_file = latest_file
json_output_file = f"{station_name.replace(' ', '_')}_output.json"
return convert_nc_to_json(downloaded_file, json_output_file, target_station_id)
def load_json(json_file):
with open(json_file, "r") as f:
data = json.load(f)
return data
if __name__ == "__main__":
data = main()
if data:
print(json.dumps(data, indent=2))