diff --git a/test/cordex-cmip6/generate_plots_into_swift.py b/test/cordex-cmip6/generate_plots_into_swift.py
new file mode 100644
index 0000000000000000000000000000000000000000..7e56e19a12e9b0fedc25452270b464976ae792ed
--- /dev/null
+++ b/test/cordex-cmip6/generate_plots_into_swift.py
@@ -0,0 +1,62 @@
+from tqdm import tqdm
+from mytoken import *
+import json
+import os
+import xarray as xr
+
+#https://swift.dkrz.de/v1/dkrz_475f8922-045d-43f4-b661-0b053074407e/test/no_ave.txt
+os.environ["OS_STORAGE_URL"]=OS_STORAGE_URL
+os.environ["OS_AUTH_TOKEN"]=OS_AUTH_TOKEN
+
+import fsspec
+fh=fsspec.filesystem("http",timeout=9999999)
+fs=fsspec.filesystem("swift")
+swift_account_url=OS_STORAGE_URL.replace("https://","swift://").replace("/v1/","/")
+plotdir=swift_account_url+"/plots"
+print(plotdir)
+#fs.makedir(plotdir) tobedone in browser
+cloudmapper=fs.get_mapper(plotdir)
+
+kinds=["quadmesh","contourf"]
+color="coolwarm"
+times=["-1_0","-12_0"]
+
+host=os.environ["HOSTNAME"]
+baseurl="http://"+host+":9000/datasets"
+with fh.open(baseurl,"rb") as fp:
+    datasets=fp.read()
+
+cloudmapper["test.htm"]=datasets
+datasets=eval(datasets)
+print(datasets)
+chosen=0
+#CORDEX.CMIP6.DD.EUR-12.CLMcom-DWD.ERA5.evaluation.r1i1p1f1.ICON-CLM-202407-1-1.v0-r0.1hr.v20240713
+hosted='.'.join(datasets[chosen].split('.')[2:11])
+hosted=datasets[chosen]
+version=datasets[chosen].split('.')[11]
+print(hosted)
+print(version)
+zarrurl='/'.join([baseurl,hosted])+"/zarr"
+ds=xr.open_zarr(
+        zarrurl,
+        consolidated=True,
+        decode_cf=True
+        )
+hostedtrunk='/'.join(hosted.split('.')[2:-1])
+for dv in tqdm(ds.data_vars):
+    if "bnds" in dv or "bounds" in dv or "rotated_lat" in dv or "vertices" in dv:
+        continue
+    print(dv)
+    for kind,time in zip(kinds,times):
+        plotname='_'.join(["hvplot",dv,hosted.split('.')[-2],kind,color,time])+".htm"
+        trunk='/'.join([hostedtrunk,dv,version])
+        entry='/'.join([trunk,plotname])
+        url='/'.join([baseurl,hosted,"plot",dv,kind,color,"lat/20_73/lon/-45_65/time",time])
+        print(entry+": "+url)
+        if entry in cloudmapper:
+            continue
+        try:
+            cloudmapper[entry]=fh.cat(url)
+        except:
+            print("Did not work for "+entry)
+            continue
diff --git a/test/cordex-cmip6/generate_table_app.py b/test/cordex-cmip6/generate_table_app.py
new file mode 100644
index 0000000000000000000000000000000000000000..5da4f1493511cff78e11a2416cc42cf121ddadce
--- /dev/null
+++ b/test/cordex-cmip6/generate_table_app.py
@@ -0,0 +1,58 @@
+import pandas as pd
+from tqdm import tqdm
+from mytoken import *
+import json
+import os
+import xarray as xr
+import panel as pn
+pn.extension("tabulator")
+
+os.environ["OS_STORAGE_URL"]=OS_STORAGE_URL
+os.environ["OS_AUTH_TOKEN"]=OS_AUTH_TOKEN
+import fsspec
+fs=fsspec.filesystem("swift")
+swift_account_url=OS_STORAGE_URL.replace("https://","swift://").replace("/v1/","/")
+plotdir=swift_account_url+"/plots"
+#fs.makedir(plotdir) tobedone in browser
+cloudmapper=fs.get_mapper(plotdir)
+plots=list(filter(lambda a: "tabu.htm" not in a and "test.htm" not in a, list(cloudmapper.keys())))
+
+DRS=["activity_id","domain_id","institution_id","driving_source_id","driving_experiment_id",
+        "driving_variant_label","source_id","version_realisation","frequency","variable_id","version","plotname"]
+FN=["hvplot","variable_id","frequency","kind","color","times"]
+using=["plotname","frequency","variable_id","kind","color","times"]
+
+from bokeh.models import HTMLTemplateFormatter
+
+bokeh_formatters = {
+    "url": HTMLTemplateFormatter(template="<code><%= value %></code>")
+}
+
+def map_plot(entry):
+    resdict={}
+    for part,value in zip(DRS,entry.split('/')):
+        if any(part in a for a in using):
+            resdict[part]=value
+    resdict["url"]=(plotdir+"/"+entry).replace("swift://","https://").replace("dkrz.de/","dkrz.de/v1/")
+    for part,value in zip(FN,resdict["plotname"].split('_')):
+        if any(part in a for a in using):
+            resdict[part]=value
+    del resdict["plotname"]
+    return resdict   
+
+def make_clickable(val):
+    return '<a target="_blank" href="{}">{}</a>'.format(val, val)
+
+df=pd.DataFrame(list(map(map_plot,plots)))
+df["url"]=df["url"].apply(make_clickable)
+
+tabu = pn.widgets.Tabulator(
+    df,
+    show_index=False,
+    header_filters=True,
+    selectable=1,
+    widths=dict(url=200),
+    pagination="local",
+    formatters=bokeh_formatters
+)
+tabu.save("tabu.html")
diff --git a/test/cordex-cmip6/host.py b/test/cordex-cmip6/host.py
new file mode 100755
index 0000000000000000000000000000000000000000..38451a0d82b70a96bbcc83c393e9a957950e7f06
--- /dev/null
+++ b/test/cordex-cmip6/host.py
@@ -0,0 +1,69 @@
+import os
+from cloudify.plugins.geoanimation import *
+from cloudify.utils.daskhelper import *
+import xarray as xr
+import xpublish as xp
+import asyncio
+import nest_asyncio
+nest_asyncio.apply()
+import glob
+trunk="/work/bb1149/ESGF_Buff/"
+dset="CORDEX/CMIP6/DD/EUR-12/CLMcom-DWD/ERA5/evaluation/r1i1p1f1/ICON-CLM-202407-1-1/v0-r0/1hr/*/v20240713/*"
+dset="CORDEX/CMIP6/DD/EUR-12/CLMcom-DWD/ERA5/evaluation/r1i1p1f1/ICON-CLM-202407-1-1/v0-r0/mon/*/v20240713/*"
+testvar="ts"
+fn=sorted(glob.glob(trunk+dset.replace("/*/","/ts/")))[-1]
+timestamp=fn.split('/')[-1].split('_')[9]
+lastyear=timestamp[0:4]
+import pandas as pd
+
+def sortout(dss,cm):
+    l_iscm=any(
+            cm in dss[dv].attrs.get("cell_methods","default")
+            for dv in dss.data_vars
+            )
+    if l_iscm:
+        return dss
+    else:
+        dss=dss.isel(time=0)
+        dss["time"]=dss['time'] - pd.Timedelta(minutes=30)
+        return dss
+
+if __name__ == "__main__":  # This avoids infinite subprocess creation
+    #client = asyncio.get_event_loop().run_until_complete(get_dask_client())
+    import dask
+    dask.config.set({"array.slicing.split_large_chunks": False})
+    dask.config.set({"array.chunk-size": "100 MB"})
+    zarrcluster = asyncio.get_event_loop().run_until_complete(get_dask_cluster())
+    #client=Client(cluster)
+    os.environ["ZARR_ADDRESS"]=zarrcluster.scheduler._address
+    dsdict={}
+    template=trunk+dset+str(lastyear)+"*"
+    if "/1hr" in dset:
+        for freq,cm in [("1hr","time: mean"),("1hrPt","time: point")]:
+            ds=xr.open_mfdataset(
+                template,
+                compat="override",
+                coords="minimal",
+                chunks="auto",
+                preprocess=lambda dss: sortout(dss,cm)
+                )
+            del ds["time_bnds"]
+            entry=dset.replace('1hr/*',freq).replace('/*','').replace('/','.')
+            print(entry)
+            dsdict[entry]=ds.chunk(time=1)
+    else:
+        ds=xr.open_mfdataset(
+                template,
+                compat="override",
+                coords="minimal",
+                chunks="auto",
+                )
+        del ds["time_bnds"]
+        entry=dset.replace('/*','').replace('/','.')
+        print(entry)
+        dsdict[entry]=ds.chunk(time=1)        
+
+    collection = xp.Rest(dsdict,cache_kws=dict(available_bytes=1000000000))
+    collection.register_plugin(PlotPlugin())
+    collection.serve(host="0.0.0.0", port=9000)
+