From 2e23919c06ceed5bc9c42340eff805b3b0dc7bae Mon Sep 17 00:00:00 2001
From: =?UTF-8?q?Gr=C3=B6ne=2C=20Tjark=20Leon=20Raphael?=
 <tjark.leon.raphael.groene@uni-hamburg.de>
Date: Mon, 16 Jun 2025 22:33:51 +0200
Subject: [PATCH] Update file maxwell_integrate_to_h5.py

---
 maxwell_integrate_to_h5.py | 47 ++++++++++++++++++++++----------------
 1 file changed, 27 insertions(+), 20 deletions(-)

diff --git a/maxwell_integrate_to_h5.py b/maxwell_integrate_to_h5.py
index 35f14b7..51c817a 100644
--- a/maxwell_integrate_to_h5.py
+++ b/maxwell_integrate_to_h5.py
@@ -15,6 +15,7 @@ from watchdog.observers.polling import PollingObserver
 from watchdog.events import PatternMatchingEventHandler
 from multiprocessing.pool import ThreadPool as Pool
 import pandas as pd
+from silx.io.dictdump import h5todict, dicttoh5
 
 
 
@@ -121,27 +122,33 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"):
             results_df = results_df.sort_values(by="filename", key=lambda col: col.str.lower())
             subdir_name = os.path.basename(os.path.normpath(subdir_path_int))
             results_df.to_csv(os.path.join(subdir_path_int, f"{subdir_name}.csv"), index=False)
-            results_df.to_hdf(os.path.join(subdir_path_int, f"{subdir_name}.h5"), key='entry/data', mode='w', format='table')
-
-            with pd.HDFStore(os.path.join(subdir_path_int, f"{subdir_name}.h5"), mode='w') as hdf_store:
-                hdf_store.put("entry/definition", pd.Series(["NXdata"]), format='table')
-                hdf_store.put("entry/title", pd.Series([subdir_name]), format='table')
-                
-                # Create the NXdata group with Nexus conventions
-                nxdata_group = {
-                    "@axes": "q",
-                    "@signal": "I",
-                    "@errors": "dI"
+
+            # Prepare data for HDF5 file using silx
+            hdf5_data = {
+                "entry": {
+                    "definition": "NXdata",
+                    "title": subdir_name,
+                    "data": {
+                        "@axes": "q",
+                        "@signal": "I",
+                        "@errors": "dI",
+                    }
+                }
+            }
+
+            for result in results_data:
+                filename_key = os.path.basename(result["filename"])
+                hdf5_data["entry"]["data"][filename_key] = {
+                    "q": result["q"].tolist(),
+                    "I": result["I"].tolist(),
+                    "dI": result["dI"].tolist()
                 }
-                hdf_store.put("entry/data/NXdata", pd.Series(nxdata_group), format='table')
-                
-                for result in results_data:
-                    filename_key = os.path.basename(result["filename"])
-                    hdf_store.put(f"entry/data/{filename_key}/q", pd.Series(np.array(result["q"], dtype=float)), format='table')
-                    hdf_store.put(f"entry/data/{filename_key}/I", pd.Series(np.array(result["I"], dtype=float)), format='table')
-                    hdf_store.put(f"entry/data/{filename_key}/dI", pd.Series(np.array(result["dI"], dtype=float)), format='table')
-
-            print(f"Results for subdirectory {subdir_name} saved to CSV and HDF5 files using Nexus conventions.")
+
+            # Save to HDF5 file using silx
+            hdf5_file_path = os.path.join(subdir_path_int, f"{subdir_name}.h5")
+            dicttoh5(hdf5_data, hdf5_file_path, mode="w")
+
+            print(f"Results for subdirectory {subdir_name} saved to CSV and HDF5 files using silx.")
             del results_df
         else:
             print(f"No images were integrated in subdirectory {subdir}. No results DataFrame created.")
-- 
GitLab