Skip to content
Snippets Groups Projects
Commit 1385aeba authored by Gröne, Tjark Leon Raphael's avatar Gröne, Tjark Leon Raphael
Browse files

Edit maxwell_integrate_to_h5.py

parent 2fe88a76
Branches
No related tags found
No related merge requests found
...@@ -219,20 +219,20 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"): ...@@ -219,20 +219,20 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"):
entry.attrs["NX_class"] = "NXentry" entry.attrs["NX_class"] = "NXentry"
entry.create_dataset("time", data=results_metadata[idx-1]["dateString"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'), chunks=False, compression="gzip") entry.create_dataset("time", data=results_metadata[idx-1]["dateString"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'))
if any(results_metadata[idx-1][key] for key in ["userComment1", "userComment2", "userComment3", "userComment4"]): if any(results_metadata[idx-1][key] for key in ["userComment1", "userComment2", "userComment3", "userComment4"]):
comments = entry.create_group("comments") comments = entry.create_group("comments")
comments.attrs["NX_class"] = "NXcomments" comments.attrs["NX_class"] = "NXcomments"
if results_metadata[idx-1]["userComment1"]: if results_metadata[idx-1]["userComment1"]:
comments.create_dataset("userComment1", data=results_metadata[idx-1]["userComment1"].encode('utf-8'), chunks=False, dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip") comments.create_dataset("userComment1", data=results_metadata[idx-1]["userComment1"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip")
if results_metadata[idx-1]["userComment2"]: if results_metadata[idx-1]["userComment2"]:
comments.create_dataset("userComment2", data=results_metadata[idx-1]["userComment2"].encode('utf-8'), chunks=False, dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip") comments.create_dataset("userComment2", data=results_metadata[idx-1]["userComment2"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip")
if results_metadata[idx-1]["userComment3"]: if results_metadata[idx-1]["userComment3"]:
comments.create_dataset("userComment3", data=results_metadata[idx-1]["userComment3"].encode('utf-8'), chunks=False, dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip") comments.create_dataset("userComment3", data=results_metadata[idx-1]["userComment3"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip")
if results_metadata[idx-1]["userComment4"]: if results_metadata[idx-1]["userComment4"]:
comments.create_dataset("userComment4", data=results_metadata[idx-1]["userComment4"].encode('utf-8'), chunks=False, dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip") comments.create_dataset("userComment4", data=results_metadata[idx-1]["userComment4"].encode('utf-8'), dtype=h5py.string_dtype(encoding='utf-8'), compression="gzip")
# Instrument / Detector group # Instrument / Detector group
...@@ -240,9 +240,9 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"): ...@@ -240,9 +240,9 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"):
detector.attrs["NX_class"] = "NXdetector" detector.attrs["NX_class"] = "NXdetector"
chunk_size = min(len(result["I"]), 1000) chunk_size = min(len(result["I"]), 1000)
detector.create_dataset("q", data=np.asarray(result["q"], dtype=np.float64), chunks=(chunk_size,), dtype=h5py.float64, compression="gzip") detector.create_dataset("q", data=np.asarray(result["q"], dtype=np.float64), chunks=(chunk_size,), dtype="f8", compression="gzip")
detector.create_dataset("I", data=np.asarray(result["I"], dtype=np.float64), chunks=(chunk_size,), dtype=h5py.float64, compression="gzip") detector.create_dataset("I", data=np.asarray(result["I"], dtype=np.float64), chunks=(chunk_size,), dtype="f8", compression="gzip")
detector.create_dataset("dI", data=np.asarray(result["dI"], dtype=np.float64), chunks=(chunk_size,), dtype=h5py.float64, compression="gzip") detector.create_dataset("dI", data=np.asarray(result["dI"], dtype=np.float64), chunks=(chunk_size,), dtype="f8", compression="gzip")
# Handle missing or invalid metadata values with defaults # Handle missing or invalid metadata values with defaults
width = results_metadata[idx-1].get("width", "").strip() width = results_metadata[idx-1].get("width", "").strip()
...@@ -252,15 +252,15 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"): ...@@ -252,15 +252,15 @@ def integrate_ims_in_dir(path_im, path_int, dtype_im=".tif", dtype_int=".dat"):
image_sequence_number = results_metadata[idx-1].get("imageSequenceNumber", "").strip() image_sequence_number = results_metadata[idx-1].get("imageSequenceNumber", "").strip()
if width.isdigit(): if width.isdigit():
detector.create_dataset("pixels width", data=np.asarray([int(width)], dtype=np.int64), chunks=False, dtype=h5py.int64, compression="gzip") detector.create_dataset("pixels width", data=np.asarray([int(width)], dtype=np.int64), dtype="i4", compression="gzip")
if height.isdigit(): if height.isdigit():
detector.create_dataset("pixels height", data=np.asarray([int(height)], dtype=np.int64), chunks=False, dtype=h5py.int64, compression="gzip") detector.create_dataset("pixels height", data=np.asarray([int(height)], dtype=np.int64), dtype="i4", compression="gzip")
if exposure_time.isdigit(): if exposure_time.isdigit():
detector.create_dataset("exposure time", data=np.asarray([float(exposure_time)], dtype=np.float64), chunks=False, dtype=h5py.float64, compression="gzip") detector.create_dataset("exposure time", data=np.asarray([float(exposure_time)], dtype=np.float32), dtype="f4", compression="gzip")
if summed_exposures.replace('.', '', 1).isdigit(): if summed_exposures.replace('.', '', 1).isdigit():
detector.create_dataset("summed exposures", data=np.asarray([int(summed_exposures)], dtype=np.int64), chunks=False, dtype=h5py.int64, compression="gzip") detector.create_dataset("summed exposures", data=np.asarray([int(summed_exposures)], dtype=np.int64), dtype="i4", compression="gzip")
if image_sequence_number.isdigit(): if image_sequence_number.isdigit():
detector.create_dataset("image sequence number", data=np.asarray([int(image_sequence_number)], dtype=np.int64), chunks=False, dtype=h5py.int64, compression="gzip") detector.create_dataset("image sequence number", data=np.asarray([int(image_sequence_number)], dtype=np.int64), dtype="i4", compression="gzip")
# Add interpretation info (optional for PyMca) # Add interpretation info (optional for PyMca)
detector["I"].attrs["interpretation"] = "spectrum" detector["I"].attrs["interpretation"] = "spectrum"
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment