"""

:platform: Unix, Windows 
:last changed: 2024-01-23

.. moduleauthor:: Tjark Leon Raphael Gröne <tgroene@physnet.uni-hamburg.de>


This Code is used to import integrated .chi data and processes it to to the PDF .gr 
"""

from backbone.PDF_generation_class import PDF_generation_class
from backbone.settings.routines.PDF_generation_routine import *
from copy import copy
import pathlib
rootdir = str(pathlib.Path(__file__).parent.resolve())

#----------------------------------------------------------------------------------------#
#                                                                                        #
#                                      Configurations                                    #
#                                                                                        #
#----------------------------------------------------------------------------------------#


# BG file name (directories for integrated data and metadata should be keept constant throughout the beamtime)
bg_sampleName = 'lk_zns_background_insitu_oaws_1_00001-00001' #filename 
bg_path = r'C:\Users\admin\Nextcloud\MyData\bt_1222\in_situ_reactions\varex\data\Original\lk_zns_background_insitu_oaws_1_00001\\' #integrated data path
bg_tempPath = r'C:\Users\admin\Nextcloud\MyData\bt_1222\in_situ_reactions\varex\metadata\lk_zns_background_insitu_oaws_1_00001\\' #metadat path

# Measurment file name (directories for integrated data and metadata should be keept constant throughout the beamtime)
meas_sampleName = 'lk_zns_insitu_1_00001'  #filename
meas_path = r'C:\Users\admin\Nextcloud\MyData\bt_1222\in_situ_reactions\varex\data\Original\lk_zns_insitu_1_00001\\' #integrated data path
meas_tempPath =  r'C:\Users\admin\Nextcloud\MyData\bt_1222\in_situ_reactions\varex\metadata\lk_zns_insitu_1_00001\\' #metadat path

# path and temp_path could also be set to None if reload .mat files are used.
bg_Reload = None #rootdir + '/processed/' + meas_sampleName + '/Reload_files/bg_Reload/' + bg_sampleName + '.mat' 
meas_Reload = None #rootdir + '/processed/' + meas_sampleName + '/Reload_files/meas_Reload/' + meas_sampleName + '.mat'

# Correction factors to subtract as well empty glas to counter deviations in cell manufacturing
empty_glas_int = 0 # -0.0009683967395722652#-0.01#*(1/120) #0.01 # 0.014 #0.0210336
empty_glas = None #'C:/Users/admin/Nextcloud/Beamtimes/bt_0423/XRD_detailed/empty_ac_cell_inlet_180_10_Av120.chi'

scatter_supression = 0.0001 # Small correction to dampen deviations from the BG subtraction calculated factor. 
# Currently I try to solve this problem by looking at high r Fourier noise, as this approche was not the best way to counter the problem.

counter_PDF = {'time': 'dateString=',
               'temperature': 'userComment1="Temp: '}

energy = 101.5 #Energy in keV for the reference calculation from cif files.

# References can be loaded as howl sets of the phases you expect:
references = {  'ZnS Sph': {'path': rootdir + "/References/ZnS Sphalerite.cif", #Change here references name (Copper) and path (/References/cu.cif) to acces other references
                            'broadening': 0.1,  #gaussian sigma for broadening effects
                            'decay_rate': 0.8}, #exponential decay can be added to match the data better (put 0 for no decay: exp(0)=1)       
                'ZnS Wz':{'path': rootdir + "/References/ZnS Wurzit.cif",
                            'broadening': 0.1,
                            'decay_rate': 0.8},
                'ZnO cb': {'path': rootdir + "/References/ZnO Cubic.cif",
                            'broadening': 0.1,
                            'decay_rate': 0.8},
                'ZnO hex':{'path': rootdir + "/References/ZnO Hexagonal.cif",
                            'broadening': 0.1,
                            'decay_rate': 0.8},
                'Zn': {'path': rootdir + "/References/Zn.cif",
                            'broadening': 0.1,
                            'decay_rate': 0.8},} #you an add more if nessicarry, just add another three lined block 


# Legacy note: Prior to .cif I used .int files generated by Vesta for 100 keV

ends_with = '.dat' # this can differe for differnet live integrations used, but should be the same for one beamtime

ramp_start_temp = 30 #set a little bit higher than baseline temperature from which the measurment or bg started
plato_start_temp = 156 #temperatur on which to stay in °C
average = 240  #number of scans to average over
Qmaxinst = 21  #Qmax at which the mask sets in 
FactorMax = 1.2 # Maximum factor allowed for the BG subtraction
use_PDF_maximization = True # Use the  monte carlo PDF maximization strategy

check_phases_at_scan = 20 #choose last scan processed by the live integration devided by the number of averged scans

###### PDF params
Composition = 'ZnS' # material the PDF should focus on (NP material)
Qmaxinst_integrate = 21 #Q at which in the XRD peaks dominate over the noise (Qmaxinst from xPDF)
Qmax = 16 #Qmax that the PDF will process
Qmin = 2.6 #minimal Q normally allways around 0.1
Rmin = 0.5
Rmax = 30 #maximal R to process the PDF to
Rpoly = 1.7 #polynomal degree which affects getPDFx bg subtraction


# You can crop the data if nescissary, anyhow the BG-subtration discards pre ramp
# and cooldown data if not changed in the options
startScan = 0
endScan = 1e10 # Default setting for normal temperature ramps

#----------------------------------------------------------------------------------------#
#                                                                                        #
#                                    Process data set                                    #
#                                                                                        #
#----------------------------------------------------------------------------------------#


#Loading the background data
bg = PDF_generation_class(path=bg_path, tempPath=bg_tempPath, sampleName=bg_sampleName, startScan=startScan, endScan=endScan, ends_with=ends_with, reload=bg_Reload, average=average, counter_PDF=counter_PDF)

# Get the start, end of ramps and platos (here for the BG) 
bg.getTempPoints(ramp_start_temp,plato_start_temp)

# Heat ranges are a dict of these just calculated temperature points
bg_heat_ranges = {  "ramp1_start": bg.rampstart_index,   
                    "ramp1_end": int(bg.platostart_index), #Sometimes the ramps dont match due to rounding error, needs little correction by +- half average 
                    "ramp2_start": 100000,
                    "ramp2_end": 100000,}

# Here we crop away the cooldown data points, as they are most often not needed
endScan = bg.platoend_index

# We save everything for the next time to a .mat file
# bg.saveReload(rootdir + '/processed/' + meas_sampleName + '/Reload_files/bg_Reload/')

# Norm was previously used to normalize the data, however this alters the data too much and now it just crops Q to Qmaxinst
bg.Filter('whittaker',10000,3) # Smoothing of the TS/XRD data with a wittaker approche --> Reference: https://github.com/mhvwerts/whittaker-eilers-smoother
bg.outputDatFile(rootdir + '/processed/' + meas_sampleName + '/Dat_files/bg_Dat/') # Save the files for the BG subtration as .dat

# Here we extract some of the data from the class to use it later, 
# when we work with the sample and not the BG
bg.getRampPlatoLists(bg_heat_ranges)
bg_plato_list = bg.plato_list
bg_ramp_list = bg.ramp_list

# Save everthing done to a log file.
bg.PrintLogFile(rootdir + '/processed/' + meas_sampleName + '/Log_files/bg_Log/')

# Finish for the BG part: Now to the real sample
#-----------------------------------------------------------------------------------------#

meas = PDF_generation_class(path=meas_path, tempPath=meas_tempPath, sampleName=meas_sampleName, startScan=startScan, endScan=endScan, ends_with=ends_with, reload=meas_Reload, without_dark=False, average=average,
                            Composition=Composition,Qmax=Qmax,Qmaxinst=Qmaxinst, Qmin=Qmin, Rmax=Rmax, Rmin=Rmin, Rpoly=Rpoly, Rstep=0.01, counter_PDF=counter_PDF)

# Similar aproches to get the temperature points
meas.getTempPoints(ramp_start_temp,plato_start_temp)
meas_heat_ranges = {  "ramp1_start": meas.rampstart_index,
                    "ramp1_end": meas.platostart_index,
                    "ramp2_start": 100000,
                    "ramp2_end": 100000,}

# Cropping again away the cooldown
endScan = meas.platoend_index

# And saving to a .mat
# meas.saveReload(rootdir + '/processed/' + meas_sampleName + '/Reload_files/meas_Reload/')

# Also here we process and save to .dat
meas.outputDatFile(rootdir + '/processed/' + meas_sampleName + '/Dat_files/meas_type1_uncorrected_Dat/')
original = meas.I

# Finish loading the sample: Now to the BG subtraction and PDF generation
#-----------------------------------------------------------------------------------------#

# The fast version of the BG subtraction just accounts for getting a good factor,
# where one part of the data in a choosen region approches 0. The more time intesive approche includes
# the maximization of the PDF in a choosen region, where the main peak of the data is expected. 
# This works quite nice, since the PDF under normal circumstances increases, 
# when the BG is subtracted better. Limits however are, 
# that you need to define the region and decrease the resolution for faster convergion in a iterative monte carlo approche. 
# This can sometime lead to some ammounts of oversubtraction in some parts of the data set,
# if the factor for lower resolution is higher than for higher resolution. But also for this you can do some trics in FT space.

#Background subtraction, filtering and baseline correction
meas.getRampPlatoLists(meas_heat_ranges)
meas.bgSubtraction(bg_plato_list,bg_ramp_list,meas_heat_ranges,bg_sampleName,FactorMax=FactorMax, Qmaxinst=Qmaxinst, 
                   empty_glas_int=empty_glas_int, empty_glas=empty_glas, Qmin=Qmin, use_PDF_maximization=use_PDF_maximization,
                   outputPath=rootdir + '/processed/' + meas_sampleName + '/PDF_files/meas_type1_uncorrected_PDF/',
                   single_BG_file=r'C:/Users/admin/Nextcloud/MyData/bt_1222/in_situ_reactions/varex/data/Normed_and_Averaged/lk_zns_background_insitu_oaws_1_00001_002-n=None/lk_zns_background_insitu_oaws_1_00001-17040.chi')
meas.outputMatFile(rootdir + '/processed/' + meas_sampleName + '/Mat_files/meas_type1_uncorrected_Mat/')
meas.PrintLogFile(rootdir + '/processed/' + meas_sampleName + '/Log_files/meas_type1_uncorrected_Log/')


#~~~~~~~~~~~~~~~~~~ XRD Phase Check ~~~~~~~~~~~~~~~~~~~~~~~#
# We want to plot the BG subtracted data for the phase check therefore we change  I to I_subtracted
meas.I = copy.deepcopy(meas.subtracted_I)

# with baseline correction and filtering the data is easier to interpret:
meas.baselineCorrection(40)
meas.Filter('whittaker',20)

# Here we perform the phase check
meas.checkPhases(check_phases_at_scan,references=references,savefig='png',outputPath=rootdir + '/processed/' + meas_sampleName + '/XRD_Plots/meas_phase_check/', maxQ=30, energy=energy)

# In addition 2D and 1D plots of the data over time can be plottet
meas.plot2dTemp(savefig='svg',outputPath=rootdir + '/processed/' + meas_sampleName + '/XRD_Plots/meas_2D_plot/')
meas.plot1dvsTime(step_width=2,maxQ=15, max_index=30, plot_temp=True,savefig='svg',outputPath=rootdir + '/processed/' + meas_sampleName + '/XRD_Plots/meas_1D_plot/')
#~~~~~~~~~~~~~~~~~ XRD Phase Check end ~~~~~~~~~~~~~~~~~~~~~#



# Now we come to the PDf generation itself (appart from the one performed for BG subtraction)
#-----------------------------------------------------------------------------------------#
# In the beginning I tried with 3 types of aproches 
# 1. Giving PDFgetX3 the sample and BG file + the subtraction factor (no corrections done)
#       --> This is now implemented directly in the BG subtraction, when using PDF maximization for it.
# 2. Subtracting the BG and the give PDFgetX3 just the subtracted sample file
#       (This approche works not that well as PDFgetX3 accounts for the BG subtraction scale and alters the PDF in a profidable manner,
#        however here one can do baseline corrections quite easily)
# 3. Doing the corrections first, then accounting for the scale of the BG and subracting the corrections from both BG and sample,
#        then giving PDFgetX3 both corrected files and the scale 
#       (This approche can be helpfull, but in the end corrections are after the FT not that important as a good BG subtraction)

# The difference between Qmaxinst and Qmaxinst_integrate is just that one is used as a mask for the BG subtraction
# and the other as the limit for PDFgetX3. Often they can be set just to be the same, 
# so I didn´t incllude 2 varibles in the class and I now set the here:
Qmaxinst = Qmaxinst_integrate

#This line is used already in the phase check but is missing if one comments it out, 
# because you already now the data set and just want to reprocess the PDF.
meas.I = meas.subtracted_I

# Here we create the .dat for PDFgetX3 to take:
meas.outputDatFile(rootdir + '/processed/' + meas_sampleName + '/Dat_files/meas_type2_corrected_prior_Dat/')