ATLAS Offline Software
Loading...
Searching...
No Matches
avgData Namespace Reference

Functions

 tempCorr (Temp, Eg)
 averageData (m, lumi_df)
 main ()

Function Documentation

◆ averageData()

avgData.averageData ( m,
lumi_df )

Definition at line 21 of file avgData.py.

21def averageData (m,lumi_df):
22
23 #home directory definition
24 homeDirectory = os.path.expanduser('/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/')
25
26 # Define path to folder
27 dataFolder = homeDirectory + "/IBLData/processedData/means_dat/"
28 inputFolder = homeDirectory + "/IBLData/processedData/"
29
30 if not os.path.exists(dataFolder):
31 os.mkdir(dataFolder)
32
33 # Time bins = every day
34 b = lumi_df["start"].drop_duplicates()
35
36 output_dict = pd.DataFrame({})
37
38 # Loop over lumi blocks
39 lumis = []
40 total_lumi = 0
41 for l in lumi_df["intlumi"]:
42 total_lumi += l/(10**9)
43 lumis += [ total_lumi ]
44
45 print(total_lumi)
46 lumi_df["totlumi"] = lumis
47
48# tmp_dict = lumi_df.groupby(pd.cut(lumi_df["start"],bins=b),as_index=False).mean()
49# tmp_dict.fillna(method='ffill',inplace=True)
50 output_dict["start"] = b
51 output_dict["intlumi"] = lumi_df["totlumi"]
52 output_dict.fillna(method='ffill',inplace=True)
53 times = [datetime.datetime.utcfromtimestamp(s) for s in b]
54
55 plt.scatter(times,output_dict["intlumi"],marker=".")
56 plt.title(m)
57 plt.savefig(dataFolder+"intlumi/"+m+"_time.png")
58 plt.close()
59
60 # Jennet gets these from https://twiki.cern.ch/twiki/bin/viewauth/Atlas/PixelConditionsRUN2
61 voltage_settings = []
62
63 volume = []
64 sensorSize_planar = 50*250*200*1E-12 #cm3
65 sensorSize_3D = 50*250*230*1E-12 #cm3
66
67 sensorsPerModule = 336*80
68
69 if m.endswith("M4"): # 3D module
70 for s in output_dict["start"]:
71 volume += [sensorSize_3D*sensorsPerModule*4]
72
73 if m == "LI_S11_A_M4":
74 if s < time.mktime(datetime.date(2017,1,1).timetuple()):
75 voltage_settings += [20.0]
76 elif s < time.mktime(datetime.date(2018,1,1).timetuple()):
77 voltage_settings += [30.0]
78 else:
79 voltage_settings += [20.0]
80 continue
81 if m == "LI_S12_A_M4":
82 if s < time.mktime(datetime.date(2017,1,1).timetuple()):
83 voltage_settings += [20.0]
84 elif s < time.mktime(datetime.date(2018,1,1).timetuple()):
85 voltage_settings += [21.0]
86 else:
87 voltage_settings += [30.0]
88 continue
89 if m == "LI_S13_A_M4":
90 if s < time.mktime(datetime.date(2017,1,1).timetuple()):
91 voltage_settings += [15.0]
92 else:
93 voltage_settings += [40.0]
94 continue
95 if s < time.mktime(datetime.date(2017,1,1).timetuple()):
96 voltage_settings += [20.0]
97 else:
98 voltage_settings += [40.0]
99 else: # Planar module
100 for s in output_dict["start"]:
101 volume += [sensorSize_planar*sensorsPerModule*4]
102
103 if s < time.mktime(datetime.date(2016,9,16).timetuple()):
104 voltage_settings += [80.0]
105 elif s < time.mktime(datetime.date(2017,1,1).timetuple()):
106 voltage_settings += [150.0]
107 elif s < time.mktime(datetime.date(2017,11,7).timetuple()):
108 voltage_settings += [350.0]
109 else:
110 voltage_settings += [400.0]
111
112 output_dict["HV_VSet"] = voltage_settings
113 output_dict["volume"] = volume
114
115 dataTypes = ["PP4LV","TModule","ENV_TT","HV_VMeas","HV_IMeas"]
116
117 for dataType in dataTypes:
118
119 print ("Investigating " + dataType )
120
121 if not os.path.exists(dataFolder+dataType):
122 os.mkdir(dataFolder+dataType)
123
124 # DO THE AVERAGES
125 infile = inputFolder + dataType + "/" + m + ".ssv"
126 meas_header=["module_name","measurement_date","measurement_time","unix-timestamp",dataType]
127 meas_dict = pd.read_csv(infile, names=meas_header, delimiter=' ', skiprows=1)
128 output_dict[dataType] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType]
129
130 if dataType == "TModule" or dataType == "PP4LV" or dataType == "ENV_TT":
131 output_dict.fillna(method='ffill',inplace=True)
132
133 if dataType == "HV_VMeas":
134 output_dict["HV_VMeas_0"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType]
135 output_dict.fillna(method='ffill',inplace=True)
136 output_dict["HV_VMeas_1"] = meas_dict.groupby(pd.cut(meas_dict["unix-timestamp"],bins=b),as_index=False).mean()[dataType]
137 output_dict.fillna(method='bfill',inplace=True)
138 output_dict["HV_VMeas"] = output_dict[["HV_VMeas_0","HV_VMeas_1"]].mean(axis=1)
139
140 output_dict.plot.scatter(x="intlumi",y=dataType,marker=".")
141 plt.title(m)
142 plt.savefig(dataFolder+dataType+"/"+m+".png")
143 plt.close()
144
145 plt.scatter(times,output_dict[dataType],marker=".")
146 plt.title(m)
147 plt.savefig(dataFolder+dataType+"/"+m+"_time.png")
148 plt.close()
149
150 # Take cooling pipe temp
151# output_dict['TModule'] = np.where(output_dict['TModule'] < -20, output_dict['ENV_TT'], output_dict['TModule'])
152
153 plt.scatter(times,output_dict["TModule"],marker=".",s=1,label="TModule")
154 plt.scatter(times,output_dict["ENV_TT"],marker=".",s=1,label="ENV_TT")
155 plt.legend()
156 plt.title(m)
157 plt.savefig(m+".png")
158 plt.close()
159
160 saveFileName = dataFolder + m + "_nocuts.ssv"
161 if os.path.exists(saveFileName):
162 os.remove(saveFileName)
163 output_dict.to_csv(saveFileName,index=False)
164
165 output_dict.dropna(inplace=True)
166
167 # Veto
168 output_dict = output_dict[abs(output_dict["HV_VMeas"]-output_dict["HV_VSet"])<1.0]
169
170 # Correct
171 output_dict["I_Eg1.12"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.12) / row["volume"] for i, row in output_dict.iterrows() ]
172 output_dict["I_Eg1.21"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.21) / row["volume"] for i, row in output_dict.iterrows() ]
173 output_dict["I_Eg1.30"] = [ row["HV_IMeas"] * tempCorr(row["TModule"],1.30) / row["volume"] for i, row in output_dict.iterrows() ]
174
175 if not os.path.exists(dataFolder+"I_Eg1.12"):
176 os.mkdir(dataFolder+"I_Eg1.12")
177 output_dict.plot.scatter("intlumi","I_Eg1.12",marker=".")
178 plt.title(m)
179 plt.savefig(dataFolder+"I_Eg1.12/"+m+".png")
180 plt.close()
181
182 if not os.path.exists(dataFolder+"I_Eg1.21"):
183 os.mkdir(dataFolder+"I_Eg1.21")
184 output_dict.plot.scatter("intlumi","I_Eg1.21",marker=".")
185 plt.title(m)
186 plt.savefig(dataFolder+"I_Eg1.21/"+m+".png")
187 plt.close()
188
189 if not os.path.exists(dataFolder+"I_Eg1.30"):
190 os.mkdir(dataFolder+"I_Eg1.30")
191 output_dict.plot.scatter("intlumi","I_Eg1.30",marker=".")
192 plt.title(m)
193 plt.savefig(dataFolder+"I_Eg1.30/"+m+".png")
194 plt.close()
195
196 saveFileName = dataFolder + m + ".ssv"
197 if os.path.exists(saveFileName):
198 os.remove(saveFileName)
199 output_dict.to_csv(saveFileName,index=False)
200
201# Begin script
void print(char *figname, TCanvas *c1)
void mean(std::vector< double > &bins, std::vector< double > &values, const std::vector< std::string > &files, const std::string &histname, const std::string &tplotname, const std::string &label="")

◆ main()

avgData.main ( )

Definition at line 202 of file avgData.py.

202def main():
203
204 infile_lumi = "/eos/atlas/user/j/jdickins/Pixel/LeakageCurrent/IBLData/processedData/Lumi/runData.txt"
205 lumi_header=["run","fill","lb","start","len","0","1","lumiall","intlumi"]
206 lumi_df=pd.read_csv(infile_lumi, names=lumi_header, delimiter=' ', skiprows=0)
207
208# lumi_df.drop_duplicates(subset='intlumi',keep='first',inplace=True)
209
210 input_module = sys.argv[1]
211 averageData(input_module,lumi_df)
212
int main()
Definition hello.cxx:18

◆ tempCorr()

avgData.tempCorr ( Temp,
Eg )

Definition at line 12 of file avgData.py.

12def tempCorr(Temp,Eg):
13
14 kB = 8.617*pow(10,-5) # eV/K
15 Tref = 273.0 # Reference temperature in K
16 Temp = Temp + 273 # Convert to K
17
18 return pow(1.0*Tref/Temp,2)*np.exp((-0.5*Eg/kB)*(1.0/Tref - 1.0/Temp))
19
20# Jennet shamelessly steals Nick's code for bookkeeping
constexpr int pow(int base, int exp) noexcept