7 parser = argparse.ArgumentParser(description=
"create GBTS linking table")
8 parser.add_argument(
"-o",
"--output_file", default=
"binTables_ITK_RUN4.txt", help=
"output table for use by GBTS (default=%(default)s)")
9 parser.add_argument(
"-p",
"--min-link-probability", type=float, default=0.01, help=
"minimum linking probability (default=%(default)s, LRT=0.001)")
10 parser.add_argument(
"-e",
"--eta-bins", type=float, default=0.2, help=
"eta bin width (default=%(default)s)")
11 parser.add_argument(
"-a",
"--no-symmetrize", action=
'store_true', help=
"do not symmetrize by adding missing layers on the other side")
12 parser.add_argument(
"-i",
"--write-inverse-linking-scheme", action=
'store_true', help=
"write inverse linking scheme table")
13 parser.add_argument(
"-w",
"--write-scheme-file", default=
"inverse_linking_scheme_ITK_RUN4.txt", help=
"inverse linking scheme table file name (default=%(default)s)")
14 parser.add_argument(
"input_csv", nargs=
"?", default=
"layer_connection_table.csv", help=
"input CSV layer connection table, output from Athena (default=%(default)s)")
15 return parser.parse_args()
31 print(f
"Read {args.input_csv}")
33 link_df = pd.read_csv(args.input_csv)
34 link_df_orig_len = len(link_df)
36 link_df = link_df.drop(link_df[link_df[
'probability'] < args.min_link_probability].index)
38 print(f
"Drop {link_df_orig_len-len(link_df)} ({link_df_orig_len}->{len(link_df)}) links with p<{args.min_link_probability}")
40 layer_from_idx = link_df[
'from'].values
41 layer_to_idx = link_df[
'to'].values
43 layer_idx = np.unique(np.concatenate((layer_from_idx, layer_to_idx)))
50 layer_dict[id] =
Layer(id)
52 dst_idx = link_df[
'from']
58 src_dict = {k: v
for k, v
in layer_dict.items()
if v.nTargets == 0}
59 dst_dict = {k: v
for k, v
in layer_dict.items()
if v.nTargets > 0}
68 print(
'Creating stages...')
71 print(
'Iteration ', iter,
'N src', len(src_dict),
'N dst', len(dst_dict))
74 src_layer_dict[iter] = []
75 for key, lay
in src_dict.items():
76 src_layer_dict[iter].
append(key)
77 dst_layer_dict[iter] = []
79 for key, lay
in src_dict.items():
80 dst_df = link_df[link_df[
'to'] == key]
81 dst_vec = dst_df[
'from']
83 dst_dict[d].nTargets -= 1
85 dst_layer_dict[iter].
append(d)
87 src_dict = {k: v
for k, v
in dst_dict.items()
if v.nTargets == 0}
88 dst_dict = {k: v
for k, v
in dst_dict.items()
if v.nTargets > 0}
89 dst_layer_dict[iter] =
list(
set(dst_layer_dict[iter]))
91 if len(src_dict) == 0
or len(dst_dict) == 0:
94 print(
'Last iteration ', iter,
'N src', len(src_dict),
'N dst', len(dst_dict))
97 if not args.no_symmetrize:
100 for stage, coll
in pair_dict.items():
107 src_vol = p1.src // 10000
108 dst_vol = p1.dst // 10000
110 skip = src_vol == 8
and dst_vol == 8
113 src_rem1 = p1.src % 10000
114 dst_rem1 = p1.dst % 10000
121 src_rem2 = p2.src % 10000
122 dst_rem2 = p2.dst % 10000
123 if src_rem1 == src_rem2
and dst_rem1 == dst_rem2:
135 new_src = 90000 + src_rem1
137 new_src = 70000 + src_rem1
144 new_dst = 90000 + dst_rem1
146 new_dst = 70000 + dst_rem1
148 missing_links.append([stage, new_src, new_dst])
150 for ml
in missing_links:
151 pair_dict[ml[0]].
append(
Pair(ml[1], ml[2], ml[0]))
155 for i, coll
in pair_dict.items():
156 nConnsTotal += len(coll)
158 if args.write_inverse_linking_scheme:
159 link_file =
open(args.write_scheme_file,
'w')
160 link_file.write(
'%d\n' % (len(pair_dict)))
161 for i, coll
in pair_dict.items():
162 link_file.write(
'%d %d\n' % (i, len(coll)))
164 link_file.write(
'%d %d \n' % (p.src, p.dst))
168 print(f
"Write output file {args.output_file} with {nConnsTotal} connections and eta bin width {args.eta_bins}")
170 bin_table =
open(args.output_file,
'w')
172 bin_table.write(f
"{nConnsTotal} {args.eta_bins}\n")
176 for stage, coll
in pair_dict.items():
178 bin_table.write(
'%d %d %d %d 1 1 100\n' % (conn_counter, stage, p.src, p.dst))
179 bin_table.write(
'100\n')