@@ -124,14 +124,14 @@ def normalize_with(mean, std):
124
124
1.0 ,
125
125
]
126
126
127
- results = dict ()
128
-
129
- # for vdepth in ventraldepths:
130
- # results[vdepth] = dict()
131
- # for bn in bottlenecks:
132
- # results[vdepth][bn] = dict()
133
- results [n_bn ] = dict ()
134
- results [n_bn ][rep ] = dict ()
127
+ # results = dict()
128
+ #
129
+ # # for vdepth in ventraldepths:
130
+ # # results[vdepth] = dict()
131
+ # # for bn in bottlenecks:
132
+ # # results[vdepth][bn] = dict()
133
+ # results[n_bn] = dict()
134
+ # results[n_bn][rep] = dict()
135
135
136
136
model = RetinalBottleneckModel (n_bn , 'resnet50' , n_out = 1000 , n_inch = 3 , retina_kernel_size = 7 , transform = normalize )
137
137
model .load_state_dict (torch .load (dir + model_file + '.pt' ))
@@ -146,7 +146,7 @@ def normalize_with(mean, std):
146
146
147
147
# results[vdepth][bn][run]["accuracy"] = accuracy(fmodel, images, labels)
148
148
149
- attack_success = np .zeros ((len (attacks ), len ( epsilons ), len (testset )), dtype = np .bool )
149
+ attack_success = np .zeros ((len (epsilons ), len (testset )), dtype = np .bool )
150
150
# for i, attack in enumerate(attacks):
151
151
print (attack )
152
152
idx = 0
@@ -156,14 +156,19 @@ def normalize_with(mean, std):
156
156
157
157
_ , _ , success = attack (fmodel , images , labels , epsilons = epsilons )
158
158
success_ = success .cpu ().numpy ()
159
- attack_success [attack_index ][ :, idx :idx + len (labels )] = success_
159
+ attack_success [:, idx :idx + len (labels )] = success_
160
160
idx = idx + len (labels )
161
161
# print("")
162
162
# for i, attack in enumerate(attacks):
163
- results [ n_bn ][ rep ][ str ( attack )] = ( 1.0 - attack_success [ attack_index ]. mean ( axis = - 1 )). tolist ()
164
-
165
- robust_accuracy = 1.0 - attack_success . max ( axis = 0 ). mean ( axis = - 1 )
166
- results [ n_bn ][ rep ][ 'robust_accuracy' ] = robust_accuracy . tolist ( )
163
+ import pickle
164
+ with open ( f'results-imagenet-linf- { n_bn } - { rep } - { attack_index } .p' , 'wb' ) as f :
165
+ # Pickle the 'data' dictionary using the highest protocol available.
166
+ pickle . dump ( attack_success , f )
167
167
168
- with open (f'results-imagenet-linf-{ n_bn } -{ rep } -{ attack_index } .json' , 'w' ) as fp :
169
- json .dump (results , fp )
168
+ # results[n_bn][rep][str(attack)] = (1.0 - attack_success[attack_index].mean(axis=-1)).tolist()
169
+ #
170
+ # robust_accuracy = 1.0 - attack_success.max(axis=0).mean(axis=-1)
171
+ # results[n_bn][rep]['robust_accuracy'] = robust_accuracy.tolist()
172
+ #
173
+ # with open(f'results-imagenet-linf-{n_bn}-{rep}-{attack_index}.json', 'w') as fp:
174
+ # json.dump(results, fp)
0 commit comments