Commit 85b0ec8f authored by Matthijs's avatar Matthijs
Browse files

making use of the self.diagnostic variable

parent 714281e6
......@@ -58,7 +58,7 @@ class SimpleAlgorithm:
self.norm_truth = config['norm_truth']
if 'diagnostic' in config:
self.diagnostic = True
self.diagnostic = config['diagnostic']
else:
self.diagnostic = False
......@@ -102,7 +102,7 @@ class SimpleAlgorithm:
if hasattr(self, 'truth'):
Relerrs = change.copy()
# Different algorithms will have different quatities that one
# Different algorithms will have different qualities that one
# should monitor. Since we take a fixed point perspective, the main
# thing to monitor is the change in the iterates.
shadow = self.prox1.work(u)
......@@ -115,7 +115,7 @@ class SimpleAlgorithm:
config['u'] = u
u_new = self.evaluate(u)
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
# the next prox operation only gets used in the computation of
# the size of the gap. This extra step is not
# required in alternating projections, which makes RAAR
......@@ -132,7 +132,7 @@ class SimpleAlgorithm:
# tmp_change = (norm(u - u_new, 'fro') / norm_data) ** 2
tmp_change = phase_offset_compensated_norm(u, u_new, norm_type='fro', norm_factor=norm_data) ** 2
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
# For Douglas-Rachford,in general it is appropriate to monitor the
# SHADOWS of the iterates, since in the convex case these converge
# even for beta=1.
......@@ -157,24 +157,24 @@ class SimpleAlgorithm:
elif q == 1:
for j in range(self.product_space_dimension):
tmp_change = tmp_change + (norm(u[:, :, j] - u_new[:, :, j], 'fro') / norm_data) ** 2
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
# compute (||P_SP_Mx-P_Mx||/norm_data)^2:
tmp_gap = tmp_gap + (norm(u1[:, :, j] - u2[:, :, j], 'fro') / norm_data) ** 2
tmp_shadow = tmp_shadow + (norm(u2[:, :, j] - shadow[:, :, j], 'fro') / norm_data) ** 2
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
if hasattr(self, 'truth'):
z = u1[:, :, 0]
Relerrs[iter] = norm((self.truth - exp(-1j * angle(trace(self.truth.T.transpose() * z))) * z),'fro') / self.norm_truth
else:
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
if hasattr(self, 'truth'):
Relerrs[iter] = 0
for j in range(self.product_space_dimension):
for k in range(self.Nz):
tmp_change = tmp_change + (norm(u[:, :, k, j] - u_new[:, :, k, j], 'fro') / norm_data) ** 2
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
# compute (||P_Sx-P_Mx||/norm_data)^2:
tmp_gap = tmp_gap + (norm(u1[:, :, k, j] - u2[:, :, k, j], 'fro') / (norm_data)) ** 2
tmp_shadow = tmp_shadow + (
......@@ -185,7 +185,7 @@ class SimpleAlgorithm:
'fro') / self.norm_Truth
change[iter] = sqrt(tmp_change)
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
gap[iter] = sqrt(tmp_gap)
shadow_change[iter] = sqrt(tmp_shadow) # this is the Euclidean norm of the gap to
# the unregularized set. To monitor the Euclidean norm of the gap to the
......@@ -196,7 +196,7 @@ class SimpleAlgorithm:
# update
u = u_new
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
# For Douglas-Rachford,in general it is appropriate to monitor the
# SHADOWS of the iterates, since in the convex case these converge
# even for beta=1.
......@@ -219,7 +219,7 @@ class SimpleAlgorithm:
change = change[1:iter + 1]
output = {'u': u, 'u1': u1, 'u2': u2, 'iter': iter, 'change': change}
if 'diagnostic' in self.config and self.config['diagnostic']:
if self.diagnostic:
gap = gap[1:iter + 1]
shadow_change = shadow_change[1:iter + 1]
output['gap'] = gap
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment