Skip to content

Commit

Permalink
Print statement changes, currently looking at why there are issues wi…
Browse files Browse the repository at this point in the history
…th current point not being a numpy array.
  • Loading branch information
josephward committed Apr 8, 2024
1 parent 6f6e81c commit e699b9f
Show file tree
Hide file tree
Showing 3 changed files with 31 additions and 13 deletions.
Binary file not shown.
28 changes: 16 additions & 12 deletions rosplane_tuning/src/autotune/optimizer.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,6 +40,7 @@ def __init__(self, initial_gains, optimization_params):
self.init_alpha = optimization_params['alpha']
self.tau = optimization_params['tau']
self.state = OptimizerState.FINDING_GRADIENT # Find the initial gradient from the starting gains
self.initial_gains = initial_gains

# Line Search Variables
self.k = 0
Expand Down Expand Up @@ -71,7 +72,10 @@ def get_optimiztion_status(self):
str: The status of the optimization algorithm.
"""
# Return with the final values, the best value found
return 'TODO: Status not implemented.'
if self.state == OptimizerState.TERMINATED:
return 'Optimization Terminated'
else:
return "Optimization in Progress"

def get_next_parameter_set(self, error):
"""
Expand Down Expand Up @@ -103,9 +107,9 @@ def get_next_parameter_set(self, error):
elif self.state == OptimizerState.SELECT_DIRECTION:

if self.k == 0:
new_gains = self.line_search(self.initial_gains, error, self.save_gains, self.new_phis)
new_gains = self.line_search(self.initial_gains, error, self.save_gains, self.save_phis)
else:
new_gains = self.line_search()
new_gains = self.line_search(self.current_gains, error, self.save_gains, self.save_phis)

return new_gains

Expand Down Expand Up @@ -187,9 +191,9 @@ def line_search(self, gains, phi, gainsh, phih):
self.p = -phi_prime/np.linalg.norm(phi_prime) + bk*prior_p

# Prepare for bracketing
self.OptimizerState = OptimizerState.BRACKETING
self.state = OptimizerState.BRACKETING
# Request phi2 and phi2+h
gains2 = gains + self.a_init*p
gains2 = gains + self.init_alpha*self.p
gains2h = [gain + 0.01 for gain in gains2]
new_gains = np.array([gains2, gains2h])

Expand Down Expand Up @@ -230,13 +234,13 @@ def bracketing(self, gains, gainsh, phi1, phi1_prime, gains2, gains2h, phi2, phi
# Request new point
alphap = self.interpolate(alpha1, alpha2)
gainsp = self.init_gains + alphap*self.p
new_gains = [self.save_gains[4], self.save_gains[5]]
new_gains = np.array([self.save_gains[4], self.save_gains[5]])
return new_gains

# Optimized
if abs(phi2_prime) <= -self.u2*phi1_prime:
self.state == OptimizerState.SELECT_DIRECTION
new_gains = self.init_gains + alpha2*self.p
new_gains = np.array([self.init_gains + alpha2*self.p])
self.current_gains = new_gains
return new_gains

Expand All @@ -249,7 +253,7 @@ def bracketing(self, gains, gainsh, phi1, phi1_prime, gains2, gains2h, phi2, phi
# Request new point
alphap = self.interpolate(alpha1, alpha2)
gainsp = self.init_gains + alphap*self.p
new_gains = [self.save_gains[4], self.save_gains[5]]
new_gains = np.array([self.save_gains[4], self.save_gains[5]])
return new_gains

# Needs more Bracketing
Expand All @@ -263,7 +267,7 @@ def bracketing(self, gains, gainsh, phi1, phi1_prime, gains2, gains2h, phi2, phi
gains1h = [gain + 0.01 for gain in gains1]
gains2h = [gain + 0.01 for gain in gains2]

new_gains = [gains1, gains1h, gains2, gains2h]
new_gains = np.array([gains1, gains1h, gains2, gains2h])
return new_gains

def interpolate(self, alpha1, alpha2):
Expand Down Expand Up @@ -301,14 +305,14 @@ def pinpointing(self, gains1, phi1, phi1_prime, gains2, gainsp, phip, phip_prime
phi2_prime = phip_prime
self.save_gains = np.array([gains1, None, gains2, None, gainsp, [gain + 0.01 for gain in gainsp]])
self.save_phis = np.array([phi1, phi1_prime, phi2, phi2_prime])
new_gains = [self.save_gains[4], self.save_gains[5]]
new_gains = np.array([self.save_gains[4], self.save_gains[5]])
return new_gains
else:
# Optimized
if abs(phip_prime) <= -self.u2*phi1_prime:
self.state == OptimizerState.SELECT_DIRECTION
alphastar = alphap
new_gains = self.init_gains + alphastar*self.p
new_gains = np.array([self.init_gains + alphastar*self.p])
return new_gains
# More parameterization needed
elif phip_prime*(alpha2 - alpha1) >= 0:
Expand All @@ -321,7 +325,7 @@ def pinpointing(self, gains1, phi1, phi1_prime, gains2, gainsp, phip, phip_prime

self.save_gains = np.array([gains1, None, gains2, None, gainsp, [gain + 0.01 for gain in gainsp]])
self.save_phis = np.array([phi1, phi1_prime, phi2, phi2_prime])
new_gains = [self.save_gains[4], self.save_gains[5]]
new_gains = np.array([self.save_gains[4], self.save_gains[5]])
return new_gains

# Check for failure criteria - the nuclear option
Expand Down
16 changes: 15 additions & 1 deletion rosplane_tuning/src/autotune/optimizer_tester.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,8 +7,12 @@
# Function to test the optimizer with
def function(x):
# Matyas function
print("f", x)
return 0.26 * (x[0] ** 2 + x[1] ** 2) - 0.48 * x[0] * x[1]

def gradient(x):
# Gradient of Matyas function
return np.array([0.52 * x[0] - 0.48 * x[1], 0.52 * x[1] - 0.48 * x[0]])

# Initialize optimizer
curr_points = np.array([[0, 5]]) # Initial point
Expand All @@ -22,22 +26,32 @@ def function(x):

# Run optimization
all_points = []
k = 0
while not optimizer.optimization_terminated():
print("Iteration ", k)
# Print status
print(optimizer.get_optimiztion_status())
print(optimizer.state)

# Calculate error for current points
error = []
# print(curr_points) # Testing
for point in curr_points:
error.append(function(point))
error = np.array(error)

# Pass points to optimizer
print("CP", curr_points) # Testing
# print("G", gradient(curr_points[0])) # Testing
curr_points = optimizer.get_next_parameter_set(error)
print("CP", curr_points) # Testing

# Store points
for point in curr_points:
all_points.append(point)

# End interation step
k += 1
print()
all_points = np.array(all_points)

print('Optimization terminated with status: {}'.format(optimizer.get_optimiztion_status()))
Expand Down

0 comments on commit e699b9f

Please sign in to comment.