Update 'algorithm/optimization/gradient.py'
This commit is contained in:
parent
0a8eb87534
commit
b35805d96b
|
@ -346,6 +346,30 @@ class gd2d_compete(object):
|
|||
display(self.compute_output)
|
||||
self.button_plot.on_click(self.plot)
|
||||
display(self.plot_output)
|
||||
|
||||
def compute_default(self, *args):
|
||||
with self.compute_output:
|
||||
x0 = np.array(self.wg_x0.value.split(","), dtype=float)
|
||||
xn = x0
|
||||
x1 = symbols("x1")
|
||||
x2 = symbols("x2")
|
||||
expr = sympify(self.wg_expr.value)
|
||||
self.xn_list, self.df_list = [], []
|
||||
|
||||
for n in tqdm(range(0, self.wg_max_iter.value)):
|
||||
gradient = np.array([diff(expr, x1).subs(x1, xn[0]).subs(x2, xn[1]),
|
||||
diff(expr, x2).subs(x1, xn[0]).subs(x2, xn[1])], dtype=float)
|
||||
self.xn_list.append(xn)
|
||||
self.df_list.append(gradient)
|
||||
if np.linalg.norm(gradient, ord=2) < self.wg_epsilon.value:
|
||||
clear_output(wait=True)
|
||||
print("Found solution of {} after".format(expr), n, "iterations")
|
||||
print("x* = [{}, {}]".format(xn[0], xn[1]))
|
||||
return None
|
||||
xn = xn - self.wg_lr.value * gradient
|
||||
clear_output(wait=True)
|
||||
display("Exceeded maximum iterations. No solution found.")
|
||||
return None
|
||||
|
||||
def compute(self, *args):
|
||||
with self.compute_output:
|
||||
|
@ -358,16 +382,28 @@ class gd2d_compete(object):
|
|||
self.xn_p1_list.append(x0)
|
||||
direction_p0 = np.array(self.wg_direction_p0.value.split(","), dtype=float)
|
||||
direction_p1 = np.array(self.wg_direction_p1.value.split(","), dtype=float)
|
||||
|
||||
self.timer = self.timer + 1
|
||||
# calcualte next point position
|
||||
x0_p0 = self.xn_p0_list[self.timer-1] + self.wg_lr.value * direction_p0
|
||||
x0_p1 = self.xn_p1_list[self.timer-1] + self.wg_lr.value * direction_p1
|
||||
self.xn_p0_list.append(x0_p0)
|
||||
self.xn_p1_list.append(x0_p1)
|
||||
self.xn_p1_list.append(x0_p1)
|
||||
f = lambdify((x1, x2), expr, "numpy")
|
||||
|
||||
f_xn = f(np.array([x0_p0, x0_p1])[:, 0], np.array([x0_p0, x0_p1])[:, 1])
|
||||
|
||||
self.expr = expr
|
||||
self.x0_p0 = x0_p0
|
||||
self.x0_p1 = x0_p1
|
||||
gradient_p0 = np.array([diff(expr, x1).subs(x1, x0_p0[0]).subs(x2, x0_p0[1]),
|
||||
diff(expr, x2).subs(x1, x0_p0[0]).subs(x2, x0_p0[1])], dtype=float)
|
||||
gradient_p1 = np.array([diff(expr, x1).subs(x1, x0_p1[0]).subs(x2, x0_p1[1]),
|
||||
diff(expr, x2).subs(x1, x0_p1[0]).subs(x2, x0_p1[1])], dtype=float)
|
||||
|
||||
clear_output(wait=True)
|
||||
print("a1({}): {}, a2({}): {}".format(self.timer, np.append(x0_p0, f_xn[0]), self.timer,np.append(x0_p1, f_xn[1])))
|
||||
print("coordinate: a1({}): {}, a2({}): {}".format(self.timer, np.append(x0_p0, f_xn[0]), self.timer,np.append(x0_p1, f_xn[1])))
|
||||
print("gradient: a1({}): {}, a2({}): {}".format(self.timer, gradient_p0, self.timer,gradient_p1))
|
||||
|
||||
def plot(self, *args):
|
||||
with self.plot_output:
|
||||
|
|
Loading…
Reference in New Issue