当前位置: 代码迷 >> 综合 >> Leetcode 1515. Best Position for a Service Centre (python)
  详细解决方案

Leetcode 1515. Best Position for a Service Centre (python)

热度:81   发布时间:2023-11-26 06:24:02.0

题目

在这里插入图片描述

解法:gradient descent

还是第一次碰到用梯度下降做的题目。这个题目是没有办法找到精确解的,所以就能想到梯度下降了。这边梯度下降还得调一调参数,固定learning rate不太行,需要learning rate decay。也可以加momentum加快速度

with lr decay

class Solution:def getMinDistSum(self, positions: List[List[int]]) -> float:n = len(positions)if n<=1:return 0# partial derivative regard xdef cal_dx(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]a = x-xib = ((x-xi)**2+(y-yi)**2)**0.5tmp = a/b if b else 0ans += tmpreturn ans# partial derivative regard ydef cal_dy(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]a = y-yib = ((x-xi)**2+(y-yi)**2)**0.5tmp = a/b if b else 0ans += tmpreturn ans# compute costdef cal_cost(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]ans += ((x-xi)**2+(y-yi)**2)**0.5return ans# initialize x and yx = sum(p[0] for p in positions) / len(positions)y = sum(p[1] for p in positions) / len(positions)prev_cost = cal_cost(x,y)reduced_cost = float('inf')# perform gradient descent with lr decaylr = 10decay = 1 - (10 ** -3)while abs(reduced_cost)>1e-15:dx = cal_dx(x,y)dy = cal_dy(x,y)x = x-lr*dxy = y-lr*dycurr_cost = cal_cost(x,y)reduced_cost = prev_cost-curr_costprev_cost = curr_costlr *= decayreturn curr_cost

with lr decay and momentum

class Solution:def getMinDistSum(self, positions: List[List[int]]) -> float:n = len(positions)if n<=1:return 0# partial derivative regard xdef cal_dx(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]a = x-xib = ((x-xi)**2+(y-yi)**2)**0.5tmp = a/b if b else 0ans += tmpreturn ans# partial derivative regard ydef cal_dy(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]a = y-yib = ((x-xi)**2+(y-yi)**2)**0.5tmp = a/b if b else 0ans += tmpreturn ans# compute costdef cal_cost(x,y):ans = 0for i in range(n):xi = positions[i][0]yi = positions[i][1]ans += ((x-xi)**2+(y-yi)**2)**0.5return ans# initialize x and yx = sum(p[0] for p in positions) / len(positions)y = sum(p[1] for p in positions) / len(positions)prev_cost = cal_cost(x,y)reduced_cost = float('inf')dx = 0dy = 0# perform gradient descent with momentum and lr decaylr = 1momentum = 0.9decay = 0.98while abs(reduced_cost)>1e-15:dx = cal_dx(x,y) + momentum*dxdy = cal_dy(x,y) + momentum*dyx = x-lr*dxy = y-lr*dycurr_cost = cal_cost(x,y)reduced_cost = prev_cost-curr_costprev_cost = curr_costlr *= decayreturn curr_cost
  相关解决方案