Remove duplicates in an efficient way
This commit is contained in:
parent
75c3a94fbe
commit
bf7ca7f520
|
@ -39,6 +39,13 @@ def explore_solutions(solutions, data):
|
|||
return closest_elements.iloc[furthest_index]
|
||||
|
||||
|
||||
def remove_duplicates(current, previous, data):
|
||||
data = data.query(
|
||||
f"(source != {current} or destination not in @previous) and (source not in @previous or destination != {current})"
|
||||
)
|
||||
return data
|
||||
|
||||
|
||||
def greedy_algorithm(n, m, data):
|
||||
solutions = DataFrame(columns=["point", "distance"])
|
||||
first_solution = get_first_solution(n, data)
|
||||
|
@ -46,6 +53,9 @@ def greedy_algorithm(n, m, data):
|
|||
for _ in range(m):
|
||||
element = explore_solutions(solutions, data)
|
||||
solutions = solutions.append(element)
|
||||
data = remove_duplicates(
|
||||
current=element["point"], previous=solutions["point"], data=data
|
||||
)
|
||||
return solutions
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue