Remove duplicates in an efficient way

This commit is contained in:
coolneng 2021-04-13 22:44:31 +02:00
parent 75c3a94fbe
commit bf7ca7f520
Signed by: coolneng
GPG Key ID: 9893DA236405AF57
1 changed files with 10 additions and 0 deletions

View File

@ -39,6 +39,13 @@ def explore_solutions(solutions, data):
return closest_elements.iloc[furthest_index]
def remove_duplicates(current, previous, data):
data = data.query(
f"(source != {current} or destination not in @previous) and (source not in @previous or destination != {current})"
)
return data
def greedy_algorithm(n, m, data):
solutions = DataFrame(columns=["point", "distance"])
first_solution = get_first_solution(n, data)
@ -46,6 +53,9 @@ def greedy_algorithm(n, m, data):
for _ in range(m):
element = explore_solutions(solutions, data)
solutions = solutions.append(element)
data = remove_duplicates(
current=element["point"], previous=solutions["point"], data=data
)
return solutions