diff --git a/chl/draw_func.py b/chl/draw_func.py
index 0e8a784..f2a1501 100755
--- a/chl/draw_func.py
+++ b/chl/draw_func.py
@@ -86,8 +86,8 @@ from math import sqrt, sin, cos, atan2, pi
import numpy as np
# XPRESS ENVIRONMENT
-os.environ['XPRESSDIR'] = "/opt/xpressmp_9.2.5"
-os.environ['XPRESS'] = "/opt/xpressmp_9.2.5/bin"
+os.environ['XPRESSDIR'] = "/opt/xpressmp_9.5.0"
+os.environ['XPRESS'] = "/opt/xpressmp_9.5.0/bin"
os.environ['LD_LIBRARY_PATH'] = os.environ['XPRESSDIR'] + \
"/lib:"+os.environ['LD_LIBRARY_PATH']
os.environ['DYLD_LIBRARY_PATH'] = os.environ['XPRESSDIR']+"/lib:"
@@ -326,13 +326,64 @@ def Availabilities(scenario_id,showSolution = "0"):
class Draw_Simulator:
- def __init__(self,algorithm, opponent_func, html_output, use_db=False):
+ def __init__(self,algorithm, opponent_func, html_output, use_db=False, use_chess=False):
+ self.chess = use_chess
+ self.blue_groups = []
+ self.red_groups = []
+ if self.chess:
+
+ # basepots {'D': [
+ # {'id': 47715, 'pot': 'D', 'name': 'Rouen Dragons', 'country': 'France', 'coeff': 2.0, 'lat': 49.443232, 'lon': 1.099971},
+ # {'id': 47747, 'pot': 'D', 'name': 'Fehérvár AV19', 'country': 'Austria', 'coeff': 3.0, 'lat': 47.1860262, 'lon': 18.4221358},
+ # {'id': 52169, 'pot': 'D', 'name': 'Sheffield Steelers', 'country': 'United Kingdom', 'coeff': 3.0, 'lat': 53.38112899999999, 'lon': -1.470085},
+ # {'id': 52170, 'pot': 'D', 'name': 'SønderjyskE Vojens', 'country': 'Denmark', 'coeff': 1.0, 'lat': 55.249489, 'lon': 9.3019649},
+ # {'id': 52171, 'pot': 'D', 'name': 'Unia Oświęcim', 'country': 'Poland', 'coeff': 1.0, 'lat': 50.0343982, 'lon': 19.2097782},
+ # {'id': 47741, 'pot': 'D', 'name': 'Storhamar Hamar', 'country': 'Norway', 'coeff': 1.0, 'lat': 60.794442, 'lon': 11.0442983}], 'C': [
+ # {'id': 52167, 'pot': 'C', 'name': 'KAC Klagenfurt', 'country': 'Austria', 'coeff': 3.0, 'lat': 46.625704, 'lon': 14.3137371},
+ # {'id': 47742, 'pot': 'C', 'name': 'Växjö Lakers', 'country': 'Sweden', 'coeff': 4.0, 'lat': 56.8790044, 'lon': 14.8058522},
+ # {'id': 47743, 'pot': 'C', 'name': 'Straubing Tigers', 'country': 'Germany', 'coeff': 4.0, 'lat': 48.8777333, 'lon': 12.5801538},
+ # {'id': 47755, 'pot': 'C', 'name': 'Sparta Prague', 'country': 'Czechia', 'coeff': 4.0, 'lat': 50.0755381, 'lon': 14.4378005},
+ # {'id': 52168, 'pot': 'C', 'name': 'Lausanne HC', 'country': 'Switzerland', 'coeff': 3.0, 'lat': 46.5196535, 'lon': 6.6322734},
+ # {'id': 47737, 'pot': 'C', 'name': 'Lahti Pelicans', 'country': 'Finland', 'coeff': 2.0, 'lat': 60.9826749, 'lon': 25.6612096}], 'B': [
+ # {'id': 52166, 'pot': 'B', 'name': 'Pinguins Bremerhaven', 'country': 'Germany', 'coeff': 4.0, 'lat': 53.5395845, 'lon': 8.5809424},
+ # {'id': 47761, 'pot': 'B', 'name': 'Red Bull Salzburg', 'country': 'Austria', 'coeff': 3.0, 'lat': 47.80949, 'lon': 13.05501},
+ # {'id': 47724, 'pot': 'B', 'name': 'Ilves Tampere', 'country': 'Finland', 'coeff': 4.0, 'lat': 61.4977524, 'lon': 23.7609535},
+ # {'id': 47735, 'pot': 'B', 'name': 'Dynamo Pardubice', 'country': 'Czechia', 'coeff': 4.0, 'lat': 50.0343092, 'lon': 15.7811994},
+ # {'id': 47740, 'pot': 'B', 'name': 'Färjestad Karlstad', 'country': 'Sweden', 'coeff': 4.0, 'lat': 59.4002601, 'lon': 13.5009352},
+ # {'id': 47717, 'pot': 'B', 'name': 'Fribourg-Gottéron', 'country': 'Switzerland', 'coeff': 4.0, 'lat': 46.8064773, 'lon': 7.1619719}], 'A': [
+ # {'id': 47721, 'pot': 'A', 'name': 'Eisbären Berlin', 'country': 'Germany', 'coeff': 4.0, 'lat': 52.5200066, 'lon': 13.404954},
+ # {'id': 47719, 'pot': 'A', 'name': 'ZSC Lions Zurich', 'country': 'Switzerland', 'coeff': 4.0, 'lat': 47.4124497, 'lon': 8.5578995},
+ # {'id': 47756, 'pot': 'A', 'name': 'Oceláři Třinec', 'country': 'Czechia', 'coeff': 4.0, 'lat': 49.677631, 'lon': 18.6707901},
+ # {'id': 47757, 'pot': 'A', 'name': 'Skellefteå AIK', 'country': 'Sweden', 'coeff': 4.0, 'lat': 64.750244, 'lon': 20.950917},
+ # {'id': 47725, 'pot': 'A', 'name': 'Tappara Tampere', 'country': 'Finland', 'coeff': 4.0, 'lat': 61.4977524, 'lon': 23.7609535},
+ # {'id': 47733, 'pot': 'A', 'name': 'Genève-Servette', 'country': 'Switzerland', 'coeff': 4.0, 'lat': 46.2043907, 'lon': 6.1431577}]}
+ self.blue_teams = [
+ 52169, 47717, 47719
+ ]
+
+ self.red_teams = [
+ 47755
+ ]
+
+
+
+
+ # self.blue_teams = [
+ # 47733,47721,52169,47719,47717,
+ # ]
+ # self.red_teams = [
+ # 47724,47755,47725,
+ # ]
+ else:
+ self.blue_teams = []
+ self.red_teams = []
+
self.pots = ['D','C','B','A']
if use_db:
- scenario_id = 9529
+ scenario_id = 12098
scenario = Scenario.objects.get(id=scenario_id)
self.basepots = {
@@ -354,50 +405,77 @@ class Draw_Simulator:
df_rank = pd.read_csv('rankings_2.0.csv')
for pot in self.basepots:
for team in self.basepots[pot]:
- ranking = df_rank[df_rank['TEAM'] == team['name']].iloc[0]
- team['coeff'] = ranking['RANK']
+ try:
+ ranking = df_rank[df_rank['TEAM'] == team['name']].iloc[0]
+ team['coeff'] = ranking['RANK']
+ except:
+ team['coeff'] = 0
else:
self.basepots = {
'A': [
+
{'id': 0, 'pot': 'A', 'name': 'Rögle Ängelholm', 'country': 'Sweden'},
+
{'id': 1, 'pot': 'A', 'name': 'Färjestad Karlstad', 'country': 'Sweden'},
+
{'id': 2, 'pot': 'A', 'name': 'EV Zug', 'country': 'Switzerland'},
+
{'id': 3, 'pot': 'A', 'name': 'Eisbären Berlin', 'country': 'Germany'},
+
{'id': 4, 'pot': 'A', 'name': 'Tappara Tampere', 'country': 'Finland'},
+
{'id': 5, 'pot': 'A', 'name': 'Oceláři Třinec',
'country': 'Czech Republic'},
],
'B': [
+
{'id': 6, 'pot': 'B', 'name': 'Red Bull Salzburg', 'country': 'Austria'},
+
{'id': 7, 'pot': 'B', 'name': 'Lulea Hockey', 'country': 'Sweden'},
+
{'id': 8, 'pot': 'B', 'name': 'Fribourg-Gottéron',
'country': 'Switzerland'},
+
{'id': 9, 'pot': 'B', 'name': 'Red Bull Munich', 'country': 'Germany'},
+
{'id': 10, 'pot': 'B', 'name': 'Jukurit Mikkeli', 'country': 'Finland'},
+
{'id': 11, 'pot': 'B', 'name': 'Mountfield HK',
'country': 'Czech Republic'},
],
'C': [
+
{'id': 12, 'pot': 'C', 'name': 'VS Villach',
'country': 'Austria'},
+
{'id': 13, 'pot': 'C', 'name': 'ZSC Lions Zürich',
'country': 'Switzerland'},
+
{'id': 14, 'pot': 'C', 'name': 'Grizzlys Wolfsburg', 'country': 'Germany'},
+
{'id': 15, 'pot': 'C', 'name': 'Ilves Tampere',
'country': 'Finland'},
+
{'id': 16, 'pot': 'C', 'name': 'Sparta Prague',
'country': 'Czech Republic'},
+
{'id': 17, 'pot': 'C', 'name': 'Fehérvár AV19',
'country': 'Austria'},
],
'D': [
+
{'id': 18, 'pot': 'D', 'name': 'Belfast Giants',
'country': 'United Kingdom'},
+
{'id': 19, 'pot': 'D', 'name': 'Grenoble', 'country': 'France'},
+
{'id': 20, 'pot': 'D', 'name': 'GKS Katowice', 'country': 'Poland'},
+
{'id': 21, 'pot': 'D', 'name': 'Aalborg Pirates', 'country': 'Denmark'},
+
{'id': 22, 'pot': 'D', 'name': 'Stavanger Oilers', 'country': 'Norway'},
+
{'id': 23, 'pot': 'D', 'name': 'Slovan Bratislava', 'country': 'Slovakia'},
]
}
@@ -683,6 +761,11 @@ class Draw_Simulator:
return opponents, homeGames, awayGames
+ def groups_1_24_martin_chess(self):
+ return self.groups_1_24_martin()
+
+
+
def groups_1_24_martin(self):
"""
1 groups of 24 teams
@@ -884,10 +967,17 @@ class Draw_Simulator:
model_xp.addConstraint(
xp.Sum([x_xp[t['id'], g] for t in self.teams if t['country'] == c]) <= 2)
+
+ if self.chess:
+ model_xp.addConstraint(xp.Sum(x_xp[t,g] for t in self.red_teams for g in self.blue_groups) <= 0)
+ model_xp.addConstraint(xp.Sum(x_xp[t,g] for t in self.blue_teams for g in self.red_groups) <= 0)
+
+
+
# do not play other countries more than 3 times
# for t1 in self.teams:
- # for c in ['FIN']:
+ # for c in ['Finland']:
# for (g1, p1), awayGames in self.awayGames.items():
# if p1 == t1['pot']:
# model_xp.addConstraint(
@@ -900,7 +990,7 @@ class Draw_Simulator:
# # model_xp.addConstraint(
# xp.Sum([x_xp[t['id'], g] for t in self.teams if t['country'] == c]) <= 2)
# for t in self.teams:
- # for c in ['FIN']:
+ # for c in ['Finland']:
# for t2 in self.teams_by_country[c]:
# for (g1, p1), awayGames in self.awayGames.items():
# if p1 == t1['pot']:
@@ -997,14 +1087,15 @@ class Draw_Simulator:
def simulate(self, nTimes):
-
+ tt = time.time()
if nTimes < 1:
return None
tmp_stats = defaultdict(lambda: {})
+
for n in range(nTimes):
- print(f"{self.opponent_func.__name__} - Simulation {n}")
+ print(f"{self.opponent_func.__name__} - Simulation {n} - in {time.time()-tt}")
ttt =time.time()
sol_dict = {
@@ -1018,6 +1109,8 @@ class Draw_Simulator:
total_model_time = 0
total_computations = 0
+ self.blue_groups = []
+ self.red_groups = []
feasible = False
for p in self.pots:
@@ -1039,6 +1132,13 @@ class Draw_Simulator:
total_model_time += modeltime
if feasible:
feasible = True
+ if self.chess and not self.blue_groups:
+ if r['id'] in self.blue_teams:
+ self.blue_groups = [1,3,5] if g % 2 == 1 else [2,4,6]
+ self.red_groups = [2,4,6] if g % 2 == 1 else [1,3,5]
+ elif r['id'] in self.red_teams:
+ self.blue_groups = [2,4,6] if g % 2 == 1 else [1,3,5]
+ self.red_groups = [1,3,5] if g % 2 == 1 else [2,4,6]
break
else:
# print("\tCONFLICT: skipping group {} for team {}".format(g,r))
@@ -1064,7 +1164,7 @@ class Draw_Simulator:
for ag in self.awayGames[g, p]:
t2 = sol_dict[ag[0]][ag[1]]
travel[t1['id']] += self.distance_matrix[t1['id'],t2['id']]
- if t2['country'] == 'FIN':
+ if t2['country'] == 'Switzerland':
travel_finland[t1['id']] += 1
for op in self.opponents[g,p]:
t2 = sol_dict[op[0]][op[1]]
@@ -1073,13 +1173,13 @@ class Draw_Simulator:
else:
coefficients[t1['id']] += abs(t1['coeff']-t2['coeff'])
visited_countries[t1['id']].add(t2['country'])
- if t2['country'] == 'FIN':
+ if t2['country'] == 'Switzerland':
visited_finland[t1['id']] += 1
- # blockings, breaks = self.create_schedule(sol_dict, n)
- blockings = defaultdict(lambda:0)
- breaks = defaultdict(lambda:0)
+ blockings, breaks = self.create_schedule(sol_dict, n)
+ # blockings = defaultdict(lambda:0)
+ # breaks = defaultdict(lambda:0)
@@ -1220,7 +1320,7 @@ class Draw_Simulator:
# print("\t",game[0],game[1],sol_dict[game[0]][game[1]])
# for game in self.awayGames[g, p]:
# print("\t",game[0],game[1],sol_dict[game[0]][game[1]])
- base_scenario_id = 9529
+ base_scenario_id = 12098
base_scenario = Scenario.objects.get(id=base_scenario_id)
scenario = copy_scenario(base_scenario,f" - {self.opponent_func.__name__} - {nSim}")
@@ -1282,7 +1382,13 @@ class Draw_Simulator:
sol += "\n"
sol += " | "
for g in self.groups:
- sol += f"{g} | "
+ if g in self.blue_groups:
+ color = 'blue'
+ elif g in self.red_groups:
+ color = 'red'
+ else:
+ color = 'white'
+ sol += f"{g} | "
sol += "
"
sol += "\n"
sol += "
\n"
@@ -1295,10 +1401,16 @@ class Draw_Simulator:
nFixed += 1
else:
color = 'lightyellow'
+ if sol_dict[g][p]['id'] in self.blue_teams:
+ text_color = 'blue'
+ elif sol_dict[g][p]['id'] in self.red_teams:
+ text_color = 'red'
+ else:
+ text_color = 'black'
tpot = sol_dict[g][p]['pot']
tname = sol_dict[g][p]['name']
tcountry = sol_dict[g][p]['country']
- sol += f"({tpot}){tname}({tcountry}) | "
+ sol += f"({tpot}){tname}({tcountry}) | "
sol += ""
sol += "\n"
sol += "\n"
@@ -1338,22 +1450,26 @@ funcs = [
# Draw_Simulator.groups_6_4,
# Draw_Simulator.groups_3_8,
Draw_Simulator.groups_1_24_martin,
- Draw_Simulator.groups_2_12_martin,
+ Draw_Simulator.groups_1_24_martin_chess,
+ # Draw_Simulator.groups_2_12_martin,
# Draw_Simulator.groups_2_12_stephan,
# Draw_Simulator.groups_1_24_stephan,
- Draw_Simulator.groups_3_8_stephan,
+ # Draw_Simulator.groups_3_8_stephan,
]
-scenario_id = 9529
+scenario_id = 12098
scenario = Scenario.objects.get(id=scenario_id)
# Scenario.objects.filter(base_scenario=scenario).delete()
stats = {}
for func in funcs:
- simulator = Draw_Simulator(algorithm='XP', opponent_func = func, html_output=True,use_db=True)
- nSim = 1000
+ if func == Draw_Simulator.groups_1_24_martin_chess:
+ simulator = Draw_Simulator(algorithm='XP', opponent_func = func, html_output=True,use_db=True,use_chess=True)
+ else:
+ simulator = Draw_Simulator(algorithm='XP', opponent_func = func, html_output=True,use_db=True)
+ nSim = 100
stats[func.__name__] = simulator.simulate(nSim)
@@ -1391,16 +1507,16 @@ sol += "Coe. | "
sol += "Block | "
sol += "No Travel | "
sol += "Countr. | "
-sol += "Play Finland 3x | "
-sol += "Travel Finland 3x | "
+sol += "Play Switzerland 3x | "
+sol += "Travel Switzerland 3x | "
for t in simulator.teams:
sol += "Trav. | "
sol += "Coe. | "
sol += "Block | "
sol += "No Travel | "
sol += "Countr. | "
- sol += "Play Finland 3x | "
- sol += "Travel Finland 3x | "
+ sol += "Play Switzerland 3x | "
+ sol += "Travel Switzerland 3x | "
sol += ""
sol += ""
sol += "| M | "
diff --git a/dfbnet/auswertung_sachsen.py b/dfbnet/auswertung_sachsen.py
new file mode 100644
index 0000000..a53dd4f
--- /dev/null
+++ b/dfbnet/auswertung_sachsen.py
@@ -0,0 +1,145 @@
+# %%
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+# %%
+with open("data/previous_stats_road_distance.json", "r", encoding="utf-8") as f:
+ stats_previous_road = json.load(f)
+stats_previous_road = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_previous_road.items()}
+
+with open("data/previous_stats_road_duration.json", "r", encoding="utf-8") as f:
+ stats_previous_duration = json.load(f)
+stats_previous_duration = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_previous_duration.items()}
+
+with open("data/new_stats_road_distance.json", "r", encoding="utf-8") as f:
+ stats_new_road = json.load(f)
+stats_new_road = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_new_road.items()}
+
+with open("data/new_stats_road_duration.json", "r", encoding="utf-8") as f:
+ stats_new_duration = json.load(f)
+stats_new_duration = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_new_duration.items()}
+
+
+
+# %%
+
+
+print(stats_previous_road['overall'])
+print(stats_previous_duration['overall'])
+print(stats_new_road['overall'])
+print(stats_new_duration['overall'])
+
+overall_stats = {
+ "previous": {
+ "road": stats_previous_road['overall'],
+ "duration": stats_previous_duration['overall'],
+ "average_max_team_distance": sum([v['max_team_distance'] for k, v in stats_previous_road.items() if k != "overall"]) / len([v['max_team_distance'] for k, v in stats_previous_road.items() if k != "overall"])
+ },
+ "new": {
+ "road": stats_new_road['overall'],
+ "duration": stats_new_duration['overall'],
+ "average_max_team_distance": sum([v['max_team_distance'] for k, v in stats_new_road.items() if k != "overall"]) / len([v['max_team_distance'] for k, v in stats_new_road.items() if k != "overall"])
+ }
+}
+
+# %%
+
+import plotly.graph_objects as go
+
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["previous"]["road"]['total_distance'], overall_stats["previous"]["duration"]['total_distance']],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["new"]["road"]['total_distance'], overall_stats["new"]["duration"]['total_distance']],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Total Distances",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration", "Max Team Distance"],
+ y=[overall_stats["previous"]["road"]['average_distance'], overall_stats["previous"]["duration"]['average_distance'], overall_stats["previous"]["average_max_team_distance"]],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration", "Max Team Distance"],
+ y=[overall_stats["new"]["road"]['average_distance'], overall_stats["new"]["duration"]['average_distance'], overall_stats["new"]["average_max_team_distance"]],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Average Total Distance per Team",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["previous"]["road"]['average_group_distance'], overall_stats["previous"]["duration"]['average_group_distance']],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["new"]["road"]['average_group_distance'], overall_stats["new"]["duration"]['average_group_distance']],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Average Total Distance per Group",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+
+
+
+
+# %%
diff --git a/dfbnet/competitions.py b/dfbnet/competitions.py
new file mode 100644
index 0000000..c05394a
--- /dev/null
+++ b/dfbnet/competitions.py
@@ -0,0 +1,59 @@
+import pandas as pd
+
+def get_teams_from_staffel(staffel=None):
+
+ dresden = pd.read_excel("data/beispiel_daten_dresden.xlsx")
+ # staffeln = dresden["STAFFEL"].unique()
+
+ if staffel:
+
+ teams_in_staffel = {}
+ teams = dresden[dresden["STAFFEL"] == staffel][
+ [
+ "MANNSCHAFT",
+ "MS_ART",
+ "SP_KLASSE",
+ "SCHLUESSEL_ZAHL",
+ "WUNSCH_WOCHENTAG",
+ "WUNSCH_TAG",
+ "WUNSCH_ZEIT",
+ "SPIELSTAETTE",
+ ]
+ ].to_dict(orient="records")
+
+ for t in teams:
+ if t['MANNSCHAFT'] not in teams_in_staffel:
+ teams_in_staffel[t['MANNSCHAFT']] = t
+
+
+ return teams_in_staffel
+
+ else:
+ staffeln = dresden["STAFFEL"].unique()
+ teams_in_staffel = {}
+ staffel_type = None
+ for staffel in staffeln:
+ find_duplicates = []
+ teams_in_staffel[staffel] = []
+ teams = dresden[dresden["STAFFEL"] == staffel][
+ [
+ "MANNSCHAFT",
+ "MS_ART",
+ "SP_KLASSE",
+ "SCHLUESSEL_ZAHL",
+ "WUNSCH_WOCHENTAG",
+ "WUNSCH_TAG",
+ "WUNSCH_ZEIT",
+ "SPIELSTAETTE",
+ ]
+ ].to_dict(orient="records")
+
+ for t in teams:
+ if t['MANNSCHAFT'] not in find_duplicates:
+ teams_in_staffel[staffel].append(t)
+ find_duplicates.append(t['MANNSCHAFT'])
+
+ return staffeln, teams_in_staffel
+
+
+
diff --git a/dfbnet/competitions_sachsen.py b/dfbnet/competitions_sachsen.py
new file mode 100644
index 0000000..afd0a06
--- /dev/null
+++ b/dfbnet/competitions_sachsen.py
@@ -0,0 +1,649 @@
+# %%
+
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+
+
+# %%
+def convert_xlsx_to_json():
+ gmaps = googlemaps.Client(key="AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU")
+
+ staffel = None
+
+ dresden = pd.read_excel("data/beispiel_daten_sachsen.xlsx")
+ # staffeln = dresden["STAFFEL"].unique()
+
+ unique_staffeln = list(
+ dresden[["MS_ART", "SP_KLASSE", "STAFFEL"]]
+ .drop_duplicates()
+ .itertuples(index=False, name=None)
+ )
+ competitions_dict = {s: {} for s in unique_staffeln}
+ teams_in_competition = {}
+ staffel_type = None
+ for art, klasse, staffel in unique_staffeln:
+ find_duplicates = []
+ teams_in_competition[(art, klasse, staffel)] = []
+ teams = dresden[
+ (dresden["MS_ART"] == art)
+ & (dresden["SP_KLASSE"] == klasse)
+ & (dresden["STAFFEL"] == staffel)
+ ][
+ [
+ "GEB_VEREIN",
+ "GEB_MS",
+ "MANNSCHAFT",
+ "MS_KEY",
+ "MS_ART",
+ "SP_KLASSE",
+ "STAFFEL",
+ "SCHLUESSEL_ZAHL",
+ "WUNSCH_WOCHENTAG",
+ "WUNSCH_TAG",
+ "WUNSCH_ZEIT",
+ "SPIELSTAETTE",
+ ]
+ ].to_dict(
+ orient="records"
+ )
+
+ for t in teams:
+ if t["MANNSCHAFT"] not in find_duplicates:
+ teams_in_competition[(art, klasse, staffel)].append(t)
+ find_duplicates.append(t["MANNSCHAFT"])
+ geocode_result = gmaps.geocode(
+ f"{t['GEB_VEREIN']} {t['MANNSCHAFT']} {t['SPIELSTAETTE']}"
+ )
+ latitude = 0
+ longitude = 0
+ if len(geocode_result) > 0:
+ location = geocode_result[0]["geometry"]["location"]
+ latitude = location["lat"]
+ longitude = location["lng"]
+
+ t["LATITUDE"] = latitude
+ t["LONGITUDE"] = longitude
+
+ competitions_dict[(art, klasse, staffel)]["teams"] = teams_in_competition[
+ (art, klasse, staffel)
+ ]
+ competitions_dict[(art, klasse, staffel)]["nTeams"] = len(
+ teams_in_competition[(art, klasse, staffel)]
+ )
+ competitions_dict[(art, klasse, staffel)]["art"] = art
+ competitions_dict[(art, klasse, staffel)]["klasse"] = klasse
+ competitions_dict[(art, klasse, staffel)]["staffel"] = staffel
+
+ competitions_dict_list_keys = {str(k): v for k, v in competitions_dict.items()}
+
+ with open("data/sachsen.json", "w", encoding="utf-8") as f:
+ json.dump(
+ competitions_dict_list_keys, f, ensure_ascii=False, indent=4, default=str
+ )
+
+
+# %%
+
+
+from math import sqrt, sin, cos, atan2, pi
+
+
+def degreesToRadians(degrees):
+ """Convert degrees to radians"""
+ return degrees * pi / 180
+
+
+def distanceInKmByGPS(lat1, lon1, lat2, lon2):
+ """Calculate the distance between two points in km"""
+ earthRadiusKm = 6371
+ dLat = degreesToRadians(lat2 - lat1)
+ dLon = degreesToRadians(lon2 - lon1)
+ lat1 = degreesToRadians(lat1)
+ lat2 = degreesToRadians(lat2)
+ a = sin(dLat / 2) * sin(dLat / 2) + sin(dLon / 2) * sin(dLon / 2) * cos(lat1) * cos(
+ lat2
+ )
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+ return int(earthRadiusKm * c)
+
+
+# %%
+
+with open("data/sachsen.json", "r", encoding="utf-8") as f:
+ competitions = json.load(f)
+
+competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+# STAFFELN PRO ART UND KLASSE
+# ('Herren', 'Landesliga') 1
+# ('Herren', 'Landesklasse') 3
+# ('Frauen', 'Landesliga') 1
+# ('Frauen', 'Landesklasse') 3
+# ('A-Junioren', 'Landesliga') 1
+# ('A-Junioren', 'Landesklasse') 4
+# ('Herren', 'Kreisoberliga') 13
+# ('Herren', '1.Kreisliga (A)') 19
+# ('Herren', '2.Kreisliga (B)') 8
+# ('Herren', '3.Kreisliga (C)') 1
+# ('Herren', '1.Kreisklasse') 21
+# ('Herren', '2.Kreisklasse') 9
+# ('A-Junioren', 'Kreisoberliga') 10
+# ('A-Junioren', '1.Kreisliga (A)') 6
+# ('Frauen', 'Kreisoberliga') 4
+# ('Frauen', '1.Kreisliga (A)') 1
+# ('Frauen', '1.Kreisklasse') 3
+# ('B-Junioren', 'Landesliga') 1
+# ('B-Junioren', 'Landesklasse') 4
+# ('B-Junioren', 'Kreisoberliga') 13
+# ('B-Junioren', '1.Kreisliga (A)') 13
+# ('B-Junioren', '1.Kreisklasse') 1
+# ('C-Junioren', 'Landesliga') 1
+# ('C-Junioren', 'Landesklasse') 4
+# ('C-Junioren', 'Kreisoberliga') 16
+# ('C-Junioren', '1.Kreisliga (A)') 15
+# ('C-Junioren', '1.Kreisklasse') 9
+# ('D-Junioren', 'Landesliga') 1
+# ('D-Junioren', 'Landesklasse') 6
+# ('D-Junioren', 'Kreisoberliga') 16
+# ('D-Junioren', '1.Kreisliga (A)') 24
+# ('D-Junioren', '2.Kreisliga (B)') 8
+# ('D-Junioren', '3.Kreisliga (C)') 2
+# ('D-Junioren', '1.Kreisklasse') 33
+# ('D-Junioren', '2.Kreisklasse') 10
+# ('B-Juniorinnen', 'Landesliga') 1
+# ('B-Juniorinnen', 'Landesklasse') 2
+# ('C-Juniorinnen', 'Landesklasse') 3
+# ('D-Juniorinnen', 'Kreisoberliga') 1
+# ('Herren Ü35', 'Kreisoberliga') 4
+# ('Herren Ü35', '1.Kreisliga (A)') 3
+# ('Herren Ü35', '1.Kreisklasse') 3
+# ('Herren Ü35', '2.Kreisklasse') 1
+# ('Herren Ü40', '1.Kreisliga (A)') 5
+# ('Herren Ü40', '1.Kreisklasse') 1
+# ('Herren Ü50', '1.Kreisliga (A)') 1
+# ('Herren Ü50', '1.Kreisklasse') 1
+# ('Freizeitsport', '1.Kreisliga (A)') 3
+# ('Freizeitsport', '1.Kreisklasse') 2
+
+
+some_colors = [
+ "red",
+ "blue",
+ "green",
+ "yellow",
+ "purple",
+ "orange",
+ "pink",
+ "brown",
+ "black",
+ "white",
+ "gray",
+ "cyan",
+ "magenta",
+ "lime",
+ "indigo",
+ "violet",
+ "turquoise",
+ "gold",
+ "silver",
+ "beige",
+ "maroon",
+ "olive",
+ "navy",
+ "teal",
+ "coral",
+ "lavender",
+ "salmon",
+ "chocolate",
+ "crimson",
+ "aqua",
+ "ivory",
+ "khaki",
+ "plum",
+ "orchid",
+ "peru",
+ "tan",
+ "tomato",
+ "wheat",
+ "azure",
+ "mint",
+ "apricot",
+ "chartreuse",
+ "amber",
+ "fuchsia",
+ "jade",
+ "ruby",
+ "amethyst",
+ "rose",
+ "sapphire",
+ "cerulean",
+ "moss",
+ "denim",
+ "copper",
+ "peach",
+ "sand",
+ "pearl",
+ "mulberry",
+ "lemon",
+ "cream",
+ "ocher",
+ "brass",
+ "eggplant",
+ "cinnamon",
+ "mustard",
+ "rust",
+ "sienna",
+ "sepia",
+ "umber",
+ "limegreen",
+ "seagreen",
+ "forestgreen",
+ "dodgerblue",
+ "mediumslateblue",
+ "royalblue",
+ "firebrick",
+ "darkolivegreen",
+ "midnightblue",
+ "darkturquoise",
+ "lightcoral",
+ "palevioletred",
+ "hotpink",
+ "deeppink",
+ "darkkhaki",
+ "lightseagreen",
+ "darkslategray",
+ "slategray",
+ "lightsteelblue",
+ "skyblue",
+ "lightblue",
+ "powderblue",
+ "darkorange",
+ "lightsalmon",
+ "indianred",
+ "thistle",
+ "burlywood",
+ "mediumaquamarine",
+ "mediumorchid",
+ "mediumvioletred",
+ "papayawhip",
+ "moccasin",
+ "bisque",
+ "blanchedalmond",
+ "antiquewhite",
+ "mistyrose",
+ "lavenderblush",
+ "linen",
+ "snow",
+ "honeydew",
+ "palegreen",
+ "lightcyan",
+ "aliceblue",
+ "ghostwhite",
+ "whitesmoke",
+ "gainsboro",
+]
+
+latitude = 51.18292980165227
+longitude = 13.11435805600463
+gmap = GoogleMapPlotter(
+ latitude, longitude, 8, apikey="AIzaSyAPzFyMk3ZA0kL9TUlJ_kpV_IY56uBwdrc"
+)
+
+
+def random_color():
+ return "#{:06x}".format(random.randint(0, 0xFFFFFF))
+
+
+competition_details = {}
+color = None
+for staffel, attr in competitions.items():
+ if (staffel[0], staffel[1]) != ("Herren", "Kreisoberliga"):
+ # if (staffel[0], staffel[1]) != ('Herren', '1.Kreisklasse'):
+ continue
+ competitions[staffel]["distance"] = []
+
+ if (staffel[0], staffel[1]) not in competition_details:
+ competition_details[(staffel[0], staffel[1])] = {
+ "nStaffeln": 1,
+ "nTeams": 0,
+ "previous_distances": [],
+ "teams": [],
+ }
+ color = some_colors.pop(0)
+ else:
+ competition_details[(staffel[0], staffel[1])]["nStaffeln"] += 1
+ color = some_colors.pop(0)
+
+ latitudes = []
+ longitudes = []
+ markers_text = []
+
+ for team1 in attr["teams"]:
+ competition_details[(staffel[0], staffel[1])]["nTeams"] += 1
+ competition_details[(staffel[0], staffel[1])]["teams"].append(team1)
+ for team2 in attr["teams"]:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distanceInKmByGPS(
+ team1["LATITUDE"],
+ team1["LONGITUDE"],
+ team2["LATITUDE"],
+ team2["LONGITUDE"],
+ )
+
+ competition_details[(staffel[0], staffel[1])][
+ "previous_distances"
+ ].append(distance)
+ competitions[staffel]["distance"].append(distance)
+ latitudes.append(team1["LATITUDE"])
+ longitudes.append(team1["LONGITUDE"])
+ markers_text.append(f"{team1['MANNSCHAFT']} @{team1['SPIELSTAETTE']}")
+
+ # Plot the points on the map
+ gmap.scatter(latitudes, longitudes, color=color, size=40, marker=False)
+ for (lat1, lon1), (lat2, lon2) in itertools.combinations(
+ zip(latitudes, longitudes), 2
+ ):
+ gmap.plot([lat1, lat2], [lon1, lon2], color=color, edge_width=2)
+ for lat, lon, text in zip(latitudes, longitudes, markers_text):
+ gmap.marker(lat, lon, title=text.replace('"', ""), color=color)
+
+ print(color, staffel, attr["nTeams"], sum(attr["distance"]))
+
+
+for competition, details in competition_details.items():
+ print(
+ competition,
+ details["nStaffeln"],
+ details["nTeams"],
+ sum(details["previous_distances"]),
+ )
+
+
+# Optionally, draw a line path connecting the points
+# gmap.plot(latitudes, longitudes, color='blue', edge_width=2.5)
+
+# Save the map to an HTML file
+gmap.draw("map_previous.html")
+
+# %%
+# for key, value in competition_details.items():
+# print(key,value['nStaffeln'])
+
+# %%
+
+"""" GENERATE ALL DISTANCES BETWEEN TEAMS """
+
+distance_between_teams = {}
+for competition, details in competition_details.items():
+ print(f"Calculating distances for {competition}")
+ for team1 in details["teams"]:
+ distance_between_teams[team1["MANNSCHAFT"]] = {}
+ for team2 in details["teams"]:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distanceInKmByGPS(
+ team1["LATITUDE"],
+ team1["LONGITUDE"],
+ team2["LATITUDE"],
+ team2["LONGITUDE"],
+ )
+ distance_between_teams[team1["MANNSCHAFT"]][team2["MANNSCHAFT"]] = distance
+
+
+
+for comp, attr in competition_details.items():
+ teams = attr["teams"]
+
+print(teams)
+
+"""" RECLUSTERING THE COMPETITION INTO DIVISIONS """
+
+# from pulp import (
+# LpVariable,
+# LpProblem,
+# LpMinimize,
+# lpSum,
+# LpStatus,
+# value,
+# LpInteger,
+# XPRESS,
+# )
+
+# model = LpProblem("Cluster", LpMinimize)
+
+
+# """ x = 1 if team i is in same division as j, 0 otherwise """
+# x = {}
+
+# for team1 in teams:
+# for team2 in teams:
+# x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] = LpVariable(
+# f"team_{team1['MANNSCHAFT']}_{team2['MANNSCHAFT']}",
+# lowBound=0,
+# upBound=1,
+# cat=LpInteger,
+# )
+
+# """ g = 1 if team i is i group j, 0 otherwise """
+# groups = range(1,20)
+
+# g = {}
+# for team in teams:
+# for group in groups:
+# g[(team["MANNSCHAFT"], group)] = LpVariable(
+# f"team_{team['MANNSCHAFT']}_{group}",
+# lowBound=0,
+# upBound=1,
+# cat=LpInteger,
+# )
+
+# """ Each team is in exactly one division """
+# for team1 in teams:
+# model += lpSum(g[(team["MANNSCHAFT"], group)] for group in groups) == 1
+
+
+# """ Each team is in same divisin as itself """
+# for team in teams:
+# model += x[(team["MANNSCHAFT"], team["MANNSCHAFT"])] == 1
+
+# """ Each team is in same division with at least 14 and at most 16 other teams"""
+# for team1 in teams:
+# model += (
+# lpSum(
+# x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] for team2 in teams
+# )
+# >= 14
+# )
+# model += (
+# lpSum(
+# x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] for team2 in teams
+# )
+# <= 16
+# )
+
+# """ no more than 16 teams in a division """
+# for group in groups:
+# model += lpSum(g[(team["MANNSCHAFT"], group)] for team in teams) <= 16
+
+# """ if team1 and team2 are paired, than they are in the same division """
+# for group in groups:
+# for team1 in teams:
+# for team2 in teams:
+# if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+# model += x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] + g[(team1["MANNSCHAFT"], group)] <= 1 + g[(team2["MANNSCHAFT"], group)]
+# model += x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] + g[(team2["MANNSCHAFT"], group)] <= 1 + g[(team1["MANNSCHAFT"], group)]
+
+
+# """ symmetry constraint """
+# for t1, t2 in x.keys():
+# model += x[(t1, t2)] == x[(t2, t1)]
+
+
+# """ MINIMIZE THE TRAVEL DISTANCE """
+# model += lpSum(
+# distance_between_teams[team1["MANNSCHAFT"]][team2["MANNSCHAFT"]]
+# * x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])]
+# for team1 in teams
+# for team2 in teams
+# )
+
+# model.solve(XPRESS(msg=1, gapRel=0.5))
+
+# new_cluster = 1
+
+# for group in groups:
+# for team in teams:
+# if value(g[(team['MANNSCHAFT'], group)]) > 0.9:
+# print(f"TEAM {team['MANNSCHAFT']} - {group}")
+
+# # for team1 in teams:
+# # for team2 in teams:
+# # if value(x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])]) == 1:
+
+# # print(
+# # f"SAME CLUSTER {team1['MANNSCHAFT']} - {team2['MANNSCHAFT']} - {value(g[(team1["MANNSCHAFT"], group)])}"
+# # )
+
+
+# # some_colors = [
+# # "red",
+# # "blue",
+# # "green",
+# # "yellow",
+# # "purple",
+# # "orange",
+# # "pink",
+# # "brown",
+# # "black",
+# # "white",
+# # "gray",
+# # "cyan",
+# # "magenta",
+# # "lime",
+# # "indigo",
+# # "violet",
+# # "turquoise",
+# # "gold",
+# # "silver",
+# # "beige",
+# # "maroon",
+# # "olive",
+# # "navy",
+# # "teal",
+# # "coral",
+# # "lavender",
+# # "salmon",
+# # "chocolate",
+# # "crimson",
+# # "aqua",
+# # "ivory",
+# # "khaki",
+# # "plum",
+# # "orchid",
+# # "peru",
+# # "tan",
+# # "tomato",
+# # "wheat",
+# # "azure",
+# # "mint",
+# # "apricot",
+# # "chartreuse",
+# # "amber",
+# # "fuchsia",
+# # "jade",
+# # "ruby",
+# # "amethyst",
+# # "rose",
+# # "sapphire",
+# # "cerulean",
+# # "moss",
+# # "denim",
+# # "copper",
+# # "peach",
+# # "sand",
+# # "pearl",
+# # "mulberry",
+# # "lemon",
+# # "cream",
+# # "ocher",
+# # "brass",
+# # "eggplant",
+# # "cinnamon",
+# # "mustard",
+# # "rust",
+# # "sienna",
+# # "sepia",
+# # "umber",
+# # "limegreen",
+# # "seagreen",
+# # "forestgreen",
+# # "dodgerblue",
+# # "mediumslateblue",
+# # "royalblue",
+# # "firebrick",
+# # "darkolivegreen",
+# # "midnightblue",
+# # "darkturquoise",
+# # "lightcoral",
+# # "palevioletred",
+# # "hotpink",
+# # "deeppink",
+# # "darkkhaki",
+# # "lightseagreen",
+# # "darkslategray",
+# # "slategray",
+# # "lightsteelblue",
+# # "skyblue",
+# # "lightblue",
+# # "powderblue",
+# # "darkorange",
+# # "lightsalmon",
+# # "indianred",
+# # "thistle",
+# # "burlywood",
+# # "mediumaquamarine",
+# # "mediumorchid",
+# # "mediumvioletred",
+# # "papayawhip",
+# # "moccasin",
+# # "bisque",
+# # "blanchedalmond",
+# # "antiquewhite",
+# # "mistyrose",
+# # "lavenderblush",
+# # "linen",
+# # "snow",
+# # "honeydew",
+# # "palegreen",
+# # "lightcyan",
+# # "aliceblue",
+# # "ghostwhite",
+# # "whitesmoke",
+# # "gainsboro",
+# # ]
+
+# # latitude = 51.18292980165227
+# # longitude = 13.11435805600463
+# # gmap = GoogleMapPlotter(
+# # latitude, longitude, 8, apikey="AIzaSyAPzFyMk3ZA0kL9TUlJ_kpV_IY56uBwdrc"
+# # )
+
+
+# # # Plot the points on the map
+# # gmap.scatter(latitudes, longitudes, color=color, size=40, marker=False)
+# # for (lat1, lon1), (lat2, lon2) in itertools.combinations(
+# # zip(latitudes, longitudes), 2
+# # ):
+# # gmap.plot([lat1, lat2], [lon1, lon2], color=color, edge_width=2)
+# # for lat, lon, text in zip(latitudes, longitudes, markers_text):
+# # gmap.marker(lat, lon, title=text.replace('"', ""), color=color)
+
+# %%
diff --git a/dfbnet/competitions_sachsen_new.py b/dfbnet/competitions_sachsen_new.py
new file mode 100644
index 0000000..0efbf66
--- /dev/null
+++ b/dfbnet/competitions_sachsen_new.py
@@ -0,0 +1,896 @@
+# %%
+
+from pulp import (
+ LpVariable,
+ LpProblem,
+ LpMinimize,
+ lpSum,
+ LpStatus,
+ value,
+ LpInteger,
+ LpContinuous,
+ XPRESS,
+)
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+# %%
+def convert_xlsx_to_json():
+ gmaps = googlemaps.Client(key="AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU")
+
+ staffel = None
+
+ dresden = pd.read_excel("data/beispiel_daten_sachsen.xlsx")
+ # staffeln = dresden["STAFFEL"].unique()
+
+ unique_staffeln = list(
+ dresden[["MS_ART", "SP_KLASSE", "STAFFEL"]]
+ .drop_duplicates()
+ .itertuples(index=False, name=None)
+ )
+ competitions_dict = {s: {} for s in unique_staffeln}
+ teams_in_competition = {}
+ staffel_type = None
+ for art, klasse, staffel in unique_staffeln:
+ find_duplicates = []
+ teams_in_competition[(art, klasse, staffel)] = []
+ teams = dresden[
+ (dresden["MS_ART"] == art)
+ & (dresden["SP_KLASSE"] == klasse)
+ & (dresden["STAFFEL"] == staffel)
+ ][
+ [
+ "GEB_VEREIN",
+ "GEB_MS",
+ "MANNSCHAFT",
+ "MS_KEY",
+ "MS_ART",
+ "SP_KLASSE",
+ "STAFFEL",
+ "SCHLUESSEL_ZAHL",
+ "WUNSCH_WOCHENTAG",
+ "WUNSCH_TAG",
+ "WUNSCH_ZEIT",
+ "SPIELSTAETTE",
+ ]
+ ].to_dict(
+ orient="records"
+ )
+
+ for t in teams:
+ if t["MANNSCHAFT"] not in find_duplicates:
+ teams_in_competition[(art, klasse, staffel)].append(t)
+ find_duplicates.append(t["MANNSCHAFT"])
+ geocode_result = gmaps.geocode(
+ f"{t['GEB_VEREIN']} {t['MANNSCHAFT']} {t['SPIELSTAETTE']}"
+ )
+ latitude = 0
+ longitude = 0
+ if len(geocode_result) > 0:
+ location = geocode_result[0]["geometry"]["location"]
+ latitude = location["lat"]
+ longitude = location["lng"]
+
+ t["LATITUDE"] = latitude
+ t["LONGITUDE"] = longitude
+
+ competitions_dict[(art, klasse, staffel)]["teams"] = teams_in_competition[
+ (art, klasse, staffel)
+ ]
+ competitions_dict[(art, klasse, staffel)]["nTeams"] = len(
+ teams_in_competition[(art, klasse, staffel)]
+ )
+ competitions_dict[(art, klasse, staffel)]["art"] = art
+ competitions_dict[(art, klasse, staffel)]["klasse"] = klasse
+ competitions_dict[(art, klasse, staffel)]["staffel"] = staffel
+
+ competitions_dict_list_keys = {str(k): v for k, v in competitions_dict.items()}
+
+ with open("data/sachsen.json", "w", encoding="utf-8") as f:
+ json.dump(
+ competitions_dict_list_keys, f, ensure_ascii=False, indent=4, default=str
+ )
+
+
+# %%
+
+
+from math import sqrt, sin, cos, atan2, pi
+
+
+def degreesToRadians(degrees):
+ """Convert degrees to radians"""
+ return degrees * pi / 180
+
+
+def distanceInKmByGPS(lat1, lon1, lat2, lon2):
+ """Calculate the distance between two points in km"""
+ earthRadiusKm = 6371
+ dLat = degreesToRadians(lat2 - lat1)
+ dLon = degreesToRadians(lon2 - lon1)
+ lat1 = degreesToRadians(lat1)
+ lat2 = degreesToRadians(lat2)
+ a = sin(dLat / 2) * sin(dLat / 2) + sin(dLon / 2) * sin(dLon / 2) * cos(lat1) * cos(
+ lat2
+ )
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+ return int(earthRadiusKm * c)
+
+
+for metric in ["road_distance", "road_duration", "flight_distance"]:
+ print("\n\n#######################################################")
+ print(f"Calculating {metric}")
+ # %%
+ """ read csv and skip first row """
+ distance_dict = {}
+ with open("data/distances.csv", "r", encoding="utf-8") as f:
+ csv_distances = f.readlines()
+ for i, row in enumerate(csv_distances):
+ if i == 0:
+ continue
+ _, _, team1, team2, road_distance, road_duration, flight_distance = (
+ row.split(",")
+ )
+ distance_dict[(team1, team2)] = {
+ "road_distance": float(road_distance),
+ "road_duration": float(road_duration),
+ "flight_distance": float(flight_distance),
+ }
+
+ # %%
+
+ with open("data/sachsen.json", "r", encoding="utf-8") as f:
+ competitions = json.load(f)
+
+ competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+ # region
+ # STAFFELN PRO ART UND KLASSE
+ # ('Herren', 'Landesliga') 1
+ # ('Herren', 'Landesklasse') 3
+ # ('Frauen', 'Landesliga') 1
+ # ('Frauen', 'Landesklasse') 3
+ # ('A-Junioren', 'Landesliga') 1
+ # ('A-Junioren', 'Landesklasse') 4
+ # ('Herren', 'Kreisoberliga') 13
+ # ('Herren', '1.Kreisliga (A)') 19
+ # ('Herren', '2.Kreisliga (B)') 8
+ # ('Herren', '3.Kreisliga (C)') 1
+ # ('Herren', '1.Kreisklasse') 21
+ # ('Herren', '2.Kreisklasse') 9
+ # ('A-Junioren', 'Kreisoberliga') 10
+ # ('A-Junioren', '1.Kreisliga (A)') 6
+ # ('Frauen', 'Kreisoberliga') 4
+ # ('Frauen', '1.Kreisliga (A)') 1
+ # ('Frauen', '1.Kreisklasse') 3
+ # ('B-Junioren', 'Landesliga') 1
+ # ('B-Junioren', 'Landesklasse') 4
+ # ('B-Junioren', 'Kreisoberliga') 13
+ # ('B-Junioren', '1.Kreisliga (A)') 13
+ # ('B-Junioren', '1.Kreisklasse') 1
+ # ('C-Junioren', 'Landesliga') 1
+ # ('C-Junioren', 'Landesklasse') 4
+ # ('C-Junioren', 'Kreisoberliga') 16
+ # ('C-Junioren', '1.Kreisliga (A)') 15
+ # ('C-Junioren', '1.Kreisklasse') 9
+ # ('D-Junioren', 'Landesliga') 1
+ # ('D-Junioren', 'Landesklasse') 6
+ # ('D-Junioren', 'Kreisoberliga') 16
+ # ('D-Junioren', '1.Kreisliga (A)') 24
+ # ('D-Junioren', '2.Kreisliga (B)') 8
+ # ('D-Junioren', '3.Kreisliga (C)') 2
+ # ('D-Junioren', '1.Kreisklasse') 33
+ # ('D-Junioren', '2.Kreisklasse') 10
+ # ('B-Juniorinnen', 'Landesliga') 1
+ # ('B-Juniorinnen', 'Landesklasse') 2
+ # ('C-Juniorinnen', 'Landesklasse') 3
+ # ('D-Juniorinnen', 'Kreisoberliga') 1
+ # ('Herren Ü35', 'Kreisoberliga') 4
+ # ('Herren Ü35', '1.Kreisliga (A)') 3
+ # ('Herren Ü35', '1.Kreisklasse') 3
+ # ('Herren Ü35', '2.Kreisklasse') 1
+ # ('Herren Ü40', '1.Kreisliga (A)') 5
+ # ('Herren Ü40', '1.Kreisklasse') 1
+ # ('Herren Ü50', '1.Kreisliga (A)') 1
+ # ('Herren Ü50', '1.Kreisklasse') 1
+ # ('Freizeitsport', '1.Kreisliga (A)') 3
+ # ('Freizeitsport', '1.Kreisklasse') 2
+ # endregion
+
+ some_colors = [
+ "red",
+ "blue",
+ "green",
+ "yellow",
+ "purple",
+ "orange",
+ "pink",
+ "brown",
+ "black",
+ "white",
+ "gray",
+ "cyan",
+ "magenta",
+ "lime",
+ "indigo",
+ "violet",
+ "turquoise",
+ "gold",
+ "silver",
+ "beige",
+ "maroon",
+ "olive",
+ "navy",
+ "teal",
+ "coral",
+ "lavender",
+ "salmon",
+ "chocolate",
+ "crimson",
+ "aqua",
+ "ivory",
+ "khaki",
+ "plum",
+ "orchid",
+ "peru",
+ "tan",
+ "tomato",
+ "wheat",
+ "azure",
+ "mint",
+ "apricot",
+ "chartreuse",
+ "amber",
+ "fuchsia",
+ "jade",
+ "ruby",
+ "amethyst",
+ "rose",
+ "sapphire",
+ "cerulean",
+ "moss",
+ "denim",
+ "copper",
+ "peach",
+ "sand",
+ "pearl",
+ "mulberry",
+ "lemon",
+ "cream",
+ "ocher",
+ "brass",
+ "eggplant",
+ "cinnamon",
+ "mustard",
+ "rust",
+ "sienna",
+ "sepia",
+ "umber",
+ "limegreen",
+ "seagreen",
+ "forestgreen",
+ "dodgerblue",
+ "mediumslateblue",
+ "royalblue",
+ "firebrick",
+ "darkolivegreen",
+ "midnightblue",
+ "darkturquoise",
+ "lightcoral",
+ "palevioletred",
+ "hotpink",
+ "deeppink",
+ "darkkhaki",
+ "lightseagreen",
+ "darkslategray",
+ "slategray",
+ "lightsteelblue",
+ "skyblue",
+ "lightblue",
+ "powderblue",
+ "darkorange",
+ "lightsalmon",
+ "indianred",
+ "thistle",
+ "burlywood",
+ "mediumaquamarine",
+ "mediumorchid",
+ "mediumvioletred",
+ "papayawhip",
+ "moccasin",
+ "bisque",
+ "blanchedalmond",
+ "antiquewhite",
+ "mistyrose",
+ "lavenderblush",
+ "linen",
+ "snow",
+ "honeydew",
+ "palegreen",
+ "lightcyan",
+ "aliceblue",
+ "ghostwhite",
+ "whitesmoke",
+ "gainsboro",
+ ]
+
+ latitude = 51.18292980165227
+ longitude = 13.11435805600463
+ gmap = GoogleMapPlotter(
+ latitude, longitude, 8, apikey="AIzaSyAPzFyMk3ZA0kL9TUlJ_kpV_IY56uBwdrc"
+ )
+
+ def random_color():
+ return "#{:06x}".format(random.randint(0, 0xFFFFFF))
+
+ previous_statistics = {}
+
+ competition_details = {}
+ color = None
+ for staffel, attr in competitions.items():
+ if (staffel[0], staffel[1]) != ("Herren", "Kreisoberliga"):
+ # if (staffel[0], staffel[1]) != ('Herren', '1.Kreisklasse'):
+ continue
+ competitions[staffel]["distance"] = []
+
+ if (staffel[0], staffel[1]) not in competition_details:
+ competition_details[(staffel[0], staffel[1])] = {
+ "nStaffeln": 1,
+ "nTeams": 0,
+ "previous_distances": [],
+ "teams": [],
+ }
+ color = some_colors.pop(0)
+ else:
+ competition_details[(staffel[0], staffel[1])]["nStaffeln"] += 1
+ color = some_colors.pop(0)
+
+ latitudes = []
+ longitudes = []
+ markers_text = []
+
+ distance_for_team = {}
+ for team1 in attr["teams"]:
+ competition_details[(staffel[0], staffel[1])]["nTeams"] += 1
+ competition_details[(staffel[0], staffel[1])]["teams"].append(team1)
+ distance_for_team[team1["MANNSCHAFT"]] = []
+ for team2 in attr["teams"]:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distance_dict[
+ (team1["MANNSCHAFT"], team2["MANNSCHAFT"])
+ ][metric]
+ competition_details[(staffel[0], staffel[1])][
+ "previous_distances"
+ ].append(distance)
+ competitions[staffel]["distance"].append(distance)
+ distance_for_team[team1["MANNSCHAFT"]].append(distance)
+ latitudes.append(team1["LATITUDE"])
+ longitudes.append(team1["LONGITUDE"])
+ markers_text.append(f"{team1['MANNSCHAFT']} @{team1['SPIELSTAETTE']}")
+
+ # Plot the points on the map
+ gmap.scatter(latitudes, longitudes, color=color, size=40, marker=False)
+ for (lat1, lon1), (lat2, lon2) in itertools.combinations(
+ zip(latitudes, longitudes), 2
+ ):
+ gmap.plot([lat1, lat2], [lon1, lon2], color=color, edge_width=2)
+ for lat, lon, text in zip(latitudes, longitudes, markers_text):
+ gmap.marker(lat, lon, title=text.replace('"', ""), color=color)
+
+ print(color, staffel, attr["nTeams"], sum(attr["distance"]))
+
+ previous_statistics[staffel] = {
+ "nTeams": attr["nTeams"],
+ "total_distance": sum(attr["distance"]),
+ "average_distance": sum(attr["distance"]) / attr["nTeams"],
+ "max_distance": max(attr["distance"]),
+ "min_distance": min(attr["distance"]),
+ "max_team": max(distance_for_team, key=lambda x: sum(distance_for_team[x])),
+ "max_team_distance": max(
+ [sum(distance_for_team[x]) for x in distance_for_team]
+ ),
+ "min_team": min(distance_for_team, key=lambda x: sum(distance_for_team[x])),
+ "min_team_distance": min(
+ [sum(distance_for_team[x]) for x in distance_for_team]
+ ),
+ }
+
+ """ GATHER SOME PREVIOUS STATISTICS """
+
+ for key, val in previous_statistics.items():
+ print(key, val)
+
+ """ add overall statistics """
+ for competition, details in competition_details.items():
+ print(
+ competition,
+ details["nStaffeln"],
+ details["nTeams"],
+ sum(details["previous_distances"]),
+ )
+ previous_statistics["overall"] = {
+ "nStaffeln": sum(
+ [details["nStaffeln"] for details in competition_details.values()]
+ ),
+ "nTeams": sum(
+ [details["nTeams"] for details in competition_details.values()]
+ ),
+ "total_distance": sum(
+ [
+ sum(details["previous_distances"])
+ for details in competition_details.values()
+ ]
+ ),
+ "average_distance": sum(
+ [
+ sum(details["previous_distances"])
+ for details in competition_details.values()
+ ]
+ )
+ / sum([details["nTeams"] for details in competition_details.values()]),
+ "max_distance": max(
+ [
+ max(details["previous_distances"])
+ for details in competition_details.values()
+ ]
+ ),
+ "min_distance": min(
+ [
+ min(details["previous_distances"])
+ for details in competition_details.values()
+ ]
+ ),
+ "average_group_distance": sum(
+ [
+ sum(details["previous_distances"])
+ for details in competition_details.values()
+ ]
+ )
+ / sum([details["nStaffeln"] for details in competition_details.values()]),
+ }
+
+ previous_statistics_str_keys = {str(k): v for k, v in previous_statistics.items()}
+
+ with open(f"data/previous_stats_{metric}.json", "w", encoding="utf-8") as f:
+ json.dump(
+ previous_statistics_str_keys, f, ensure_ascii=False, indent=4, default=str
+ )
+
+ # Optionally, draw a line path connecting the points
+ # gmap.plot(latitudes, longitudes, color='blue', edge_width=2.5)
+
+ # Save the map to an HTML file
+ gmap.draw(f"map_previous_{metric}.html")
+ # %%
+ # for key, value in competition_details.items():
+ # print(key,value['nStaffeln'])
+
+ # %%
+
+ """" GENERATE ALL DISTANCES BETWEEN TEAMS """
+
+ distance_between_teams = {}
+ for competition, details in competition_details.items():
+ print(f"Calculating distances for {competition}")
+ for team1 in details["teams"]:
+ distance_between_teams[team1["MANNSCHAFT"]] = {}
+ for team2 in details["teams"]:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distance_dict[
+ (team1["MANNSCHAFT"], team2["MANNSCHAFT"])
+ ][metric]
+ distance_between_teams[team1["MANNSCHAFT"]][
+ team2["MANNSCHAFT"]
+ ] = distance
+
+ for comp, attr in competition_details.items():
+ teams = attr["teams"]
+
+ """" RECLUSTERING THE COMPETITION INTO DIVISIONS """
+
+ model = LpProblem("Cluster", LpMinimize)
+
+ """ x = 1 if team i is in same division as j, 0 otherwise """
+ x = {}
+
+ for team1 in teams:
+ for team2 in teams:
+ x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] = LpVariable(
+ f"team_{team1['MANNSCHAFT']}_{team2['MANNSCHAFT']}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ """ g = 1 if team i is i group j, 0 otherwise """
+ groups = range(1, 14)
+
+ g = {}
+ for team in teams:
+ for group in groups:
+ g[(team["MANNSCHAFT"], group)] = LpVariable(
+ f"team_{team['MANNSCHAFT']}_{group}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ """ Each team is in exactly one division """
+ for team in teams:
+ model += lpSum(g[(team["MANNSCHAFT"], group)] for group in groups) == 1
+
+ """ Each team is in same divisin as itself """
+ for team in teams:
+ model += x[(team["MANNSCHAFT"], team["MANNSCHAFT"])] == 1
+
+ """ Each team is in same division with at least 14 and at most 16 other teams"""
+
+ for team1 in teams:
+ model += (
+ lpSum(x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] for team2 in teams)
+ >= 14
+ )
+ model += (
+ lpSum(x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])] for team2 in teams)
+ <= 16
+ )
+
+ if False:
+ """ no more than 16 teams in a division """
+ for group in groups:
+ model += lpSum(g[(team["MANNSCHAFT"], group)] for team in teams) <= 16
+
+ """ use each group / at least one team per group """
+ for group in groups:
+ model += lpSum(g[(team["MANNSCHAFT"], group)] for team in teams) >= 1
+
+ model += lpSum(g[(team["MANNSCHAFT"], 1)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 2)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 3)] for team in teams) == 16
+ model += lpSum(g[(team["MANNSCHAFT"], 4)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 5)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 6)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 7)] for team in teams) == 16
+ model += lpSum(g[(team["MANNSCHAFT"], 8)] for team in teams) == 16
+ model += lpSum(g[(team["MANNSCHAFT"], 9)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 10)] for team in teams) == 14
+ model += lpSum(g[(team["MANNSCHAFT"], 11)] for team in teams) == 15
+ model += lpSum(g[(team["MANNSCHAFT"], 12)] for team in teams) == 16
+ model += lpSum(g[(team["MANNSCHAFT"], 13)] for team in teams) == 14
+
+
+ """ if team1 and team2 are paired, than they are in the same division """
+ for group in groups:
+ for team1 in teams:
+ for team2 in teams:
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ model += (
+ x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])]
+ + g[(team1["MANNSCHAFT"], group)]
+ <= 1 + g[(team2["MANNSCHAFT"], group)]
+ )
+ model += (
+ x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])]
+ + g[(team2["MANNSCHAFT"], group)]
+ <= 1 + g[(team1["MANNSCHAFT"], group)]
+ )
+
+ """ symmetry constraint """
+ for t1, t2 in x.keys():
+ model += x[(t1, t2)] == x[(t2, t1)]
+
+ """ MINIMIZE THE TRAVEL DISTANCE """
+ model += lpSum(
+ distance_between_teams[team1["MANNSCHAFT"]][team2["MANNSCHAFT"]]
+ * x[(team1["MANNSCHAFT"], team2["MANNSCHAFT"])]
+ for team1 in teams
+ for team2 in teams
+ )
+
+ model.solve(XPRESS(msg=1, gapRel=0.01, timeLimit=1))
+
+ localsearch_time = 3600*2
+
+ start_time = time.time()
+
+ while time.time() - start_time < localsearch_time:
+
+ used_groups = [
+ group
+ for group in groups
+ if sum(value(g[(team["MANNSCHAFT"], group)]) for team in teams) > 0.9
+ ]
+
+ group_size = random.randint(2, 6)
+ opt_groups = random.sample(used_groups, group_size)
+ opt_stalltime = group_size * 25
+
+ print(f"Time: {time.time() - start_time}")
+ print("Optimizing groups", opt_groups)
+
+ for key in x.keys():
+ x[key].setInitialValue(value(x[key]))
+ x[key].lowBound = value(x[key])
+
+ for key in g.keys():
+ g[key].setInitialValue(value(g[key]))
+ if key[1] in opt_groups:
+ g[key].lowBound = 0
+ for (t1,t2) in x.keys():
+ if t1 == key[0] or t2 == key[0]:
+ x[(t1,t2)].lowBound = 0
+ else:
+ g[key].lowBound = value(g[key])
+
+ model.solve(
+ XPRESS(
+ msg=1,
+ gapRel=0.01,
+ warmStart=True,
+ options=[f"MAXSTALLTIME={opt_stalltime}"],
+ )
+ )
+
+ some_colors = [
+ "red",
+ "blue",
+ "green",
+ "yellow",
+ "purple",
+ "orange",
+ "pink",
+ "brown",
+ "black",
+ "white",
+ "gray",
+ "cyan",
+ "magenta",
+ "lime",
+ "indigo",
+ "violet",
+ "turquoise",
+ "gold",
+ "silver",
+ "beige",
+ "maroon",
+ "olive",
+ "navy",
+ "teal",
+ "coral",
+ "lavender",
+ "salmon",
+ "chocolate",
+ "crimson",
+ "aqua",
+ "ivory",
+ "khaki",
+ "plum",
+ "orchid",
+ "peru",
+ "tan",
+ "tomato",
+ "wheat",
+ "azure",
+ "mint",
+ "apricot",
+ "chartreuse",
+ "amber",
+ "fuchsia",
+ "jade",
+ "ruby",
+ "amethyst",
+ "rose",
+ "sapphire",
+ "cerulean",
+ "moss",
+ "denim",
+ "copper",
+ "peach",
+ "sand",
+ "pearl",
+ "mulberry",
+ "lemon",
+ "cream",
+ "ocher",
+ "brass",
+ "eggplant",
+ "cinnamon",
+ "mustard",
+ "rust",
+ "sienna",
+ "sepia",
+ "umber",
+ "limegreen",
+ "seagreen",
+ "forestgreen",
+ "dodgerblue",
+ "mediumslateblue",
+ "royalblue",
+ "firebrick",
+ "darkolivegreen",
+ "midnightblue",
+ "darkturquoise",
+ "lightcoral",
+ "palevioletred",
+ "hotpink",
+ "deeppink",
+ "darkkhaki",
+ "lightseagreen",
+ "darkslategray",
+ "slategray",
+ "lightsteelblue",
+ "skyblue",
+ "lightblue",
+ "powderblue",
+ "darkorange",
+ "lightsalmon",
+ "indianred",
+ "thistle",
+ "burlywood",
+ "mediumaquamarine",
+ "mediumorchid",
+ "mediumvioletred",
+ "papayawhip",
+ "moccasin",
+ "bisque",
+ "blanchedalmond",
+ "antiquewhite",
+ "mistyrose",
+ "lavenderblush",
+ "linen",
+ "snow",
+ "honeydew",
+ "palegreen",
+ "lightcyan",
+ "aliceblue",
+ "ghostwhite",
+ "whitesmoke",
+ "gainsboro",
+ ]
+
+ latitude = 51.18292980165227
+ longitude = 13.11435805600463
+ gmap = GoogleMapPlotter(
+ latitude, longitude, 8, apikey="AIzaSyAPzFyMk3ZA0kL9TUlJ_kpV_IY56uBwdrc"
+ )
+
+ new_statistics = {}
+
+ f = open(f"data/new_solution_{metric}.csv", "w")
+
+ for group in groups:
+ print(f"GROUP {group}")
+ new_statistics[group] = {
+ "nTeams": 0,
+ "total_distance": 0,
+ "average_distance": 0,
+ "max_distance": 0,
+ "min_distance": 0,
+ "max_team": "",
+ "max_team_distance": 0,
+ "min_team": "",
+ "min_team_distance": 0,
+ }
+
+ latitudes = []
+ longitudes = []
+ markers_text = []
+
+ color = some_colors.pop(0)
+
+ for team in teams:
+ if value(g[(team["MANNSCHAFT"], group)]) > 0.9:
+ print(f"TEAM {team['MANNSCHAFT']} - {group}")
+ f.write(f"{team['MANNSCHAFT']},{group}\n")
+
+ latitudes.append(team["LATITUDE"])
+ longitudes.append(team["LONGITUDE"])
+ markers_text.append(f"{team['MANNSCHAFT']} @{team['SPIELSTAETTE']}")
+ new_statistics[group]["nTeams"] += 1
+
+ for t1, t2 in x.keys():
+ if t1 == team["MANNSCHAFT"] and value(x[(t1, t2)]) > 0.9:
+ new_statistics[group][
+ "total_distance"
+ ] += distance_between_teams[t1][t2]
+
+ new_statistics[group]["average_distance"] = new_statistics[group][
+ "total_distance"
+ ] / (max(new_statistics[group]["nTeams"], 1))
+
+ teams_in_group = [
+ team["MANNSCHAFT"]
+ for team in teams
+ if value(g[(team["MANNSCHAFT"], group)]) > 0.9
+ ]
+ new_statistics[group]["max_distance"] = max(
+ [
+ distance_between_teams[t1][t2]
+ for t1 in teams_in_group
+ for t2 in teams_in_group
+ ],
+ default=0,
+ )
+ new_statistics[group]["min_distance"] = min(
+ [
+ distance_between_teams[t1][t2]
+ for t1 in teams_in_group
+ for t2 in teams_in_group
+ ],
+ default=0,
+ )
+ new_statistics[group]["max_team"] = max(
+ teams_in_group,
+ key=lambda x: sum([distance_between_teams[x][t2] for t2 in teams_in_group]),
+ default=0,
+ )
+ new_statistics[group]["max_team_distance"] = sum(
+ [
+ distance_between_teams[new_statistics[group]["max_team"]][t2]
+ for t2 in teams_in_group
+ ]
+ )
+ new_statistics[group]["min_team"] = min(
+ teams_in_group,
+ key=lambda x: sum([distance_between_teams[x][t2] for t2 in teams_in_group]),
+ default=0,
+ )
+ new_statistics[group]["min_team_distance"] = sum(
+ [
+ distance_between_teams[new_statistics[group]["min_team"]][t2]
+ for t2 in teams_in_group
+ ]
+ )
+
+ # Plot the points on the map
+ gmap.scatter(latitudes, longitudes, color=color, size=40, marker=False)
+ for (lat1, lon1), (lat2, lon2) in itertools.combinations(
+ zip(latitudes, longitudes), 2
+ ):
+ gmap.plot([lat1, lat2], [lon1, lon2], color=color, edge_width=2)
+ for lat, lon, text in zip(latitudes, longitudes, markers_text):
+ gmap.marker(lat, lon, title=text.replace('"', ""), color=color)
+
+ f.close()
+
+ gmap.draw(f"map_new_{metric}.html")
+
+ new_statistics["overall"] = {
+ "nGroups": len(
+ [group for group in groups if new_statistics[group]["nTeams"] > 0]
+ ),
+ "nTeams": sum([new_statistics[group]["nTeams"] for group in groups]),
+ "total_distance": sum(
+ [new_statistics[group]["total_distance"] for group in groups]
+ ),
+ "average_distance": sum(
+ [new_statistics[group]["total_distance"] for group in groups]
+ )
+ / sum([new_statistics[group]["nTeams"] for group in groups]),
+ "max_distance": max(
+ [new_statistics[group]["max_distance"] for group in groups]
+ ),
+ "min_distance": min(
+ [new_statistics[group]["min_distance"] for group in groups]
+ ),
+ }
+ new_statistics["overall"]["average_group_distance"] = (
+ new_statistics["overall"]["total_distance"]
+ / new_statistics["overall"]["nGroups"]
+ )
+
+ new_statistics_str_keys = {str(k): v for k, v in new_statistics.items()}
+
+ with open(f"data/new_stats_{metric}.json", "w", encoding="utf-8") as f:
+ json.dump(new_statistics_str_keys, f, ensure_ascii=False, indent=4, default=str)
diff --git a/dfbnet/dfbnet.py b/dfbnet/dfbnet.py
new file mode 100644
index 0000000..78dce37
--- /dev/null
+++ b/dfbnet/dfbnet.py
@@ -0,0 +1,102 @@
+# %%
+import json
+from datetime import datetime, time, date
+from serializers import ProblemSerializer
+
+
+from competitions import get_teams_from_staffel
+from schluesselzahlen import get_schluesselzahlen
+from rahmentermine import get_rahmentermine
+
+
+PROJECT_PATH = '/home/md/Work/ligalytics/leagues_stable/'
+import os, sys
+sys.path.insert(0, PROJECT_PATH)
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leagues.settings")
+os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
+
+""" read json """
+with open("data/competitions.json", "r", encoding="utf-8") as f:
+ competitions = json.load(f)
+
+prob = ProblemSerializer(data=competitions)
+prob.is_valid()
+
+# %%
+""" Create pulp model for solving a schedule for a given set of teams and rahmentermine """
+
+
+rounds1 = list(range(1, len(rahmentermine)//2+1))
+rounds2 = list(range(len(rahmentermine)//2+1, len(rahmentermine)+1))
+rounds = rounds1 + rounds2
+
+
+
+
+
+# %%
+
+from pulp import LpVariable, LpProblem, LpMinimize, lpSum, LpStatus, value, LpInteger, XPRESS
+
+model = LpProblem("Spielplan", LpMinimize)
+
+# Create a variable for each team and each rahmentermin
+x = {}
+for team, attr in teams.items():
+ for round in rounds:
+ x[(team, round)] = LpVariable(
+ f"team_{attr['MANNSCHAFT']}_{round}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+# Create home variables """
+home = {}
+for team, attr in teams.items():
+ for round in rounds:
+ home[(team, round)] = LpVariable(
+ f"home_{attr['MANNSCHAFT']}_{round}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+# Create pattern variables
+assignPattern = {}
+for team, attr in teams.items():
+ for p in pattern:
+ assignPattern[(team, p)] = LpVariable(
+ f"pattern_{attr['MANNSCHAFT']}_{p}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+""" Each team exactly one pattern """
+for team in teams:
+ model += (lpSum(assignPattern[(team, p)] for p in pattern) == 1, f"team_{team}_one_pattern")
+
+""" Patterns cannot be used more than once """
+for p in pattern:
+ model += (lpSum(assignPattern[(team, p)] for team in teams) <= 1, f"pattern_{p}_used_once")
+
+""" Couple patterns with home variables """
+for round in rounds1:
+ for team, attr in teams.items():
+ model += (lpSum(assignPattern[(team, p)] for p in pattern if pattern[p][round-1] == "H") == home[(team, round)], f"coupling_pattern_home_{attr['MANNSCHAFT']}_{round}")
+
+# %%
+model.solve(XPRESS(msg=1))
+
+
+""" print patterns """
+for team, attr in teams.items():
+ for p in pattern:
+ if value(assignPattern[(team, p)]) == 1:
+ print(f"{attr['MANNSCHAFT']} is assigned pattern {p}")
+
+
+
+
+
diff --git a/dfbnet/dfbnet_tojson.py b/dfbnet/dfbnet_tojson.py
new file mode 100644
index 0000000..2fe6e6c
--- /dev/null
+++ b/dfbnet/dfbnet_tojson.py
@@ -0,0 +1,70 @@
+# %%
+from datetime import datetime, time, date
+
+from competitions import get_teams_from_staffel
+from schluesselzahlen import get_schluesselzahlen
+from rahmentermine import get_rahmentermine
+from spielstaetten import get_venues
+
+# staffel = "Brandible Stadtliga B"
+# teams = get_teams_from_staffel(staffel)
+def datetime_serializer(obj):
+ if isinstance(obj, datetime) or isinstance(obj, date) or isinstance(obj, time):
+ return obj.isoformat() # or use obj.strftime("%Y-%m-%d %H:%M:%S")
+ raise TypeError("Type not serializable")
+
+staffeln_raw, teams = get_teams_from_staffel()
+
+
+staffeln = []
+divisions = []
+court_names = []
+venues = []
+
+for s in staffeln_raw:
+ if len(teams[s]) < 4 or len(teams[s]) % 2 == 1:
+ # if len(teams[s]) > 0:
+ # print(s, teams[s][0]['MS_ART'], len(teams[s]))
+ # else:
+ # print(s, "no teams")
+ print("Incompatible", s)
+ continue
+
+ nTeams = len(teams[s])
+ pattern, opponent = get_schluesselzahlen(nTeams)
+
+ ms_art = teams[s][0]['MS_ART']
+ if ms_art in ["A-Junioren","B-Junioren","C-Junioren","D-Junioren","D-Juniorinnen"]:
+ ms_art = "Junioren A-D"
+ elif ms_art in ["Herren Ü50"]:
+ ms_art = "Senioren Ü50"
+ elif ms_art in ["Herren Ü35"]:
+ ms_art = "Senioren Ü35"
+
+ rahmentermine = get_rahmentermine(ms_art, nTeams)
+
+ if not rahmentermine:
+ print("No rahmentermine for", s, ms_art, nTeams)
+ continue
+ divisions.append({
+ "name": s,
+ "teams": teams[s],
+ "nTeams": nTeams,
+ "ms_art": ms_art,
+ "pattern": pattern,
+ "opponent": opponent,
+ "rahmentermine": rahmentermine
+ })
+ # for t in teams[s]:
+ # if not t['SPIELSTAETTE'] in court_names:
+ # courts += [{"name":t['SPIELSTAETTE']}]
+ # court_names += [t['SPIELSTAETTE']]
+
+venues = get_venues()
+
+
+""" dump json """
+import json
+with open("data/competitions.json", "w", encoding="utf-8") as f:
+ json.dump({'divisions':divisions,'venues':venues}, f, default=datetime_serializer, ensure_ascii=False, indent=4)
+# %%
diff --git a/dfbnet/generate_distances.py b/dfbnet/generate_distances.py
new file mode 100644
index 0000000..3b39e22
--- /dev/null
+++ b/dfbnet/generate_distances.py
@@ -0,0 +1,116 @@
+# %%
+
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+
+from math import sqrt, sin, cos, atan2, pi
+
+
+
+gmaps = googlemaps.Client(key='AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU')
+
+
+def degreesToRadians(degrees):
+ """ Convert degrees to radians """
+ return degrees * pi / 180
+
+
+def distanceInKmByGPS(lat1, lon1, lat2, lon2):
+ """ Calculate the distance between two points in km """
+ earthRadiusKm = 6371
+ dLat = degreesToRadians(lat2 - lat1)
+ dLon = degreesToRadians(lon2 - lon1)
+ lat1 = degreesToRadians(lat1)
+ lat2 = degreesToRadians(lat2)
+ a = sin(dLat / 2) * sin(dLat / 2) + sin(dLon / 2) * \
+ sin(dLon / 2) * cos(lat1) * cos(lat2)
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+ return int(earthRadiusKm * c)
+
+# %%
+
+with open('data/sachsen.json', 'r', encoding='utf-8') as f:
+ competitions = json.load(f)
+
+competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+competition_details = {}
+color = None
+
+teams_in_competition = {}
+
+for staffel ,attr in competitions.items():
+ if (staffel[0],staffel[1]) not in competition_details:
+ competition_details[(staffel[0],staffel[1])] = {
+ 'nStaffeln': 1,
+ 'nTeams': 0,
+ 'distances': {},
+ 'teams': []
+ }
+ else:
+ competition_details[(staffel[0],staffel[1])]['nStaffeln'] += 1
+
+
+
+ for team in attr['teams']:
+ if team['MANNSCHAFT'] not in competition_details[(staffel[0],staffel[1])]['teams']:
+ competition_details[(staffel[0],staffel[1])]['teams'].append(team)
+
+csv_file = 'data/distances.csv'
+
+
+f = open(csv_file, 'w')
+f.write('art,klasse,team1,team2,road_distance,road_duration,flight_distance\n')
+
+
+for competition, details in competition_details.items():
+ print(f"Calculating distances for {competition}")
+ for team1 in details['teams']:
+ details['distances'][team1['MANNSCHAFT']] = {}
+ for team2 in details['teams']:
+ if team1 != team2:
+ start = team1
+ end = team2
+ c = 0
+ while(c <= 1):
+ try:
+ road_distance = gmaps.distance_matrix((start['LATITUDE'],start['LONGITUDE']),(end['LATITUDE'],end['LONGITUDE']))['rows'][0]['elements'][0]['distance']['value']
+ road_duration = gmaps.distance_matrix((start['LATITUDE'],start['LONGITUDE']),(end['LATITUDE'],end['LONGITUDE']))['rows'][0]['elements'][0]['duration']['value']
+ c = 3
+ except:
+ road_distance = 0
+ road_duration = 0
+ print(f"Error with {team1['MANNSCHAFT']} and {team2['MANNSCHAFT']}")
+ time.sleep(3)
+ c += 1
+ road_distance = round(road_distance*0.001)
+ road_duration = round(road_duration*0.016666)
+
+ flight_distance = distanceInKmByGPS(team1['LATITUDE'], team1['LONGITUDE'], team2['LATITUDE'], team2['LONGITUDE'])
+
+ # details['distances'][team1['MANNSCHAFT']][team2['MANNSCHAFT']] = {
+ # 'road_distance': road_distance,
+ # 'road_duration': road_duration,
+ # 'flight_distance': flight_distance
+ # }
+ # print(f"Distance between {team1['MANNSCHAFT']} and {team2['MANNSCHAFT']} calculated", details['distances'][team1['MANNSCHAFT']][team2['MANNSCHAFT']])
+ f.write(f"{competition[0]},{competition[1]},{team1['MANNSCHAFT']},{team2['MANNSCHAFT']},{road_distance},{road_duration},{flight_distance}\n")
+
+
+f.close()
+
+# matrix_str_keys = {str(k): v for k, v in competition_details.items()}
+# with open('data/sachsen_matrix.json', 'w', encoding='utf-8') as f:
+# json.dump(matrix_str_keys, f, ensure_ascii=False, indent=4, default=str)
+# %%
+# %%
diff --git a/dfbnet/generate_distances_new.py b/dfbnet/generate_distances_new.py
new file mode 100644
index 0000000..f236e25
--- /dev/null
+++ b/dfbnet/generate_distances_new.py
@@ -0,0 +1,116 @@
+# %%
+
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+
+from math import sqrt, sin, cos, atan2, pi
+
+
+
+gmaps = googlemaps.Client(key='AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU')
+
+
+def degreesToRadians(degrees):
+ """ Convert degrees to radians """
+ return degrees * pi / 180
+
+
+def distanceInKmByGPS(lat1, lon1, lat2, lon2):
+ """ Calculate the distance between two points in km """
+ earthRadiusKm = 6371
+ dLat = degreesToRadians(lat2 - lat1)
+ dLon = degreesToRadians(lon2 - lon1)
+ lat1 = degreesToRadians(lat1)
+ lat2 = degreesToRadians(lat2)
+ a = sin(dLat / 2) * sin(dLat / 2) + sin(dLon / 2) * \
+ sin(dLon / 2) * cos(lat1) * cos(lat2)
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+ return int(earthRadiusKm * c)
+
+# %%
+
+with open('data/sachsen.json', 'r', encoding='utf-8') as f:
+ competitions = json.load(f)
+
+competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+competition_details = {}
+color = None
+
+teams_in_competition = {}
+
+for staffel ,attr in competitions.items():
+ if (staffel[0],staffel[1]) not in competition_details:
+ competition_details[(staffel[0],staffel[1])] = {
+ 'nStaffeln': 1,
+ 'nTeams': 0,
+ 'distances': {},
+ 'teams': []
+ }
+ else:
+ competition_details[(staffel[0],staffel[1])]['nStaffeln'] += 1
+
+
+
+ for team in attr['teams']:
+ if team['MANNSCHAFT'] not in competition_details[(staffel[0],staffel[1])]['teams']:
+ competition_details[(staffel[0],staffel[1])]['teams'].append(team)
+
+csv_file = 'data/distances_2.csv'
+
+
+f = open(csv_file, 'w')
+f.write('art,klasse,team1,team2,road_distance,road_duration,flight_distance\n')
+
+
+for competition, details in sorted(competition_details.items(), key=lambda x:x[0], reverse=True):
+ print(f"Calculating distances for {competition}")
+ for team1 in details['teams']:
+ details['distances'][team1['MANNSCHAFT']] = {}
+ for team2 in details['teams']:
+ if team1 != team2:
+ start = team1
+ end = team2
+ c = 0
+ while(c <= 1):
+ try:
+ road_distance = gmaps.distance_matrix((start['LATITUDE'],start['LONGITUDE']),(end['LATITUDE'],end['LONGITUDE']))['rows'][0]['elements'][0]['distance']['value']
+ road_duration = gmaps.distance_matrix((start['LATITUDE'],start['LONGITUDE']),(end['LATITUDE'],end['LONGITUDE']))['rows'][0]['elements'][0]['duration']['value']
+ c = 3
+ except:
+ road_distance = 0
+ road_duration = 0
+ print(f"Error with {team1['MANNSCHAFT']} and {team2['MANNSCHAFT']}")
+ time.sleep(3)
+ c += 1
+ road_distance = round(road_distance*0.001)
+ road_duration = round(road_duration*0.016666)
+
+ flight_distance = distanceInKmByGPS(team1['LATITUDE'], team1['LONGITUDE'], team2['LATITUDE'], team2['LONGITUDE'])
+
+ # details['distances'][team1['MANNSCHAFT']][team2['MANNSCHAFT']] = {
+ # 'road_distance': road_distance,
+ # 'road_duration': road_duration,
+ # 'flight_distance': flight_distance
+ # }
+ # print(f"Distance between {team1['MANNSCHAFT']} and {team2['MANNSCHAFT']} calculated", details['distances'][team1['MANNSCHAFT']][team2['MANNSCHAFT']])
+ f.write(f"{competition[0]},{competition[1]},{team1['MANNSCHAFT']},{team2['MANNSCHAFT']},{road_distance},{road_duration},{flight_distance}\n")
+
+
+f.close()
+
+# matrix_str_keys = {str(k): v for k, v in competition_details.items()}
+# with open('data/sachsen_matrix.json', 'w', encoding='utf-8') as f:
+# json.dump(matrix_str_keys, f, ensure_ascii=False, indent=4, default=str)
+# %%
+# %%
diff --git a/dfbnet/kmeans_sachsen.py b/dfbnet/kmeans_sachsen.py
new file mode 100644
index 0000000..38e70a1
--- /dev/null
+++ b/dfbnet/kmeans_sachsen.py
@@ -0,0 +1,665 @@
+# %%
+
+from pulp import (
+ LpVariable,
+ LpProblem,
+ LpMinimize,
+ lpSum,
+ LpStatus,
+ value,
+ LpInteger,
+ LpContinuous,
+ XPRESS,
+)
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+import os
+
+from scipy.cluster.vq import kmeans, vq
+
+import numpy as np
+import matplotlib.pyplot as plt
+
+
+os.environ["XPRESSDIR"] = "/opt/xpressmp_9.5.0"
+os.environ["XPRESS"] = "/opt/xpressmp_9.5.0/bin"
+os.environ["LD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
+os.environ["DYLD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
+os.environ["SHLIB_PATH"] = os.environ["XPRESSDIR"] + "/lib"
+os.environ["LIBPATH"] = os.environ["XPRESSDIR"] + "/lib"
+os.environ["PYTHONPATH"] = os.environ["XPRESSDIR"] + "/lib"
+os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprs.jar"
+os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprb.jar" + os.pathsep + os.environ["CLASSPATH"]
+os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprm.jar" + os.pathsep + os.environ["CLASSPATH"]
+os.environ["PATH"] = os.environ["XPRESSDIR"] + "/bin" + os.pathsep + os.environ["PATH"]
+
+
+
+# %%
+
+
+from math import sqrt, sin, cos, atan2, pi
+
+
+def degreesToRadians(degrees):
+ """Convert degrees to radians"""
+ return degrees * pi / 180
+
+
+def distanceInKmByGPS(lat1, lon1, lat2, lon2):
+ """Calculate the distance between two points in km"""
+ earthRadiusKm = 6371
+ dLat = degreesToRadians(lat2 - lat1)
+ dLon = degreesToRadians(lon2 - lon1)
+ lat1 = degreesToRadians(lat1)
+ lat2 = degreesToRadians(lat2)
+ a = sin(dLat / 2) * sin(dLat / 2) + sin(dLon / 2) * sin(dLon / 2) * cos(lat1) * cos(
+ lat2
+ )
+ c = 2 * atan2(sqrt(a), sqrt(1 - a))
+ return int(earthRadiusKm * c)
+
+
+def random_color():
+ return "#{:06x}".format(random.randint(0, 0xFFFFFF))
+
+# %%
+
+with open("data/sachsen.json", "r", encoding="utf-8") as f:
+ competitions = json.load(f)
+
+competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+# %%
+
+# region
+# STAFFELN PRO ART UND KLASSE
+# ('Herren', 'Landesliga') 1
+# ('Herren', 'Landesklasse') 3
+# ('Frauen', 'Landesliga') 1
+# ('Frauen', 'Landesklasse') 3
+# ('A-Junioren', 'Landesliga') 1
+# ('A-Junioren', 'Landesklasse') 4
+# ('Herren', 'Kreisoberliga') 13
+# ('Herren', '1.Kreisliga (A)') 19
+# ('Herren', '2.Kreisliga (B)') 8
+# ('Herren', '3.Kreisliga (C)') 1
+# ('Herren', '1.Kreisklasse') 21
+# ('Herren', '2.Kreisklasse') 9
+# ('A-Junioren', 'Kreisoberliga') 10
+# ('A-Junioren', '1.Kreisliga (A)') 6
+# ('Frauen', 'Kreisoberliga') 4
+# ('Frauen', '1.Kreisliga (A)') 1
+# ('Frauen', '1.Kreisklasse') 3
+# ('B-Junioren', 'Landesliga') 1
+# ('B-Junioren', 'Landesklasse') 4
+# ('B-Junioren', 'Kreisoberliga') 13
+# ('B-Junioren', '1.Kreisliga (A)') 13
+# ('B-Junioren', '1.Kreisklasse') 1
+# ('C-Junioren', 'Landesliga') 1
+# ('C-Junioren', 'Landesklasse') 4
+# ('C-Junioren', 'Kreisoberliga') 16
+# ('C-Junioren', '1.Kreisliga (A)') 15
+# ('C-Junioren', '1.Kreisklasse') 9
+# ('D-Junioren', 'Landesliga') 1
+# ('D-Junioren', 'Landesklasse') 6
+# ('D-Junioren', 'Kreisoberliga') 16
+# ('D-Junioren', '1.Kreisliga (A)') 24
+# ('D-Junioren', '2.Kreisliga (B)') 8
+# ('D-Junioren', '3.Kreisliga (C)') 2
+# ('D-Junioren', '1.Kreisklasse') 33
+# ('D-Junioren', '2.Kreisklasse') 10
+# ('B-Juniorinnen', 'Landesliga') 1
+# ('B-Juniorinnen', 'Landesklasse') 2
+# ('C-Juniorinnen', 'Landesklasse') 3
+# ('D-Juniorinnen', 'Kreisoberliga') 1
+# ('Herren Ü35', 'Kreisoberliga') 4
+# ('Herren Ü35', '1.Kreisliga (A)') 3
+# ('Herren Ü35', '1.Kreisklasse') 3
+# ('Herren Ü35', '2.Kreisklasse') 1
+# ('Herren Ü40', '1.Kreisliga (A)') 5
+# ('Herren Ü40', '1.Kreisklasse') 1
+# ('Herren Ü50', '1.Kreisliga (A)') 1
+# ('Herren Ü50', '1.Kreisklasse') 1
+# ('Freizeitsport', '1.Kreisliga (A)') 3
+# ('Freizeitsport', '1.Kreisklasse') 2
+# endregion
+
+competition_details = {}
+color = None
+for staffel, attr in competitions.items():
+ # if (staffel[0], staffel[1]) != ('Herren', 'Kreisoberliga'):
+ # continue
+ competitions[staffel]["distance"] = []
+
+ if (staffel[0], staffel[1]) not in competition_details:
+ competition_details[(staffel[0], staffel[1])] = {
+ "nStaffeln": 1,
+ "nTeams": len(attr["teams"]),
+ "teams": attr["teams"],
+ "group_sizes": [len(attr["teams"])],
+ "clusters": {},
+ }
+ else:
+ competition_details[(staffel[0], staffel[1])]["nStaffeln"] += 1
+ competition_details[(staffel[0], staffel[1])]["group_sizes"].append(len(attr["teams"]))
+ competition_details[(staffel[0], staffel[1])]["nTeams"] += len(attr["teams"])
+ competition_details[(staffel[0], staffel[1])]["teams"] += attr["teams"]
+
+
+"""" GENERATE ALL DISTANCES BETWEEN TEAMS """
+
+distance_between_teams = {}
+for competition, details in competition_details.items():
+
+# competition = ('Herren', 'Kreisoberliga')
+# details = competition_details[competition]
+
+
+ print(f"Calculating distances for {competition}")
+ for id, team1 in enumerate(details["teams"]):
+ team1['ID'] = id
+ distance_between_teams[team1["MANNSCHAFT"]] = {}
+ for team2 in details["teams"]:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distanceInKmByGPS(
+ team1["LATITUDE"],
+ team1["LONGITUDE"],
+ team2["LATITUDE"],
+ team2["LONGITUDE"],
+ )
+ distance_between_teams[team1["MANNSCHAFT"]][
+ team2["MANNSCHAFT"]
+ ] = distance
+
+ teams = details["teams"]
+ # print("Number of teams", len(teams))
+
+
+ locations = []
+ for team in teams:
+ locations.append([team["LATITUDE"], team["LONGITUDE"]])
+
+ data = np.array(locations)
+
+ k = details['nStaffeln']
+
+ # print("Number of groups", k)
+
+ centroids, _ = kmeans(data, k)
+ cluster_labels, _ = vq(data, centroids)
+
+ # print("Initial centroids", len(centroids), centroids)
+ for diff in range(len(centroids), k):
+ centroids = np.append(centroids, [[0, 0]], axis=0)
+
+
+
+ """" RECLUSTERING THE COMPETITION INTO DIVISIONS """
+ improvement = True
+ it = 0
+ last_objective = False
+ while(improvement):
+ it += 1
+ print("Iteration", it)
+
+ model = LpProblem(f"KMeans_{it}", LpMinimize)
+
+ """ x = 1 if team i is in same division as j, 0 otherwise """
+ x = {}
+
+ """ g = 1 if team i is i group j, 0 otherwise """
+ groups = range(1, k+1)
+
+
+ g = {}
+ for team in teams:
+ for group in groups:
+ g[(team["MANNSCHAFT"], group)] = LpVariable(
+ f"team_{team['ID']}_{group}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ """ Each team is in exactly one division """
+ for team in teams:
+ model += lpSum(g[(team["MANNSCHAFT"], group)] for group in groups) == 1
+
+ for group, group_size in enumerate(details["group_sizes"]):
+ # print(group+1, group_size)
+ model += lpSum(g[(team["MANNSCHAFT"], group+1)] for team in teams) == group_size
+
+ """ MINIMIZE THE DISTANCE TO THE CLUSTER CENTROID """
+ model += lpSum(g[team["MANNSCHAFT"], group] * distanceInKmByGPS(
+ team["LATITUDE"],
+ team["LONGITUDE"],
+ centroids[group - 1][0],
+ centroids[group - 1][1],
+ ) for team in teams for group in groups
+ )
+
+ """ write the model to a file """
+ # model.writeLP(f"kmeans/kmeans_{competition}_{it}.lp")
+
+ model.solve(XPRESS(msg=0, gapRel=0.01))
+
+ if last_objective:
+ if last_objective <= value(model.objective):
+ improvement = False
+ last_objective = value(model.objective)
+
+
+ """ recompute the centroids """
+ centroids = []
+ for group in groups:
+ latitudes = []
+ longitudes = []
+ for team in teams:
+ if value(g[(team["MANNSCHAFT"], group)]) > 0.9:
+ latitudes.append(team["LATITUDE"])
+ longitudes.append(team["LONGITUDE"])
+ centroids.append([np.mean(latitudes), np.mean(longitudes)])
+
+
+ clusters = {k: [] for k in range(1, len(groups)+1)}
+ augmented_teams = []
+ for group in groups:
+ for team in teams:
+ if value(g[(team["MANNSCHAFT"], group)]) > 0.9:
+ clusters[group].append(team)
+
+ competition_details[competition]["clusters"] = clusters
+
+ some_colors = [
+ "red",
+ "blue",
+ "green",
+ "yellow",
+ "purple",
+ "orange",
+ "pink",
+ "brown",
+ "black",
+ "white",
+ "gray",
+ "cyan",
+ "magenta",
+ "lime",
+ "indigo",
+ "violet",
+ "turquoise",
+ "gold",
+ "silver",
+ "beige",
+ "maroon",
+ "olive",
+ "navy",
+ "teal",
+ "coral",
+ "lavender",
+ "salmon",
+ "chocolate",
+ "crimson",
+ "aqua",
+ "ivory",
+ "khaki",
+ "plum",
+ "orchid",
+ "peru",
+ "tan",
+ "tomato",
+ "wheat",
+ "azure",
+ "mint",
+ "apricot",
+ "chartreuse",
+ "amber",
+ "fuchsia",
+ "jade",
+ "ruby",
+ "amethyst",
+ "rose",
+ "sapphire",
+ "cerulean",
+ "moss",
+ "denim",
+ "copper",
+ "peach",
+ "sand",
+ "pearl",
+ "mulberry",
+ "lemon",
+ "cream",
+ "ocher",
+ "brass",
+ "eggplant",
+ "cinnamon",
+ "mustard",
+ "rust",
+ "sienna",
+ "sepia",
+ "umber",
+ "limegreen",
+ "seagreen",
+ "forestgreen",
+ "dodgerblue",
+ "mediumslateblue",
+ "royalblue",
+ "firebrick",
+ "darkolivegreen",
+ "midnightblue",
+ "darkturquoise",
+ "lightcoral",
+ "palevioletred",
+ "hotpink",
+ "deeppink",
+ "darkkhaki",
+ "lightseagreen",
+ "darkslategray",
+ "slategray",
+ "lightsteelblue",
+ "skyblue",
+ "lightblue",
+ "powderblue",
+ "darkorange",
+ "lightsalmon",
+ "indianred",
+ "thistle",
+ "burlywood",
+ "mediumaquamarine",
+ "mediumorchid",
+ "mediumvioletred",
+ "papayawhip",
+ "moccasin",
+ "bisque",
+ "blanchedalmond",
+ "antiquewhite",
+ "mistyrose",
+ "lavenderblush",
+ "linen",
+ "snow",
+ "honeydew",
+ "palegreen",
+ "lightcyan",
+ "aliceblue",
+ "ghostwhite",
+ "whitesmoke",
+ "gainsboro",
+ ]
+
+ latitude = 51.18292980165227
+ longitude = 13.11435805600463
+ gmap = GoogleMapPlotter(
+ latitude, longitude, 8, apikey="AIzaSyAPzFyMk3ZA0kL9TUlJ_kpV_IY56uBwdrc"
+ )
+
+ aggregated_distance = 0
+ distance_for_team = {}
+ for cluster, teamslist in clusters.items():
+ latitudes = []
+ longitudes = []
+ markers_text = []
+ color = some_colors.pop(0)
+ cluster_distance = 0
+ for team1 in teamslist:
+ distance_for_team[team1["MANNSCHAFT"]] = []
+ for team2 in teamslist:
+ distance = 0
+ if team1["MANNSCHAFT"] != team2["MANNSCHAFT"]:
+ distance = distance_between_teams[team1["MANNSCHAFT"]][team2["MANNSCHAFT"]]
+ cluster_distance += distance
+ aggregated_distance += distance
+ distance_for_team[team1["MANNSCHAFT"]].append(distance)
+ latitudes.append(team1["LATITUDE"])
+ longitudes.append(team1["LONGITUDE"])
+ markers_text.append(f"{team1['MANNSCHAFT']} @{team1['SPIELSTAETTE']}")
+
+ # Plot the points on the map
+ gmap.scatter(latitudes, longitudes, color=color, size=40, marker=False)
+ for (lat1, lon1), (lat2, lon2) in itertools.combinations(
+ zip(latitudes, longitudes), 2
+ ):
+ gmap.plot([lat1, lat2], [lon1, lon2], color=color, edge_width=2)
+ for lat, lon, text in zip(latitudes, longitudes, markers_text):
+ gmap.marker(lat, lon, title=text.replace('"', ""), color=color)
+
+ print(cluster, len(teamslist), cluster_distance, aggregated_distance, color)
+
+ gmap.draw(f"kmeans/map_mip_{competition}.html")
+
+
+# %%
+
+""" DUMP THE COMPETITIONS """
+from datetime import datetime, time, date
+
+# from competitions import get_teams_from_staffel
+from schluesselzahlen import get_schluesselzahlen
+from rahmentermine import get_rahmentermine
+from spielstaetten import get_venues
+
+# staffel = "Brandible Stadtliga B"
+# teams = get_teams_from_staffel(staffel)
+def datetime_serializer(obj):
+ if isinstance(obj, datetime) or isinstance(obj, date) or isinstance(obj, time):
+ return obj.isoformat() # or use obj.strftime("%Y-%m-%d %H:%M:%S")
+ raise TypeError("Type not serializable")
+
+
+
+staffeln = []
+divisions = []
+courts = []
+court_names = []
+venues = []
+
+for competition, details in competition_details.items():
+
+ for cluster, cluster_teams in details['clusters'].items():
+ print(f"Processing {competition} {cluster}")
+ nTeams = len(cluster_teams) + len(cluster_teams) % 2
+ pattern, opponent = get_schluesselzahlen(nTeams)
+
+ teams = cluster_teams
+
+ ms_art = teams[0]['MS_ART']
+ if ms_art in ["A-Junioren","B-Junioren","C-Junioren","D-Junioren","D-Juniorinnen"]:
+ ms_art = "Junioren A-D"
+ elif ms_art in ["Herren Ü50"]:
+ ms_art = "Senioren Ü50"
+ elif ms_art in ["Herren Ü35"]:
+ ms_art = "Senioren Ü35"
+
+ rahmentermine = get_rahmentermine(ms_art, nTeams)
+
+ if not rahmentermine:
+ print("No rahmentermine for", competition, cluster, ms_art, nTeams)
+ continue
+ divisions.append({
+ "name": f"{competition[0]} {competition[1]} {cluster}",
+ "teams": teams,
+ "nTeams": nTeams,
+ "ms_art": ms_art,
+ "pattern": pattern,
+ "opponent": opponent,
+ "rahmentermine": rahmentermine
+ })
+ for t in teams:
+ if not t['SPIELSTAETTE'] in court_names:
+ # courts += [{"name":t['SPIELSTAETTE']}]
+
+ venues.append({
+ "SB_SPST_ID": len(venues)+1,
+ "SB_SPST_GEBIET_REF": len(venues)+1,
+ "SB_SPST_NAME": t['SPIELSTAETTE'],
+ "SB_SPST_TYP_REF": 1,
+ "SB_SPST_ZUSTAND_REF": 1,
+ "SB_SPST_PLATZ_NR": "(null)",
+ "SB_SPST_FLUTLICHT": "t",
+ "SB_SPST_SPIELE_PARALLEL_MAX": 3,
+ "SB_SPST_ANSTOSSZEIT_VON": "08:30:00",
+ "SB_SPST_ANSTOSSZEIT_BIS": "20:30:00",
+ "SB_SPST_ANZ_UMKLEIDEN": 5,
+ "SB_SPST_MITTAGSPAUSE_VON": "(null)",
+ "SB_SPST_MITTAGSPAUSE_BIS": "(null)",
+ "SB_SPST_GROESSE_REF": 1,
+ "SB_SPST_SPIELE_TAG_MAX": 20,
+ "SB_SPST_ANZ_TORE": 6,
+ "SB_SPST_SPIELE_ABSTAND": 0,
+ "latitude": t["LATITUDE"],
+ "longitude": t["LONGITUDE"],
+ })
+
+ court_names += [t['SPIELSTAETTE']]
+
+
+
+""" dump json """
+import json
+with open("kmeans/competitions.json", "w", encoding="utf-8") as f:
+ json.dump({'divisions':divisions,'venues':venues}, f, default=datetime_serializer, ensure_ascii=False, indent=4)
+
+
+
+
+# # %%
+
+# %%
+with open("kmeans/competitions.json", "r", encoding="utf-8") as f:
+ data = json.load(f)
+
+
+from pulp import LpVariable, LpProblem, LpMinimize, lpSum, LpStatus, value, LpInteger, XPRESS
+model = LpProblem("Spielplan", LpMinimize)
+x = {}
+home = {}
+assignPattern = {}
+
+
+divisions = data['divisions']
+venues = data['venues']
+max_rounds = 0
+
+team_id = 0
+
+for division_id, division in enumerate(divisions):
+ division['id'] = division_id
+
+ rahmentermine = division['rahmentermine']
+ teams = division['teams']
+ pattern = division['pattern']
+ opponent = division['opponent']
+ nTeams = division['nTeams']
+ ms_art = division['ms_art']
+
+ for t in teams:
+ t['id'] = team_id
+ team_id += 1
+
+
+ # %%
+ """ Create pulp model for solving a schedule for a given set of teams and rahmentermine """
+
+
+ rounds1 = list(range(1, len(rahmentermine)//2+1))
+ rounds2 = list(range(len(rahmentermine)//2+1, len(rahmentermine)+1))
+ rounds = rounds1 + rounds2
+ max_rounds = max(max_rounds, len(rounds1))
+
+ # %%
+
+ # Create a variable for each team and each rahmentermin
+ for team in teams:
+ for round in rounds:
+ x[(team['id'], round)] = LpVariable(
+ f"team_{team['id']}_{round}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ # Create home variables """
+ for team in teams:
+ for round in rounds:
+ home[(team['id'], round)] = LpVariable(
+ f"home_{team['id']}_{round}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ # Create pattern variables
+ for team in teams:
+ for p in pattern:
+ assignPattern[(team['id'], p)] = LpVariable(
+ f"pattern_{team['id']}_{p}",
+ lowBound=0,
+ upBound=1,
+ cat=LpInteger,
+ )
+
+ """ Each team exactly one pattern """
+ for team in teams:
+ model += (lpSum(assignPattern[(team['id'], p)] for p in pattern) == 1, f"team_{team['id']}_one_pattern")
+
+ # if team['SPIELSTAETTE'].strip() not in [venue['SB_SPST_NAME'] for venue in venues]:
+ # print(f"Venue {team['SPIELSTAETTE']} not found in venues")
+ # exit()
+ # else:
+ # print(f"Venue {team['SPIELSTAETTE']} found in venues")
+
+ """ Patterns cannot be used more than once """
+ for p in pattern:
+ model += (lpSum(assignPattern[(team['id'], p)] for team in teams) <= 1, f"pattern_{p}_used_once_in_division_{division['id']}")
+
+ """ Couple patterns with home variables """
+ for round in rounds1:
+ for team in teams:
+ model += (lpSum(assignPattern[(team['id'], p)] for p in pattern if pattern[p][round-1] == "H") == home[(team['id'], round)], f"coupling_pattern_home_{team['id']}_{round}")
+
+
+
+model.solve(XPRESS(msg=1))
+
+
+
+csv_file = open("kmeans/schedule.csv", "w")
+csv_file.write("round,venue,day,division,hometeam,awayteam,homepattern,awaypattern,wunschtag,wunschzeit\n")
+""" print patterns """
+for round in range(1,max_rounds+1):
+ for venue in venues:
+ print(f"Round {round} at {venue['SB_SPST_NAME']}")
+ for division in divisions:
+ if division['nTeams'] <= round:
+ continue
+ for team in division['teams']:
+
+
+ if team['SPIELSTAETTE'] == venue['SB_SPST_NAME']:
+ if value(home.get((team['id'], round),0)) == 1:
+ p1 = [p for p in division['pattern'] if assignPattern[(team['id'], p)].varValue == 1][0]
+ p2 = 0
+ o = None
+ for t2 in division['teams']:
+ p2 = [p for p in division['pattern'] if assignPattern[(t2['id'], p)].varValue == 1][0]
+ # print(round,division['nTeams'])
+ if int(p2) == int(division['opponent'][p1][round-1]):
+ o = t2['MANNSCHAFT']
+ break
+ # print(f"{round} ({team['SPIELSTAETTE']}, {venue['SB_SPST_SPIELE_TAG_MAX']}): {division['name']} - {team['MANNSCHAFT']} - {p1} - {team['WUNSCH_TAG']} - {team['WUNSCH_ZEIT']} vs {o} - {p2}")
+ if o:
+ csv_file.write(f"{round},{team['SPIELSTAETTE'].replace(","," ")},{venue['SB_SPST_SPIELE_TAG_MAX']},{division['name']},{team['MANNSCHAFT']},{o},{p1},{p2},{team['WUNSCH_TAG']},{team['WUNSCH_ZEIT']}\n")
+csv_file.close()
+
+
+
diff --git a/dfbnet/rahmentermine.py b/dfbnet/rahmentermine.py
new file mode 100644
index 0000000..6a2f700
--- /dev/null
+++ b/dfbnet/rahmentermine.py
@@ -0,0 +1,48 @@
+import pandas as pd
+
+
+def get_rahmentermine(MS_ART, nTeams):
+
+ """ LOAD RAHMENTERMINPLAN FROM CSV"""
+ rahmentermine = pd.read_csv("data/rahmentermine.csv")
+ cols = list(rahmentermine.columns)
+
+ # %%
+
+ rahmentermine['From'] = pd.to_datetime(rahmentermine['From'], format='%d/%m').dt.date
+ rahmentermine['From'] = rahmentermine['From'].apply(lambda x: x.replace(year=2025) if x.month < 7 else x.replace(year=2024))
+ rahmentermine['To'] = pd.to_datetime(rahmentermine['To'], format='%d/%m').dt.date
+ rahmentermine['To'] = rahmentermine['To'].apply(lambda x: x.replace(year=2025) if x.month < 7 else x.replace(year=2024))
+
+ # %%
+
+ """ REMOVE ALL ENTRIES THAT ARE NO REAL MATCHDAYS"""
+ for col in cols:
+ if col in ['From', 'To']:
+ continue
+ rahmentermine[col] = pd.to_numeric(rahmentermine[col], errors='coerce')
+ rahmentermine[col] = rahmentermine[col].apply(lambda x: int(x) if pd.notna(x) and x == int(x) else '')
+
+ # %%
+
+ """ CREATE DICTIONARY WITH MATCHDAYS"""
+ termine = {}
+ for col in cols:
+ colname = col.split('.')[0]
+ if col in ['From', 'To']:
+ continue
+ if rahmentermine[col][0] != nTeams:
+ continue
+ termine[(colname,nTeams)] = {}
+
+ for row in rahmentermine.iterrows():
+ if row[0] == 0:
+ continue
+ if row[1][col] != "":
+ termine[(colname,nTeams)][row[1][col]] = [row[1]['From'], row[1]['To']]
+
+ return termine.get((MS_ART,nTeams), [])
+
+
+
+
\ No newline at end of file
diff --git a/dfbnet/schluesselzahlen.py b/dfbnet/schluesselzahlen.py
new file mode 100644
index 0000000..a595ebf
--- /dev/null
+++ b/dfbnet/schluesselzahlen.py
@@ -0,0 +1,136 @@
+rahmenplan = {}
+
+rahmenplan[4] = [
+[(1,3),(1,2),(2,3)],
+[(4,2),(3,4),(4,1)],
+]
+
+
+rahmenplan[6] = [
+[(1,5), (2,6), (1,3), (1,2), (2,5)],
+[(3,2), (4,1), (4,2), (3,6), (4,3)],
+[(6,4), (5,3), (6,5), (5,4), (6,1)],
+]
+
+rahmenplan[8] = [
+[(1,7), (2,3), (1,5), (2,6), (1,3), (1,2), (2,7)],
+[(3,4), (4,8), (3,7), (4,1), (4,2), (3,8), (4,5)],
+[(5,2), (6,1), (6,4), (5,3), (6,7), (5,6), (6,3)],
+[(8,6), (7,5), (8,2), (7,8), (8,5), (7,4), (8,1)],
+]
+
+rahmenplan[10] = [
+[(1,9), (2,5), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,9)],
+[(3,6), (4,3), (3,2), (4,8), (3,7), (4,1), (4,2), (3,10), (4,7)],
+[(5,4), (6,10), (5,9), (6,1), (6,4), (5,3), (6,9), (5,8), (6,5)],
+[(7,2), (8,1), (8,6), (7,5), (8,2), (7,10), (8,7), (7,6), (8,3)],
+[(10,8), (9,7), (10,4), (9,3), (10,9), (9,8), (10,5), (9,4), (10,1)],
+]
+
+rahmenplan[12] = [
+[(1,11), (2,7), (1,9), (2,3), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,11)],
+[(3,8), (4,5), (3,4), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,12), (4,9)],
+[(5,6), (6,3), (5,2), (6,10), (5,9), (6,1), (6,4), (5,3), (6,11), (5,10), (6,7)],
+[(7,4), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,12), (8,9), (7,8), (8,5)],
+[(9,2), (10,1), (10,8), (9,7), (10,4), (9,3), (10,11), (9,10), (10,7), (9,6), (10,3)],
+[(12,10), (11,9), (12,6), (11,5), (12,2), (11,12), (12,9), (11,8), (12,5), (11,4), (12,1)],
+]
+
+rahmenplan[14] = [
+[(1,13), (2,9), (1,11), (2,5), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,13)],
+[(3,10), (4,7), (3,6), (4,3), (3,2), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,14), (4,11)],
+[(5,8), (6,5), (5,4), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,13), (5,12), (6,9)],
+[(7,6), (8,3), (7,2), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,14), (8,11), (7,10), (8,7)],
+[(9,4), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,13), (9,12), (10,9), (9,8), (10,5)],
+[(11,2), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,14), (12,11), (11,10), (12,7), (11,6), (12,3)],
+[(14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,13), (13,12), (14,9), (13,8), (14,5), (13,4), (14,1)],
+]
+
+rahmenplan[16] = [
+[(1,15), (2,11), (1,13), (2,7), (1,11), (2,3), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,15)],
+[(3,12), (4,9), (3,8), (4,5), (3,4), (4,16), (3,15), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,16), (4,13)],
+[(5,10), (6,7), (5,6), (6,3), (5,2), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,15), (5,14), (6,11)],
+[(7,8), (8,5), (7,4), (8,16), (7,15), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,16), (8,13), (7,12), (8,9)],
+[(9,6), (10,3), (9,2), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,15), (9,14), (10,11), (9,10), (10,7)],
+[(11,4), (12,16), (11,15), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,16), (12,13), (11,12), (12,9), (11,8), (12,5)],
+[(13,2), (14,1), (14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,15), (13,14), (14,11), (13,10), (14,7), (13,6), (14,3)],
+[(16,14), (15,13), (16,10), (15,9), (16,6), (15,5), (16,2), (15,16), (16,13), (15,12), (16,9), (15,8), (16,5), (15,4), (16,1)],
+]
+
+rahmenplan[18] = [
+[(1,17), (2,13), (1,15), (2,9), (1,13), (2,5), (1,11), (2,18), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,17)],
+[(3,14), (4,11), (3,10), (4,7), (3,6), (4,3), (3,2), (4,16), (3,15), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,18), (4,15)],
+[(5,12), (6,9), (5,8), (6,5), (5,4), (6,18), (5,17), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,17), (5,16), (6,13)],
+[(7,10), (8,7), (7,6), (8,3), (7,2), (8,16), (7,15), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,18), (8,15), (7,14), (8,11)],
+[(9,8), (10,5), (9,4), (10,18), (9,17), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,17), (9,16), (10,13), (9,12), (10,9)],
+[(11,6), (12,3), (11,2), (12,16), (11,15), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,18), (12,15), (11,14), (12,11), (11,10), (12,7)],
+[(13,4), (14,18), (13,17), (14,1), (14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,17), (13,16), (14,13), (13,12), (14,9), (13,8), (14,5)],
+[(15,2), (16,1), (16,14), (15,13), (16,10), (15,9), (16,6), (15,5), (16,2), (15,18), (16,15), (15,14), (16,11), (15,10), (16,7), (15,6), (16,3)],
+[(18,16), (17,15), (18,12), (17,11), (18,8), (17,7), (18,4), (17,3), (18,17), (17,16), (18,13), (17,12), (18,9), (17,8), (18,5), (17,4), (18,1)],
+]
+
+rahmenplan[20] = [
+[(1,19), (2,15), (1,17), (2,11), (1,15), (2,7), (1,13), (2,3), (1,11), (2,18), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,19)],
+[(3,16), (4,13), (3,12), (4,9), (3,8), (4,5), (3,4), (4,20), (3,19), (4,16), (3,15), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,20), (4,17)],
+[(5,14), (6,11), (5,10), (6,7), (5,6), (6,3), (5,2), (6,18), (5,17), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,19), (5,18), (6,15)],
+[(7,12), (8,9), (7,8), (8,5), (7,4), (8,20), (7,19), (8,16), (7,15), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,20), (8,17), (7,16), (8,13)],
+[(9,10), (10,7), (9,6), (10,3), (9,2), (10,18), (9,17), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,19), (9,18), (10,15), (9,14), (10,11)],
+[(11,8), (12,5), (11,4), (12,20), (11,19), (12,16), (11,15), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,20), (12,17), (11,16), (12,13), (11,12), (12,9)],
+[(13,6), (14,3), (13,2), (14,18), (13,17), (14,1), (14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,19), (13,18), (14,15), (13,14), (14,11), (13,10), (14,7)],
+[(15,4), (16,20), (15,19), (16,1), (16,14), (15,13), (16,10), (15,9), (16,6), (15,5), (16,2), (15,20), (16,17), (15,16), (16,13), (15,12), (16,9), (15,8), (16,5)],
+[(17,2), (18,1), (18,16), (17,15), (18,12), (17,11), (18,8), (17,7), (18,4), (17,3), (18,19), (17,18), (18,15), (17,14), (18,11), (17,10), (18,7), (17,6), (18,3)],
+[(20,18), (19,17), (20,14), (19,13), (20,10), (19,9), (20,6), (19,5), (20,2), (19,20), (20,17), (19,16), (20,13), (19,12), (20,9), (19,8), (20,5), (19,4), (20,1)],
+]
+
+rahmenplan[22] = [
+[(1,21), (2,17), (1,19), (2,13), (1,17), (2,9), (1,15), (2,5), (1,13), (2,22), (1,11), (2,18), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,21)],
+[(3,18), (4,15), (3,14), (4,11), (3,10), (4,7), (3,6), (4,3), (3,2), (4,20), (3,19), (4,16), (3,15), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,22), (4,19)],
+[(5,16), (6,13), (5,12), (6,9), (5,8), (6,5), (5,4), (6,22), (5,21), (6,18), (5,17), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,21), (5,20), (6,17)],
+[(7,14), (8,11), (7,10), (8,7), (7,6), (8,3), (7,2), (8,20), (7,19), (8,16), (7,15), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,22), (8,19), (7,18), (8,15)],
+[(9,12), (10,9), (9,8), (10,5), (9,4), (10,22), (9,21), (10,18), (9,17), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,21), (9,20), (10,17), (9,16), (10,13)],
+[(11,10), (12,7), (11,6), (12,3), (11,2), (12,20), (11,19), (12,16), (11,15), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,22), (12,19), (11,18), (12,15), (11,14), (12,11)],
+[(13,8), (14,5), (13,4), (14,22), (13,21), (14,18), (13,17), (14,1), (14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,21), (13,20), (14,17), (13,16), (14,13), (13,12), (14,9)],
+[(15,6), (16,3), (15,2), (16,20), (15,19), (16,1), (16,14), (15,13), (16,10), (15,9), (16,6), (15,5), (16,2), (15,22), (16,19), (15,18), (16,15), (15,14), (16,11), (15,10), (16,7)],
+[(17,4), (18,22), (17,21), (18,1), (18,16), (17,15), (18,12), (17,11), (18,8), (17,7), (18,4), (17,3), (18,21), (17,20), (18,17), (17,16), (18,13), (17,12), (18,9), (17,8), (18,5)],
+[(19,2), (20,1), (20,18), (19,17), (20,14), (19,13), (20,10), (19,9), (20,6), (19,5), (20,2), (19,22), (20,19), (19,18), (20,15), (19,14), (20,11), (19,10), (20,7), (19,6), (20,3)],
+[(22,20), (21,19), (22,16), (21,15), (22,12), (21,11), (22,8), (21,7), (22,4), (21,3), (22,21), (21,20), (22,17), (21,16), (22,13), (21,12), (22,9), (21,8), (22,5), (21,4), (22,1)],
+]
+
+rahmenplan[24] = [
+[(1,23), (2,19), (1,21), (2,15), (1,19), (2,11), (1,17), (2,7), (1,15), (2,3), (1,13), (2,22), (1,11), (2,18), (1,9), (2,14), (1,7), (2,10), (1,5), (2,6), (1,3), (1,2), (2,23)],
+[(3,20), (4,17), (3,16), (4,13), (3,12), (4,9), (3,8), (4,5), (3,4), (4,24), (3,23), (4,20), (3,19), (4,16), (3,15), (4,12), (3,11), (4,8), (3,7), (4,1), (4,2), (3,24), (4,21)],
+[(5,18), (6,15), (5,14), (6,11), (5,10), (6,7), (5,6), (6,3), (5,2), (6,22), (5,21), (6,18), (5,17), (6,14), (5,13), (6,10), (5,9), (6,1), (6,4), (5,3), (6,23), (5,22), (6,19)],
+[(7,16), (8,13), (7,12), (8,9), (7,8), (8,5), (7,4), (8,24), (7,23), (8,20), (7,19), (8,16), (7,15), (8,12), (7,11), (8,1), (8,6), (7,5), (8,2), (7,24), (8,21), (7,20), (8,17)],
+[(9,14), (10,11), (9,10), (10,7), (9,6), (10,3), (9,2), (10,22), (9,21), (10,18), (9,17), (10,14), (9,13), (10,1), (10,8), (9,7), (10,4), (9,3), (10,23), (9,22), (10,19), (9,18), (10,15)],
+[(11,12), (12,9), (11,8), (12,5), (11,4), (12,24), (11,23), (12,20), (11,19), (12,16), (11,15), (12,1), (12,10), (11,9), (12,6), (11,5), (12,2), (11,24), (12,21), (11,20), (12,17), (11,16), (12,13)],
+[(13,10), (14,7), (13,6), (14,3), (13,2), (14,22), (13,21), (14,18), (13,17), (14,1), (14,12), (13,11), (14,8), (13,7), (14,4), (13,3), (14,23), (13,22), (14,19), (13,18), (14,15), (13,14), (14,11)],
+[(15,8), (16,5), (15,4), (16,24), (15,23), (16,20), (15,19), (16,1), (16,14), (15,13), (16,10), (15,9), (16,6), (15,5), (16,2), (15,24), (16,21), (15,20), (16,17), (15,16), (16,13), (15,12), (16,9)],
+[(17,6), (18,3), (17,2), (18,22), (17,21), (18,1), (18,16), (17,15), (18,12), (17,11), (18,8), (17,7), (18,4), (17,3), (18,23), (17,22), (18,19), (17,18), (18,15), (17,14), (18,11), (17,10), (18,7)],
+[(19,4), (20,24), (19,23), (20,1), (20,18), (19,17), (20,14), (19,13), (20,10), (19,9), (20,6), (19,5), (20,2), (19,24), (20,21), (19,20), (20,17), (19,16), (20,13), (19,12), (20,9), (19,8), (20,5)],
+[(21,2), (22,1), (22,20), (21,19), (22,16), (21,15), (22,12), (21,11), (22,8), (21,7), (22,4), (21,3), (22,23), (21,22), (22,19), (21,18), (22,15), (21,14), (22,11), (21,10), (22,7), (21,6), (22,3)],
+[(24,22), (23,21), (24,18), (23,17), (24,14), (23,13), (24,10), (23,9), (24,6), (23,5), (24,2), (23,24), (24,21), (23,20), (24,17), (23,16), (24,13), (23,12), (24,9), (23,8), (24,5), (23,4), (24,1)],
+]
+
+
+def get_schluesselzahlen(nTeams):
+ schluessel = {md:[] for md in range(1,nTeams)}
+ for row in rahmenplan[nTeams]:
+ for md,(t1,t2) in enumerate(row):
+ schluessel[nTeams-(md+1)].append((t1,t2))
+
+ pattern = {
+ i: [] for i in range(1,nTeams+1)
+ }
+ opponent = {
+ i: [] for i in range(1,nTeams+1)
+ }
+
+
+ for md in schluessel:
+ for (t1,t2) in schluessel[md]:
+ pattern[t1].append("H")
+ pattern[t2].append("A")
+ opponent[t1].append(t2)
+ opponent[t2].append(t1)
+
+ return pattern, opponent
diff --git a/dfbnet/serializers.py b/dfbnet/serializers.py
new file mode 100644
index 0000000..ee9261e
--- /dev/null
+++ b/dfbnet/serializers.py
@@ -0,0 +1,23 @@
+from rest_framework import serializers
+
+
+""" DATA SERIALIZERS """
+
+class TeamsSerializer(serializers.Serializer):
+ id = serializers.CharField()
+ name = serializers.CharField()
+ division = serializers.IntegerField()
+
+
+class DivisionsSerializer(serializers.Serializer):
+ id = serializers.IntegerField()
+ name = serializers.CharField()
+ teams = TeamsSerializer(many=True)
+ nTeams = serializers.IntegerField()
+ ms_art = serializers.CharField()
+ patterns = serializers.DictField()
+ opponents = serializers.DictField()
+
+
+class ProblemSerializer(serializers.Serializer):
+ divisions = DivisionsSerializer(many=True)
\ No newline at end of file
diff --git a/dfbnet/simulation_1/auswertung_sachsen.py b/dfbnet/simulation_1/auswertung_sachsen.py
new file mode 100644
index 0000000..5c6549f
--- /dev/null
+++ b/dfbnet/simulation_1/auswertung_sachsen.py
@@ -0,0 +1,143 @@
+# %%
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+# %%
+with open("data/previous_stats_road_distance.json", "r", encoding="utf-8") as f:
+ stats_previous_road = json.load(f)
+stats_previous_road = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_previous_road.items()}
+
+with open("data/previous_stats_road_duration.json", "r", encoding="utf-8") as f:
+ stats_previous_duration = json.load(f)
+stats_previous_duration = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_previous_duration.items()}
+
+with open("data/new_stats_road_distance.json", "r", encoding="utf-8") as f:
+ stats_new_road = json.load(f)
+stats_new_road = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_new_road.items()}
+
+with open("data/new_stats_road_duration.json", "r", encoding="utf-8") as f:
+ stats_new_duration = json.load(f)
+stats_new_duration = {ast.literal_eval(k) if k != "overall" else k: v for k, v in stats_new_duration.items()}
+
+
+
+# %%
+
+
+print(stats_previous_road['overall'])
+print(stats_previous_duration['overall'])
+print(stats_new_road['overall'])
+print(stats_new_duration['overall'])
+
+overall_stats = {
+ "previous": {
+ "road": stats_previous_road['overall'],
+ "duration": stats_previous_duration['overall']
+ },
+ "new": {
+ "road": stats_new_road['overall'],
+ "duration": stats_new_duration['overall']
+ }
+}
+
+# %%
+
+import plotly.graph_objects as go
+
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["previous"]["road"]['total_distance'], overall_stats["previous"]["duration"]['total_distance']],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["new"]["road"]['total_distance'], overall_stats["new"]["duration"]['total_distance']],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Total Distances",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["previous"]["road"]['average_distance'], overall_stats["previous"]["duration"]['average_distance']],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["new"]["road"]['average_distance'], overall_stats["new"]["duration"]['average_distance']],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Average Distances per Team",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+
+# %%
+
+
+""" create bar plot for overall stats """
+fig = go.Figure()
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["previous"]["road"]['average_group_distance'], overall_stats["previous"]["duration"]['average_group_distance']],
+ name="Previous",
+ marker_color='rgb(55, 83, 109)'
+))
+fig.add_trace(go.Bar(
+ x=["Distance", "Duration"],
+ y=[overall_stats["new"]["road"]['average_group_distance'], overall_stats["new"]["duration"]['average_group_distance']],
+ name="New",
+ marker_color='rgb(26, 118, 255)'
+))
+
+fig.update_layout(
+ title="Average Distance per Group",
+ xaxis_title="",
+ yaxis_title="Distance in km / Time in m",
+ barmode='group'
+)
+
+fig.show()
+
+
+
+
+
+# %%
diff --git a/dfbnet/spielstaetten.py b/dfbnet/spielstaetten.py
new file mode 100644
index 0000000..ef8b162
--- /dev/null
+++ b/dfbnet/spielstaetten.py
@@ -0,0 +1,30 @@
+# %%
+import pandas as pd
+import googlemaps
+
+
+gmaps = googlemaps.Client(key='AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU')
+
+""" LOAD SPIELSTAETTEN FROM CSV"""
+
+def get_venues():
+ venues = pd.read_excel("data/beispieldaten_spielstaetten.xlsx")
+
+ venues = venues.to_dict(orient='records')
+
+
+ for v in venues:
+ geocode_result = gmaps.geocode("Dresden "+v['SB_SPST_NAME'])
+ latitude =0
+ longitude =0
+ if len(geocode_result)>0:
+ location = geocode_result[0]['geometry']['location']
+ latitude = location['lat']
+ longitude = location['lng']
+
+ v['latitude'] = latitude
+ v['longitude'] = longitude
+
+ return venues
+
+# %%
diff --git a/dfbnet/stats_sachsen.py b/dfbnet/stats_sachsen.py
new file mode 100644
index 0000000..d8cf5a3
--- /dev/null
+++ b/dfbnet/stats_sachsen.py
@@ -0,0 +1,106 @@
+# %%
+
+from pulp import (
+ LpVariable,
+ LpProblem,
+ LpMinimize,
+ lpSum,
+ LpStatus,
+ value,
+ LpInteger,
+ LpContinuous,
+ XPRESS,
+)
+
+import googlemaps
+
+from gmplot import GoogleMapPlotter
+import json
+import pandas as pd
+import ast
+import random
+import itertools
+import time
+
+
+
+with open("data/sachsen.json", "r", encoding="utf-8") as f:
+ competitions = json.load(f)
+
+competitions = {ast.literal_eval(k): v for k, v in competitions.items()}
+
+# region
+# STAFFELN PRO ART UND KLASSE
+# ('Herren', 'Landesliga') 1
+# ('Herren', 'Landesklasse') 3
+# ('Frauen', 'Landesliga') 1
+# ('Frauen', 'Landesklasse') 3
+# ('A-Junioren', 'Landesliga') 1
+# ('A-Junioren', 'Landesklasse') 4
+# ('Herren', 'Kreisoberliga') 13
+# ('Herren', '1.Kreisliga (A)') 19
+# ('Herren', '2.Kreisliga (B)') 8
+# ('Herren', '3.Kreisliga (C)') 1
+# ('Herren', '1.Kreisklasse') 21
+# ('Herren', '2.Kreisklasse') 9
+# ('A-Junioren', 'Kreisoberliga') 10
+# ('A-Junioren', '1.Kreisliga (A)') 6
+# ('Frauen', 'Kreisoberliga') 4
+# ('Frauen', '1.Kreisliga (A)') 1
+# ('Frauen', '1.Kreisklasse') 3
+# ('B-Junioren', 'Landesliga') 1
+# ('B-Junioren', 'Landesklasse') 4
+# ('B-Junioren', 'Kreisoberliga') 13
+# ('B-Junioren', '1.Kreisliga (A)') 13
+# ('B-Junioren', '1.Kreisklasse') 1
+# ('C-Junioren', 'Landesliga') 1
+# ('C-Junioren', 'Landesklasse') 4
+# ('C-Junioren', 'Kreisoberliga') 16
+# ('C-Junioren', '1.Kreisliga (A)') 15
+# ('C-Junioren', '1.Kreisklasse') 9
+# ('D-Junioren', 'Landesliga') 1
+# ('D-Junioren', 'Landesklasse') 6
+# ('D-Junioren', 'Kreisoberliga') 16
+# ('D-Junioren', '1.Kreisliga (A)') 24
+# ('D-Junioren', '2.Kreisliga (B)') 8
+# ('D-Junioren', '3.Kreisliga (C)') 2
+# ('D-Junioren', '1.Kreisklasse') 33
+# ('D-Junioren', '2.Kreisklasse') 10
+# ('B-Juniorinnen', 'Landesliga') 1
+# ('B-Juniorinnen', 'Landesklasse') 2
+# ('C-Juniorinnen', 'Landesklasse') 3
+# ('D-Juniorinnen', 'Kreisoberliga') 1
+# ('Herren Ü35', 'Kreisoberliga') 4
+# ('Herren Ü35', '1.Kreisliga (A)') 3
+# ('Herren Ü35', '1.Kreisklasse') 3
+# ('Herren Ü35', '2.Kreisklasse') 1
+# ('Herren Ü40', '1.Kreisliga (A)') 5
+# ('Herren Ü40', '1.Kreisklasse') 1
+# ('Herren Ü50', '1.Kreisliga (A)') 1
+# ('Herren Ü50', '1.Kreisklasse') 1
+# ('Freizeitsport', '1.Kreisliga (A)') 3
+# ('Freizeitsport', '1.Kreisklasse') 2
+# endregion
+
+previous_statistics = {}
+
+competition_details = {}
+color = None
+for staffel, attr in competitions.items():
+ competitions[staffel]["distance"] = []
+
+ if (staffel[0], staffel[1]) not in competition_details:
+ competition_details[(staffel[0], staffel[1])] = {
+ "nStaffeln": 1,
+ "nTeams": 0,
+ "previous_distances": [],
+ "teams": [],
+ }
+ else:
+ competition_details[(staffel[0], staffel[1])]["nStaffeln"] += 1
+
+
+for key, val in competitions.items():
+ print(key, val)
+
+# %%
diff --git a/fastapi/mock/Dockerfile b/fastapi/mock/Dockerfile
new file mode 100644
index 0000000..2edb0f3
--- /dev/null
+++ b/fastapi/mock/Dockerfile
@@ -0,0 +1,6 @@
+FROM python:3.13
+WORKDIR /www
+COPY requirements.txt requirements.txt
+RUN pip install --no-cache-dir --upgrade -r requirements.txt
+COPY app api
+CMD ["uvicorn", "api.main:app", "--host", "0.0.0.0", "--port", "4321", "--reload"]
\ No newline at end of file
diff --git a/fastapi/mock/app/__init__.py b/fastapi/mock/app/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/fastapi/mock/app/main.py b/fastapi/mock/app/main.py
new file mode 100644
index 0000000..2688b42
--- /dev/null
+++ b/fastapi/mock/app/main.py
@@ -0,0 +1,89 @@
+from fastapi import FastAPI, Depends, HTTPException, Security
+from fastapi.security.api_key import APIKeyHeader
+
+from fastapi.openapi.models import APIKey
+from fastapi.openapi.utils import get_openapi
+
+
+from pydantic import BaseModel
+from typing import Dict
+
+
+app = FastAPI()
+
+
+
+API_KEY = "your-secret-api-key"
+API_KEY_NAME = "X-API-Key"
+
+api_key_header = APIKeyHeader(name=API_KEY_NAME, auto_error=False)
+
+def get_api_key(api_key: str = Depends(api_key_header)):
+ if api_key == API_KEY:
+ return api_key
+ else:
+ raise HTTPException(
+ status_code=403,
+ detail="Invalid or missing API Key",
+ )
+
+
+def custom_openapi():
+ if app.openapi_schema:
+ return app.openapi_schema
+ openapi_schema = get_openapi(
+ title="FastAPI with API Key Authentication",
+ version="1.0.0",
+ description="API documentation with API Key authentication",
+ routes=app.routes,
+ )
+ openapi_schema["components"]["securitySchemes"] = {
+ "APIKeyHeader": {
+ "type": "apiKey",
+ "name": API_KEY_NAME,
+ "in": "header",
+ }
+ }
+ openapi_schema["security"] = [{"APIKeyHeader": []}]
+ app.openapi_schema = openapi_schema
+ return app.openapi_schema
+
+app.openapi = custom_openapi
+
+
+
+
+# In-memory "database"
+items_db: Dict[int, dict] = {}
+
+class Item(BaseModel):
+ name: str
+ description: str = None
+ price: float
+
+@app.get("/items3/{item_id}", response_model=Item, dependencies=[Depends(get_api_key)])
+async def get_item(item_id: int):
+ item = items_db.get(item_id)
+ if not item:
+ raise HTTPException(status_code=404, detail="Item not found")
+ return item
+
+@app.post("/items/", response_model=Item, status_code=201)
+async def create_item(item_id: int, item: Item):
+ if item_id in items_db:
+ raise HTTPException(status_code=400, detail="Item ID already exists")
+ items_db[item_id] = item.dict()
+ return item
+
+@app.put("/items/{item_id}", response_model=Item)
+async def update_item(item_id: int, item: Item):
+ if item_id not in items_db:
+ raise HTTPException(status_code=404, detail="Item not found")
+ items_db[item_id] = item.dict()
+ return item
+
+@app.delete("/items/{item_id}", status_code=204)
+async def delete_item(item_id: int):
+ if item_id not in items_db:
+ raise HTTPException(status_code=404, detail="Item not found")
+ del items_db[item_id]
\ No newline at end of file
diff --git a/fastapi/mock/docker-compose.yml b/fastapi/mock/docker-compose.yml
new file mode 100644
index 0000000..6c5e771
--- /dev/null
+++ b/fastapi/mock/docker-compose.yml
@@ -0,0 +1,14 @@
+version: "3"
+services:
+ api:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ restart: unless-stopped
+ container_name: fastapi-mock
+ ports:
+ - "4321:4321"
+ user: "1002:1002"
+ environment:
+ - WATCHFILES_FORCE_POLLING=true # This is needed for hot reloading to work on windows (fix watch feature) for python
+ - WATCHPACK_POLLING=true # This is needed for hot reloading to work on windows (fix watch feature)