This commit is contained in:
martin 2024-10-15 10:41:25 +02:00
parent c583dd4d7e
commit 33ba6b819d
17 changed files with 3116 additions and 88 deletions

View File

@ -877,6 +877,38 @@ class Draw_Simulator:
if p2 == t2['pot']: if p2 == t2['pot']:
model_xp.addConstraint( model_xp.addConstraint(
x_xp[t1['id'], g1] + x_xp[t2['id'], g2] <= 1) x_xp[t1['id'], g1] + x_xp[t2['id'], g2] <= 1)
# no more than 2 teams from same country in one group
for c in self.countries:
for g in self.groups:
model_xp.addConstraint(
xp.Sum([x_xp[t['id'], g] for t in self.teams if t['country'] == c]) <= 2)
# do not play other countries more than 3 times
# for t1 in self.teams:
# for c in ['FIN']:
# for (g1, p1), awayGames in self.awayGames.items():
# if p1 == t1['pot']:
# model_xp.addConstraint(
# xp.Sum([x_xp[t2['id'], g2] for (g2, p2) in awayGames for t2 in self.teams if p2 == t2['pot'] and t2['country'] == c]) <= 3)
# print(t1['name'],t1['country'],t1['pot'],[t2['name'] for (g2, p2) in awayGames for t2 in self.teams if p2 == t2['pot'] and t2['country'] == c])
# for (g2, p2) in awayGames:
# if p2 == t2['pot']:
# model_xp.addConstraint(
# x_xp[t1['id'], g1] + x_xp[t2['id'], g2] <= 1)
# # model_xp.addConstraint(
# xp.Sum([x_xp[t['id'], g] for t in self.teams if t['country'] == c]) <= 2)
# for t in self.teams:
# for c in ['FIN']:
# for t2 in self.teams_by_country[c]:
# for (g1, p1), awayGames in self.awayGames.items():
# if p1 == t1['pot']:
# for (g2, p2) in awayGames:
# if p2 == t2['pot']:
# print(t['name'],t['country'], t2['name'], t2['country'])
# model_xp.addConstraint(
# xp.Sum([x_xp[t2['id'], g] for (g2, p2) in awayGames]) <= 3)
# add fixations # add fixations
nFixed = 0 nFixed = 0
@ -1024,6 +1056,7 @@ class Draw_Simulator:
coefficients = defaultdict(lambda:0) coefficients = defaultdict(lambda:0)
visited_countries = defaultdict(lambda:set({})) visited_countries = defaultdict(lambda:set({}))
visited_finland = defaultdict(lambda:0) visited_finland = defaultdict(lambda:0)
travel_finland = defaultdict(lambda:0)
if not infeasible: if not infeasible:
for g in self.groups: for g in self.groups:
for p in self.pots: for p in self.pots:
@ -1031,6 +1064,8 @@ class Draw_Simulator:
for ag in self.awayGames[g, p]: for ag in self.awayGames[g, p]:
t2 = sol_dict[ag[0]][ag[1]] t2 = sol_dict[ag[0]][ag[1]]
travel[t1['id']] += self.distance_matrix[t1['id'],t2['id']] travel[t1['id']] += self.distance_matrix[t1['id'],t2['id']]
if t2['country'] == 'FIN':
travel_finland[t1['id']] += 1
for op in self.opponents[g,p]: for op in self.opponents[g,p]:
t2 = sol_dict[op[0]][op[1]] t2 = sol_dict[op[0]][op[1]]
if self.opponent_func.__name__ == 'groups_6_4': if self.opponent_func.__name__ == 'groups_6_4':
@ -1046,7 +1081,6 @@ class Draw_Simulator:
blockings = defaultdict(lambda:0) blockings = defaultdict(lambda:0)
breaks = defaultdict(lambda:0) breaks = defaultdict(lambda:0)
tmp_stats[n] = { tmp_stats[n] = {
@ -1061,6 +1095,7 @@ class Draw_Simulator:
'blockings':blockings, 'blockings':blockings,
'breaks':breaks, 'breaks':breaks,
'visited_finland':visited_finland, 'visited_finland':visited_finland,
'travel_finland':travel_finland,
} }
with open(f'json/{self.opponent_func.__name__}_{n}.json', 'w') as f: with open(f'json/{self.opponent_func.__name__}_{n}.json', 'w') as f:
@ -1114,7 +1149,7 @@ class Draw_Simulator:
visited_finland = { visited_finland = {
t['id']: { t['id']: {
'sum': np.sum([1 for s in tmp_stats.values() if s['visited_finland'][t['id']] >= 4]), 'sum': np.sum([1 for s in tmp_stats.values() if s['visited_finland'][t['id']] >= 3]),
} }
for t in self.teams for t in self.teams
} }
@ -1122,6 +1157,17 @@ class Draw_Simulator:
'sum': np.sum([visited_finland[t['id']]['sum'] for t in self.teams]), 'sum': np.sum([visited_finland[t['id']]['sum'] for t in self.teams]),
} }
travel_finland = {
t['id']: {
'sum': np.sum([1 for s in tmp_stats.values() if s['travel_finland'][t['id']] >= 3]),
}
for t in self.teams
}
travel_finland['total'] = {
'sum': np.sum([travel_finland[t['id']]['sum'] for t in self.teams]),
}
blockings = { blockings = {
t['id']: { t['id']: {
'mean': round(np.mean([s['blockings'][t['id']] for s in tmp_stats.values()]),3), 'mean': round(np.mean([s['blockings'][t['id']] for s in tmp_stats.values()]),3),
@ -1153,6 +1199,7 @@ class Draw_Simulator:
'coefficient_stats':coefficient_stats, 'coefficient_stats':coefficient_stats,
'visited_countries':visited_countries, 'visited_countries':visited_countries,
'visited_finland':visited_finland, 'visited_finland':visited_finland,
'travel_finland':travel_finland,
'blockings':blockings, 'blockings':blockings,
'breaks':breaks, 'breaks':breaks,
} }
@ -1328,14 +1375,14 @@ sol += "<table style='border:1px solid black'>\n"
sol += "<thead>\n" sol += "<thead>\n"
sol += f"<tr><td rowspan='4'>n={nSim}</td>" sol += f"<tr><td rowspan='4'>n={nSim}</td>"
sol += f"<td colspan='{len(simulator.pots)}'>Conflicts</td>" sol += f"<td colspan='{len(simulator.pots)}'>Conflicts</td>"
sol += f"<td colspan='9' rowspan='2'>Total</td>" sol += f"<td colspan='10' rowspan='2'>Total</td>"
sol += f"<td colspan='{7*len(simulator.teams)}'>Teams</td>" sol += f"<td colspan='{8*len(simulator.teams)}'>Teams</td>"
sol += "</tr>" sol += "</tr>"
sol += "<tr>" sol += "<tr>"
for p in simulator.pots: for p in simulator.pots:
sol += f"<td rowspan='3'>{p}</td>" sol += f"<td rowspan='3'>{p}</td>"
for t in simulator.teams: for t in simulator.teams:
sol+= f"<td colspan='8'>{t['name']} ({t['country']})</td>" sol+= f"<td colspan='9'>{t['name']} ({t['country']})</td>"
sol += "</tr>" sol += "</tr>"
sol += "<tr>" sol += "<tr>"
sol += "<td colspan='1' rowspan='2'>Cfl.</td>" sol += "<td colspan='1' rowspan='2'>Cfl.</td>"
@ -1344,14 +1391,16 @@ sol += "<td colspan='2'>Coe.</td>"
sol += "<td colspan='1' rowspan='2'>Block</td>" sol += "<td colspan='1' rowspan='2'>Block</td>"
sol += "<td colspan='1' rowspan='2'>No Travel</td>" sol += "<td colspan='1' rowspan='2'>No Travel</td>"
sol += "<td colspan='1' rowspan='2'>Countr.</td>" sol += "<td colspan='1' rowspan='2'>Countr.</td>"
sol += "<td colspan='1' rowspan='2'>Finland.</td>" sol += "<td colspan='1' rowspan='2'>Play Finland 3x</td>"
sol += "<td colspan='1' rowspan='2'>Travel Finland 3x</td>"
for t in simulator.teams: for t in simulator.teams:
sol += "<td colspan='2' rowspan='1'>Trav.</td>" sol += "<td colspan='2' rowspan='1'>Trav.</td>"
sol += "<td colspan='2' rowspan='1'>Coe.</td>" sol += "<td colspan='2' rowspan='1'>Coe.</td>"
sol += "<td colspan='1' rowspan='2'>Block</td>" sol += "<td colspan='1' rowspan='2'>Block</td>"
sol += "<td colspan='1' rowspan='2'>No Travel</td>" sol += "<td colspan='1' rowspan='2'>No Travel</td>"
sol += "<td colspan='1' rowspan='2'>Countr.</td>" sol += "<td colspan='1' rowspan='2'>Countr.</td>"
sol += "<td colspan='1' rowspan='2'>Finland.</td>" sol += "<td colspan='1' rowspan='2'>Play Finland 3x</td>"
sol += "<td colspan='1' rowspan='2'>Travel Finland 3x</td>"
sol += "</tr>" sol += "</tr>"
sol += "<tr>" sol += "<tr>"
sol += "<td>M</td>" sol += "<td>M</td>"
@ -1384,6 +1433,7 @@ for func in funcs:
sol += f"<td>{stats[func.__name__]['breaks']['total']['sum']}</td>" sol += f"<td>{stats[func.__name__]['breaks']['total']['sum']}</td>"
sol += f"<td>{stats[func.__name__]['visited_countries']['total']['mean']}</td>" sol += f"<td>{stats[func.__name__]['visited_countries']['total']['mean']}</td>"
sol += f"<td>{stats[func.__name__]['visited_finland']['total']['sum']}</td>" sol += f"<td>{stats[func.__name__]['visited_finland']['total']['sum']}</td>"
sol += f"<td>{stats[func.__name__]['travel_finland']['total']['sum']}</td>"
for t in simulator.teams: for t in simulator.teams:
tmean = stats[func.__name__]['travel_stats'][t['id']]['mean'] tmean = stats[func.__name__]['travel_stats'][t['id']]['mean']
tstd = stats[func.__name__]['travel_stats'][t['id']]['std'] tstd = stats[func.__name__]['travel_stats'][t['id']]['std']
@ -1393,6 +1443,7 @@ for func in funcs:
brmean = stats[func.__name__]['breaks'][t['id']]['mean'] brmean = stats[func.__name__]['breaks'][t['id']]['mean']
visited = stats[func.__name__]['visited_countries'][t['id']]['mean'] visited = stats[func.__name__]['visited_countries'][t['id']]['mean']
visited_finland = stats[func.__name__]['visited_finland'][t['id']]['sum'] visited_finland = stats[func.__name__]['visited_finland'][t['id']]['sum']
travel_finland = stats[func.__name__]['travel_finland'][t['id']]['sum']
color = Draw_Simulator.heatmap_color_for(abs(tmean-ttmean)/ttmean) color = Draw_Simulator.heatmap_color_for(abs(tmean-ttmean)/ttmean)
sol += f"<td style='background-color:{color}'>{tmean}</td>" sol += f"<td style='background-color:{color}'>{tmean}</td>"
color = Draw_Simulator.heatmap_color_for(abs(tstd-ttstd)/ttstd) color = Draw_Simulator.heatmap_color_for(abs(tstd-ttstd)/ttstd)
@ -1405,6 +1456,7 @@ for func in funcs:
sol += f"<td >{brmean}</td>" sol += f"<td >{brmean}</td>"
sol += f"<td >{visited}</td>" sol += f"<td >{visited}</td>"
sol += f"<td >{visited_finland}</td>" sol += f"<td >{visited_finland}</td>"
sol += f"<td >{travel_finland}</td>"
sol += "</tr>" sol += "</tr>"
sol += "</tbody>\n" sol += "</tbody>\n"

View File

@ -0,0 +1,266 @@
# %%
# Wichtiger Header + Xpress
"""
##############################################################################################################
"""
PROJECT_PATH = '/home/md/Work/ligalytics/leagues_stable/'
import os, sys
sys.path.insert(0, PROJECT_PATH)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leagues.settings")
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from leagues import settings
# settings.DATABASES['default']['NAME'] = PROJECT_PATH+'/db.sqlite3'
settings.DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
settings.DATABASES['default']['HOST'] = '0.0.0.0'
settings.DATABASES['default']['PORT'] = '5432'
settings.DATABASES['default']['USER'] = 'postgres'
settings.DATABASES['default']['PASSWORD'] = 'secret123'
settings.DATABASES['default']['NAME'] = 'mypgsqldb'
settings.DATABASES['default']['ATOMIC_REQUESTS'] = False
settings.DATABASES['default']['AUTOCOMMIT'] = True
settings.DATABASES['default']['CONN_MAX_AGE'] = 0
settings.DATABASES['default']['CONN_HEALTH_CHECKS'] = False
settings.DATABASES['default']['OPTIONS'] = {}
os.environ["XPRESSDIR"] = "/opt/xpressmp"
os.environ["XPRESS"] = "/opt/xpressmp/bin"
os.environ["LD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["DYLD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["SHLIB_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["LIBPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["PYTHONPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprs.jar"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprb.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprm.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["PATH"] = os.environ["XPRESSDIR"] + "/bin" + os.pathsep + os.environ["PATH"]
import django
django.setup()
import csv
"""
##############################################################################################################
"""
# %%
from scheduler.models import *
from referees.models import *
scenario = Scenario.objects.get(id=10991)
season= scenario.season
# Day.objects.filter(season=scenario.season).delete()
Location.objects.filter(season=season).update(country = Country.objects.get(season=season,name='Denmark'))
# %%
# %%
# %%
# %%
# %%
# %%
# with open('referees/fixtures/metalligen2425/2425_arenas.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# latitude,longitude = row[3].replace(',','.').split(' ')
# print(latitude,longitude)
# Location.objects.get_or_create(season=scenario.season, name=row[1], city=row[4], latitude=latitude, longitude=longitude)
# # %%
# Role.objects.get_or_create(season=scenario.season,name='Referee',order=0,min_required=2,max_required=2)
# Role.objects.get_or_create(season=scenario.season,name='Linesperson',order=1,min_required=2,max_required=2)
# Category.objects.get_or_create(season=scenario.season,name='A',order=0)
# Category.objects.get_or_create(season=scenario.season,name='B',order=1)
# Category.objects.get_or_create(season=scenario.season,name='C',order=2)
# # %%
# import googlemaps
# gmaps = googlemaps.Client(
# key='AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU')
# with open('referees/fixtures/metalligen2425/2425_officials.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# id = row[0]
# name = row[1]
# zip_code = row[2]
# city = row[3]
# region = row[4]
# role= row[5]
# category = row[6]
# target_games = row[7]
# geocode_result = gmaps.geocode(city+" "+zip_code)
# if len(geocode_result) > 0:
# location = geocode_result[0]['geometry']['location']
# print("\t", location)
# else:
# location = {'lat': 0, 'lng': 0}
# print("\t", "NOT FOUND")
# delegate, created = Delegate.objects.get_or_create(season=scenario.season,
# category= Category.objects.filter(season=scenario.season,name=category).first(),
# name= name,
# latitude= location['lat'],
# longitude= location['lng'],
# )
# location, created = Location.objects.get_or_create(season=scenario.season,
# name=city,
# city=city,
# type=3,
# latitude=location['lat'],
# longitude=location['lng'])
# delegate.location = location
# delegate.save()
# Delegate.objects.filter(season=scenario.season).update(country=Country.objects.get(season=scenario.season,name='Denmark'))
# %%
# competition, created = Competition.objects.get_or_create(season=season,name='Metalligen 2425')
# # %%
# RefGame.objects.filter(scenario=scenario).delete()
# with open('referees/fixtures/metalligen2425/2425_schedule.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# # print(row)
# day = datetime.datetime.strptime(row[2],'%m/%d/%Y')
# date, created = Day.objects.get_or_create(
# season=season,
# date=day,
# day=day.strftime("%d-%m-%Y")
# )
# time = row[4]
# timeslot, created = TimeSlot.objects.get_or_create(
# season=season,
# name=time,
# start=time,
# )
# gameID = row[6]
# homeTeam = Team.objects.filter(season=season,name=row[7]).first()
# awayTeam = Team.objects.filter(season=season,name=row[8]).first()
# if not homeTeam and awayTeam:
# print("ERROR: ", row)
# continue
# arena = Location.objects.filter(season=season,name=row[9]).first()
# RefGame.objects.get_or_create(
# homeTeam=homeTeam,
# awayTeam=awayTeam,
# scenario=scenario,
# timeslot=timeslot,
# day=date,
# location=arena,
# competition=competition,
# )
# # %%
# Day.objects.filter(season=season).update(round=1)
# %%
# teams = Team.objects.filter(season=season)
# firstday = Day.objects.filter(season=season).order_by('date').first()
# lastday = Day.objects.filter(season=season).order_by('date').last()
# print(firstday.date, lastday.date)
# # %%
# RefWish.objects.filter(scenario=scenario).delete()
# with open('referees/fixtures/metalligen2425/2425_officials.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# id = row[0]
# name = row[1]
# zip_code = row[2]
# city = row[3]
# region = row[4]
# role= row[5]
# category = row[6]
# target_games = row[7]
# delegate = Delegate.objects.filter(season=season,name=name).first()
# wish = RefWish.objects.create(
# scenario=scenario,
# startFirstDay=True,
# endLastDay=True,
# day=firstday,
# day2=lastday,
# minGames=target_games,
# maxGames=target_games,
# reason=f'Target {target_games} Games for {delegate.name} ({delegate.category.name})',
# prio='A'
# )
# wish.teams.add(*teams)
# wish.delegates.add(delegate)
# # # %%
# for d in Day.objects.filter(season=season):
# d.day = d.date.strftime("%Y-%m-%d")
# d.save()
# # date=day,
# # day=day.strftime("%d-%m-%Y")
# # %%
# for r in Requirement.objects.filter(season=season):
# print(r.competition,r.role,r.classification)
# # %%
# game = RefGame.objects.filter(scenario=scenario).first()
# # %%
# for a in Assignment.objects.filter(game=game):
# print(a.game,a.role,a.delegate)
# # %%
# Delegate.objects.filter(season=season,roles__isnull=True).delete()
# # %%
# %%

View File

@ -0,0 +1,270 @@
# %%
# Wichtiger Header + Xpress
"""
##############################################################################################################
"""
PROJECT_PATH = '/home/md/Work/ligalytics/leagues_stable/'
import os, sys
sys.path.insert(0, PROJECT_PATH)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leagues.settings")
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from leagues import settings
# settings.DATABASES['default']['NAME'] = PROJECT_PATH+'/db.sqlite3'
settings.DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
settings.DATABASES['default']['HOST'] = '0.0.0.0'
settings.DATABASES['default']['PORT'] = '5432'
settings.DATABASES['default']['USER'] = 'postgres'
settings.DATABASES['default']['PASSWORD'] = 'secret123'
settings.DATABASES['default']['NAME'] = 'mypgsqldb'
settings.DATABASES['default']['ATOMIC_REQUESTS'] = False
settings.DATABASES['default']['AUTOCOMMIT'] = True
settings.DATABASES['default']['CONN_MAX_AGE'] = 0
settings.DATABASES['default']['CONN_HEALTH_CHECKS'] = False
settings.DATABASES['default']['OPTIONS'] = {}
os.environ["XPRESSDIR"] = "/opt/xpressmp"
os.environ["XPRESS"] = "/opt/xpressmp/bin"
os.environ["LD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["DYLD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["SHLIB_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["LIBPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["PYTHONPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprs.jar"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprb.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprm.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["PATH"] = os.environ["XPRESSDIR"] + "/bin" + os.pathsep + os.environ["PATH"]
import django
django.setup()
import csv
"""
##############################################################################################################
"""
# %%
from scheduler.models import *
from referees.models import *
scenario = Scenario.objects.get(id=10991)
season= scenario.season
# Day.objects.filter(season=scenario.season).delete()
# %%
from referees.optimize import optimize_wales
optimize_wales(scenario.id, "user_name", True)
# %%
# %%
# %%
# %%
# %%
# %%
# with open('referees/fixtures/metalligen2425/2425_arenas.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# latitude,longitude = row[3].replace(',','.').split(' ')
# print(latitude,longitude)
# Location.objects.get_or_create(season=scenario.season, name=row[1], city=row[4], latitude=latitude, longitude=longitude)
# # %%
# Role.objects.get_or_create(season=scenario.season,name='Referee',order=0,min_required=2,max_required=2)
# Role.objects.get_or_create(season=scenario.season,name='Linesperson',order=1,min_required=2,max_required=2)
# Category.objects.get_or_create(season=scenario.season,name='A',order=0)
# Category.objects.get_or_create(season=scenario.season,name='B',order=1)
# Category.objects.get_or_create(season=scenario.season,name='C',order=2)
# # %%
# import googlemaps
# gmaps = googlemaps.Client(
# key='AIzaSyB76EhR4OqjdXHQUiTkHZC0Svx_7cPGqyU')
# with open('referees/fixtures/metalligen2425/2425_officials.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# id = row[0]
# name = row[1]
# zip_code = row[2]
# city = row[3]
# region = row[4]
# role= row[5]
# category = row[6]
# target_games = row[7]
# geocode_result = gmaps.geocode(city+" "+zip_code)
# if len(geocode_result) > 0:
# location = geocode_result[0]['geometry']['location']
# print("\t", location)
# else:
# location = {'lat': 0, 'lng': 0}
# print("\t", "NOT FOUND")
# delegate, created = Delegate.objects.get_or_create(season=scenario.season,
# category= Category.objects.filter(season=scenario.season,name=category).first(),
# name= name,
# latitude= location['lat'],
# longitude= location['lng'],
# )
# location, created = Location.objects.get_or_create(season=scenario.season,
# name=city,
# city=city,
# type=3,
# latitude=location['lat'],
# longitude=location['lng'])
# delegate.location = location
# delegate.save()
# Delegate.objects.filter(season=scenario.season).update(country=Country.objects.get(season=scenario.season,name='Denmark'))
# %%
# competition, created = Competition.objects.get_or_create(season=season,name='Metalligen 2425')
# # %%
# RefGame.objects.filter(scenario=scenario).delete()
# with open('referees/fixtures/metalligen2425/2425_schedule.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# # print(row)
# day = datetime.datetime.strptime(row[2],'%m/%d/%Y')
# date, created = Day.objects.get_or_create(
# season=season,
# date=day,
# day=day.strftime("%d-%m-%Y")
# )
# time = row[4]
# timeslot, created = TimeSlot.objects.get_or_create(
# season=season,
# name=time,
# start=time,
# )
# gameID = row[6]
# homeTeam = Team.objects.filter(season=season,name=row[7]).first()
# awayTeam = Team.objects.filter(season=season,name=row[8]).first()
# if not homeTeam and awayTeam:
# print("ERROR: ", row)
# continue
# arena = Location.objects.filter(season=season,name=row[9]).first()
# RefGame.objects.get_or_create(
# homeTeam=homeTeam,
# awayTeam=awayTeam,
# scenario=scenario,
# timeslot=timeslot,
# day=date,
# location=arena,
# competition=competition,
# )
# # %%
# Day.objects.filter(season=season).update(round=1)
# %%
# teams = Team.objects.filter(season=season)
# firstday = Day.objects.filter(season=season).order_by('date').first()
# lastday = Day.objects.filter(season=season).order_by('date').last()
# print(firstday.date, lastday.date)
# # %%
# RefWish.objects.filter(scenario=scenario).delete()
# with open('referees/fixtures/metalligen2425/2425_officials.csv','r') as f:
# reader = csv.reader(f)
# next(reader)
# for row in reader:
# print(row)
# id = row[0]
# name = row[1]
# zip_code = row[2]
# city = row[3]
# region = row[4]
# role= row[5]
# category = row[6]
# target_games = row[7]
# delegate = Delegate.objects.filter(season=season,name=name).first()
# wish = RefWish.objects.create(
# scenario=scenario,
# startFirstDay=True,
# endLastDay=True,
# day=firstday,
# day2=lastday,
# minGames=target_games,
# maxGames=target_games,
# reason=f'Target {target_games} Games for {delegate.name} ({delegate.category.name})',
# prio='A'
# )
# wish.teams.add(*teams)
# wish.delegates.add(delegate)
# # # %%
# for d in Day.objects.filter(season=season):
# d.day = d.date.strftime("%Y-%m-%d")
# d.save()
# # date=day,
# # day=day.strftime("%d-%m-%Y")
# # %%
# for r in Requirement.objects.filter(season=season):
# print(r.competition,r.role,r.classification)
# # %%
# game = RefGame.objects.filter(scenario=scenario).first()
# # %%
# for a in Assignment.objects.filter(game=game):
# print(a.game,a.role,a.delegate)
# # %%
# Delegate.objects.filter(season=season,roles__isnull=True).delete()
# # %%
# %%

View File

@ -235,16 +235,16 @@ def simulate_draws(filename,n):
from multiprocessing import Pool, cpu_count
n = sys.maxsize n = sys.maxsize
pool = Pool() pool = Pool()
result = {} result = {}
answer = {} answer = {}
# n_threads = cpu_count()
n_threads = 8 n_threads = 8
for cpu in range(n_threads): for cpu in range(n_threads):
result[cpu] = pool.apply_async(simulate_draws, args=(f'thread_{cpu}_HA_2country', n,)) result[cpu] = pool.apply_async(simulate_draws, args=(f'thread_{cpu}', n,))
for cpu in range(n_threads): for cpu in range(n_threads):
answer[cpu] = result[cpu].get() answer[cpu] = result[cpu].get()

View File

@ -0,0 +1,910 @@
# A rudimentary solver for nonconvex MIQCQP problems
#
# (C) Fair Isaac Corp., 1983-2023
import xpress as xp
import sys
import math
import numpy as np
eps = 1e-5
TYPE_OA = 1
TYPE_SECANT = 2
TYPE_MCCORMICK = 3
TYPE_BOUNDREDUCE = 4
var_type = []
# NumPy arrays for faster elaboration
Aux_i = None
Aux_j = None
Aux_ind = None
# Compute bounds on bilinear term
def bdprod(lb1, ub1, lb2, ub2):
"""
Computes lower and upper bound of a product of two terms from
their respective lower and upper bounds
"""
assert(lb1 <= ub1)
assert(lb2 <= ub2)
if lb1 >= 0 and lb2 >= 0:
l,u = lb1*lb2, ub1*ub2
elif lb1 >= 0:
if ub2 >= 0:
l,u = ub1*lb2, ub1*ub2
else:
l,u = ub1*lb2, lb1*ub2
elif lb2 >= 0:
if ub1 >= 0:
l,u = ub2*lb1, ub2*ub1
else:
l,u = ub2*lb1, lb2*ub1
elif ub1 <= 0 and ub2 <= 0:
l,u = ub1*ub2, lb1*lb2
elif ub1 <= 0:
l,u = lb1*ub2, lb1*lb2
elif ub2 <= 0:
l,u = lb2*ub1, lb2*lb1
else:
assert (lb1 <= 0)
assert (lb2 <= 0)
assert (ub1 >= 0)
assert (ub2 >= 0)
l = min(lb1*ub2, lb2*ub1)
u = max(lb1*lb2, ub1*ub2)
assert (l == min ([lb1 * lb2, lb1 * ub2, ub1 * lb2, ub1 * ub2]))
assert (u == max ([lb1 * lb2, lb1 * ub2, ub1 * lb2, ub1 * ub2]))
l = max(-xp.infinity, l)
u = min( xp.infinity, u)
return (l, u)
# Return auxiliary variable
def addaux(aux, p, i, j, lb, ub, vtype):
"""
Adds auxiliary variable to problem relative to product x[i]*x[j]
"""
# Find bounds of auxiliary first
if i != j:
# bilinear term
l, u = bdprod(lb[i], ub[i], lb[j], ub[j])
elif lb[i] >= 0:
l, u = lb[i]**2, ub[i]**2
elif ub[i] <= 0:
l, u = ub[i]**2, lb[i]**2
else:
l, u = 0, max([lb[i]**2, ub[i]**2])
if (l >= xp.infinity) or (u <= -xp.infinity):
print("inconsistent bounds on {0} {1}".format(i, j))
exit(-1)
# Then infer its type from the type of the factors
if vtype[i] == 'B' and vtype[j] == 'B':
t = xp.binary
elif (vtype[i] == 'B' or vtype[i] == 'I') and \
(vtype[j] == 'B' or vtype[j] == 'I'):
t = xp.integer
else:
t = xp.continuous
bigU = 1e8
l = max(l, -bigU)
u = min(u, bigU)
# Add auxiliaries
aux[i, j] = xp.var(lb=l, ub=u, vartype=t,
name='aux_{0}_{1}'.format(
p.getVariable(i).name.split(' ')[0],
p.getVariable(j).name.split(' ')[0]))
return aux[i, j]
# De-quadratify quadratic constraint/objective
def convQaux(p, aux, mstart, ind, coef, row, lb, ub, vtype):
"""
Converts a quadratic objective/row into a linear one by replacing
bilinear terms with an auxiliary variable
"""
rcols = []
rrows = []
rcoef = []
for i,__ms in enumerate(mstart[:-1]):
for j in range(mstart[i], mstart[i+1]):
J = p.getIndex(ind[j])
if (i, J) not in aux.keys():
y = addaux(aux, p, i, J, lb, ub, vtype)
p.addVariable(y)
else:
y = aux[i, J]
if row < 0: # objective
mult = .5
else:
mult = 1
if i != J:
coe = 2 * mult * coef[j]
else:
coe = mult * coef[j]
if row < 0:
p.chgobj([y], [coe])
else:
rcols.append(y)
rrows.append(row)
rcoef.append(coe)
if row >= 0:
# This is a quadratic constraint, not the objective function
# Add linear coefficients for newly introduced variables
p.chgmcoef(rrows, rcols, rcoef)
# Remove quadratic matrix
p.delqmatrix(row)
else:
# Objective: Remove quadratic part
indI = []
for i in range(len(mstart) - 1):
indI.extend([i] * (mstart[i+1] - mstart[i]))
# Set all quadratic elements to zero
p.chgmqobj(indI, ind, [0] * mstart[-1])
# Create problem from filename and reformulate it
def create_prob(filename):
global var_type, Aux_i, Aux_j, Aux_ind
# Read file, then linearize by replacing all bilinear terms with
# auxiliary variables
p = xp.problem()
p.read(filename)
n = p.attributes.cols
m = p.attributes.rows
# Get original variables' bounds
lb = []
ub = []
p.getlb(lb, 0, n-1)
p.getub(ub, 0, n-1)
# Normalize bounds so that we get meaningful McCormick constraints
btype = []
bind = []
bnd = []
art_bound = 1e5
for i, b in enumerate(lb):
if b <= -xp.infinity / 2:
btype.append('L')
bind.append(i)
bnd.append(-art_bound)
lb[i] = -art_bound
for i, b in enumerate(ub):
if b >= xp.infinity / 2:
btype.append('U')
bind.append(i)
bnd.append(art_bound)
ub[i] = art_bound
p.chgbounds(bind, btype, bnd)
# Get original variables' types
vtype = []
p.getcoltype(vtype, 0, n-1)
x = p.getVariable()
aux = {} # Dictionary containing the map (x_i,x_j) --> y_ij
# Read quadratic objective, replace it with its linearization
# Get size of quadratic part of objective function
size = p.getmqobj(start=None, colind=None, objqcoef=None,
maxcoefs=0, first=0, last=n-1)
if size:
# objective is also quadratic
mstart = []
ind = []
obj = []
# read Q matrix of objective
size = p.getmqobj(mstart, ind, obj, size, 0, n-1)
# add auxiliaries if necessary
convQaux(p, aux, mstart, ind, obj, -1, lb, ub, vtype)
# Do the same operation on all quadratic rows
for i in range(m):
# get size of matrix
size = p.getqrowqmatrix(row=i, start=None, colind=None,
rowqcoef=None, maxcoefs=0, first=0, last=n-1)
if size == 0:
continue
# objective is also quadratic
mstart = []
ind = []
coef = []
# read Q matrix
size = p.getqrowqmatrix(i, mstart, ind, coef, size, 0, n-1)
# add auxiliaries if necessary
convQaux(p, aux, mstart, ind, coef, i, lb, ub, vtype)
# Problem is now linear. Add McCormick constraints
p.addConstraint(
[aux[i, j] >= lb[j]*x[i] + lb[i]*x[j] - lb[i] * lb[j]
for (i, j) in aux.keys() if max(-lb[i], -lb[j]) < xp.infinity],
[aux[i, j] >= ub[j]*x[i] + ub[i]*x[j] - ub[i] * ub[j]
for (i, j) in aux.keys() if max(ub[i], ub[j]) < xp.infinity],
[aux[i, j] <= ub[j]*x[i] + lb[i]*x[j] - lb[i] * ub[j]
for (i, j) in aux.keys() if max(-lb[i], ub[j]) < xp.infinity],
[aux[i, j] <= lb[j]*x[i] + ub[i]*x[j] - ub[i] * lb[j]
for (i, j) in aux.keys() if max(ub[i], -lb[j]) < xp.infinity])
# Make sure the quadratic variables and the auxiliary variables
# are not touched by the presolver
securecols = list(aux.values())
secureorig = set()
for i, j in aux.keys():
secureorig.add(i)
secureorig.add(j)
securecols += list(secureorig)
p.loadsecurevecs(rowind=None, colind=securecols)
var_type = vtype
# Create numpy vectors containing i, j, and aux(i,j)
Aux_i = np.array([i[0] for i in aux.keys()])
Aux_j = np.array([i[1] for i in aux.keys()])
Aux_ind = np.array([p.getIndex(i) for i in aux.values()])
return p, aux
def getCBbounds(prob, n):
"""Get lower/upper bound in the original variables space
"""
lb, ub = [], []
# Retrieve node bounds
prob.getlb(lb)
prob.getub(ub)
rowmap = []
colmap = []
prob.getpresolvemap(rowmap, colmap)
olb = [-xp.infinity]*n
oub = [ xp.infinity]*n
for i, b in enumerate(lb):
if colmap[i] != -1:
olb[colmap[i]] = lb[i]
oub[colmap[i]] = ub[i]
return olb, oub
# Add rows to branching object
def addrowzip(prob, bo, side, sign, rhs, ind, coef):
"""
Eliminate zero coefficients from coef and corresponding indices,
then add row to branching object
"""
ind2 = []
coe2 = []
for i in range(len(coef)):
if coef[i] != 0:
coe2.append(coef[i])
ind2.append(ind[i])
ind3, coe3 = [], []
rhs3, status = prob.presolverow(sign, ind2, coe2, rhs, 100, ind3, coe3)
if len(ind3) == 1:
if sign == 'G':
sign = 'L'
else:
sign = 'U'
if coe3[0] < 0:
if sign == 'L':
sign = 'U'
else:
sign = 'L'
bo.addbounds(side, [sign], [ind3[0]], [rhs3/coe3[0]])
elif len(ind3) > 1:
bo.addrows(side, [sign], [rhs3], [0, len(ind3)], ind3, coe3)
# Define callback functions: one for checking if a solution is
# feasible (apart from linearity, all auxiliaries must be equal to
# their respective product); one for adding new McCormick inequalities
# for changed bounds; and finally, one for branching as we might have
# to branch on continuous variables.
def cbbranch(prob, aux, branch):
"""Branch callback. Receives branch in input and, if it finds
continuous branches that are violated, adds them.
"""
sol = []
if (prob.attributes.presolvestate & 128) == 0:
return branch
# Retrieve node solution
try:
prob.getlpsol(x=sol)
except:
return branch
lb, ub = getCBbounds(prob, len(sol))
assert(len(lb) == len(ub))
assert(len(sol) == len(lb))
x = prob.getVariable() # presolved variables
rowmap = []
colmap = []
prob.getpresolvemap(rowmap, colmap)
invcolmap = [-1 for _ in lb]
for i, m in enumerate(colmap):
invcolmap[m] = i
# make sure all dimensions match
assert (len(lb) == len(ub))
assert (len(sol) == len(lb))
assert (len(invcolmap) == len(lb))
# Check if all auxiliaries are equal to their respective bilinear
# term. If so, we have a feasible solution
sol = np.array(sol)
discr = sol[Aux_ind] - sol[Aux_i] * sol[Aux_j]
discr[Aux_i == Aux_j] = np.maximum(0, discr[Aux_i == Aux_j])
maxdiscind = np.argmax(np.abs(discr))
if abs(discr[maxdiscind]) < eps:
return branch
i,j = Aux_i[maxdiscind], Aux_j[maxdiscind]
yind = prob.getIndex(aux[i, j])
if i == j:
# Test of violation is done on the original
# space. However, the problem variables are scrambled with invcolmap
if sol[i] > lb[i] + eps and \
sol[i] < ub[i] - eps and \
sol[yind] > sol[i]**2 + eps and \
sol[yind] - lb[i]**2 <= (ub[i] + lb[i]) * (sol[i] - lb[i]) - eps:
# Can't separate, must branch. Otherwise OA or secant
# cut separated above should be enough
brvarind = invcolmap[i]
brpoint = sol[i]
brvar = x[brvarind]
brleft = brpoint
brright = brpoint
assert(brvarind >= 0)
if brvar.vartype in [xp.integer, xp.binary]:
brleft = math.floor(brpoint + 1e-5)
brright = math.ceil(brpoint - 1e-5)
b = xp.branchobj(prob, isoriginal=False)
b.addbranches(2)
addrowzip(prob, b, 0, 'L', brleft, [i], [1])
addrowzip(prob, b, 1, 'G', brright, [i], [1])
# New variable bounds are not enough, add new McCormick
# inequalities for y = x**2: suppose x0,y0 are the current
# solution values for x,y, yp = x0**2 and xu,yu = xu**2 are their
# upper bound, and similar for lower bound. Then these two
# rows must be added, one for each branch:
#
# y - yp <= (yl-yp)/(xl-x0) * (x - x0) <===>
# (yl-yp)/(xl-x0) * x - y >= (yl-yp)/(xl-x0) * x0 - yp
#
# y - yp <= (yu-yp)/(xu-x0) * (x - x0) <===>
# (yu-yp)/(xu-x0) * x - y >= (yu-yp)/(xu-x0) * x0 - yp
#
# Obviously do this only for finite bounds
ypl = brleft**2
ypr = brright**2
if lb[i] > -1e7 and sol[i] > lb[i] + eps:
yl = lb[i]**2
coeff = (yl - ypl) / (lb[i] - sol[i])
if coeff != 0:
addrowzip(prob, b, 0, 'G', coeff*sol[i] - ypl,
[i, yind], [coeff, -1])
if ub[i] < 1e7 and sol[i] < ub[i] - eps:
yu = ub[i]**2
coeff = (yu - ypr) / (ub[i] - sol[i])
if coeff != 0:
addrowzip(prob, b, 1, 'G', coeff*sol[i] - ypr,
[i, yind], [coeff, -1])
return b
else:
lbi0, ubi0 = lb[i], ub[i]
lbi1, ubi1 = lb[i], ub[i]
lbj0, ubj0 = lb[j], ub[j]
lbj1, ubj1 = lb[j], ub[j]
# No cut violated, must branch
if min(sol[i] - lb[i], ub[i] - sol[i]) / (1 + ub[i] - lb[i]) > \
min(sol[j] - lb[j], ub[j] - sol[j]) / (1 + ub[j] - lb[j]):
lbi1 = sol[i]
ubi0 = sol[i]
brvar = i
else:
lbj1 = sol[j]
ubj0 = sol[j]
brvar = j
alpha = 0.2
brvarind = invcolmap[brvar]
brpoint = sol[brvar]
brleft = brpoint
brright = brpoint
if x[brvarind].vartype in [xp.integer, xp.binary]:
brleft = math.floor(brpoint + 1e-5)
brright = math.ceil(brpoint - 1e-5)
b = xp.branchobj(prob, isoriginal=False)
b.addbranches(2)
addrowzip(prob, b, 0, 'L', brleft, [brvar], [1])
addrowzip(prob, b, 1, 'G', brright, [brvar], [1])
# As for the i==j case, the variable branch is
# insufficient, so add updated McCormick inequalities.
# There are two McCormick inequalities per changed bound:
#
# y >= lb[j] * x[i] + lb[i] * x[j] - lb[j] * lb[i] ---> add to branch 1
# y >= ub[j] * x[i] + ub[i] * x[j] - ub[j] * ub[i] ---> add to branch 0
# y <= lb[j] * x[i] + ub[i] * x[j] - lb[j] * ub[i] ---> add to branch 1 if x[brvarind] == j, 0 if x[brvarind] == i
# y <= ub[j] * x[i] + lb[i] * x[j] - ub[j] * lb[i] ---> add to branch 1 if x[brvarind] == i, 0 if x[brvarind] == j
addrowzip(prob, b, 0, 'G', - ubi0 * ubj0, [yind, i, j], [1, -ubj0, -ubi0])
addrowzip(prob, b, 1, 'G', - lbi1 * lbj1, [yind, i, j], [1, -lbj1, -lbi1])
if brvarind == i:
addrowzip(prob, b, 0, 'L', - lbj0 * ubi0, [yind, i, j], [1, -lbj0, -ubi0])
addrowzip(prob, b, 1, 'L', - ubj1 * lbi1, [yind, i, j], [1, -ubj1, -lbi1])
else:
addrowzip(prob, b, 0, 'L', - ubj0 * lbi0, [yind, i, j], [1, -ubj0, -lbi0])
addrowzip(prob, b, 1, 'L', - lbj1 * ubi1, [yind, i, j], [1, -lbj1, -ubi1])
return b
# If no branching rule was found, return none
return branch
# Callback for checking a solution. Returns tuple (refuse, cutoff)
# where refuse=1 if solution is deemed infeasible and cutoff is the
# actual value of the solution if deemed feasible
def cbchecksol(prob, aux, soltype, cutoff):
"""
Callback for checking if solution is truly feasible. The optimizer
already has check integrality, we need to see if auxiliaries are
respected
"""
global Aux_i, Aux_j, Aux_ind
if (prob.attributes.presolvestate & 128) == 0:
return (1, cutoff)
sol = []
# Retrieve node solution
try:
prob.getlpsol(x=sol)
except:
return (1, cutoff)
sol = np.array(sol)
# Check if all auxiliaries are equal to their respective bilinear
# term. If so, we have a feasible solution
refuse = 1 if np.max(np.abs(sol[Aux_i] * sol[Aux_j] - sol[Aux_ind])) > eps else 0
# Return with refuse != 0 if solution is rejected, 0 otherwise;
# and same cutoff
return (refuse, cutoff)
def cbfindsol(prob, aux):
"""Callback for finding a feasible solution. Returns tuple (refuse,
cutoff) where refuse=1 if solution is deemed infeasible and cutoff
is the actual value of the solution if deemed feasible. Note that
the solution must be feasible as otherwise it gets regenerated
every time.
"""
if (prob.attributes.presolvestate & 128) == 0:
return 0
sol = []
try:
prob.getlpsol(x=sol)
except:
return 0
xnew = sol[:]
# Round solution to nearest integer
for i,t in enumerate(var_type):
if t == 'I' or t == 'B' and \
xnew[i] > math.floor(xnew[i] + prob.controls.miptol) + prob.controls.miptol:
xnew[i] = math.floor(xnew[i] + .5)
for i, j in aux.keys():
yind = prob.getIndex(aux[i, j])
xnew[yind] = xnew[i] * xnew[j]
prob.addmipsol(xnew)
return 0
# Callback for adding cuts. Can use addcuts(). Checks feasibility of
# the Y=xx' equation and attempts at adding Outer Approximation,
# secant, or McCormick inequalities.
def cbaddmccormickcuts(prob, aux, sol):
"""
Callback to add tighter McCormick inequalities arising from
tighter lower/upper bounds on the original variables
"""
lb, ub = getCBbounds(prob, len(sol))
cuts = []
# Check if all auxiliaries are equal to their respective bilinear
# term. If so, we have a feasible solution
for i, j in aux.keys():
yind = prob.getIndex(aux[i, j])
if i == j:
# Separate quadratic term
if sol[yind] < sol[i]**2 - eps and \
abs(sol[i]) < xp.infinity / 2:
# Find the right point for separation, which should be
# minimum-distance point from the current solution
# (sol[i], sol[yind]) to the region (y >= x**2).
#
# For the square function, this amounts to solving a
# "depressed cubic" equation (depressed as the square
# term has zero coefficient)
#
# 4x^3 + (2-4y0) x - 2x0 = 0
#
# Solve this with a few iterations of Newton's method. @todo
xk = sol[i]
#for _ in range(5):
# xk -= (4*xk**3 + 2*(1-2*sol[yind])*xk - 2*sol[i]) / (12*xk**2 + 2*(1 - 2*sol[yind]))
ox = xk
oy = ox ** 2
# Add Outer Approximation cut y >= xs^2 + 2xs*(x-xs)
# <===> y - 2xs*x >= -xs^2
cuts.append((TYPE_OA, 'G', - ox**2, [yind, i],
[1, -2*ox]))
# Otherwise, check if secant can be of help: y0 - xl**2 >
# (xu**2 - xl**2) / (xu - xl) * (x0 - xl)
elif sol[yind] > sol[i]**2 + eps and \
sol[yind] - lb[i]**2 > (ub[i] + lb[i]) * (sol[i] - lb[i]) \
+ eps and abs(lb[i] + ub[i]) < xp.infinity / 2:
cuts.append((TYPE_SECANT, 'L',
lb[i]**2 - (ub[i] + lb[i]) * lb[i],
[yind, i], [1, - (lb[i] + ub[i])]))
elif abs(sol[yind] - sol[i]*sol[j]) > eps:
# Separate bilinear term, where i != j. There might be at
# least one cut violated
if sol[yind] < lb[j]*sol[i] + lb[i]*sol[j] - lb[i]*lb[j] - eps:
if lb[i] > -xp.infinity / 2 and lb[j] > -xp.infinity / 2:
cuts.append((TYPE_MCCORMICK, 'G', - lb[i] * lb[j],
[yind, i, j], [1, -lb[j], -lb[i]]))
elif sol[yind] < ub[j]*sol[i] + ub[i]*sol[j] - ub[i]*ub[j] - eps:
if ub[i] < xp.infinity / 2 and ub[j] < xp.infinity / 2:
cuts.append((TYPE_MCCORMICK, 'G', - ub[i] * ub[j],
[yind, i, j], [1, -ub[j], -ub[i]]))
elif sol[yind] > lb[j]*sol[i] + ub[i]*sol[j] - ub[i]*lb[j] + eps:
if ub[i] < xp.infinity / 2 and lb[j] > -xp.infinity / 2:
cuts.append((TYPE_MCCORMICK, 'L', - ub[i] * lb[j],
[yind, i, j], [1, -lb[j], -ub[i]]))
elif sol[yind] > ub[j]*sol[i] + lb[i]*sol[j] - lb[i]*ub[j] + eps:
if lb[i] > -xp.infinity / 2 and ub[j] < xp.infinity / 2:
cuts.append((TYPE_MCCORMICK, 'L', - lb[i] * ub[j],
[yind, i, j], [1, -ub[j], -lb[i]]))
# Done creating cuts. Add them to the problem
for (t, s, r, I, C) in cuts: # cuts might be the empty list
mcolsp, dvalp = [], []
drhsp, status = prob.presolverow(s, I, C, r, prob.attributes.cols,
mcolsp, dvalp)
if status >= 0:
prob.addcuts([t], [s], [drhsp], [0, len(mcolsp)], mcolsp, dvalp)
return 0
def cbboundreduce(prob, aux, sol):
"""
Callback to reduce bounds that might have been propagated through
the problem.
"""
cuts = []
lb, ub = getCBbounds(prob, len(sol))
# Check if bounds on original variables can be reduced based on
# bounds on auxiliary ones. The other direction is already taken
# care of by McCormick and tangent/secant cuts.
feastol = prob.controls.feastol
for (i,j),a in aux.items():
auxind = prob.getIndex(a)
lbi = lb[i]
ubi = ub[i]
lba = lb[auxind]
uba = ub[auxind]
if i == j: # check if upper bound is tight w.r.t. bounds on
# x[i]
# Forward propagation: from new independent variable
# bounds, infer new bound for dependent variable.
if uba > max(lbi**2, ubi**2) + feastol:
cuts.append((TYPE_BOUNDREDUCE, 'L', max(lbi**2, ubi**2), [auxind], [1]))
if lbi > 0 and lba < lbi**2 - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', lbi**2, [auxind], [1]))
elif ubi < 0 and lba < ubi**2 - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', ubi**2, [auxind], [1]))
if uba < -feastol:
return 1 # infeasible node
else:
if uba < lbi**2 - feastol:
if lbi > 0:
return 1 # infeasible node
else:
cuts.append((TYPE_BOUNDREDUCE, 'G', -math.sqrt(uba), [i], [1]))
if uba < ubi**2 - feastol:
if ubi < - feastol:
return 1
else:
cuts.append((TYPE_BOUNDREDUCE, 'L', math.sqrt(uba), [i], [1]))
if lba > prob.controls.feastol and lbi > 0 and lbi**2 < lba - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', math.sqrt(lba), [i], [1]))
else:
tlb, tub = bdprod(lb[i], ub[i], lb[j], ub[j])
if lba < tlb - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', tlb, [auxind], [1]))
if uba > tub + feastol:
cuts.append((TYPE_BOUNDREDUCE, 'L', tub, [auxind], [1]))
# For simplicity let's just assume lower bounds are nonnegative
lbj = lb[j]
ubj = ub[j]
if lbj >= 0 and lbi >= 0:
if lbi*ubj < lba - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', lba / ubj, [i], [1]))
if lbj*ubi < lba - feastol:
cuts.append((TYPE_BOUNDREDUCE, 'G', lba / ubi, [j], [1]))
if lbi*ubj > uba + feastol:
cuts.append((TYPE_BOUNDREDUCE, 'L', uba / lbi, [j], [1]))
if lbj*ubi > uba + feastol:
cuts.append((TYPE_BOUNDREDUCE, 'L', uba / lbj, [i], [1]))
# Done creating cuts. Add them to the problem
for (t, s, r, I, C) in cuts: # cuts might be the empty list
mcolsp, dvalp = [], []
drhsp, status = prob.presolverow(s, I, C, r, prob.attributes.cols,
mcolsp, dvalp)
if status >= 0:
if len(mcolsp) == 0:
continue
elif len(mcolsp) == 1:
if s == 'G':
btype = 'L'
elif s == 'L':
btype = 'U'
else: # don't want to add an equality bound reduction
continue
assert(dvalp[0] > 0)
prob.chgbounds(mcolsp,[btype],[drhsp/dvalp[0]])
else:
prob.addcuts([t], [s], [drhsp], [0, len(mcolsp)], mcolsp, dvalp)
return 0
def cbaddsdpcuts(prob, aux, sol):
return 0
def cbaddcuts(prob, aux):
sol = []
if (prob.attributes.presolvestate & 128) == 0:
return 0
try:
prob.getlpsol(x=sol)
except:
return 0
retval = \
cbboundreduce(prob, aux, sol) or \
cbaddmccormickcuts(prob, aux, sol) or \
cbaddsdpcuts(prob, aux, sol)
return retval
def solveprob(p, aux):
# The above callbacks are run within the branch-and-bound. Assign
# them to specific points in the BB
p.addcbpreintsol(cbchecksol, aux, 1)
# p.addcboptnode(cbfindsol, aux, 3)
p.addcboptnode(cbaddcuts, aux, 3)
p.addcbchgbranchobject(cbbranch, aux, 1)
# Start branch-and-bound
p.mipoptimize()
if p.attributes.solstatus not in [xp.SolStatus.OPTIMAL, xp.SolStatus.FEASIBLE]:
print("Solve status:", p.attributes.solvestatus.name)
print("Solution status:", p.attributes.solstatus.name)
else:
sol = p.getSolution()
print("Solution:", sol)
nviol = 0
for i, j in aux.keys():
y = p.getIndex(aux[i, j])
if (abs(sol[y] - sol[i] * sol[j]) > 1e-8):
nviol += 1
print("Violation ({0},{1},{2}): ".format(i, j, y),
abs(sol[y] - sol[i] * sol[j]))
print (nviol, 'Violations')
# main script
if __name__ == "__main__":
if len(sys.argv) < 2:
print("Needs an argument: mps or lp file with quadratic instance")
exit(-1)
p, aux = create_prob(sys.argv[1])
# p.controls.timelimit = 120
# p.controls.maxnode = 40
# p.controls.threads = 1
# p.controls.callbackfrommasterthread = 1
solveprob(p, aux)

View File

@ -167,6 +167,9 @@ def check_feasible(fixed_games):
tt =time.time() tt =time.time()
tt =time.time()
model.write(f'draw_{time.time()}.lp')
model.solve() model.solve()
comp_time = time.time()-tt comp_time = time.time()-tt
@ -224,9 +227,10 @@ def check_feasible_fix(fixed_games):
reset_bounds = [key for key in x.keys() if key not in fixed_games] reset_bounds = [key for key in x.keys() if key not in fixed_games]
model.chgbounds([x[key] for key in fixed_games],['L' for _ in range(len(fixed_games))],[1 for _ in range(len(fixed_games))]) model.chgbounds([x[key] for key in fixed_games],['L' for _ in range(len(fixed_games))],[1 for _ in range(len(fixed_games))])
model.chgbounds([x[key] for key in reset_bounds],['L' for _ in range(len(reset_bounds))],[0 for _ in range(len(reset_bounds))]) model.chgbounds([x[key] for key in reset_bounds],['L' for _ in range(len(reset_bounds))],[0.0 for _ in range(len(reset_bounds))])
tt =time.time() tt =time.time()
model.write(f'draw_{time.time()}.lp')
model.solve() model.solve()
comp_time = time.time()-tt comp_time = time.time()-tt
@ -296,7 +300,8 @@ def check_feasible_fix(fixed_games):
def simulate_draws(filename,n): def simulate_draws(filename,n):
counter = 0
terminate = 0
print("RUNNING ASYNC",filename) print("RUNNING ASYNC",filename)
for i in range(1, n): for i in range(1, n):
@ -347,8 +352,13 @@ def simulate_draws(filename,n):
n_computations += 1 n_computations += 1
tt = time.time() tt = time.time()
# check, comp_time = check_feasible_pulp(fixed_games+[new_game]) # check, comp_time = check_feasible_pulp(fixed_games+[new_game])
# check, comp_time = check_feasible_fix(fixed_games+[new_game]) check, comp_time = check_feasible_fix(fixed_games+[new_game])
check, comp_time = check_feasible(fixed_games+[new_game]) if terminate:
exit()
if not check:
terminate = 1
counter += 1
# check, comp_time = check_feasible(fixed_games+[new_game])
check_time += time.time()-tt check_time += time.time()-tt
total_comp_time += comp_time total_comp_time += comp_time
if check: if check:

View File

@ -1,3 +1,4 @@
# %%
import os, sys import os, sys
from dotenv import load_dotenv from dotenv import load_dotenv
load_dotenv() load_dotenv()
@ -65,7 +66,7 @@ from django.template.loader import render_to_string
from qualifiers.models import * from qualifiers.models import *
from common.models import GlobalTeam, GlobalCountry from common.models import GlobalTeam, GlobalCountry
from scheduler.models import Season, Scenario, Team, DayObj, CountryClash, Country from scheduler.models import Season, Scenario, Team, DayObj, CountryClash, Country, GameRequirement
from qualifiers.draws import groupTeams, optimize_inversions4 from qualifiers.draws import groupTeams, optimize_inversions4
from scheduler.solver.tasks.optimize import optimize from scheduler.solver.tasks.optimize import optimize
@ -107,7 +108,7 @@ solver = SOLVER
# partial_solution = optimize(task=None, s2=s2, user_name=user_name, user_is_staff=user_is_staff, # partial_solution = optimize(task=None, s2=s2, user_name=user_name, user_is_staff=user_is_staff,
# runMode=runMode, localsearch_time=localsearch_time, RUN_ENV=RUN_ENV, solver=SOLVER) # runMode=runMode, localsearch_time=localsearch_time, RUN_ENV=RUN_ENV, solver=SOLVER)
# %%
@ -342,6 +343,9 @@ for file in os.listdir('counterexample'):
opponents_from_same_country[a][h.country] += 1 opponents_from_same_country[a][h.country] += 1
GameRequirement.objects.create(scenario=scenario,team1=h,team2=a,number=1)
exit()
for t in teams: for t in teams:
for p in range(1,5): for p in range(1,5):

View File

@ -134,7 +134,7 @@ def create_graph(pot=None):
nx.draw_networkx_edge_labels( nx.draw_networkx_edge_labels(
sub, pos=pos, edge_labels={e: "" for e in list(sub.edges())}, font_color='black', font_size=6 sub, pos=pos, edge_labels={e: "" for e in list(sub.edges())}, font_color='black', font_size=6
) )
nx.write_gexf(sub, "test.gexf") nx.write_gexf(sub, "9704.gexf")
distance = {} distance = {}

View File

@ -71,7 +71,7 @@ from qualifiers.helpers import import_globals
# %% # %%
scenario = Scenario.objects.get(id=7) scenario = Scenario.objects.get(id=10310)
# import_globals(scenario.season.id) # import_globals(scenario.season.id)
# teams = scenario.season.scheduler_teams.all() # teams = scenario.season.scheduler_teams.all()

View File

@ -0,0 +1,456 @@
# %%
PROJECT_PATH = '/home/md/Work/ligalytics/leagues_stable/'
import os, sys
sys.path.insert(0, PROJECT_PATH)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leagues.settings")
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from leagues import settings
# settings.DATABASES['default']['NAME'] = PROJECT_PATH+'/db.sqlite3'
settings.DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
settings.DATABASES['default']['HOST'] = '0.0.0.0'
settings.DATABASES['default']['PORT'] = '5432'
settings.DATABASES['default']['USER'] = 'postgres'
settings.DATABASES['default']['PASSWORD'] = 'secret123'
settings.DATABASES['default']['NAME'] = 'mypgsqldb'
settings.DATABASES['default']['ATOMIC_REQUESTS'] = False
settings.DATABASES['default']['AUTOCOMMIT'] = True
settings.DATABASES['default']['CONN_MAX_AGE'] = 0
settings.DATABASES['default']['CONN_HEALTH_CHECKS'] = False
settings.DATABASES['default']['OPTIONS'] = {}
os.environ["XPRESSDIR"] = "/opt/xpressmp"
os.environ["XPRESS"] = "/opt/xpressmp/bin"
os.environ["LD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["DYLD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["SHLIB_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["LIBPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["PYTHONPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprs.jar"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprb.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprm.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["PATH"] = os.environ["XPRESSDIR"] + "/bin" + os.pathsep + os.environ["PATH"]
import django
django.setup()
from django.shortcuts import HttpResponseRedirect
from django.http import HttpResponse, JsonResponse
from django.utils import timezone
from django.urls import reverse
from django.core.files.storage import FileSystemStorage
from django.core.mail import send_mail
from django_tex.shortcuts import render_to_pdf
from celery.result import AsyncResult
import googlemaps
import timeit
import random
import json
import builtins as __builtin__
import csv
from leagues.celery import celery
from leagues.settings import EMAIL_DEFAULT_FROM, EMAIL_DEFAULT_TO
from leagues.settings import RUN_ENV, INSTANCE, DEBUG
from common.tasks import log_telegram
from common.functions import *
from scheduler.models import *
from scheduler.helpers import *
from scheduler.widgets import widget_context_kpis
from scheduler.solver.optimizer import optimize_2phases, optimize_sequentially
import scheduler.solver.optimizer as so
from draws.solver import optimize_draws
import time as timer
from qualifiers.helpers import import_globals
# %%
scenario = Scenario.objects.get(id=10312)
teams = scenario.season.scheduler_teams.all()
# %%
from draws.models import SuperGroup,Draw
base_draw = Draw.objects.filter(season=scenario.season).first()
supergroups = SuperGroup.objects.filter(draw=base_draw)
GameRequirement.objects.filter(scenario=scenario).delete()
for supergroup in supergroups:
games = optimize_draws.simulate_draws(supergroup.id, 1,report=False)
gamereqs = []
top_games = []
eng_top = []
fra_top = []
ger_top = []
ita_top = []
esp_top = []
for g in games:
team1 = teams.get(id=g[0])
team2 = teams.get(id=g[1])
attractivity = team1.attractivity*team2.attractivity
if attractivity >= 20:
top_games.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ENG" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ENG" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
eng_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "FRA" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "FRA" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
fra_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "GER" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "GER" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
ger_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ITA" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ITA" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
ita_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ESP" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ESP" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
esp_top.append((g[0],g[1],attractivity))
gamereqs.append(GameRequirement(scenario=scenario, team1=teams.get(id=g[0]), team2=teams.get(id=g[1]), number=1))
GameRequirement.objects.bulk_create(gamereqs)
encgroups = EncGroup.objects.filter(scenario=scenario)
Encounter.objects.filter(scenario=scenario).delete()
for g in sorted(top_games, key=lambda x: x[2], reverse=True):
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Top Games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
eng_top = sorted(eng_top, key=lambda x: x[2], reverse=True)[:8]
for g in eng_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="England top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
fra_top = sorted(fra_top, key=lambda x: x[2], reverse=True)[:8]
for g in fra_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="France top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
ger_top = sorted(ger_top, key=lambda x: x[2], reverse=True)[:8]
for g in ger_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="German top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
ita_top = sorted(ita_top, key=lambda x: x[2], reverse=True)[:8]
for g in ita_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Italian top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
esp_top = sorted(esp_top, key=lambda x: x[2], reverse=True)[:8]
for g in esp_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Spain top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
# adjust distribution of topgames
EncWish.objects.filter(scenario=scenario,reason="ITA top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(ita_top)*0.4),maxGames=int(len(ita_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="ENG top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(eng_top)*0.4),maxGames=int(len(eng_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="FRA top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(fra_top)*0.4),maxGames=int(len(fra_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="ESP top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(esp_top)*0.4),maxGames=int(len(esp_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="GER top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(ger_top)*0.4),maxGames=int(len(ger_top)*0.6+0.5))
exit()
# %%
t_dict = {
"Getafe C.F.": 87960,
"FC Shkupi": 2606993,
"Maccabi Tel-Aviv FC": 57477,
"Malatyaspor": 78058,
"RCD Espanyol de Barcelona": 54189,
"Tottenham Hotspur": 1652,
"Trabzonspor A.Ş.": 52731,
"AS Monaco": 50023,
"Fotbal Club FCSB": 50065,
"FK Radnički Niš": 52734,
"F91 Dudelange": 59028,
"Progrès Niederkorn": 52311,
"IFK Norrköping": 50099,
"FC Levadia Tallinn": 77482,
"LASK Linz": 63405,
"GKS Piast Gliwice": 2600545,
"MKS Cracovia Kraków": 88133,
"Sabail": 2608569,
"RC Strasbourg": 59857,
"FK Makedonija Skopje": 64395,
"PFC Arsenal Tula": 2606414,
"U Craiova 1948 Club Sportiv": 64503,
"VfL Borussia Mönchengladbach": 52757,
"PFC Lokomotiv Plovdiv 1926": 57466,
"Football Club Zenit": 52826,
"Lechia Gdańsk": 52763,
"Hapoel Beer-Sheva FC": 59340,
"SP La Fiorita": 64508,
"KF Feronikeli": 2608281,
"Cardiff MUFC": 59337,
"Barry Town UFC": 53069,
"Beşiktaş": 50157,
"Mons Calpe Sports Club": 2608011,
"FC St.Gallen 1879": 51151,
"FC Zimbru Chişinău": 59036,
"Hapoel Be'er Sheva FC": 59340,
"PFC Arda Kardzhali 1924": 2610060,
"FC Torpedo-Belaz Zhodino": 79551,
"Panevezys": 2608927,
"Ballkani": 2609552,
"Dnipro-1": 2609193,
"Union Saint-Gilloise": 64125,
"Omonoia FC": 50077,
"R. Charleroi SC": 52886,
"Manchester City FC": 52919,
"Manchester United FC": 52682,
"Barry Town United FC": 53069,
"FC CSKA 1948": 2610262,
"Sabah": 2609356,
"FC Bologna": 52969,
"FC Dynamo Brest": 64374,
"Kisvarda Master Good": 2605958,
"FC Bologna": 52969,
"Beşiktaş": 50157,
"Omonoia FC": 50077,
"WKS Śląsk Wrocław": 52655,
"FC Ararat Yerevan": 59318,
"FCB Magpies": 2606279,
"KKS Lech Poznań": 64227,
"Sumgait FC": 2603921,
"KF Dukagjini": 2608311,
"FC Haka Valkeakoski": 52802,
"FC Hegelmann Litauen": 2607108,
"FCV Farul": 2604753,
"FC Petrocub Hîncești": 2607112,
"Kolos Kovalivka": 2609189,
"St. Patrick Athletic FC": 50133,
"Manchester United FC": 52682,
"Trabzonspor AS": 52731,
"İstanbul Basaksehir FK": 2600288,
"Borussia VfL 1900 Mönchengladbach": 52757,
}
for t in teams:
gt = t.global_id
if not gt:
gt = GlobalTeam.objects.filter(name=t.name).first()
if not gt:
gt = GlobalTeam.objects.filter(fame_id=t_dict.get(t.name)).first()
if not gt:
print(t.name)
t.name = gt.name
t.global_id = gt
t.external_id = gt.fame_id
t.save()
# %%
# %%
new_teams = []
teams.update(active=False)
with open('calendar_juneteams/ucl_teams.csv', newline='') as csvfile:
reader = csv.reader(csvfile)
next(reader, None)
for row in reader:
team_name = row[1].split("(")[0].strip()
if row[0] != "":
# print(row)
team_pos = int(row[0])
team_name = row[1].split("(")[0].strip()
team_coeff = row[2]
t = teams.filter(name=team_name).first()
gt = GlobalTeam.objects.filter(name=team_name).first()
if not t:
print(f"Team {team_name} not found")
if not gt:
print(f"Global team {team_name} not found")
else:
t = Team.objects.create(
season=scenario.season,
name=team_name,
shortname=gt.shortname,
position=team_pos,
latitude=gt.latitude,
longitude=gt.longitude,
countryObj=Country.objects.get(season=scenario.season,shortname=gt.country.uefa),
active=True
)
if not t:
print("Could not create team", team_name)
continue
t.global_id = gt
t.external_id = gt.fame_id
t.active=True
t.position = team_pos
if team_pos <= 9:
t.pot = 1
elif team_pos <= 18:
t.pot = 2
elif team_pos <= 27:
t.pot = 3
else:
t.pot = 4
t.save()
# %%
CET_minus_1 = ['ENG','POR','SCO','ISL','FRO']
CET_plus_1 = ['TUR','GRE','ROU','LTU','FIN','EST','MDA','CYP','AZE','ISR','UKR']
for i in range(1,5):
conf = Conference.objects.get_or_create(scenario=scenario,name=f"UCL-Pot {i}")
conf[0].teams.clear()
for i in range(1,6):
conf = Conference.objects.get_or_create(scenario=scenario,name=f"Global Coeff {i}")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name=f"Domestic Coeff {i}")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET-1")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET+1")
conf[0].teams.clear()
# for conf in Conference.objects.filter(scenario=scenario).exclude(name__in=['HARD Constraints','SOFT Constraints']):
# conf[0].teams.clear()
# conf.collapseInView = True
# conf.save()
# Team.objects.filter(season=scenario.season).update(active=False)
# for t in new_teams:
# team_name = t[1].split('(')[0].strip()
# team_country = t[1].split('(')[1].split(')')[0].strip()
# # abbreviation = t[2]
# global_coeff = t[4]
# domestic_coeff = t[5]
# pot = int(t[3].split(' ')[1].strip())
# pos = int(t[0])
# competition = "UCL"
# teamObj = Team.objects.filter(season=scenario.season,name=team_name)
# if teamObj:
# pass
# else:
# print(t,"->", team_name)
# gteam = GlobalTeam.objects.filter(name=team_name)
# if gteam:
# teamObj = Team.objects.create(season=scenario.season,
# name=team_name,
# attractivity=global_coeff+0.1*domestic_coeff,
# position=pos,
# pot=pot,
# latitude=gteam.first().latitude,
# longitude=gteam.first().longitude,
# country=gteam.first().country,
# active=True)
# print("\tCreated team from global", team_name)
# teamObj = Team.objects.filter(season=scenario.season,name=team_name)
# else:
# print("\tTeam not found", team_name)
# continue
for team in Team.objects.filter(season=scenario.season,active=True):
teamObj = Team.objects.filter(id=team.id)
competition = "UCL"
global_coeff = int(team.attractivity)
domestic_coeff = int(team.attractivity*10)%10
team_country = team.countryObj.shortname
Conference.objects.filter(scenario=scenario,name=competition).first().teams.add(teamObj.first())
Conference.objects.filter(scenario=scenario,name=f"{competition}-Pot {team.pot}").first().teams.add(teamObj.first())
if global_coeff in range(1,6):
Conference.objects.filter(scenario=scenario,name=f"Global Coeff {global_coeff}").first().teams.add(teamObj.first())
if domestic_coeff in range(1,6):
Conference.objects.filter(scenario=scenario,name=f"Domestic Coeff {domestic_coeff}").first().teams.add(teamObj.first())
if team_country in CET_minus_1:
Conference.objects.filter(scenario=scenario,name="CET-1").first().teams.add(teamObj.first())
elif team_country in CET_plus_1:
Conference.objects.filter(scenario=scenario,name="CET+1").first().teams.add(teamObj.first())
else:
Conference.objects.filter(scenario=scenario,name="CET").first().teams.add(teamObj.first())
teamObj.update(coefficient=5-team.pot)
for conf in Conference.objects.filter(scenario=scenario):
for t in conf.teams.filter(active=False):
conf.teams.remove(t)
for haw in HAWish.objects.filter(scenario=scenario):
for t in haw.teams.filter(active=False):
haw.teams.remove(t)
for enc in EncWish.objects.filter(scenario=scenario):
for t in enc.teams1.filter(active=False):
enc.teams1.remove(t)
for t in enc.teams2.filter(active=False):
enc.teams1.remove(t)
for pair in Pairing.objects.filter(scenario=scenario):
if pair.team1.active==False or pair.team2.active==False:
pair.active=False
pair.save()
# %%
# %%

View File

@ -0,0 +1,519 @@
# %%
PROJECT_PATH = '/home/md/Work/ligalytics/leagues_stable/'
import os, sys
sys.path.insert(0, PROJECT_PATH)
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leagues.settings")
os.environ["DJANGO_ALLOW_ASYNC_UNSAFE"] = "true"
from leagues import settings
# settings.DATABASES['default']['NAME'] = PROJECT_PATH+'/db.sqlite3'
settings.DATABASES['default']['ENGINE'] = 'django.db.backends.postgresql_psycopg2'
settings.DATABASES['default']['HOST'] = '0.0.0.0'
settings.DATABASES['default']['PORT'] = '5432'
settings.DATABASES['default']['USER'] = 'postgres'
settings.DATABASES['default']['PASSWORD'] = 'secret123'
settings.DATABASES['default']['NAME'] = 'mypgsqldb'
settings.DATABASES['default']['ATOMIC_REQUESTS'] = False
settings.DATABASES['default']['AUTOCOMMIT'] = True
settings.DATABASES['default']['CONN_MAX_AGE'] = 0
settings.DATABASES['default']['CONN_HEALTH_CHECKS'] = False
settings.DATABASES['default']['OPTIONS'] = {}
os.environ["XPRESSDIR"] = "/opt/xpressmp"
os.environ["XPRESS"] = "/opt/xpressmp/bin"
os.environ["LD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["DYLD_LIBRARY_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["SHLIB_PATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["LIBPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["PYTHONPATH"] = os.environ["XPRESSDIR"] + "/lib"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprs.jar"
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprb.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["CLASSPATH"] = os.environ["XPRESSDIR"] + "/lib/xprm.jar" + os.pathsep + os.environ["CLASSPATH"]
os.environ["PATH"] = os.environ["XPRESSDIR"] + "/bin" + os.pathsep + os.environ["PATH"]
import django
django.setup()
from django.shortcuts import HttpResponseRedirect
from django.http import HttpResponse, JsonResponse
from django.utils import timezone
from django.urls import reverse
from django.core.files.storage import FileSystemStorage
from django.core.mail import send_mail
from django_tex.shortcuts import render_to_pdf
from celery.result import AsyncResult
import googlemaps
import timeit
import random
import json
import builtins as __builtin__
import csv
from leagues.celery import celery
from leagues.settings import EMAIL_DEFAULT_FROM, EMAIL_DEFAULT_TO
from leagues.settings import RUN_ENV, INSTANCE, DEBUG
from common.tasks import log_telegram
from common.functions import *
from scheduler.models import *
from scheduler.helpers import *
from scheduler.widgets import widget_context_kpis
from scheduler.solver.optimizer import optimize_2phases, optimize_sequentially
import scheduler.solver.optimizer as so
from draws.solver import optimize_draws
import time as timer
from qualifiers.helpers import import_globals
# %%
scenario = Scenario.objects.get(id=10313)
teams = scenario.season.scheduler_teams.all()
# %%
from draws.models import SuperGroup,Draw
base_draw = Draw.objects.filter(season=scenario.season).first()
supergroups = SuperGroup.objects.filter(draw=base_draw)
GameRequirement.objects.filter(scenario=scenario).delete()
for supergroup in supergroups:
games = optimize_draws.simulate_draws(supergroup.id, 1,report=False)
gamereqs = []
top_games = []
eng_top = []
fra_top = []
ger_top = []
ita_top = []
esp_top = []
for g in games:
team1 = teams.get(id=g[0])
team2 = teams.get(id=g[1])
attractivity = team1.attractivity*team2.attractivity
if attractivity >= 20:
top_games.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ENG" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ENG" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
eng_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "FRA" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "FRA" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
fra_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "GER" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "GER" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
ger_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ITA" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ITA" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
ita_top.append((g[0],g[1],attractivity))
if team1.countryObj.shortname == "ESP" and team2.countryObj != team1.countryObj and team2.attractivity >= 3 or \
team2.countryObj.shortname == "ESP" and team1.countryObj != team2.countryObj and team1.attractivity >= 3:
esp_top.append((g[0],g[1],attractivity))
gamereqs.append(GameRequirement(scenario=scenario, team1=teams.get(id=g[0]), team2=teams.get(id=g[1]), number=1))
GameRequirement.objects.bulk_create(gamereqs)
encgroups = EncGroup.objects.filter(scenario=scenario)
Encounter.objects.filter(scenario=scenario).delete()
for g in sorted(top_games, key=lambda x: x[2], reverse=True):
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Top Games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
eng_top = sorted(eng_top, key=lambda x: x[2], reverse=True)[:8]
for g in eng_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="England top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
fra_top = sorted(fra_top, key=lambda x: x[2], reverse=True)[:8]
for g in fra_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="France top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
ger_top = sorted(ger_top, key=lambda x: x[2], reverse=True)[:8]
for g in ger_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="German top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
ita_top = sorted(ita_top, key=lambda x: x[2], reverse=True)[:8]
for g in ita_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Italian top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
esp_top = sorted(esp_top, key=lambda x: x[2], reverse=True)[:8]
for g in esp_top:
enc = Encounter(scenario=scenario,encounterGroup=encgroups.get(name="Spain top games"))
enc.save()
enc.homeTeams.add(g[0])
enc.awayTeams.add(g[1])
# adjust distribution of topgames
EncWish.objects.filter(scenario=scenario,reason="ITA top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(ita_top)*0.4),maxGames=int(len(ita_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="ENG top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(eng_top)*0.4),maxGames=int(len(eng_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="FRA top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(fra_top)*0.4),maxGames=int(len(fra_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="ESP top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(esp_top)*0.4),maxGames=int(len(esp_top)*0.6+0.5))
EncWish.objects.filter(scenario=scenario,reason="GER top games should not be scheduled more than 60% of the time on the same weekday").update(minGames=int(len(ger_top)*0.4),maxGames=int(len(ger_top)*0.6+0.5))
exit()
# %%
t_dict = {
"Getafe C.F.": 87960,
"FC Shkupi": 2606993,
"Maccabi Tel-Aviv FC": 57477,
"Malatyaspor": 78058,
"RCD Espanyol de Barcelona": 54189,
"Tottenham Hotspur": 1652,
"Trabzonspor A.Ş.": 52731,
"AS Monaco": 50023,
"Fotbal Club FCSB": 50065,
"FK Radnički Niš": 52734,
"F91 Dudelange": 59028,
"Progrès Niederkorn": 52311,
"IFK Norrköping": 50099,
"FC Levadia Tallinn": 77482,
"LASK Linz": 63405,
"GKS Piast Gliwice": 2600545,
"MKS Cracovia Kraków": 88133,
"Sabail": 2608569,
"RC Strasbourg": 59857,
"FK Makedonija Skopje": 64395,
"PFC Arsenal Tula": 2606414,
"U Craiova 1948 Club Sportiv": 64503,
"VfL Borussia Mönchengladbach": 52757,
"PFC Lokomotiv Plovdiv 1926": 57466,
"Football Club Zenit": 52826,
"Lechia Gdańsk": 52763,
"Hapoel Beer-Sheva FC": 59340,
"SP La Fiorita": 64508,
"KF Feronikeli": 2608281,
"Cardiff MUFC": 59337,
"Barry Town UFC": 53069,
"Beşiktaş": 50157,
"Mons Calpe Sports Club": 2608011,
"FC St.Gallen 1879": 51151,
"FC Zimbru Chişinău": 59036,
"Hapoel Be'er Sheva FC": 59340,
"PFC Arda Kardzhali 1924": 2610060,
"FC Torpedo-Belaz Zhodino": 79551,
"Panevezys": 2608927,
"Ballkani": 2609552,
"Dnipro-1": 2609193,
"Union Saint-Gilloise": 64125,
"Omonoia FC": 50077,
"R. Charleroi SC": 52886,
"Manchester City FC": 52919,
"Manchester United FC": 52682,
"Barry Town United FC": 53069,
"FC CSKA 1948": 2610262,
"Sabah": 2609356,
"FC Bologna": 52969,
"FC Dynamo Brest": 64374,
"Kisvarda Master Good": 2605958,
"FC Bologna": 52969,
"Beşiktaş": 50157,
"Omonoia FC": 50077,
"WKS Śląsk Wrocław": 52655,
"FC Ararat Yerevan": 59318,
"FCB Magpies": 2606279,
"KKS Lech Poznań": 64227,
"Sumgait FC": 2603921,
"KF Dukagjini": 2608311,
"FC Haka Valkeakoski": 52802,
"FC Hegelmann Litauen": 2607108,
"FCV Farul": 2604753,
"FC Petrocub Hîncești": 2607112,
"Kolos Kovalivka": 2609189,
"St. Patrick Athletic FC": 50133,
"Manchester United FC": 52682,
"Trabzonspor AS": 52731,
"Sepsi OSK Sfantu Gheorghe": 2608575,
"Raków Czestochowa": 60566,
"FK TSC Bačka Topola": 2610140,
"İstanbul Başakşehir FK": 2600288,
}
for t in teams:
gt = t.global_id
if not gt:
gt = GlobalTeam.objects.filter(name=t.name).first()
if not gt:
gt = GlobalTeam.objects.filter(fame_id=t_dict.get(t.name)).first()
if not gt:
print(t.name)
t.name = gt.name
t.global_id = gt
t.external_id = gt.fame_id
t.save()
# %%
# %%
new_teams = []
uel_teams = []
teams.update(active=False)
with open('calendar_juneteams/uel_teams.csv', newline='') as csvfile:
reader = csv.reader(csvfile)
next(reader, None)
for row in reader:
team_name = row[1].split("(")[0].strip()
if row[0] != "":
# print(row)
team_pos = int(row[0])
team_name = row[1].split("(")[0].strip()
team_coeff = row[2]
t = teams.filter(name=team_name).order_by('-logo').first()
gt = GlobalTeam.objects.filter(name=team_name).first()
if not t:
print(f"Team {team_name} not found")
if not gt:
print(f"Global team {team_name} not found")
else:
t = Team.objects.create(
season=scenario.season,
name=team_name,
shortname=gt.shortname,
position=team_pos,
latitude=gt.latitude,
longitude=gt.longitude,
countryObj=Country.objects.get(season=scenario.season,shortname=gt.country.uefa),
active=True
)
print("Created team", team_name)
if gt:
t.global_id = gt
t.external_id = gt.fame_id
t.shortname = gt.shortname
t.active=True
t.position = team_pos
if team_pos <= 9:
t.pot = 1
elif team_pos <= 18:
t.pot = 2
elif team_pos <= 27:
t.pot = 3
else:
t.pot = 4
t.save()
uel_teams.append(t.id)
new_teams = []
uecl_teams= []
# teams.update(active=False)
with open('calendar_juneteams/uecl_teams.csv', newline='') as csvfile:
reader = csv.reader(csvfile)
next(reader, None)
for row in reader:
team_name = row[1].split("(")[0].strip()
if row[0] != "":
# print(row)
team_pos = int(row[0])
team_name = row[1].split("(")[0].strip()
team_coeff = row[2]
t = teams.filter(name=team_name).order_by('-logo').first()
gt = GlobalTeam.objects.filter(name=team_name).first()
if not t:
print(f"Team {team_name} not found")
if not gt:
print(f"Global team {team_name} not found")
else:
t = Team.objects.create(
season=scenario.season,
name=team_name,
shortname=gt.shortname,
position=team_pos,
latitude=gt.latitude,
longitude=gt.longitude,
countryObj=Country.objects.get(season=scenario.season,shortname=gt.country.uefa),
active=True
)
print("Created team", team_name)
if gt:
t.global_id = gt
t.external_id = gt.fame_id
t.shortname = gt.shortname
t.active=True
t.position = team_pos
if team_pos <= 6:
t.pot = 1
elif team_pos <= 12:
t.pot = 2
elif team_pos <= 18:
t.pot = 3
elif team_pos <= 24:
t.pot = 4
elif team_pos <= 30:
t.pot = 5
else:
t.pot = 6
t.save()
uecl_teams.append(t.id)
# %%
CET_minus_1 = ['ENG','POR','SCO','ISL','FRO']
CET_plus_1 = ['TUR','GRE','ROU','LTU','FIN','EST','MDA','CYP','AZE','ISR','UKR']
conf = Conference.objects.get_or_create(scenario=scenario,name=f"UEL")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name=f"UECL")
conf[0].teams.clear()
for i in range(1,5):
conf = Conference.objects.get_or_create(scenario=scenario,name=f"UEL-Pot {i}")
conf[0].teams.clear()
for i in range(1,7):
conf = Conference.objects.get_or_create(scenario=scenario,name=f"UECL-Pot {i}")
conf[0].teams.clear()
for i in range(1,6):
conf = Conference.objects.get_or_create(scenario=scenario,name=f"Global Coeff {i}")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name=f"Domestic Coeff {i}")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET-1")
conf[0].teams.clear()
conf = Conference.objects.get_or_create(scenario=scenario,name="CET+1")
conf[0].teams.clear()
for team in Team.objects.filter(season=scenario.season,active=True):
teamObj = Team.objects.filter(id=team.id)
if team.id in uel_teams:
competition = "UEL"
elif team.id in uecl_teams:
competition = "UECL"
else:
print("Team not found in any competition")
global_coeff = int(team.attractivity)
domestic_coeff = int(team.attractivity*10)%10
team_country = team.countryObj.shortname
Conference.objects.filter(scenario=scenario,name=competition).first().teams.add(teamObj.first())
Conference.objects.filter(scenario=scenario,name=f"{competition}-Pot {team.pot}").first().teams.add(teamObj.first())
if global_coeff in range(1,6):
Conference.objects.filter(scenario=scenario,name=f"Global Coeff {global_coeff}").first().teams.add(teamObj.first())
if domestic_coeff in range(1,6):
Conference.objects.filter(scenario=scenario,name=f"Domestic Coeff {domestic_coeff}").first().teams.add(teamObj.first())
if team_country in CET_minus_1:
Conference.objects.filter(scenario=scenario,name="CET-1").first().teams.add(teamObj.first())
elif team_country in CET_plus_1:
Conference.objects.filter(scenario=scenario,name="CET+1").first().teams.add(teamObj.first())
else:
Conference.objects.filter(scenario=scenario,name="CET").first().teams.add(teamObj.first())
if competition == "UEL":
teamObj.update(coefficient=5-team.pot)
else:
teamObj.update(coefficient=7-team.pot)
for conf in Conference.objects.filter(scenario=scenario):
for t in conf.teams.filter(active=False):
conf.teams.remove(t)
for haw in HAWish.objects.filter(scenario=scenario):
for t in haw.teams.filter(active=False):
haw.teams.remove(t)
for enc in EncWish.objects.filter(scenario=scenario):
for t in enc.teams1.filter(active=False):
enc.teams1.remove(t)
for t in enc.teams2.filter(active=False):
enc.teams1.remove(t)
for pair in Pairing.objects.filter(scenario=scenario):
if pair.team1.active==False or pair.team2.active==False:
pair.active=False
pair.save()
from draws.models import SuperGroup
sg = SuperGroup.objects.filter(draw__season=scenario.season).get(name="UEL")
sg.teams.clear()
for t in Conference.objects.get(scenario=scenario,name="UEL").teams.all():
sg.teams.add(t)
sg = SuperGroup.objects.filter(draw__season=scenario.season).get(name="UECL")
sg.teams.clear()
for t in Conference.objects.get(scenario=scenario,name="UECL").teams.all():
sg.teams.add(t)
# %%
Blocking.objects.filter(scenario=scenario).delete()
# %%

View File

@ -1,71 +0,0 @@
# %%
import requests
prod_url = 'https://uefadigitalapi.developer.azure-api.net'
prod_primary_key = '7dfa861240aa40f8a834990c24f1a66d'
prod_secondary_key = '4451dcc1ad4f41b2aa6af96cc5a1256a'
pre_url = 'https://uefadigitalapipre.developer.azure-api.net'
pre_primary_key = '1decf93425944f8b9e6dc7226a3b8477'
pre_secondary_key = '14771f5c67b74836a59f777cb543cc0f'
# %%
# r=requests.get("https://api.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
# # %%
# r.json()
# # %%
# r=requests.get("https://api.pre.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"14771f5c67b74836a59f777cb543cc0f"})
# # %%
# r.json()
# # %%
data = {
"season": "UCL 24/25",
"games": [
{
"home": 50051,
"away": 52682
},
{
"home": 52747,
"away": 50051
},
{
"home": 50051,
"away": 50030
},
{
"home": 52758,
"away": 50051
},
{
"home": 50051,
"away": 50031
},
{
"home": 52336,
"away": 50051
},
{
"home": 50051,
"away": 50050
},
{
"home": 77977,
"away": 50051
}
]
}
# %%
# r=requests.post("http://localhost:8000/api/uefa/checker/",
r=requests.post("https://compute.asolvo.de/api/uefa/teams/",
headers={"Authorization": "R6v1e9Q5W8aS3b7C4x2KpZqL9yFmXnDz"},
json=data)
# %%
r.json()
# %%

View File

@ -0,0 +1,83 @@
# %%
from azure.storage.blob import BlobServiceClient
from azure.core.exceptions import AzureError
import zipfile
import os
category_dict = {
"zip": "CheckerCalls",
"txt": "Matchups",
"json": "Checkers",
}
# %%
container_name = "ucl"
season = "UCL 2024/25"
blobEndpoint = "https://aedrawappprd.blob.core.windows.net/"
sas_token = "sv=2022-11-02&ss=bfqt&srt=sco&sp=rwdlacupiytfx&se=2027-08-30T23:13:30Z&st=2024-07-31T15:13:30Z&spr=https&sig=zwvSe6mt%2FdKLSoKK1oPUTBhXroukhUIwtSQod3h6oHk%3D"
blob_service_client = BlobServiceClient(account_url=blobEndpoint, credential=sas_token)
container_client = blob_service_client.get_container_client(container=container_name)
# files = os.listdir("uefa_blob")
# for file in files:
# extension = file.split(".")[-1]
# category = category_dict[extension]
# tags = {
# "uploader": "Asolvo",
# "competition": container_name,
# "season": season,
# "category": category,
# "draw": "Rehearsal",
# }
# filename = f"uefa_blob/{file}"
# with open(file=filename, mode="rb") as data:
# try :
# blob_client = container_client.upload_blob(name=file, data=data, tags=tags)
# print("Upload succeeded", file)
# except AzureError as az_error :
# print(f'Error message: {az_error.message}')
# filename = 'test.txt'
# fileName = 'foobar'
# container_client.delete_blob(blob="test.txt")
# %%
for name in container_client.list_blob_names():
if "asolvo_draw_simulation_checker" in name and "20240829" in name:
print(name)
# for key,val in container_client.get_blob_client(blob=name).get_blob_tags().items():
# print(f'\t{key} : {val}')
# %%
# %%
# %%
import requests
import json
# url = f'https://aedrawapp-appservice-we-prd.azurewebsites.net/uefa/club_competitions/draws'
# headers = { "Content-Type": "application/json", "Authorization": "9ec57d80ec0087b6fb8045b78b4556f91453de43392670597acc221a19c0efd0"}
# todo = ({ "simNumber": 1, "team_to_check": "Test", "pos": 1, "games" : [ {"home":1, "away":2 }], "feasible":True, "comment":""})
# checkerCompareTodos = [todo]
# j=0
# theseCalls=[ { 'season': "UCL 2024/25", "pots" : [1,2,3,4] ,'games': ch['games'], "checkid" : 100*j+i} for i,ch in enumerate(checkerCompareTodos[j*100:j*100+100])]
# r = requests.post(f'{url}/checker/', headers=headers, data=json.dumps(theseCalls) , verify=False, timeout=1000)
# # %%
# r.text
# %%
# %%

View File

@ -0,0 +1,42 @@
# %%
import requests
import json
# %%
""" read problem data from json file """
filename= "data.json"
with open(filename) as json_file:
data = json.load(json_file)
# optimize_courts(data,RUN_ENV='local')
# %%
""" create a new optimization task """
# res = requests.post("https://optimization.ligalytics.com/api/court/task/",
res = requests.post("http://localhost:8000/api/court/task/",
headers={"Authorization": "3d9802e40000463b877bb26255915ca8"},
json=data)
print(res.json())
# %%
""" receive task_id """
task_id = res.json()['task_id']
# print(res.json())
# %%
""" get the task status """
# res = requests.get(f"https://optimization.ligalytics.com/api/court/task?task_id={task_id}",
res = requests.get(f"http://localhost:8000/api/court/task?task_id={task_id}",
headers={"Authorization": "3d9802e40000463b877bb26255915ca8"},
json=[])
""" print the task result """
print(res.json())
# %%

View File

@ -0,0 +1,252 @@
# %%
import requests
import time
# prod_url = 'https://uefadigitalapi.developer.azure-api.net'
# prod_primary_key = '7dfa861240aa40f8a834990c24f1a66d'
# prod_secondary_key = '4451dcc1ad4f41b2aa6af96cc5a1256a'
# pre_url = 'https://uefadigitalapipre.developer.azure-api.net'
# pre_primary_key = '1decf93425944f8b9e6dc7226a3b8477'
# pre_secondary_key = '14771f5c67b74836a59f777cb543cc0f'
# %%
# r=requests.get("https://api.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
# # %%
# r.json()
# # %%
# r=requests.get("https://api.pre.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"14771f5c67b74836a59f777cb543cc0f"})
# # %%
# r.json()
# # %%
games = [{'home': 52919, 'away': 52747},
{'home': 52919, 'away': 50030},
{'home': 52919, 'away': 52709},
{'home': 52919, 'away': 50113},
{'home': 52714, 'away': 52919},
{'home': 50064, 'away': 52919},
{'home': 50031, 'away': 52919},
{'home': 50123, 'away': 52919},
{'home': 50037, 'away': 7889},
{'home': 50037, 'away': 50124},
{'home': 50037, 'away': 52336},
{'home': 50037, 'away': 50067},
{'home': 52682, 'away': 50037},
{'home': 50136, 'away': 50037},
{'home': 52749, 'away': 50037},
{'home': 52748, 'away': 50037},
{'home': 7889, 'away': 50080},
{'home': 7889, 'away': 52758},
{'home': 7889, 'away': 50058},
{'home': 7889, 'away': 50050},
{'home': 50124, 'away': 7889},
{'home': 52336, 'away': 7889},
{'home': 50067, 'away': 7889},
{'home': 50051, 'away': 50138},
{'home': 50051, 'away': 52707},
{'home': 50051, 'away': 50069},
{'home': 50051, 'away': 60609},
{'home': 52747, 'away': 50051},
{'home': 50147, 'away': 50051},
{'home': 52973, 'away': 50051},
{'home': 59324, 'away': 50051},
{'home': 52747, 'away': 52280},
{'home': 52747, 'away': 50031},
{'home': 52747, 'away': 50123},
{'home': 50030, 'away': 52747},
{'home': 52816, 'away': 52747},
{'home': 77977, 'away': 52747},
{'home': 52682, 'away': 50147},
{'home': 52682, 'away': 52973},
{'home': 52682, 'away': 52277},
{'home': 50080, 'away': 52682},
{'home': 2603790, 'away': 52682},
{'home': 50062, 'away': 52682},
{'home': 60609, 'away': 52682},
{'home': 50080, 'away': 50136},
{'home': 50080, 'away': 52749},
{'home': 50080, 'away': 52748},
{'home': 52758, 'away': 50080},
{'home': 50058, 'away': 50080},
{'home': 50050, 'away': 50080},
{'home': 50138, 'away': 52714},
{'home': 50138, 'away': 2603790},
{'home': 50138, 'away': 50062},
{'home': 50138, 'away': 59324},
{'home': 52707, 'away': 50138},
{'home': 50069, 'away': 50138},
{'home': 52277, 'away': 50138},
{'home': 52714, 'away': 50064},
{'home': 52714, 'away': 52816},
{'home': 52714, 'away': 77977},
{'home': 52280, 'away': 52714},
{'home': 52709, 'away': 52714},
{'home': 50113, 'away': 52714},
{'home': 52758, 'away': 50124},
{'home': 52758, 'away': 52749},
{'home': 52758, 'away': 50050},
{'home': 50136, 'away': 52758},
{'home': 50058, 'away': 52758},
{'home': 52748, 'away': 52758},
{'home': 50124, 'away': 50136},
{'home': 50124, 'away': 50058},
{'home': 50124, 'away': 50067},
{'home': 52336, 'away': 50124},
{'home': 50050, 'away': 50124},
{'home': 2603790, 'away': 52707},
{'home': 2603790, 'away': 50062},
{'home': 2603790, 'away': 52277},
{'home': 50147, 'away': 2603790},
{'home': 52973, 'away': 2603790},
{'home': 60609, 'away': 2603790},
{'home': 50147, 'away': 52973},
{'home': 50147, 'away': 60609},
{'home': 52707, 'away': 50147},
{'home': 50069, 'away': 50147},
{'home': 59324, 'away': 50147},
{'home': 50136, 'away': 52336},
{'home': 50136, 'away': 52748},
{'home': 52749, 'away': 50136},
{'home': 50067, 'away': 50136},
{'home': 50064, 'away': 52280},
{'home': 50064, 'away': 52709},
{'home': 50064, 'away': 50113},
{'home': 50030, 'away': 50064},
{'home': 50031, 'away': 50064},
{'home': 50123, 'away': 50064},
{'home': 52280, 'away': 50030},
{'home': 52280, 'away': 52816},
{'home': 52280, 'away': 77977},
{'home': 52709, 'away': 52280},
{'home': 50113, 'away': 52280},
{'home': 52707, 'away': 50069},
{'home': 52707, 'away': 59324},
{'home': 50062, 'away': 52707},
{'home': 52277, 'away': 52707},
{'home': 50030, 'away': 50031},
{'home': 50030, 'away': 50123},
{'home': 52816, 'away': 50030},
{'home': 77977, 'away': 50030},
{'home': 52816, 'away': 52709},
{'home': 52816, 'away': 77977},
{'home': 50031, 'away': 52816},
{'home': 50113, 'away': 52816},
{'home': 52749, 'away': 50058},
{'home': 52749, 'away': 50067},
{'home': 52336, 'away': 52749},
{'home': 52748, 'away': 52749},
{'home': 50058, 'away': 52336},
{'home': 50058, 'away': 52748},
{'home': 50050, 'away': 50058},
{'home': 52336, 'away': 50050},
{'home': 50067, 'away': 52336},
{'home': 50062, 'away': 52973},
{'home': 50062, 'away': 52277},
{'home': 50069, 'away': 50062},
{'home': 60609, 'away': 50062},
{'home': 52973, 'away': 50069},
{'home': 52973, 'away': 60609},
{'home': 59324, 'away': 52973},
{'home': 50069, 'away': 59324},
{'home': 52277, 'away': 50069},
{'home': 52709, 'away': 50031},
{'home': 52709, 'away': 50113},
{'home': 50123, 'away': 52709},
{'home': 50031, 'away': 50123},
{'home': 77977, 'away': 50031},
{'home': 50123, 'away': 50113},
{'home': 77977, 'away': 50123},
{'home': 52748, 'away': 50050},
{'home': 50067, 'away': 52748},
{'home': 50050, 'away': 50067},
{'home': 60609, 'away': 59324},
{'home': 52277, 'away': 60609},
{'home': 59324, 'away': 52277},
{'home': 50113, 'away': 77977}]
data = {
"season": "UCL 24/25",
"games": [
{
"home": 50051,
"away": 52682
},
{
"home": 52747,
"away": 50051
},
{
"home": 50051,
"away": 50030
},
{
"home": 52758,
"away": 50051
},
{
"home": 50051,
"away": 50031
},
{
"home": 52336,
"away": 50051
},
{
"home": 50051,
"away": 50050
},
{
"home": 77977,
"away": 50051
}
]
# "games": games,
}
# %%
tt = time.time()
# r=requests.post("http://192.168.188.68:8000/api/uefa/v1/checker/",
# r=requests.post("http://optimiza/draws/teams/",
import requests
r=requests.post("https://optimization.ligalytics.com/draws/teams/",
headers={"Authorization": "R6v1e9Q5W8aS3b7C4x2KpZqL9yFmXnDz"},
json=data)
print(r)
print(r.json())
print(time.time()-tt)
# %%
r=requests.get("http://localhost:8000/api/court/tasks2/",
headers={"Authorization": "test"},
json=[])
print(r)
r.json()
# %%
import requests
import json
filename= "data.json"
with open(filename) as json_file:
data = json.load(json_file)
# %%
res = requests.post("https://optimization.ligalytics.com/api/court/task/",
headers={"Authorization": "5f34a8d917c6b927f4ab0393c7e12d35"},
json=data)
task_id = res.json()['task_id']
# %%
data = {
'season': "UWCL 24/25 Test",
}
r=requests.post(f"https://optimization.ligalytics.com/draws/schedule/",
headers={"Authorization": "R6v1e9Q5W8aS3b7C4x2KpZqL9yFmXnDz"},
json=data)
print(r.json())
# %%
# %%

View File

@ -0,0 +1,196 @@
# %%
import requests
import json
prod_url = 'https://uefadigitalapi.developer.azure-api.net'
prod_primary_key = '7dfa861240aa40f8a834990c24f1a66d'
prod_secondary_key = '4451dcc1ad4f41b2aa6af96cc5a1256a'
pre_url = 'https://uefadigitalapipre.developer.azure-api.net'
pre_primary_key = '1decf93425944f8b9e6dc7226a3b8477'
pre_secondary_key = '14771f5c67b74836a59f777cb543cc0f'
""" GET ALL COMPETITIONS """
# %%
r=requests.get("https://api.digital.uefa.com/comp/v2/competitions", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
result = r.json()
for r in result:
print(r['id'],r['metaData']['name'])
# %%
""" All seasons for a single competition """
# competitionId = 1 # Champions League
# competitionId = 14 # Europa League
competitionId = 2014
r=requests.get(f"https://api.digital.uefa.com/comp/v2/competitions/{competitionId}/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
result = r.json()
# for r in result:
# print(r['id'],r['name'])
result
# %%
""" Single Season """
competitionId = 1 # Champions League
seasonYear = 2025
r=requests.get(f"https://api.digital.uefa.com/comp/v2/competitions/{competitionId}/seasons/{seasonYear}", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
r.json()
# %%
""" Competition Structure by filter """
competitionId = 1 # Champions League
seasonYears = 2025
r=requests.get(f"https://api.digital.uefa.com/comp/v2/competition-structure?competitionId={competitionId}&seasonYears={seasonYears}", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
r.json()
# %%
""" Team by filter or ids """
# https://api.digital.uefa.com/comp/v2/teams[?teamIds][&roundIds][&competitionId][&seasonYear][&phase][&associationId][&teamType][&offset][&limit]
competitionId = 1 # Champions League
# competitionId = 14 # Europa League
# competitionId = 2014 # Europa League
seasonYear = 2025
r=requests.get(f"https://api.digital.uefa.com/comp/v2/teams?competitionId={competitionId}&seasonYear={seasonYear}&phase=ALL&offset=1&limit=200", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
result = r.json()
# write json file
json.dump(result, open('uefa_teams.json','w'))
# print(result)
# for r in result:
# print(r['id'],r['translations']['displayOfficialName']['EN'],r['teamCode'])
# print("\t",r)
# %%
""" Matchdays by filter """
# https://api.digital.uefa.com/comp/v2/matchdays[?competitionId][&seasonYear][&matchdayIds]
competitionId = 1 # Champions League
seasonYear = 2025
r=requests.get(f"https://api.digital.uefa.com/comp/v2/matchdays?competitionId={competitionId}&seasonYear={seasonYear}", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
result = r.json()
result
# %%
""" Latest coefficient """
seasonYear = 2024
coeff_res = requests.get(url=f"https://api.digital.uefa.com/comp/v2/coefficients?coefficientType=MEN_CLUB&coefficientRange=OVERALL&seasonYear={seasonYear}&pagesize=500", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
for coeff_json in coeff_res.json()['data']['members']:
print(coeff_json['member']['displayOfficialName'], coeff_json['overallRanking'])
# %%%
# %%
# %%
r=requests.get("https://api.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
# %%
r.json()
# %%
# r=requests.get("https://api.pre.digital.uefa.com/comp/v2/competitions/1/seasons", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"14771f5c67b74836a59f777cb543cc0f"})
# # %%
# r.json()
# # %%
# TEAM by filter or ids
# https://api.digital.uefa.com/comp/v2/teams[?teamIds][&roundIds][&competitionId][&seasonYear][&phase][&associationId][&teamType][&offset][&limit]
r=requests.get("https://api.digital.uefa.com/comp/v2/teams?teamIds=50037", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
# %%
r.json()
# %%
r=requests.get("https://api.digital.uefa.com/comp/v2/stadiums?limit=1&offset=1", headers={"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"})
# %%
r.json()
# %%
list_teams = [52919,50080,52973,50031,50123,52749,59324,77977,52277,52682,50138,50069,50136,50050,50058,50062,52709,52816,52707,7889,52714,52280,52748,50067,50124,60609,50113,50147,2603790,52336,50037,50030,50064,52758,52747,50051]
get_teams = "52919,50080,52973,50031,50123,52749,59324,77977,52277,52682,50138,50069,50136,50050,50058,50062,52709,52816,52707,7889,52714,52280,52748,50067,50124,60609,50113,50147,2603790,52336,50037,50030,50064,52758,52747,50051"
uefa_coefficients_url = 'https://api.digital.uefa.com/comp/v2/coefficients?coefficientType=MEN_CLUB&coefficientRange=SEASONAL&seasonYear=2024&memberId='
uefa_teams_url = 'https://api.digital.uefa.com/comp/v2/teams?teamIds='+get_teams
headers = {"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"}
# %%
res = requests.get(uefa_teams_url, headers=headers)
# %%
res.json()
# %%
for team in res.json():
print(team['id'],team['translations']['displayOfficialName']['EN'],team['teamCode'])
coefficient = requests.get(uefa_coefficients_url+str(team['id']), headers=headers)
print(coefficient.json()['data']['members'][0]['seasonRankings'])
# %%
# res.json()
# # %%
# for r in res.json()['data']['members']:
# print(r['member']['id'],r['overallRanking']['position'],)
# # %%
uefa_match_url = "https://api.digital.uefa.com/match/v5/matches?offset=0&limit=10&order=DESC&competitionId=1&seasonYear=2024&seasonsRange=ALL&phase=ALL"
headers = {"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"}
# %%
res = requests.get(uefa_match_url, headers=headers)
# %%
res.json()
# # %%
### TEAMS
# GET https://api.digital.uefa.com/comp/v2/teams?competitionId=1&seasonYear=2024&phase=ALL&offset=1&limit=500 HTTP/1.1
# %%
""" POSITIONS """
uefa_api_prefix = 'https://api.digital.uefa.com/comp/v2'
seasonYear = 2025
uefa_coefficients_url = f'{uefa_api_prefix}/coefficients?coefficientType=MEN_CLUB&coefficientRange=OVERALL&seasonYear={seasonYear}&pagesize=500'
headers = {"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":"7dfa861240aa40f8a834990c24f1a66d"}
coeff_res = requests.get(uefa_coefficients_url, headers=headers)
changed= []
for coeff_json in coeff_res.json()['data']['members']:
print(coeff_json['member']['id'], coeff_json['overallRanking']['totalValue'])
# %%

View File

@ -0,0 +1,39 @@
# %%
import requests
import json
prod_url = 'https://uefadigitalapi.developer.azure-api.net'
prod_primary_key = '9b849959b7aa40b9b1f966254c22fc6e'
prod_secondary_key = '116b2865175141e6955e6c83fe4651ae'
pre_url = 'https://uefadigitalapipre.developer.azure-api.net'
pre_primary_key = 'b6bdd59422614fa9932c03fb16666067'
pre_secondary_key = '4e61efa534f94f38a4c96ba9bc00f444'
headers = {"Cache-Control":"no-cache","Ocp-Apim-Subscription-Key":prod_primary_key}
# %%
competitionId = 1 # Champions League
seasonYear = 2025
uefa_match_url = f"https://api.digital.uefa.com/match/v5/matches?offset=0&limit=500&status=FINISHED&order=DESC&competitionId={competitionId}&seasonYear={seasonYear}&seasonsRange=ALL&phase=ALL"
res = requests.get(uefa_match_url, headers=headers)
json.dump(res.json(), open("uefa_match.json", "w"))
# # %%
# %%
"""
Match live score
Try it
Provide details about the score of a collection of matches given a number of conditions.
"""
uefa_match_url = f"https://api.digital.uefa.com/match/v5/livescore&competitionId={competitionId}"
res = requests.get(uefa_match_url, headers=headers)
res.json()
# %%