2020)
2121from kernelbot .env import env
2222from kernelbot .ui .misc import ConfirmationView , DeleteConfirmationModal , GPUSelectionView
23- from libkernelbot .consts import GitHubGPU , ModalGPU , SubmissionMode , get_gpu_by_name
23+ from libkernelbot .consts import GitHubGPU , ModalGPU , get_gpu_by_name
2424from libkernelbot .leaderboard_db import LeaderboardDoesNotExist , LeaderboardItem , SubmissionItem
25- from libkernelbot .submission import compute_score
26- from libkernelbot .task import LeaderboardDefinition , make_task_definition
25+ from libkernelbot .task import LeaderboardDefinition , LeaderboardTask , make_task_definition
2726from libkernelbot .utils import (
2827 KernelBotError ,
2928 setup_logging ,
@@ -401,7 +400,7 @@ async def _submit_milestones(
401400 leaderboard_item = db .get_leaderboard (leaderboard_name )
402401 milestones = db .get_leaderboard_milestones (leaderboard_item ["id" ])
403402
404- task : " LeaderboardTask" = leaderboard_item ["task" ]
403+ task : LeaderboardTask = leaderboard_item ["task" ]
405404
406405 # ok, submit all that are missing
407406 submit_tasks = []
@@ -410,50 +409,6 @@ async def _submit_milestones(
410409 reporters = MultiProgressReporterDiscord (interaction )
411410 await reporters .show (f"Milestone runs for { leaderboard_name } " )
412411
413- async def submit_milestone (milestone , gpu , reporter ):
414- result = await backend .submit_leaderboard (
415- - 1 ,
416- milestone ["code" ],
417- "milestone.py" ,
418- gpu ,
419- reporter ,
420- task ,
421- SubmissionMode .LEADERBOARD ,
422- None ,
423- )
424-
425- # we do not allow milestone runs to fail
426- if not result .success :
427- logger .error (f"Milestone run failed: { result } " )
428- raise KernelBotError (f"Milestone run failed: { result .error } " )
429-
430- for key , value in result .runs .items ():
431- if not value .run .success or not value .run .passed :
432- logger .error (f"Milestone run { key } failed: { value } " )
433- raise KernelBotError (f"Milestone run { key } failed." )
434-
435- with backend .db as db :
436- for key , value in result .runs .items ():
437- # Only store LB runs in the database;
438- # we still want to run test/benchmark to validate
439- # that the code actually passes, but for all other
440- # purposes we only need the leaderboard run
441- if key != SubmissionMode .LEADERBOARD .value :
442- continue
443-
444- db .create_submission_run (
445- milestone = milestone ["id" ],
446- start = value .start ,
447- end = value .end ,
448- mode = key ,
449- runner = gpu .name ,
450- score = compute_score (result , task , - 1 ),
451- secret = False ,
452- compilation = value .compilation ,
453- result = value .run ,
454- system = result .system ,
455- )
456-
457412 if gpus is None :
458413 gpus = leaderboard_item ["gpu_types" ]
459414
@@ -479,8 +434,9 @@ async def submit_milestone(milestone, gpu, reporter):
479434 continue
480435
481436 submit_tasks .append (
482- submit_milestone (
437+ backend . submit_milestone_run (
483438 milestone ,
439+ task ,
484440 get_gpu_by_name (gpu ),
485441 reporters .add_run (f"Milestone { milestone ['name' ]} on { gpu } " ),
486442 )
0 commit comments