AI Driving Olympics Home Challenges Submissions Jobs

Submission 683

Submission683
Competingyes
Challengeaido1_LF1_r3-v3
UserPolyProgrammist
Date submitted
Completecomplete
Resultsuccess
Jobsstep1-simulation: 13322 step2-scoring: 13334 step3-videos: 13338 step4-viz: 13340
Next
User labelRandom execution
Admin priority50
User priority50

13340

Click the images to see detailed statistics about the episode.

13338

Evaluation jobs for this submission

Job IDsubmissionuseruser labelchallengestepstatusup to dateevaluatordate starteddate completeddurationmessage
13340683PolyProgrammistRandom executionaido1_LF1_r3-v3step4-vizsuccessyes-0:01:01(hidden)
driven_lanedir_median0.4588238764648154
deviation-center-line_median0.08387633757269329
in-drivable-lane_median0


other stats
deviation-center-line_max0.16214663633975826
deviation-center-line_mean0.08224980242295568
deviation-center-line_min0
deviation-heading_max0.4614232131257285
deviation-heading_mean0.15896474455912815
deviation-heading_median0.11816017424377774
deviation-heading_min0
driven_any_max1.0925087447738568
driven_any_mean0.6866789261664665
driven_any_median0.687610151486988
driven_any_min0.4068017245624871
driven_lanedir_max0.7159536385390499
driven_lanedir_mean0.4528439481215714
driven_lanedir_min0
in-drivable-lane_max2.6333333333333337
in-drivable-lane_mean0.5333333333333334
in-drivable-lane_min0
per-episodes
details{"ep000": {"driven_any": 1.0925087447738568, "driven_lanedir": 0, "in-drivable-lane": 2.6333333333333337, "deviation-heading": 0, "deviation-center-line": 0}, "ep001": {"driven_any": 0.4068017245624871, "driven_lanedir": 0.4028082593757336, "in-drivable-lane": 0, "deviation-heading": 0.13829485546634993, "deviation-center-line": 0.08387633757269329}, "ep002": {"driven_any": 0.7848694397437411, "driven_lanedir": 0.7159536385390499, "in-drivable-lane": 0.033333333333333326, "deviation-heading": 0.4614232131257285, "deviation-center-line": 0.0656286104829346}, "ep003": {"driven_any": 0.4616045702652595, "driven_lanedir": 0.4588238764648154, "in-drivable-lane": 0, "deviation-heading": 0.11816017424377774, "deviation-center-line": 0.09959742771939224}, "ep004": {"driven_any": 0.687610151486988, "driven_lanedir": 0.6866339662282586, "in-drivable-lane": 0, "deviation-heading": 0.0769454799597845, "deviation-center-line": 0.16214663633975826}}
13338683PolyProgrammistRandom executionaido1_LF1_r3-v3step3-videossuccessyes-0:00:38(hidden)
other stats
videos1
13334683PolyProgrammistRandom executionaido1_LF1_r3-v3step2-scoringsuccessyes-0:00:22(hidden)
survival_time_median1.700000000000002


other stats
episodes
details{"ep000": {"nsteps": 80, "reward": -13.10959108150564, "good_angle": 1.1303105967825406, "survival_time": 2.666666666666667, "traveled_tiles": 2, "valid_direction": 0.8999999999999981}, "ep001": {"nsteps": 32, "reward": -30.874048120807856, "good_angle": 0.021143003410554143, "survival_time": 1.0666666666666669, "traveled_tiles": 2, "valid_direction": 0}, "ep002": {"nsteps": 58, "reward": -16.969534671640602, "good_angle": 0.4629848321157944, "survival_time": 1.933333333333336, "traveled_tiles": 2, "valid_direction": 0.8666666666666694}, "ep003": {"nsteps": 35, "reward": -28.229826492922648, "good_angle": 0.014716235462878669, "survival_time": 1.166666666666667, "traveled_tiles": 1, "valid_direction": 0}, "ep004": {"nsteps": 51, "reward": -19.31553161224606, "good_angle": 0.007535142035184924, "survival_time": 1.700000000000002, "traveled_tiles": 3, "valid_direction": 0}}
good_angle_max1.1303105967825406
good_angle_mean0.3273379619613906
good_angle_median0.021143003410554143
good_angle_min0.007535142035184924
reward_max-13.10959108150564
reward_mean-21.69970639582456
reward_median-19.31553161224606
reward_min-30.874048120807856
survival_time_max2.666666666666667
survival_time_mean1.7066666666666677
survival_time_min1.0666666666666669
traveled_tiles_max3
traveled_tiles_mean2
traveled_tiles_median2
traveled_tiles_min1
valid_direction_max0.8999999999999981
valid_direction_mean0.3533333333333335
valid_direction_median0
valid_direction_min0
13322683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationsuccessyes-0:03:15(hidden)
other stats
simulation-passed1
7156683PolyProgrammistRandom executionaido1_LF1_r3-v3step4-vizsuccessyes-0:02:35(hidden)
driven_lanedir_median1.11976834341689
deviation-center-line_median0.5737459406644397
in-drivable-lane_median0.29999999999999893


other stats
deviation-center-line_max0.908204191178218
deviation-center-line_mean0.5481847924042389
deviation-center-line_min0.1991546181946653
deviation-heading_max3.691894122516046
deviation-heading_mean1.4407653851820534
deviation-heading_median0.837739463253585
deviation-heading_min0.17848144019358284
driven_any_max1.8552811847987651
driven_any_mean1.1021064768261375
driven_any_median1.1802121081416013
driven_any_min0.3567746062291797
driven_lanedir_max1.8536665441363724
driven_lanedir_mean0.9810180387720512
driven_lanedir_min0.13986396232039222
in-drivable-lane_max0.7333333333333307
in-drivable-lane_mean0.32666666666666594
in-drivable-lane_min0
per-episodes
details{"ep000": {"driven_any": 0.3567746062291797, "driven_lanedir": 0.13986396232039222, "in-drivable-lane": 0.6, "deviation-heading": 2.0288186211042474, "deviation-center-line": 0.1991546181946653}, "ep001": {"driven_any": 0.5128492001152548, "driven_lanedir": 0.5121899304077624, "in-drivable-lane": 0, "deviation-heading": 0.17848144019358284, "deviation-center-line": 0.4032591231709126}, "ep002": {"driven_any": 1.1802121081416013, "driven_lanedir": 1.11976834341689, "in-drivable-lane": 0.29999999999999893, "deviation-heading": 0.837739463253585, "deviation-center-line": 0.6565600888129588}, "ep003": {"driven_any": 1.8552811847987651, "driven_lanedir": 1.8536665441363724, "in-drivable-lane": 0, "deviation-heading": 0.4668932788428053, "deviation-center-line": 0.908204191178218}, "ep004": {"driven_any": 1.6054152848458865, "driven_lanedir": 1.2796014135788387, "in-drivable-lane": 0.7333333333333307, "deviation-heading": 3.691894122516046, "deviation-center-line": 0.5737459406644397}}
7152683PolyProgrammistRandom executionaido1_LF1_r3-v3step3-videossuccessyes-0:00:47(hidden)
other stats
videos1
7151683PolyProgrammistRandom executionaido1_LF1_r3-v3step2-scoringsuccessyes-0:00:20(hidden)
survival_time_median8.466666666666647


other stats
episodes
details{"ep000": {"nsteps": 78, "reward": -13.07956148016577, "good_angle": 1.2428026350857364, "survival_time": 2.6000000000000005, "traveled_tiles": 1, "valid_direction": 2.166666666666667}, "ep001": {"nsteps": 111, "reward": -9.58020701011022, "good_angle": 0.009460689892145484, "survival_time": 3.6999999999999966, "traveled_tiles": 1, "valid_direction": 0}, "ep002": {"nsteps": 254, "reward": -4.598547526818561, "good_angle": 0.9412499476619224, "survival_time": 8.466666666666647, "traveled_tiles": 3, "valid_direction": 2.466666666666659}, "ep003": {"nsteps": 400, "reward": -2.844978074319661, "good_angle": 0.16332608919792543, "survival_time": 13.333333333333297, "traveled_tiles": 4, "valid_direction": 0.5666666666666647}, "ep004": {"nsteps": 346, "reward": -3.5109905174373504, "good_angle": 20.418845791177297, "survival_time": 11.533333333333305, "traveled_tiles": 3, "valid_direction": 3.9999999999999862}}
good_angle_max20.418845791177297
good_angle_mean4.555137030603005
good_angle_median0.9412499476619224
good_angle_min0.009460689892145484
reward_max-2.844978074319661
reward_mean-6.722856921770313
reward_median-4.598547526818561
reward_min-13.07956148016577
survival_time_max13.333333333333297
survival_time_mean7.926666666666648
survival_time_min2.6000000000000005
traveled_tiles_max4
traveled_tiles_mean2.4
traveled_tiles_median3
traveled_tiles_min1
valid_direction_max3.9999999999999862
valid_direction_mean1.8399999999999956
valid_direction_median2.166666666666667
valid_direction_min0
7141683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationsuccessno-0:02:02(hidden)
other stats
simulation-passed1
6682683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationabortedno-0:00:01
Uncaught exception: [...]
Uncaught exception:
Traceback (most recent call last):
  File "/project/src/duckietown_challenges_runner/runner.py", line 353, in go_
    os.makedirs(wd)
  File "/usr/lib/python2.7/os.py", line 157, in makedirs
    mkdir(name, mode)
OSError: [Errno 28] No space left on device: '/tmp/duckietown/DT18/evaluator/executions/aido1_LF1_r3-v3/submission683/step1-simulation-nutonomy-P50-2110-job6682'
(hidden)
6681683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationabortedno-0:00:00
Uncaught exception: [...]
Uncaught exception:
Traceback (most recent call last):
  File "/project/src/duckietown_challenges_runner/runner.py", line 353, in go_
    os.makedirs(wd)
  File "/usr/lib/python2.7/os.py", line 157, in makedirs
    mkdir(name, mode)
OSError: [Errno 28] No space left on device: '/tmp/duckietown/DT18/evaluator/executions/aido1_LF1_r3-v3/submission683/step1-simulation-nutonomy-P50-2570-job6681'
(hidden)
6481683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationabortedno-0:00:00
Uncaught exception: [...]
Uncaught exception:
Traceback (most recent call last):
  File "/project/src/duckietown_challenges_runner/runner.py", line 353, in go_
    os.makedirs(wd)
  File "/usr/lib/python2.7/os.py", line 157, in makedirs
    mkdir(name, mode)
OSError: [Errno 28] No space left on device: '/tmp/duckietown/DT18/evaluator/executions/aido1_LF1_r3-v3/submission683/step1-simulation-nutonomy-P50-2210-job6481'
(hidden)
5375683PolyProgrammistRandom executionaido1_LF1_r3-v3step4-vizsuccessno-0:01:08(hidden)
other stats
videos1
5374683PolyProgrammistRandom executionaido1_LF1_r3-v3step3-videossuccessno-0:00:56(hidden)
other stats
videos1
5373683PolyProgrammistRandom executionaido1_LF1_r3-v3step2-scoringsuccessno-0:02:24(hidden)
survival_time_median9.099999999999978


other stats
episodes
details{"ep000": {"nsteps": 171, "reward": -7.906903265163913, "good_angle": 23.167319904536384, "survival_time": 5.6999999999999895, "traveled_tiles": 2, "valid_direction": 3.9666666666666544}, "ep001": {"nsteps": 383, "reward": -3.2070510478580236, "good_angle": 16.473773167522435, "survival_time": 12.766666666666632, "traveled_tiles": 4, "valid_direction": 4.033333333333319}, "ep002": {"nsteps": 500, "reward": -0.006893814329640009, "good_angle": 0.49274114419548326, "survival_time": 16.666666666666654, "traveled_tiles": 5, "valid_direction": 1.2333333333333645}, "ep003": {"nsteps": 235, "reward": -6.704229172239912, "good_angle": 19.915258517632104, "survival_time": 7.833333333333315, "traveled_tiles": 3, "valid_direction": 3.099999999999989}, "ep004": {"nsteps": 273, "reward": -5.652174131810583, "good_angle": 5.997302694493859, "survival_time": 9.099999999999978, "traveled_tiles": 3, "valid_direction": 3.09999999999999}}
good_angle_max23.167319904536384
good_angle_mean13.209279085676052
good_angle_median16.473773167522435
good_angle_min0.49274114419548326
reward_max-0.006893814329640009
reward_mean-4.695450286280414
reward_median-5.652174131810583
reward_min-7.906903265163913
survival_time_max16.666666666666654
survival_time_mean10.413333333333314
survival_time_min5.6999999999999895
traveled_tiles_max5
traveled_tiles_mean3.4
traveled_tiles_median3
traveled_tiles_min2
valid_direction_max4.033333333333319
valid_direction_mean3.0866666666666633
valid_direction_median3.09999999999999
valid_direction_min1.2333333333333645
5372683PolyProgrammistRandom executionaido1_LF1_r3-v3step2-scoringsuccessno-0:02:17(hidden)
survival_time_median9.099999999999978


other stats
episodes
details{"ep000": {"nsteps": 171, "reward": -7.906903265163913, "good_angle": 23.167319904536384, "survival_time": 5.6999999999999895, "traveled_tiles": 2, "valid_direction": 3.9666666666666544}, "ep001": {"nsteps": 383, "reward": -3.2070510478580236, "good_angle": 16.473773167522435, "survival_time": 12.766666666666632, "traveled_tiles": 4, "valid_direction": 4.033333333333319}, "ep002": {"nsteps": 500, "reward": -0.006893814329640009, "good_angle": 0.49274114419548326, "survival_time": 16.666666666666654, "traveled_tiles": 5, "valid_direction": 1.2333333333333645}, "ep003": {"nsteps": 235, "reward": -6.704229172239912, "good_angle": 19.915258517632104, "survival_time": 7.833333333333315, "traveled_tiles": 3, "valid_direction": 3.099999999999989}, "ep004": {"nsteps": 273, "reward": -5.652174131810583, "good_angle": 5.997302694493859, "survival_time": 9.099999999999978, "traveled_tiles": 3, "valid_direction": 3.09999999999999}}
good_angle_max23.167319904536384
good_angle_mean13.209279085676052
good_angle_median16.473773167522435
good_angle_min0.49274114419548326
reward_max-0.006893814329640009
reward_mean-4.695450286280414
reward_median-5.652174131810583
reward_min-7.906903265163913
survival_time_max16.666666666666654
survival_time_mean10.413333333333314
survival_time_min5.6999999999999895
traveled_tiles_max5
traveled_tiles_mean3.4
traveled_tiles_median3
traveled_tiles_min2
valid_direction_max4.033333333333319
valid_direction_mean3.0866666666666633
valid_direction_median3.09999999999999
valid_direction_min1.2333333333333645
5371683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationsuccessno-0:02:05(hidden)
other stats
simulation-passed1
5370683PolyProgrammistRandom executionaido1_LF1_r3-v3step1-simulationsuccessno-0:02:08(hidden)
other stats
simulation-passed1