58248 | LFv-sim | success | yes | | | 0:25:19 | | Artefacts hidden. If you are the author, please login using the top-right link or use the dashboard. | driven_lanedir_consec_median | 6.031820652770268 | survival_time_median | 49.9999999999993 | deviation-center-line_median | 2.890422675124041 | in-drivable-lane_median | 3.4499999999998687 |
other statsagent_compute-ego0_max | 0.012406478362732317 | agent_compute-ego0_mean | 0.012276380741659668 | agent_compute-ego0_median | 0.012284028484818316 | agent_compute-ego0_min | 0.01213098763426972 | complete-iteration_max | 0.1979052769832122 | complete-iteration_mean | 0.17552726892415405 | complete-iteration_median | 0.17495263801231964 | complete-iteration_min | 0.15429852268876482 | deviation-center-line_max | 3.758763972217636 | deviation-center-line_mean | 2.5023341170384743 | deviation-center-line_min | 0.4697271456881783 | deviation-heading_max | 12.82412797422542 | deviation-heading_mean | 8.260324347972649 | deviation-heading_median | 9.078803948993649 | deviation-heading_min | 2.0595615196778807 | driven_any_max | 11.679181502401502 | driven_any_mean | 8.223934584604503 | driven_any_median | 9.015676038053552 | driven_any_min | 3.185204759909409 | driven_lanedir_consec_max | 7.104699727992231 | driven_lanedir_consec_mean | 5.146472905121384 | driven_lanedir_consec_min | 1.4175505869527705 | driven_lanedir_max | 11.29941124583484 | driven_lanedir_mean | 7.387417596565012 | driven_lanedir_median | 8.41635427673622 | driven_lanedir_min | 1.4175505869527705 | get_duckie_state_max | 2.30358800324274e-06 | get_duckie_state_mean | 2.202856139607719e-06 | get_duckie_state_median | 2.22553352004613e-06 | get_duckie_state_min | 2.056769515095876e-06 | get_robot_state_max | 0.003805351098510844 | get_robot_state_mean | 0.003676816666338264 | get_robot_state_median | 0.003668067316877763 | get_robot_state_min | 0.003565780933086689 | get_state_dump_max | 0.004711381600957231 | get_state_dump_mean | 0.004611894442696353 | get_state_dump_median | 0.0045920555990656385 | get_state_dump_min | 0.004552084971696902 | get_ui_image_max | 0.03427312679779835 | get_ui_image_mean | 0.030095626159444507 | get_ui_image_median | 0.030282848125690973 | get_ui_image_min | 0.02554368158859774 | in-drivable-lane_max | 11.500000000000124 | in-drivable-lane_mean | 4.774999999999968 | in-drivable-lane_min | 0.70000000000001 | per-episodes | details{"LF-norm-loop-000-ego0": {"driven_any": 11.679181502401502, "get_ui_image": 0.028943001281014093, "step_physics": 0.10325940344156968, "survival_time": 59.99999999999873, "driven_lanedir": 11.29941124583484, "get_state_dump": 0.004711381600957231, "get_robot_state": 0.003805351098510844, "sim_render-ego0": 0.003842394119694668, "get_duckie_state": 2.30358800324274e-06, "in-drivable-lane": 0.70000000000001, "deviation-heading": 11.522751178524354, "agent_compute-ego0": 0.012376527206586064, "complete-iteration": 0.17183745850333565, "set_robot_commands": 0.002266838786802522, "deviation-center-line": 3.751344232044408, "driven_lanedir_consec": 6.253869021466859, "sim_compute_sim_state": 0.010468882386829336, "sim_compute_performance-ego0": 0.002075689420612726}, "LF-norm-zigzag-000-ego0": {"driven_any": 3.185204759909409, "get_ui_image": 0.03427312679779835, "step_physics": 0.1253918115909283, "survival_time": 19.45000000000014, "driven_lanedir": 1.4175505869527705, "get_state_dump": 0.004552084971696902, "get_robot_state": 0.003565780933086689, "sim_render-ego0": 0.003664032006875063, "get_duckie_state": 2.1659410916841942e-06, "in-drivable-lane": 11.500000000000124, "deviation-heading": 2.0595615196778807, "agent_compute-ego0": 0.012191529763050568, "complete-iteration": 0.1979052769832122, "set_robot_commands": 0.0020778521513327573, "deviation-center-line": 0.4697271456881783, "driven_lanedir_consec": 1.4175505869527705, "sim_compute_sim_state": 0.010203690406603691, "sim_compute_performance-ego0": 0.0019014523579524113}, "LF-norm-techtrack-000-ego0": {"driven_any": 7.531352203733553, "get_ui_image": 0.03162269497036785, "step_physics": 0.10669178790069848, "survival_time": 39.999999999999865, "driven_lanedir": 6.810000834221187, "get_state_dump": 0.004589244816335995, "get_robot_state": 0.0036260673913467542, "sim_render-ego0": 0.0036889935254157706, "get_duckie_state": 2.056769515095876e-06, "in-drivable-lane": 4.89999999999978, "deviation-heading": 6.634856719462939, "agent_compute-ego0": 0.012406478362732317, "complete-iteration": 0.17806781752130363, "set_robot_commands": 0.0021248685286732647, "deviation-center-line": 2.029501118203675, "driven_lanedir_consec": 5.809772284073675, "sim_compute_sim_state": 0.01130362336852875, "sim_compute_performance-ego0": 0.0019284964500741568}, "LF-norm-small_loop-000-ego0": {"driven_any": 10.499999872373552, "get_ui_image": 0.02554368158859774, "step_physics": 0.09404513798188806, "survival_time": 59.99999999999873, "driven_lanedir": 10.022707719251253, "get_state_dump": 0.0045948663817952816, "get_robot_state": 0.003710067242408771, "sim_render-ego0": 0.003754696977029335, "get_duckie_state": 2.285125948408065e-06, "in-drivable-lane": 1.9999999999999576, "deviation-heading": 12.82412797422542, "agent_compute-ego0": 0.01213098763426972, "complete-iteration": 0.15429852268876482, "set_robot_commands": 0.002183860585056276, "deviation-center-line": 3.758763972217636, "driven_lanedir_consec": 7.104699727992231, "sim_compute_sim_state": 0.006277977874336592, "sim_compute_performance-ego0": 0.001972915925749335}} | set_robot_commands_max | 0.002266838786802522 | set_robot_commands_mean | 0.002163355012966205 | set_robot_commands_median | 0.00215436455686477 | set_robot_commands_min | 0.0020778521513327573 | sim_compute_performance-ego0_max | 0.002075689420612726 | sim_compute_performance-ego0_mean | 0.001969638538597157 | sim_compute_performance-ego0_median | 0.0019507061879117456 | sim_compute_performance-ego0_min | 0.0019014523579524113 | sim_compute_sim_state_max | 0.01130362336852875 | sim_compute_sim_state_mean | 0.009563543509074592 | sim_compute_sim_state_median | 0.010336286396716514 | sim_compute_sim_state_min | 0.006277977874336592 | sim_render-ego0_max | 0.003842394119694668 | sim_render-ego0_mean | 0.003737529157253709 | sim_render-ego0_median | 0.003721845251222553 | sim_render-ego0_min | 0.003664032006875063 | simulation-passed | 1 | step_physics_max | 0.1253918115909283 | step_physics_mean | 0.10734703522877112 | step_physics_median | 0.10497559567113408 | step_physics_min | 0.09404513798188806 | survival_time_max | 59.99999999999873 | survival_time_mean | 44.862499999999365 | survival_time_min | 19.45000000000014 |
| No reset possible |
58246 | LFv-sim | host-error | yes | | | 0:31:11 | Uncaught exception:
[...]Uncaught exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/duckietown_challenges_runner/docker_compose.py", line 59, in get_services_id
raise ZValueError(container_ids=container_ids, services=services, res=res, names=names)
zuper_commons.types.exceptions.ZValueError:
│ container_ids: [791a9a3ef6aceb8d2aba4ccff4f9b1973088d56feedaba8354b8b2b169111171,
│ e4a7c844e8f516a7dddb16e05f69cf540c45ebec690951b763316664626eb17a]
│ services: dict[3]
│ │ evaluator:
│ │ dict[7]
│ │ │ image: docker.io/andreacensi/aido5-lf-sim-validation-lfv-sim-evaluator@sha256:6d0af9441525e1ed05be582f41e00fc178083c86797d28cd1a255c7025d0fd50
│ │ │ environment:
│ │ │ dict[10]
│ │ │ │ experiment_manager_parameters:
│ │ │ │ |episodes_per_scenario: 1
│ │ │ │ |episode_length_s: 60.0
│ │ │ │ |min_episode_length_s: 0.0
│ │ │ │ |seed: 888
│ │ │ │ |physics_dt: 0.05
│ │ │ │ |max_failures: 2
│ │ │ │ |fifo_dir: /fifos
│ │ │ │ |sim_in: /fifos/simulator-in
│ │ │ │ |sim_out: /fifos/simulator-out
│ │ │ │ |sm_in: /fifos/scenario_maker-in
│ │ │ │ |sm_out: /fifos/scenario_maker-out
│ │ │ │ |timeout_initialization: 120
│ │ │ │ |timeout_regular: 120
│ │ │ │ |port: 10123
│ │ │ │ |scenarios:
│ │ │ │ |- /scenarios
│ │ │ │ |
│ │ │ │ challenge_name: aido5-LF-sim-validation
│ │ │ │ challenge_step_name: LFv-sim
│ │ │ │ submission_id: 9312
│ │ │ │ submitter_name: jeromelabonte
│ │ │ │ SUBMISSION_CONTAINER: docker.io/jeromelabonte/aido-submissions:2020_10_25_18_51_25@sha256:04c7e311c0eaf6eb7705d26af59a86294addff2361cf6d4c9fb9c8ea981aa3c6
│ │ │ │ username: ubuntu
│ │ │ │ uid: 0
│ │ │ │ USER: ubuntu
│ │ │ │ HOME: /fake-home/ubuntu
│ │ │ ports: [10123]
│ │ │ labels: {org.duckietown.created_by_runner: true, org.duckietown.runner_name: nogpu-prod-07_27d69f530ab0}
│ │ │ user: 0:0
│ │ │ volumes:
│ │ │ [
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-wd:/challenges:rw,
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-fifos:/fifos:rw,
│ │ │ /tmp/duckietown/dt-challenges-runner/20_12_03_22_29_20-39644/fake-ubuntu-home:/fake-home/ubuntu:rw]
│ │ │ networks: {evaluation: {aliases: [evaluation]} }
│ │ simulator:
│ │ dict[6]
│ │ │ image: docker.io/duckietown/challenge-aido_lf-simulator-gym@sha256:c0096866077db3574e425d40603d8f5fc8ebbd164da7c0578df94ff4ede58d95
│ │ │ environment:
│ │ │ dict[12]
│ │ │ │ AIDONODE_CONFIG:
│ │ │ │ |env_constructor: Simulator
│ │ │ │ |env_parameters:
│ │ │ │ | max_steps: 500001 # we don't want the gym to reset itself
│ │ │ │ | domain_rand: 0
│ │ │ │ | camera_width: 640
│ │ │ │ | camera_height: 480
│ │ │ │ | distortion: true
│ │ │ │ | num_tris_distractors: 0
│ │ │ │ | color_ground: [0, 0.3, 0] # green
│ │ │ │ | enable_leds: true
│ │ │ │ |
│ │ │ │ AIDONODE_DATA_IN: /fifos/simulator-in
│ │ │ │ AIDONODE_DATA_OUT: fifo:/fifos/simulator-out
│ │ │ │ challenge_name: aido5-LF-sim-validation
│ │ │ │ challenge_step_name: LFv-sim
│ │ │ │ submission_id: 9312
│ │ │ │ submitter_name: jeromelabonte
│ │ │ │ SUBMISSION_CONTAINER: docker.io/jeromelabonte/aido-submissions:2020_10_25_18_51_25@sha256:04c7e311c0eaf6eb7705d26af59a86294addff2361cf6d4c9fb9c8ea981aa3c6
│ │ │ │ username: ubuntu
│ │ │ │ uid: 0
│ │ │ │ USER: ubuntu
│ │ │ │ HOME: /fake-home/ubuntu
│ │ │ labels: {org.duckietown.created_by_runner: true, org.duckietown.runner_name: nogpu-prod-07_27d69f530ab0}
│ │ │ user: 0:0
│ │ │ volumes:
│ │ │ [
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-wd:/challenges:rw,
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-fifos:/fifos:rw,
│ │ │ /tmp/duckietown/dt-challenges-runner/20_12_03_22_29_20-39644/fake-ubuntu-home:/fake-home/ubuntu:rw]
│ │ │ networks: {evaluation: {aliases: [evaluation]} }
│ │ solution-ego0:
│ │ dict[6]
│ │ │ image: docker.io/jeromelabonte/aido-submissions@sha256:04c7e311c0eaf6eb7705d26af59a86294addff2361cf6d4c9fb9c8ea981aa3c6
│ │ │ environment:
│ │ │ dict[13]
│ │ │ │ AIDONODE_NAME: ego0
│ │ │ │ AIDONODE_DATA_IN: /fifos/ego0-in
│ │ │ │ AIDO_REQUIRE_GPU: 1
│ │ │ │ AIDONODE_DATA_OUT: fifo:/fifos/ego0-out
│ │ │ │ challenge_name: aido5-LF-sim-validation
│ │ │ │ challenge_step_name: LFv-sim
│ │ │ │ submission_id: 9312
│ │ │ │ submitter_name: jeromelabonte
│ │ │ │ SUBMISSION_CONTAINER: docker.io/jeromelabonte/aido-submissions:2020_10_25_18_51_25@sha256:04c7e311c0eaf6eb7705d26af59a86294addff2361cf6d4c9fb9c8ea981aa3c6
│ │ │ │ username: ubuntu
│ │ │ │ uid: 0
│ │ │ │ USER: ubuntu
│ │ │ │ HOME: /fake-home/ubuntu
│ │ │ labels: {org.duckietown.created_by_runner: true, org.duckietown.runner_name: nogpu-prod-07_27d69f530ab0}
│ │ │ user: 0:0
│ │ │ volumes:
│ │ │ [
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-wd:/challenges:rw,
│ │ │ /tmp/duckietown/aido5-LF-sim-validation/submission9312/LFv-sim-nogpu-prod-07_27d69f530ab0-job58246-a-fifos:/fifos:rw,
│ │ │ /tmp/duckietown/dt-challenges-runner/20_12_03_22_29_20-39644/fake-ubuntu-home:/fake-home/ubuntu:rw]
│ │ │ networks: {evaluation: {aliases: [evaluation]} }
│ res: dict[2]
│ │ evaluator: 791a9a3ef6aceb8d2aba4ccff4f9b1973088d56feedaba8354b8b2b169111171
│ │ solution-ego0: e4a7c844e8f516a7dddb16e05f69cf540c45ebec690951b763316664626eb17a
│ names: dict[2]
│ │ 791a9a3ef6aceb8d2aba4ccff4f9b1973088d56feedaba8354b8b2b169111171: nogpu-prod-07_27d69f530ab0-job58246-666783_evaluator_1
│ │ e4a7c844e8f516a7dddb16e05f69cf540c45ebec690951b763316664626eb17a: nogpu-prod-07_27d69f530ab0-job58246-666783_solution-ego0_1
The above exception was the direct cause of the following exception:
Traceback (most recent call last):
File "/usr/local/lib/python3.8/dist-packages/duckietown_challenges_runner/runner.py", line 745, in get_cr
cr = run_single(
File "/usr/local/lib/python3.8/dist-packages/duckietown_challenges_runner/runner.py", line 959, in run_single
write_logs(wd, project, services=config["services"])
File "/usr/local/lib/python3.8/dist-packages/duckietown_challenges_runner/docker_compose.py", line 120, in write_logs
services2id: Dict[ServiceName, ContainerID] = get_services_id(wd, project, services)
File "/usr/local/lib/python3.8/dist-packages/duckietown_challenges_runner/docker_compose.py", line 63, in get_services_id
raise DockerComposeFail(msg, output=output.decode(), names=names) from e
duckietown_challenges_runner.docker_compose.DockerComposeFail: Cannot get process ids
│ output: |791a9a3ef6aceb8d2aba4ccff4f9b1973088d56feedaba8354b8b2b169111171
│ |e4a7c844e8f516a7dddb16e05f69cf540c45ebec690951b763316664626eb17a
│ |
│ names: dict[2]
│ │ 791a9a3ef6aceb8d2aba4ccff4f9b1973088d56feedaba8354b8b2b169111171: nogpu-prod-07_27d69f530ab0-job58246-666783_evaluator_1
│ │ e4a7c844e8f516a7dddb16e05f69cf540c45ebec690951b763316664626eb17a: nogpu-prod-07_27d69f530ab0-job58246-666783_solution-ego0_1
| Artefacts hidden. If you are the author, please login using the top-right link or use the dashboard. | | No reset possible |