From 04747e3e8b5e0cd23888f1793a887f7229734dc3 Mon Sep 17 00:00:00 2001
From: josiah_wong <84cremebrule@gmail.com>
Date: Sat, 22 Oct 2022 15:51:44 -0700
Subject: [PATCH] igibson --> omnigibson
---
.github/workflows/build-containers.yml | 2 +-
.github/workflows/docs.yml | 2 +-
.github/workflows/examples-as-test.yml | 20 +-
.github/workflows/release.yml | 6 +-
.github/workflows/sync-repos.yml | 8 +-
.github/workflows/tests.yml | 30 +--
.gitignore | 8 +-
.gitmodules | 20 +-
MANIFEST.in | 6 +-
README.md | 48 ++---
clean.sh | 2 +-
docker/.env | 2 +-
docker/omnigibson/Dockerfile | 14 +-
docker/omnigibson/build_docker.sh | 2 +-
docker/omnigibson/run_docker.sh | 4 +-
docs/acknowledgements.md | 2 +-
docs/assets.md | 4 +-
docs/conf.py | 10 +-
docs/dataset.md | 46 ++---
docs/environments.md | 44 ++--
docs/examples.md | 14 +-
docs/extended_states.md | 22 +-
docs/index.rst | 2 +-
docs/installation.md | 96 ++++-----
docs/intro.md | 26 +--
docs/issues.md | 4 +-
docs/learning_framework.md | 8 +-
docs/objects.md | 56 +++---
docs/overview.md | 14 +-
docs/physics_engine.md | 10 +-
docs/projects.md | 10 +-
docs/quickstart.md | 26 +--
docs/renderer.md | 20 +-
docs/robots.md | 36 ++--
docs/ros_integration.md | 58 +++---
docs/sampling.md | 16 +-
docs/scenes.md | 66 +++---
docs/simulators.md | 20 +-
docs/tests.md | 2 +-
docs/viewer.md | 8 +-
docs/virtual_reality.md | 32 +--
igibson/.gitignore | 2 +-
igibson/__init__.py | 40 ++--
igibson/action_primitives/__init__.py | 6 +-
.../action_primitive_set_base.py | 6 +-
.../behavior_discrete_action_primitives.py | 14 +-
.../starter_semantic_action_primitives.py | 26 +--
igibson/app_omni.py | 8 +-
igibson/app_omni_public.py | 6 +-
.../configs/apps/omni.isaac.sim.python.kit | 8 +-
.../apps/public/omni.isaac.sim.python.kit | 8 +-
.../configs/behavior_full_observability.yaml | 2 +-
.../behavior_full_observability_fetch.yaml | 2 +-
igibson/configs/behavior_vr.yaml | 2 +-
igibson/configs/controllers/dd.yaml | 2 +-
igibson/configs/controllers/ik.yaml | 2 +-
igibson/configs/controllers/joint.yaml | 2 +-
.../controllers/multi_finger_gripper.yaml | 2 +-
igibson/configs/controllers/null_gripper.yaml | 2 +-
igibson/configs/fetch_motion_planning.yaml | 2 +-
igibson/configs/fetch_rearrangement.yaml | 2 +-
igibson/configs/turtlebot_dynamic_nav.yaml | 2 +-
igibson/controllers/__init__.py | 14 +-
igibson/controllers/controller_base.py | 2 +-
igibson/controllers/dd_controller.py | 2 +-
igibson/controllers/ik_controller.py | 10 +-
igibson/controllers/joint_controller.py | 6 +-
.../multi_finger_gripper_controller.py | 6 +-
igibson/controllers/null_joint_controller.py | 2 +-
igibson/envs/__init__.py | 2 +-
igibson/envs/behavior_mp_env.py | 22 +-
igibson/envs/env_base.py | 52 ++---
igibson/envs/igibson_ray_env.py | 8 +-
igibson/envs/parallel_env.py | 10 +-
igibson/examples/README.md | 12 +-
.../environments/behavior_env_demo.py | 8 +-
.../examples/environments/config_selector.py | 16 +-
.../environments/navigation_env_demo.py | 10 +-
igibson/examples/example_selector.py | 10 +-
.../learning/demo_collection_example.py | 26 +--
.../learning/demo_replaying_example.py | 34 ++--
.../examples/learning/ray_rllib_example.py | 34 ++--
.../learning/stable_baselines3_example.py | 14 +-
.../examples/object_states/attachment_demo.py | 22 +-
.../examples/object_states/cleaning_demo.py | 14 +-
.../object_states/cleaning_demo_simple.py | 22 +-
.../heated_source_or_sink_demo.py | 14 +-
.../object_states/heated_state_demo.py | 16 +-
.../object_state_texture_demo.py | 18 +-
.../object_states/sample_kinematics_demo.py | 22 +-
.../sliceable_demo_with_reset.py | 20 +-
.../object_states/temperature_demo.py | 26 +--
igibson/examples/objects/draw_bounding_box.py | 12 +-
igibson/examples/objects/highlight_objects.py | 6 +-
.../examples/objects/load_object_selector.py | 16 +-
igibson/examples/objects/visualize_object.py | 26 +--
.../generate_additional_visual_channels.py | 14 +-
.../generate_colored_dense_pointcloud.py | 10 +-
.../generate_lidar_colored_pointcloud.py | 8 +-
.../observations/generate_lidar_velodyne.py | 14 +-
.../generate_optical_and_scene_flow.py | 16 +-
.../observations/generate_topdown_semseg.py | 10 +-
.../renderer/mesh_renderer_example.py | 6 +-
.../renderer/mesh_renderer_gpu_example.py | 6 +-
.../renderer/mesh_renderer_pano_example.py | 8 +-
.../renderer/mesh_renderer_pbr_example.py | 10 +-
.../renderer/mesh_renderer_simple_example.py | 4 +-
.../renderer_settings_example.py | 6 +-
.../examples/robots/advanced/ik_example.py | 36 ++--
.../examples/robots/all_robots_visualizer.py | 22 +-
.../examples/robots/grasping_mode_example.py | 20 +-
.../robots/motion_planning_example.py | 16 +-
.../examples/robots/robot_control_example.py | 10 +-
.../examples/ros/igibson-ros/CMakeLists.txt | 10 +-
igibson/examples/ros/igibson-ros/README.md | 2 +-
.../launch/turtlebot_gmapping.launch | 6 +-
.../launch/turtlebot_gt_navigation.launch | 30 +--
.../launch/turtlebot_hector_mapping.launch | 6 +-
.../launch/turtlebot_navigation.launch | 26 +--
.../launch/turtlebot_navigation_noisy.launch | 28 +--
.../igibson-ros/launch/turtlebot_rgbd.launch | 6 +-
igibson/examples/ros/igibson-ros/package.xml | 6 +-
.../ros/igibson-ros/turtlebot/turtlebot.urdf | 46 ++---
.../ros/igibson-ros/turtlebot_rgbd.py | 8 +-
igibson/examples/scenes/g_scene_selector.py | 12 +-
igibson/examples/scenes/ig_scene_selector.py | 16 +-
.../scenes/scene_object_rand_example.py | 6 +-
.../scenes/scene_partial_loading_example.py | 6 +-
igibson/examples/scenes/scene_selector.py | 12 +-
.../scenes/scene_texture_rand_example.py | 6 +-
.../examples/scenes/scene_tour_video_gen.py | 26 +--
.../scenes/scenescan2igibson/README.md | 10 +-
igibson/examples/scenes/stadium_example.py | 10 +-
.../scenes/traversability_map_example.py | 2 +-
.../simulator/sim_save_load_example.py | 10 +-
igibson/examples/vr/data_save_replay/vr_sr.py | 26 +--
.../vr/in_development/vr_body_tracker_test.py | 8 +-
.../vr/in_development/vr_button_mapping.py | 6 +-
.../vr/in_development/vr_cleaning_demo.py | 24 +--
.../in_development/vr_hand_dex_benchmark.py | 50 ++---
.../in_development/vr_hand_speed_benchmark.py | 30 +--
.../vr/in_development/vr_playground.py | 32 +--
.../vr/in_development/vr_sample_hud.py | 24 +--
.../vr/in_development/vr_tracker_test.py | 6 +-
igibson/examples/vr/muvr/muvr_demo.py | 32 +--
.../robot_embodiment/vr_demo_robot_explore.py | 20 +-
.../vr_demo_robot_grasping.py | 22 +-
igibson/examples/vr/test/muvr_lag_test.py | 4 +-
.../examples/vr/test/vr_condition_switch.py | 22 +-
igibson/examples/vr/test/vr_hand_geom_vis.py | 2 +-
.../examples/vr/test/vr_overlay_color_test.py | 24 +--
.../vr/test/vr_scroll_wrap_text_test.py | 24 +--
igibson/examples/vr/vr_gaze_test.py | 24 +--
igibson/examples/vr/vr_simple_demo.py | 26 +--
igibson/examples/web_ui/sampling_ui.py | 26 +--
igibson/examples/web_ui/templates/demo.html | 4 +-
igibson/examples/web_ui/templates/index.html | 4 +-
igibson/examples/web_ui/web_ui.py | 38 ++--
igibson/external/motion/__init__.py | 2 +-
.../motion/motion_planners/__init__.py | 2 +-
.../motion/motion_planners/discrete.py | 2 +-
.../external/motion/motion_planners/graph.py | 2 +-
.../motion/motion_planners/lazy_prm.py | 8 +-
.../motion/motion_planners/multi_rrt.py | 2 +-
.../external/motion/motion_planners/prm.py | 2 +-
.../external/motion/motion_planners/rrt.py | 2 +-
.../motion/motion_planners/rrt_connect.py | 2 +-
.../motion/motion_planners/rrt_star.py | 2 +-
.../motion/motion_planners/smoothing.py | 2 +-
.../motion/motion_planners/star_roadmap.py | 2 +-
.../external/motion/motion_planners/utils.py | 2 +-
igibson/global_config.yaml | 4 +-
igibson/macros.py | 18 +-
igibson/maps/segmentation_map.py | 10 +-
igibson/maps/traversable_map.py | 2 +-
igibson/metrics/agent_metric.py | 4 +-
igibson/metrics/dataset_metric.py | 2 +-
igibson/metrics/disarrangement_metric.py | 12 +-
igibson/metrics/gaze_metric.py | 2 +-
igibson/metrics/task_metric.py | 2 +-
igibson/object_states/__init__.py | 62 +++---
igibson/object_states/aabb.py | 4 +-
igibson/object_states/adjacency.py | 8 +-
igibson/object_states/attachment.py | 10 +-
igibson/object_states/burnt.py | 6 +-
igibson/object_states/cleaning_tool.py | 22 +-
igibson/object_states/contact_bodies.py | 2 +-
igibson/object_states/cooked.py | 6 +-
igibson/object_states/dirty.py | 8 +-
igibson/object_states/factory.py | 6 +-
igibson/object_states/filled.py | 12 +-
igibson/object_states/fluid_sink.py | 6 +-
igibson/object_states/fluid_source.py | 6 +-
igibson/object_states/frozen.py | 6 +-
igibson/object_states/heat_source_or_sink.py | 18 +-
igibson/object_states/heated.py | 6 +-
igibson/object_states/inside.py | 20 +-
igibson/object_states/kinematics.py | 8 +-
igibson/object_states/max_temperature.py | 4 +-
igibson/object_states/memoization.py | 6 +-
igibson/object_states/next_to.py | 10 +-
igibson/object_states/object_state_base.py | 2 +-
igibson/object_states/on_floor.py | 14 +-
igibson/object_states/on_top.py | 14 +-
igibson/object_states/open.py | 4 +-
igibson/object_states/pose.py | 2 +-
igibson/object_states/robot_related_states.py | 12 +-
igibson/object_states/room_states.py | 2 +-
igibson/object_states/sliced.py | 14 +-
igibson/object_states/slicer.py | 8 +-
igibson/object_states/soaked.py | 18 +-
igibson/object_states/temperature.py | 12 +-
igibson/object_states/toggle.py | 10 +-
igibson/object_states/touching.py | 6 +-
igibson/object_states/under.py | 12 +-
igibson/object_states/water_sink.py | 6 +-
igibson/object_states/water_source.py | 6 +-
igibson/objects/__init__.py | 14 +-
igibson/objects/controllable_object.py | 8 +-
igibson/objects/dataset_object.py | 26 +--
igibson/objects/light_object.py | 8 +-
igibson/objects/multi_object_wrappers.py | 6 +-
igibson/objects/object_base.py | 18 +-
igibson/objects/primitive_object.py | 12 +-
igibson/objects/stateful_object.py | 22 +-
igibson/objects/usd_object.py | 6 +-
igibson/prims/__init__.py | 14 +-
igibson/prims/cloth_prim.py | 4 +-
igibson/prims/entity_prim.py | 18 +-
igibson/prims/geom_prim.py | 2 +-
igibson/prims/joint_prim.py | 16 +-
igibson/prims/material_prim.py | 18 +-
igibson/prims/prim_base.py | 2 +-
igibson/prims/rigid_prim.py | 12 +-
igibson/prims/xform_prim.py | 8 +-
igibson/renderer_settings/__init__.py | 2 +-
igibson/renderer_settings/common_settings.py | 2 +-
.../path_tracing_settings.py | 2 +-
.../post_processing_settings.py | 2 +-
.../renderer_settings/real_time_settings.py | 2 +-
.../renderer_settings/renderer_settings.py | 8 +-
igibson/reward_functions/__init__.py | 10 +-
igibson/reward_functions/collision_reward.py | 2 +-
igibson/reward_functions/point_goal_reward.py | 2 +-
igibson/reward_functions/potential_reward.py | 2 +-
.../reward_functions/reaching_goal_reward.py | 4 +-
.../reward_functions/reward_function_base.py | 2 +-
igibson/robots/__init__.py | 28 +--
igibson/robots/active_camera_robot.py | 4 +-
igibson/robots/ant.py | 8 +-
igibson/robots/behavior_robot.py | 14 +-
igibson/robots/fetch.py | 24 +--
igibson/robots/freight.py | 8 +-
igibson/robots/husky.py | 8 +-
igibson/robots/jr2.py | 14 +-
igibson/robots/legacy/humanoid_robot.py | 2 +-
igibson/robots/legacy/minitaur_robot.py | 4 +-
igibson/robots/legacy/quadrotor_robot.py | 2 +-
igibson/robots/locobot.py | 8 +-
igibson/robots/locomotion_robot.py | 6 +-
igibson/robots/manipulation_robot.py | 28 +--
igibson/robots/robot_base.py | 24 +--
igibson/robots/tiago.py | 30 +--
igibson/robots/turtlebot.py | 8 +-
igibson/robots/two_wheel_robot.py | 4 +-
igibson/scenes/__init__.py | 10 +-
igibson/scenes/empty_scene.py | 4 +-
.../scenes/interactive_traversable_scene.py | 40 ++--
igibson/scenes/scene_base.py | 18 +-
igibson/scenes/static_traversable_scene.py | 12 +-
igibson/scenes/traversable_scene.py | 6 +-
igibson/sensors/__init__.py | 12 +-
igibson/sensors/dropout_sensor_noise.py | 2 +-
igibson/sensors/scan_sensor.py | 8 +-
igibson/sensors/sensor_base.py | 6 +-
igibson/sensors/sensor_noise_base.py | 2 +-
igibson/sensors/vision_sensor.py | 12 +-
igibson/simulator.py | 34 ++--
igibson/systems/__init__.py | 6 +-
igibson/systems/macro_particle_system.py | 20 +-
igibson/systems/micro_particle_system.py | 20 +-
igibson/systems/particle_system_base.py | 4 +-
igibson/systems/system_base.py | 4 +-
igibson/tasks/__init__.py | 14 +-
igibson/tasks/bddl_backend.py | 2 +-
igibson/tasks/behavior_task.py | 90 ++++-----
igibson/tasks/dummy_task.py | 8 +-
igibson/tasks/furniture_closing_task.py | 24 +--
.../tasks/point_navigation_obstacle_task.py | 30 +--
igibson/tasks/point_navigation_task.py | 40 ++--
igibson/tasks/point_reaching_task.py | 6 +-
igibson/tasks/task_base.py | 6 +-
igibson/termination_conditions/__init__.py | 14 +-
igibson/termination_conditions/falling.py | 2 +-
.../termination_conditions/max_collision.py | 2 +-
igibson/termination_conditions/point_goal.py | 6 +-
.../termination_conditions/predicate_goal.py | 2 +-
.../termination_conditions/reaching_goal.py | 4 +-
.../termination_condition_base.py | 2 +-
igibson/termination_conditions/timeout.py | 2 +-
igibson/transition_rules.py | 18 +-
igibson/utils/asset_utils.py | 188 +++++++++---------
.../utils/behavior_robot_planning_utils.py | 16 +-
igibson/utils/config_utils.py | 6 +-
igibson/utils/constants.py | 18 +-
igibson/utils/control_utils.py | 2 +-
igibson/utils/git_utils.py | 10 +-
igibson/utils/log_utils.py | 8 +-
igibson/utils/motion_planning_utils.py | 54 ++---
igibson/utils/object_state_utils.py | 38 ++--
igibson/utils/physx_utils.py | 2 +-
igibson/utils/processing_utils.py | 2 +-
igibson/utils/python_utils.py | 4 +-
igibson/utils/registry_utils.py | 4 +-
igibson/utils/render_utils.py | 6 +-
igibson/utils/sampling_utils.py | 24 +--
igibson/utils/sim_utils.py | 32 +--
igibson/utils/ui_utils.py | 8 +-
igibson/utils/usd_utils.py | 26 +--
igibson/utils/vision_utils.py | 2 +-
igibson/wrappers/__init__.py | 6 +-
igibson/wrappers/action_primitive_wrapper.py | 6 +-
igibson/wrappers/log_wrapper.py | 6 +-
igibson/wrappers/wrapper_base.py | 16 +-
pyproject.toml | 48 ++---
refactor_scripts/convert_cloth.py | 8 +-
refactor_scripts/import_all_objects.py | 6 +-
refactor_scripts/import_all_scene_assets.py | 8 +-
.../import_all_urdf_and_metadata.py | 6 +-
refactor_scripts/import_cloth.py | 16 +-
refactor_scripts/import_collision_mesh.py | 14 +-
refactor_scripts/import_metadata.py | 22 +-
refactor_scripts/import_scene_and_objects.py | 6 +-
.../import_scene_task_templates.py | 20 +-
refactor_scripts/import_scene_template.py | 16 +-
refactor_scripts/import_single_object.py | 2 +-
refactor_scripts/import_urdfs_from_scene.py | 16 +-
.../preprocess_ig2_building_assets.py | 12 +-
.../preprocess_urdf_for_metalinks.py | 6 +-
refactor_scripts/test_ag_cloth.py | 12 +-
refactor_scripts/test_full_loading.py | 24 +--
refactor_scripts/test_single_model.py | 10 +-
setup.py | 6 +-
.../benchmark/benchmark_interactive_scene.py | 18 +-
.../benchmark_interactive_scene_rendering.py | 16 +-
tests/benchmark/benchmark_object_count.py | 10 +-
tests/benchmark/benchmark_static_scene.py | 10 +-
tests/create_tests_of_examples.py | 8 +-
tests/test_binding.py | 8 +-
.../test_determinism_against_same_version.py | 4 +-
tests/test_igibson_env.py | 14 +-
tests/test_igsdf_scene_importing.py | 4 +-
tests/test_motion_planning.py | 12 +-
tests/test_object.py | 18 +-
tests/test_of_example_template.txt | 2 +-
tests/test_particles.py | 14 +-
tests/test_pbr.py | 16 +-
tests/test_render.py | 10 +-
tests/test_render_tensor.py | 8 +-
tests/test_robot.py | 8 +-
tests/test_scene_importing.py | 14 +-
tests/test_scene_saving_loading.py | 20 +-
tests/test_sensors.py | 16 +-
tests/test_simulator.py | 8 +-
tests/test_states.py | 76 +++----
tests/test_transition_rules.py | 12 +-
366 files changed, 2497 insertions(+), 2497 deletions(-)
diff --git a/.github/workflows/build-containers.yml b/.github/workflows/build-containers.yml
index dce4bed3d..c61034b79 100644
--- a/.github/workflows/build-containers.yml
+++ b/.github/workflows/build-containers.yml
@@ -7,7 +7,7 @@ on:
workflow_dispatch:
env:
- REGISTRY_USER: igibson
+ REGISTRY_USER: omnigibson
IMAGE_REGISTRY: docker.io
REGISTRY_PASSWORD: ${{ secrets.REGISTRY_PASSWORD }}
diff --git a/.github/workflows/docs.yml b/.github/workflows/docs.yml
index 12d4a1c8e..9379f386c 100644
--- a/.github/workflows/docs.yml
+++ b/.github/workflows/docs.yml
@@ -6,7 +6,7 @@ jobs:
build-docs:
name: Build and deploy documentation
runs-on: ubuntu-latest
- if: github.repository == 'StanfordVL/iGibson' && github.ref == 'refs/heads/master'
+ if: github.repository == 'StanfordVL/OmniGibson' && github.ref == 'refs/heads/master'
steps:
- name: Checkout code
diff --git a/.github/workflows/examples-as-test.yml b/.github/workflows/examples-as-test.yml
index e382fa40b..ee47f6c2b 100644
--- a/.github/workflows/examples-as-test.yml
+++ b/.github/workflows/examples-as-test.yml
@@ -15,14 +15,14 @@ concurrency:
jobs:
test:
runs-on: [self-hosted, linux, gpu]
- if: github.repository == 'StanfordVL/iGibson-dev'
+ if: github.repository == 'StanfordVL/OmniGibson-dev'
steps:
- name: Checkout source
uses: actions/checkout@v2
with:
submodules: true
- path: igibson
+ path: omnigibson
- name: Add CUDA to env
run: echo "/usr/local/cuda/bin" >> $GITHUB_PATH
@@ -34,19 +34,19 @@ jobs:
architecture: x64
- name: Install dev requirements
- working-directory: igibson
+ working-directory: omnigibson
run: pip install -r requirements-dev.txt
- name: Install additional dev requirements
- working-directory: igibson
+ working-directory: omnigibson
run: pip install -r tests/requirements-tests.txt
- name: Install
- working-directory: igibson
+ working-directory: omnigibson
run: pip install -e .
- name: Uninstall pip bddl
- working-directory: igibson
+ working-directory: omnigibson
run: pip uninstall -y bddl
- name: Checkout BDDL
@@ -64,15 +64,15 @@ jobs:
run: pip install -e .
- name: Link Dataset
- working-directory: igibson
- run: ln -s /scr/ig-data igibson/data
+ working-directory: omnigibson
+ run: ln -s /scr/ig-data omnigibson/data
- name: Create tests of examples
- working-directory: igibson
+ working-directory: omnigibson
run: python tests/create_tests_of_examples.py
- name: Run tests
- working-directory: igibson
+ working-directory: omnigibson
run: pytest /tmp/tests_of_examples
- name: Remove Files
diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml
index fbb9e34ca..6eb3811b5 100644
--- a/.github/workflows/release.yml
+++ b/.github/workflows/release.yml
@@ -9,7 +9,7 @@ jobs:
build:
runs-on: "ubuntu-latest"
steps:
- - name: Checkout iGibson source
+ - name: Checkout OmniGibson source
uses: actions/checkout@master
with:
submodules: true
@@ -24,7 +24,7 @@ jobs:
build
--user
- name: Remove unnecessary files to reduce file size
- run: rm -r igibson/render/openvr/samples
+ run: rm -r omnigibson/render/openvr/samples
- name: Build a binary wheel and a source tarball
run: >-
python -m
@@ -33,7 +33,7 @@ jobs:
--outdir dist/
.
- name: Publish a Python distribution to PyPI
- if: github.repository == 'StanfordVL/iGibson' && startsWith(github.ref, 'refs/tags')
+ if: github.repository == 'StanfordVL/OmniGibson' && startsWith(github.ref, 'refs/tags')
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
diff --git a/.github/workflows/sync-repos.yml b/.github/workflows/sync-repos.yml
index de70e0b3c..b6fac7250 100644
--- a/.github/workflows/sync-repos.yml
+++ b/.github/workflows/sync-repos.yml
@@ -1,4 +1,4 @@
-name: Sync iGibson-dev/master to iGibson/master
+name: Sync OmniGibson-dev/master to OmniGibson/master
on:
workflow_dispatch:
@@ -12,7 +12,7 @@ jobs:
if: github.ref == 'refs/heads/master'
steps:
- uses: actions/checkout@v2
- - name: Sync iGibson-dev/master to iGibson/master
- if: github.repository == 'StanfordVL/iGibson-dev' && startsWith(github.ref, 'refs/tags')
+ - name: Sync OmniGibson-dev/master to OmniGibson/master
+ if: github.repository == 'StanfordVL/OmniGibson-dev' && startsWith(github.ref, 'refs/tags')
run:
- git push https://$PERSONAL_ACCESS_TOKEN:x-oauth-basic@github.com/StanfordVL/iGibson.git master:master
+ git push https://$PERSONAL_ACCESS_TOKEN:x-oauth-basic@github.com/StanfordVL/OmniGibson.git master:master
diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml
index ed62ab758..e0d292db4 100644
--- a/.github/workflows/tests.yml
+++ b/.github/workflows/tests.yml
@@ -9,14 +9,14 @@ concurrency:
jobs:
test:
runs-on: [self-hosted, linux, gpu]
- if: github.repository == 'StanfordVL/iGibson-dev'
+ if: github.repository == 'StanfordVL/OmniGibson-dev'
steps:
- name: Checkout source
uses: actions/checkout@v2
with:
submodules: true
- path: igibson
+ path: omnigibson
- name: Add CUDA to env
run: echo "/usr/local/cuda/bin" >> $GITHUB_PATH
@@ -28,15 +28,15 @@ jobs:
architecture: x64
- name: Install dev requirements
- working-directory: igibson
+ working-directory: omnigibson
run: pip install -r requirements-dev.txt
- name: Install
- working-directory: igibson
+ working-directory: omnigibson
run: pip install -e .
- name: Uninstall pip bddl
- working-directory: igibson
+ working-directory: omnigibson
run: pip uninstall -y bddl
- name: Checkout BDDL
@@ -54,33 +54,33 @@ jobs:
run: pip install -e .
- name: Link Dataset
- working-directory: igibson
- run: ln -s /scr/ig-data igibson/data
+ working-directory: omnigibson
+ run: ln -s /scr/ig-data omnigibson/data
# The below method of checking out ig-dataset is currently unused due to poor speed.
# - name: Create data directory
- # run: mkdir -p igibson/igibson/data
+ # run: mkdir -p omnigibson/omnigibson/data
#
- # - name: Checkout ig_dataset
+ # - name: Checkout og_dataset
# uses: actions/checkout@v2
# with:
- # repository: StanfordVL/ig_dataset
+ # repository: StanfordVL/og_dataset
# token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} # PAT is required since this is a different repo
- # path: igibson/igibson/data/ig_dataset
+ # path: omnigibson/omnigibson/data/og_dataset
# submodules: recursive
# lfs: true
#
- # - name: Checkout ig_assets
+ # - name: Checkout og_assets
# uses: actions/checkout@v2
# with:
- # repository: StanfordVL/ig_assets
+ # repository: StanfordVL/og_assets
# token: ${{ secrets.PERSONAL_ACCESS_TOKEN }} # PAT is required since this is a different repo
- # path: igibson/igibson/data/assets
+ # path: omnigibson/omnigibson/data/assets
# submodules: recursive
# lfs: true
- name: Run tests
- working-directory: igibson
+ working-directory: omnigibson
run: pytest
- name: Upload coverage to Codecov
diff --git a/.gitignore b/.gitignore
index 8efbad282..e56993018 100644
--- a/.gitignore
+++ b/.gitignore
@@ -74,12 +74,12 @@ build
dist
# Directories used for QC pipeline
-igibson/utils/data_utils/mesh_decimation/collision
-igibson/utils/data_utils/mesh_decimation/visual
-igibson/utils/data_utils/mesh_decimation/final_videos
+omnigibson/utils/data_utils/mesh_decimation/collision
+omnigibson/utils/data_utils/mesh_decimation/visual
+omnigibson/utils/data_utils/mesh_decimation/final_videos
# libcryptopp
-igibson/render/mesh_renderer/libcryptopp.so.8.6
+omnigibson/render/mesh_renderer/libcryptopp.so.8.6
# Coverage
.coverage
diff --git a/.gitmodules b/.gitmodules
index 5856df639..9a77f3648 100644
--- a/.gitmodules
+++ b/.gitmodules
@@ -1,15 +1,15 @@
-[submodule "igibson/render/pybind11"]
- path = igibson/render/pybind11
+[submodule "omnigibson/render/pybind11"]
+ path = omnigibson/render/pybind11
url = https://github.com/pybind/pybind11.git
-[submodule "igibson/render/glfw"]
- path = igibson/render/glfw
+[submodule "omnigibson/render/glfw"]
+ path = omnigibson/render/glfw
url = https://github.com/glfw/glfw
-[submodule "igibson/render/glm"]
- path = igibson/render/glm
+[submodule "omnigibson/render/glm"]
+ path = omnigibson/render/glm
url = https://github.com/g-truc/glm
-[submodule "igibson/render/openvr"]
- path = igibson/render/openvr
+[submodule "omnigibson/render/openvr"]
+ path = omnigibson/render/openvr
url = https://github.com/ValveSoftware/openvr
-[submodule "igibson/render/cryptopp"]
- path = igibson/render/cryptopp
+[submodule "omnigibson/render/cryptopp"]
+ path = omnigibson/render/cryptopp
url = https://github.com/fxia22/cryptopp
diff --git a/MANIFEST.in b/MANIFEST.in
index 72880ccc1..20c98ff62 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1,7 +1,7 @@
include LICENSE
-graft igibson
-prune igibson/data
-prune igibson/render/openvr/samples
+graft omnigibson
+prune omnigibson/data
+prune omnigibson/render/openvr/samples
global-exclude *.py[co]
\ No newline at end of file
diff --git a/README.md b/README.md
index cf99216f7..35483895a 100644
--- a/README.md
+++ b/README.md
@@ -1,22 +1,22 @@
-# iGibson: A Simulation Environment to train Robots in Large Realistic Interactive Scenes
+# OmniGibson: A Simulation Environment to train Robots in Large Realistic Interactive Scenes
-
+
-iGibson is a simulation environment providing fast visual rendering and physics simulation based on Bullet. iGibson is equipped with fifteen fully interactive high quality scenes, hundreds of large 3D scenes reconstructed from real homes and offices, and compatibility with datasets like CubiCasa5K and 3D-Front, providing 8000+ additional interactive scenes. Some of the features of iGibson include domain randomization, integration with motion planners and easy-to-use tools to collect human demonstrations. With these scenes and features, iGibson allows researchers to train and evaluate robotic agents that use visual signals to solve navigation and manipulation tasks such as opening doors, picking up and placing objects, or searching in cabinets.
+OmniGibson is a simulation environment providing fast visual rendering and physics simulation based on Bullet. OmniGibson is equipped with fifteen fully interactive high quality scenes, hundreds of large 3D scenes reconstructed from real homes and offices, and compatibility with datasets like CubiCasa5K and 3D-Front, providing 8000+ additional interactive scenes. Some of the features of OmniGibson include domain randomization, integration with motion planners and easy-to-use tools to collect human demonstrations. With these scenes and features, OmniGibson allows researchers to train and evaluate robotic agents that use visual signals to solve navigation and manipulation tasks such as opening doors, picking up and placing objects, or searching in cabinets.
### Latest Updates
-[8/9/2021] Major update to iGibson to reach iGibson 2.0, for details please refer to our [arxiv preprint](https://arxiv.org/abs/2108.03272).
+[8/9/2021] Major update to OmniGibson to reach OmniGibson 2.0, for details please refer to our [arxiv preprint](https://arxiv.org/abs/2108.03272).
-- iGibson 2.0 supports object states, including temperature, wetness level, cleanliness level, and toggled and sliced states, necessary to cover a wider range of tasks.
-- iGibson 2.0 implements a set of predicate logic functions that map the simulator states to logic states like Cooked or Soaked.
-- iGibson 2.0 includes a virtual reality (VR) interface to immerse humans in its scenes to collect demonstrations.
+- OmniGibson 2.0 supports object states, including temperature, wetness level, cleanliness level, and toggled and sliced states, necessary to cover a wider range of tasks.
+- OmniGibson 2.0 implements a set of predicate logic functions that map the simulator states to logic states like Cooked or Soaked.
+- OmniGibson 2.0 includes a virtual reality (VR) interface to immerse humans in its scenes to collect demonstrations.
-[12/1/2020] Major update to iGibson to reach iGibson 1.0, for details please refer to our [arxiv preprint](https://arxiv.org/abs/2012.02924).
+[12/1/2020] Major update to OmniGibson to reach OmniGibson 1.0, for details please refer to our [arxiv preprint](https://arxiv.org/abs/2012.02924).
-- Release of iGibson dataset that includes 15 fully interactive scenes and 500+ object models annotated with materials and physical attributes on top of [existing 3D articulated models](https://cs.stanford.edu/~kaichun/partnet/).
+- Release of OmniGibson dataset that includes 15 fully interactive scenes and 500+ object models annotated with materials and physical attributes on top of [existing 3D articulated models](https://cs.stanford.edu/~kaichun/partnet/).
- Compatibility to import [CubiCasa5K](https://github.com/CubiCasa/CubiCasa5k) and [3D-Front](https://tianchi.aliyun.com/specials/promotion/alibaba-3d-scene-dataset) scene descriptions leading to more than 8000 extra interactive scenes!
-- New features in iGibson: Physically based rendering, 1-beam and 16-beam LiDAR, domain randomization, motion planning integration, tools to collect human demos and more!
+- New features in OmniGibson: Physically based rendering, 1-beam and 16-beam LiDAR, domain randomization, motion planning integration, tools to collect human demos and more!
- Code refactoring, better class structure and cleanup.
[05/14/2020] Added dynamic light support :flashlight:
@@ -24,11 +24,11 @@ iGibson is a simulation environment providing fast visual rendering and physics
[04/28/2020] Added support for Mac OSX :computer:
### Citation
-If you use iGibson or its assets and models, consider citing the following publication:
+If you use OmniGibson or its assets and models, consider citing the following publication:
```
-@misc{li2021igibson,
- title={iGibson 2.0: Object-Centric Simulation for Robot Learning of Everyday Household Tasks},
+@misc{li2021omnigibson,
+ title={OmniGibson 2.0: Object-Centric Simulation for Robot Learning of Everyday Household Tasks},
author={Chengshu Li and Fei Xia and Roberto Mart\'in-Mart\'in and Michael Lingelbach and Sanjana Srivastava and Bokui Shen and Kent Vainio and Cem Gokmen and Gokul Dharan and Tanish Jain and Andrey Kurenkov and Karen Liu and Hyowon Gweon and Jiajun Wu and Li Fei-Fei and Silvio Savarese},
year={2021},
eprint={2108.03272},
@@ -38,8 +38,8 @@ If you use iGibson or its assets and models, consider citing the following publi
```
```
-@inproceedings{shen2021igibson,
- title={iGibson 1.0: a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
+@inproceedings{shen2021omnigibson,
+ title={OmniGibson 1.0: a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
author={Bokui Shen and Fei Xia and Chengshu Li and Roberto Mart\'in-Mart\'in and Linxi Fan and Guanzhi Wang and Claudia P\'erez-D'Arpino and Shyamal Buch and Sanjana Srivastava and Lyne P. Tchapmi and Micael E. Tchapmi and Kent Vainio and Josiah Wong and Li Fei-Fei and Silvio Savarese},
booktitle={2021 IEEE/RSJ International Conference on Intelligent Robots and Systems (IROS)},
year={2021},
@@ -49,30 +49,30 @@ If you use iGibson or its assets and models, consider citing the following publi
```
### Documentation
-The documentation for iGibson can be found here: [iGibson Documentation](http://svl.stanford.edu/igibson/docs/). It includes installation guide (including data download instructions), quickstart guide, code examples, and APIs.
+The documentation for OmniGibson can be found here: [OmniGibson Documentation](http://svl.stanford.edu/omnigibson/docs/). It includes installation guide (including data download instructions), quickstart guide, code examples, and APIs.
-If you want to know more about iGibson, you can also check out [our webpage](http://svl.stanford.edu/igibson), [iGibson 2.0 arxiv preprint](https://arxiv.org/abs/2108.03272) and [iGibson 1.0 arxiv preprint](https://arxiv.org/abs/2012.02924).
+If you want to know more about OmniGibson, you can also check out [our webpage](http://svl.stanford.edu/omnigibson), [OmniGibson 2.0 arxiv preprint](https://arxiv.org/abs/2108.03272) and [OmniGibson 1.0 arxiv preprint](https://arxiv.org/abs/2012.02924).
### Dowloading the Dataset of 3D Scenes
-For instructions to install iGibson and download dataset, you can visit [installation guide](http://svl.stanford.edu/igibson/docs/installation.html) and [dataset download guide](http://svl.stanford.edu/igibson/docs/dataset.html).
+For instructions to install OmniGibson and download dataset, you can visit [installation guide](http://svl.stanford.edu/omnigibson/docs/installation.html) and [dataset download guide](http://svl.stanford.edu/omnigibson/docs/dataset.html).
-There are other datasets we link to iGibson. We include support to use CubiCasa5K and 3DFront scenes, adding up more than 10000 extra interactive scenes to use in iGibson! Check our [documentation](https://github.com/StanfordVL/iGibson/tree/master/igibson/utils/data_utils/ext_scene) on how to use those.
+There are other datasets we link to OmniGibson. We include support to use CubiCasa5K and 3DFront scenes, adding up more than 10000 extra interactive scenes to use in OmniGibson! Check our [documentation](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/utils/data_utils/ext_scene) on how to use those.
-We also maintain compatibility with datasets of 3D reconstructed large real-world scenes (homes and offices) that you can download and use with iGibson. For Gibson Dataset and Stanford 2D-3D-Semantics Dataset, please fill out this [form](https://forms.gle/36TW9uVpjrE1Mkf9A). For Matterport3D Dataset, please fill in this [form](http://dovahkiin.stanford.edu/matterport/public/MP_TOS.pdf) and send it to [matterport3d@googlegroups.com](mailto:matterport3d@googlegroups.com). Please put "use with iGibson simulator" in your email. Check our [dataset download guide](http://svl.stanford.edu/igibson/docs/dataset.html) for more details.
+We also maintain compatibility with datasets of 3D reconstructed large real-world scenes (homes and offices) that you can download and use with OmniGibson. For Gibson Dataset and Stanford 2D-3D-Semantics Dataset, please fill out this [form](https://forms.gle/36TW9uVpjrE1Mkf9A). For Matterport3D Dataset, please fill in this [form](http://dovahkiin.stanford.edu/matterport/public/MP_TOS.pdf) and send it to [matterport3d@googlegroups.com](mailto:matterport3d@googlegroups.com). Please put "use with OmniGibson simulator" in your email. Check our [dataset download guide](http://svl.stanford.edu/omnigibson/docs/dataset.html) for more details.
-### Using iGibson with VR
-If you want to use iGibson VR interface, please visit the [VR guide (TBA)].
+### Using OmniGibson with VR
+If you want to use OmniGibson VR interface, please visit the [VR guide (TBA)].
### Contributing
-This is the github repository for iGibson (pip package `igibson`) 2.0 release. (For iGibson 1.0, please use `1.0` branch.) Bug reports, suggestions for improvement, as well as community developments are encouraged and appreciated. Please, consider creating an issue or sending us an email.
+This is the github repository for OmniGibson (pip package `omnigibson`) 2.0 release. (For OmniGibson 1.0, please use `1.0` branch.) Bug reports, suggestions for improvement, as well as community developments are encouraged and appreciated. Please, consider creating an issue or sending us an email.
The support for our previous version of the environment, Gibson, can be found in the [following repository](http://github.com/StanfordVL/GibsonEnv/).
### Acknowledgments
-iGibson uses code from a few open source repositories. Without the efforts of these folks (and their willingness to release their implementations under permissable copyleft licenses), iGibson would not be possible. We thanks these authors for their efforts!
+OmniGibson uses code from a few open source repositories. Without the efforts of these folks (and their willingness to release their implementations under permissable copyleft licenses), OmniGibson would not be possible. We thanks these authors for their efforts!
- Syoyo Fujita: [tinyobjloader](https://github.com/syoyo/tinyobjloader)
- Erwin Coumans: [egl_example](https://github.com/erwincoumans/egl_example)
diff --git a/clean.sh b/clean.sh
index a48556807..53df35964 100755
--- a/clean.sh
+++ b/clean.sh
@@ -1,4 +1,4 @@
#!/bin/sh
rm -rf build
-rm -rf igibson/render/mesh_renderer/build/
+rm -rf omnigibson/render/mesh_renderer/build/
diff --git a/docker/.env b/docker/.env
index d98efa377..0877b2800 100644
--- a/docker/.env
+++ b/docker/.env
@@ -1,3 +1,3 @@
REGISTRY=docker.io
-REPO=igibson
+REPO=omnigibson
VERSION=v2.0.4
diff --git a/docker/omnigibson/Dockerfile b/docker/omnigibson/Dockerfile
index b5f317cbd..bc9de1a90 100644
--- a/docker/omnigibson/Dockerfile
+++ b/docker/omnigibson/Dockerfile
@@ -11,31 +11,31 @@ RUN apt-get update && apt-get install -y \
RUN curl -Ls https://micro.mamba.pm/api/micromamba/linux-64/latest | tar -xvj -C / bin/micromamba
ENV MAMBA_ROOT_PREFIX /micromamba
-RUN micromamba create -n igibson -c conda-forge python=3.7
+RUN micromamba create -n omnigibson -c conda-forge python=3.7
RUN micromamba shell init --shell=bash --prefix=/micromamba
-ENV PATH /micromamba/envs/igibson/bin:$PATH
+ENV PATH /micromamba/envs/omnigibson/bin:$PATH
RUN echo "source /isaac-sim/setup_conda_env.sh" >> /root/.bashrc
WORKDIR /
ENV GITHUB_USER mjlbach
ENV GITHUB_TOKEN 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
-ENV GITHUB_REPOSITORY stanfordvl/iGibson3.git
+ENV GITHUB_REPOSITORY stanfordvl/OmniGibson3.git
RUN git clone https://${GITHUB_USER}:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}
ENV GITHUB_REPOSITORY stanfordvl/bddl-dev.git
ENV GITHUB_TOKEN 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
RUN git clone https://${GITHUB_USER}:${GITHUB_TOKEN}@github.com/${GITHUB_REPOSITORY}
-WORKDIR /iGibson3
+WORKDIR /OmniGibson3
RUN pip install -e .
-RUN micromamba install -n igibson -c conda-forge opencv
+RUN micromamba install -n omnigibson -c conda-forge opencv
WORKDIR /bddl-dev
RUN pip install -e .
-WORKDIR /iGibson3/igibson/examples/
+WORKDIR /OmniGibson3/omnigibson/examples/
-RUN sed -i "s/gm.HEADLESS = False/gm.HEADLESS = True/" /iGibson3/igibson/macros.py
+RUN sed -i "s/gm.HEADLESS = False/gm.HEADLESS = True/" /OmniGibson3/omnigibson/macros.py
ENTRYPOINT bash --login
diff --git a/docker/omnigibson/build_docker.sh b/docker/omnigibson/build_docker.sh
index 3fbba9b67..6175b93ad 100644
--- a/docker/omnigibson/build_docker.sh
+++ b/docker/omnigibson/build_docker.sh
@@ -1 +1 @@
-docker build -t omnigibson:latest .
+docker build -t omnomnigibson:latest .
diff --git a/docker/omnigibson/run_docker.sh b/docker/omnigibson/run_docker.sh
index a0d9d4381..6dffb34bc 100644
--- a/docker/omnigibson/run_docker.sh
+++ b/docker/omnigibson/run_docker.sh
@@ -1,3 +1,3 @@
-export OMNIGIBSON_DATA_PATH=/path/to/omnigibson_data
+export OMNIGIBSON_DATA_PATH=/path/to/omnomnigibson_data
# To run with GUI: edit macros.py and add -v /tmp/.X11-unix:/tmp/.X11-unix to container launch command
-docker run -e DISPLAY -v $OMNIGIBSON_DATA_PATH:/iGibson3/igibson/data --rm -it omnigibson
+docker run -e DISPLAY -v $OMNIGIBSON_DATA_PATH:/OmniGibson3/omnigibson/data --rm -it omnomnigibson
diff --git a/docs/acknowledgements.md b/docs/acknowledgements.md
index de67656da..ad3ec0a5b 100644
--- a/docs/acknowledgements.md
+++ b/docs/acknowledgements.md
@@ -1,7 +1,7 @@
Acknowledgments
================
-iGibson uses code from a few open source repositories. Without the efforts of these folks (and their willingness to release their implementations under permissable copyleft licenses), iGibson would not be possible. We thanks these authors for their efforts!
+OmniGibson uses code from a few open source repositories. Without the efforts of these folks (and their willingness to release their implementations under permissable copyleft licenses), OmniGibson would not be possible. We thanks these authors for their efforts!
- Syoyo Fujita: [tinyobjloader](https://github.com/syoyo/tinyobjloader)
- Erwin Coumans: [egl_example](https://github.com/erwincoumans/egl_example)
diff --git a/docs/assets.md b/docs/assets.md
index abe3cba49..0ef984c38 100644
--- a/docs/assets.md
+++ b/docs/assets.md
@@ -2,10 +2,10 @@
## Introduction
-Assets includes necessary files for constructing a scene in iGibson simulator. The files include robot models, interactive objects, articulated objects and mesh files for tests. These files are too large to include in a version control system so we distribute them separately. The assets file can be downloaded to the path set in `your_installation_path/igibson/global_config.yaml` (default to `your_installation_path/igibson/data/assets`) with running
+Assets includes necessary files for constructing a scene in OmniGibson simulator. The files include robot models, interactive objects, articulated objects and mesh files for tests. These files are too large to include in a version control system so we distribute them separately. The assets file can be downloaded to the path set in `your_installation_path/omnigibson/global_config.yaml` (default to `your_installation_path/omnigibson/data/assets`) with running
```bash
-python -m igibson.utils.assets_utils --download_assets
+python -m omnigibson.utils.assets_utils --download_assets
```
The folder structure will look like below (in the future we might add more models):
diff --git a/docs/conf.py b/docs/conf.py
index ed8f39c42..d520fa63c 100644
--- a/docs/conf.py
+++ b/docs/conf.py
@@ -24,18 +24,18 @@
from recommonmark.parser import CommonMarkParser
sys.path.append(os.path.abspath("../"))
-import igibson
+import omnigibson
-project = "iGibson"
+project = "OmniGibson"
copyright = "Stanford University 2018-2021"
author = "Bokui Shen*, Fei Xia*, Chengshu Li*, Roberto Martín-Martín*, Linxi Fan, Guanzhi Wang, Shyamal Buch, Claudia DArpino, Sanjana Srivastava, Lyne P. Tchapmi, Micael E. Tchapmi, Kent Vainio, Li Fei-Fei, Silvio Savarese (*Equal Contribution)"
-github_doc_root = "https://github.com/StanfordVL/iGibson"
+github_doc_root = "https://github.com/StanfordVL/OmniGibson"
# The short X.Y version
-version = igibson.__version__
+version = omnigibson.__version__
# The full version, including alpha/beta/rc tags
-release = igibson.__version__
+release = omnigibson.__version__
sys.path.insert(0, os.path.abspath("../"))
sys.path.insert(0, os.path.abspath("../../"))
diff --git a/docs/dataset.md b/docs/dataset.md
index b8a9c30df..85fe59484 100644
--- a/docs/dataset.md
+++ b/docs/dataset.md
@@ -2,45 +2,45 @@
In this page you will find information about:
-- [How to download iGibson 2.0 scenes and the BEHAVIOR Dataset of Objects](#download-igibson-2-0-scenes-and-the-behavior-dataset-of-objects)
-- [How to download iGibson 1.0 scenes](#download-igibson-1-0-scenes)
+- [How to download OmniGibson 2.0 scenes and the BEHAVIOR Dataset of Objects](#download-omnigibson-2-0-scenes-and-the-behavior-dataset-of-objects)
+- [How to download OmniGibson 1.0 scenes](#download-omnigibson-1-0-scenes)
- [How to download Gibson and Stanford 2D-3D-Semantics scenes](#download-gibson-and-stanford-2d-3d-semantics-scenes)
- [How to download Matterport3D scenes](#download-matterport3d-scenes)
-## Download iGibson 2.0 Scenes and the BEHAVIOR Dataset of Objects
+## Download OmniGibson 2.0 Scenes and the BEHAVIOR Dataset of Objects
What will you download?
-- **iGibson 2.0 Dataset of Scenes**: New versions of the fully interactive scenes, more densely populated with objects.
-- **BEHAVIOR Object Dataset**: Dataset of object models annotated with physical and semantic properties. The 3D models are free to use within iGibson 2.0 for BEHAVIOR (due to artists' copyright, models are encrypted and allowed only to be used with iGibson 2.0). You can download a bundle of the iGibson 2.0 dataset of scenes and the BEHAVIOR dataset of objects here.
+- **OmniGibson 2.0 Dataset of Scenes**: New versions of the fully interactive scenes, more densely populated with objects.
+- **BEHAVIOR Object Dataset**: Dataset of object models annotated with physical and semantic properties. The 3D models are free to use within OmniGibson 2.0 for BEHAVIOR (due to artists' copyright, models are encrypted and allowed only to be used with OmniGibson 2.0). You can download a bundle of the OmniGibson 2.0 dataset of scenes and the BEHAVIOR dataset of objects here.
To download both in a bundle, you need to follow the following steps:
- Fill out the license agreement in this [form](https://docs.google.com/forms/d/e/1FAIpQLScPwhlUcHu_mwBqq5kQzT2VRIRwg_rJvF0IWYBk_LxEZiJIFg/viewform)
-- You will receive a key (igibson.key). Move it into the subfolder of the iGibson repository that contains the dataset, for example, iGibson/igibson/data
-- Download the behavior data bundle (ig_dataset) from [here](https://storage.googleapis.com/gibson_scenes/behavior_data_bundle.zip)
-- Unzip ig_dataset into the folder: `unzip behavior_data_bundle.zip -d iGibson/igibson/data`
+- You will receive a key (omnigibson.key). Move it into the subfolder of the OmniGibson repository that contains the dataset, for example, OmniGibson/omnigibson/data
+- Download the behavior data bundle (og_dataset) from [here](https://storage.googleapis.com/gibson_scenes/behavior_data_bundle.zip)
+- Unzip og_dataset into the folder: `unzip behavior_data_bundle.zip -d OmniGibson/omnigibson/data`
-After this process, you will be able to sample and use the scenes and objects in iGibson, for example, to evaluate your embodied AI solutions in the [BEHAVIOR benchmark](https://behavior.stanford.edu/).
+After this process, you will be able to sample and use the scenes and objects in OmniGibson, for example, to evaluate your embodied AI solutions in the [BEHAVIOR benchmark](https://behavior.stanford.edu/).
-## Download iGibson 1.0 Scenes
+## Download OmniGibson 1.0 Scenes
What will you download?
-- **iGibson 1.0 Dataset of Scenes**: We annotated fifteen 3D reconstructions of real-world scans and converted them into fully interactive scene models. In this process, we respect the original object-instance layout and object-category distribution. The object models are extended from open-source datasets ([ShapeNet Dataset](https://www.shapenet.org/), [Motion Dataset](http://motiondataset.zbuaa.com/), [SAPIEN Dataset](https://sapien.ucsd.edu/)) enriched with annotations of material and dynamic properties.
+- **OmniGibson 1.0 Dataset of Scenes**: We annotated fifteen 3D reconstructions of real-world scans and converted them into fully interactive scene models. In this process, we respect the original object-instance layout and object-category distribution. The object models are extended from open-source datasets ([ShapeNet Dataset](https://www.shapenet.org/), [Motion Dataset](http://motiondataset.zbuaa.com/), [SAPIEN Dataset](https://sapien.ucsd.edu/)) enriched with annotations of material and dynamic properties.
The following image shows the fifteen fully interactive scenes:
-![placeholder.jpg](images/ig_scene.png)
+![placeholder.jpg](images/og_scene.png)
-To download the dataset, you need to first configure where the dataset is to be stored. You can change it in `your_installation_path/igibson/global_config.yaml` (default and recommended: `ig_dataset: your_installation_path/igibson/data/ig_dataset`). iGibson scenes can be downloaded with one single line:
+To download the dataset, you need to first configure where the dataset is to be stored. You can change it in `your_installation_path/omnigibson/global_config.yaml` (default and recommended: `og_dataset: your_installation_path/omnigibson/data/og_dataset`). OmniGibson scenes can be downloaded with one single line:
```bash
-python -m igibson.utils.assets_utils --download_ig_dataset
+python -m omnigibson.utils.assets_utils --download_og_dataset
```
-If the script fails to work, you can download from this [direct link](https://storage.googleapis.com/gibson_scenes/ig_dataset.tar.gz) and extract to `your_installation_path/igibson/data/ig_dataset`.
+If the script fails to work, you can download from this [direct link](https://storage.googleapis.com/gibson_scenes/og_dataset.tar.gz) and extract to `your_installation_path/omnigibson/data/og_dataset`.
-A description of the file structure and format of the files in the dataset can be found [here](https://github.com/StanfordVL/iGibson/tree/master/igibson/utils/data_utils).
+A description of the file structure and format of the files in the dataset can be found [here](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/utils/data_utils).
-**Cubicasa / 3D Front Dataset Support:** We provide support for Cubicasa and 3D Front Dataset providing more than 10000 additional scenes (with less furniture than our fifteen scenes). To import them into iGibson, follow the instructions [here](https://github.com/StanfordVL/iGibson/tree/master/igibson/utils/data_utils/ext_scene).
+**Cubicasa / 3D Front Dataset Support:** We provide support for Cubicasa and 3D Front Dataset providing more than 10000 additional scenes (with less furniture than our fifteen scenes). To import them into OmniGibson, follow the instructions [here](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/utils/data_utils/ext_scene).
## Download Gibson and Stanford 2D-3D-Semantics scenes
@@ -52,15 +52,15 @@ Files included in the dataset:
- All scenes, 572 scenes (108GB): gibson_v2_all.tar.gz
- 4+ partition, 106 scenes, with textures better packed (2.6GB): gibson_v2_4+.tar.gz
-- Stanford 2D-3D-Semantics, 7 scenes (1.4GB): 2d3ds_for_igibson.zip
+- Stanford 2D-3D-Semantics, 7 scenes (1.4GB): 2d3ds_for_omnigibson.zip
-We have updated these datasets to be used with iGibson so that users can keep developing and studying pure navigation solutions. The following link will bring you to a license agreement and then to a downloading URL: [form](https://forms.gle/36TW9uVpjrE1Mkf9A)
+We have updated these datasets to be used with OmniGibson so that users can keep developing and studying pure navigation solutions. The following link will bring you to a license agreement and then to a downloading URL: [form](https://forms.gle/36TW9uVpjrE1Mkf9A)
After filling in the agreement, you will obtain a downloading `URL`.
-You can download the data manually and store it in the path set in `your_installation_path/igibson/global_config.yaml` (default and recommended: `g_dataset: your_installation_path/igibson/data/g_dataset`).
+You can download the data manually and store it in the path set in `your_installation_path/omnigibson/global_config.yaml` (default and recommended: `g_dataset: your_installation_path/omnigibson/data/g_dataset`).
Alternatively, you can run a single command to download the dataset, decompress, and place it in the correct folder:
```bash
-python -m igibson.utils.assets_utils --download_dataset URL
+python -m omnigibson.utils.assets_utils --download_dataset URL
```
The Gibson Environment Dataset consists of 572 models and 1440 floors. We cover a diverse set of models including households, offices, hotels, venues, museums, hospitals, construction sites, etc. A diverse set of visualization of all spaces in Gibson can be seen [here](http://gibsonenv.stanford.edu/database/).
@@ -103,8 +103,8 @@ For the maps, each pixel represents 0.01m, and the center of the image correspon
What will you download?
- Matterport3D Dataset: 90 scenes (3.2GB)
-Please fill in this [form](http://dovahkiin.stanford.edu/matterport/public/MP_TOS.pdf) and send it to [matterport3d@googlegroups.com](mailto:matterport3d@googlegroups.com). Please put "use with iGibson simulator" in your email.
+Please fill in this [form](http://dovahkiin.stanford.edu/matterport/public/MP_TOS.pdf) and send it to [matterport3d@googlegroups.com](mailto:matterport3d@googlegroups.com). Please put "use with OmniGibson simulator" in your email.
-You'll then recieve a python script via email in response. Run `python download_mp.py --task_data igibson -o .` with the received script to download the data (3.2GB). Afterwards, move each of the scenes to the path set in `your_installation_path/igibson/global_config.yaml` (default and recommended: `g_dataset: your_installation_path/igibson/data/g_dataset`).
+You'll then recieve a python script via email in response. Run `python download_mp.py --task_data omnigibson -o .` with the received script to download the data (3.2GB). Afterwards, move each of the scenes to the path set in `your_installation_path/omnigibson/global_config.yaml` (default and recommended: `g_dataset: your_installation_path/omnigibson/data/g_dataset`).
Reference: [Matterport3D webpage](https://niessner.github.io/Matterport/).
\ No newline at end of file
diff --git a/docs/environments.md b/docs/environments.md
index 9045e00d7..b33f406b4 100644
--- a/docs/environments.md
+++ b/docs/environments.md
@@ -4,13 +4,13 @@
We provide **Environments** that follow the [OpenAI gym](https://github.com/openai/gym) interface for applications such as reinforcement learning algorithms. Generally speaking, an **Environment** instantiates **Scene**, **Object** and **Robot** and import them into its **Simulator**. An **Environment** also instantiates a list of **Sensors** (usually as part of the observation space) and a **Task**, which further includes a list of **Reward Functions** and **Termination Conditions**.
-Most of the code can be found here: [igibson/envs/igibson_env.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/envs/igibson_env.py).
+Most of the code can be found here: [omnigibson/envs/omnigibson_env.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/envs/omnigibson_env.py).
#### Sensors
We provide different types of sensors as lightweight wrappers around the renderer. Currently we support RGB, surface normal, segmentation, 3D point cloud, depth map, optical flow, and scene flow, and also 1-beam and 16-beam LiDAR signals. Additionally, we provide a sensor noise model with random dropout (currently only for depth map and 1-beam LiDAR) to simulate real-world sensor noise. The amount of noise can be controlled by `depth_noise_rate` and `scan_noise_rate` in the config files. Contribution of more noise models is most welcome.
-Most of the code can be found in [igibson/sensors](https://github.com/StanfordVL/iGibson/tree/master/igibson/sensors).
+Most of the code can be found in [omnigibson/sensors](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/sensors).
#### Tasks
@@ -27,7 +27,7 @@ We provide a few Embodied AI tasks.
- ReachingRandomTask
- RoomRearrangementTask
-Most of the code can be found in [igibson/tasks](https://github.com/StanfordVL/iGibson/tree/master/igibson/tasks).
+Most of the code can be found in [omnigibson/tasks](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/tasks).
#### Reward Functions
@@ -38,7 +38,7 @@ We provide a few common reward functions for robotics tasks.
- PotentialReward
- CollisionReward
-Most of the code can be found in [igibson/reward_functions](https://github.com/StanfordVL/iGibson/tree/master/igibson/reward_functions).
+Most of the code can be found in [omnigibson/reward_functions](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/reward_functions).
#### Termination Conditions
@@ -49,16 +49,16 @@ We provide a few common termination conditions for robotics tasks.
- MaxCollision
- Timeout
- OutOfBound
-Most of the code can be found in [igibson/termination_conditions](https://github.com/StanfordVL/iGibson/tree/master/igibson/termination_conditions).
+Most of the code can be found in [omnigibson/termination_conditions](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/termination_conditions).
#### Configs
-To instantiate an **Environment**, we first need to create a YAML config file. It will specify parameters for the **Environment** (e.g. robot type, action frequency, etc), the **Sensors** (e.g. sensor types, image resolution, noise rate, etc), the **Task** (e.g. task type, goal distance range, etc), the **Reward Functions** (e.g. reward types, reward scale, etc) and the **Termination Conditions** (e.g. goal convergence threshold, time limit, etc). Exapmles of config files can be found here: [configs](https://github.com/StanfordVL/iGibson/tree/master/igibson/configs).
+To instantiate an **Environment**, we first need to create a YAML config file. It will specify parameters for the **Environment** (e.g. robot type, action frequency, etc), the **Sensors** (e.g. sensor types, image resolution, noise rate, etc), the **Task** (e.g. task type, goal distance range, etc), the **Reward Functions** (e.g. reward types, reward scale, etc) and the **Termination Conditions** (e.g. goal convergence threshold, time limit, etc). Exapmles of config files can be found here: [configs](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/configs).
-Here is one example: [configs/turtlebot_nav.yaml](https://github.com/StanfordVL/iGibson/blob/master/igibson/configs/turtlebot_nav.yaml)
+Here is one example: [configs/turtlebot_nav.yaml](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/configs/turtlebot_nav.yaml)
```yaml
# scene
-scene: igibson
+scene: omnigibson
scene_id: Rs_int
build_graph: true
load_texture: true
@@ -138,8 +138,8 @@ Parameters of this config file are explained below:
| Attribute | Example Value | Expalanation |
| ----------| ------------- | ------------ |
-| scene | igibson | which type of scene: [empty, stadium, gibson, igibson] |
-| scene_id | Rs_int | scene_id for the gibson or igibson scene |
+| scene | omnigibson | which type of scene: [empty, stadium, gibson, omnigibson] |
+| scene_id | Rs_int | scene_id for the gibson or omnigibson scene |
| build_graph | true | whether to build traversability graph for the building scene |
| load_texture | true | whether to load texture into MeshRenderer. Can be set to false if RGB is not needed |
| pybullet_load_texture | true | whether to load texture into PyBullet, for debugging purpose only |
@@ -175,7 +175,7 @@ Parameters of this config file are explained below:
| depth_low | 0.8 | lower bound of the valid range of the depth camera |
| depth_high | 3.5 | upper bound of the valid range of the depth camera |
| n_horizontal_rays | 228 | number of horizontal rays to simulate for the LiDAR |
-| n_vertical_beams | 1 | number of vertical beams to simulate for the LiDAR. Currently iGibson only supports n_vertical_beams == 1 |
+| n_vertical_beams | 1 | number of vertical beams to simulate for the LiDAR. Currently OmniGibson only supports n_vertical_beams == 1 |
| laser_linear_range | 5.6 | upper bound of the valid range of the LiDAR |
| laser_angular_range | 240.0 | angular range of the LiDAR (in degrees) |
| min_laser_dist | 0.05 | lower bound of the valid range of the LiDAR |
@@ -188,14 +188,14 @@ Parameters of this config file are explained below:
### Examples
#### Static Environments
-In this example, we show how to instantiate `iGibsonEnv` and how to step through the environment. At the beginning of each episode, we need to call `env.reset()`. Then we need to call `env.step(action)` to step through the environment and retrieve `(state, reward, done, info)`.
+In this example, we show how to instantiate `OmniGibsonEnv` and how to step through the environment. At the beginning of each episode, we need to call `env.reset()`. Then we need to call `env.step(action)` to step through the environment and retrieve `(state, reward, done, info)`.
- `state`: a python dictionary of observations, e.g. `state['rgb']` will be a H x W x 3 numpy array that represents the current image
- `reward`: a scalar that represents the current reward
- `done`: a boolean that indicates whether the episode should terminate
- `info`: a python dictionary for bookkeeping purpose
-The code can be found here: [igibson/examples/environments/env_nonint_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/environments/env_nonint_example.py).
+The code can be found here: [omnigibson/examples/environments/env_nonint_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/environments/env_nonint_example.py).
-If the execution fails with segfault 11, you may need to reduce texture scaling in the config file (igibson/configs/turtlebot_static_nav.yaml) to avoid out-of-memory error.
+If the execution fails with segfault 11, you may need to reduce texture scaling in the config file (omnigibson/configs/turtlebot_static_nav.yaml) to avoid out-of-memory error.
```python
import logging
@@ -204,15 +204,15 @@ from sys import platform
import yaml
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.render.profiler import Profiler
-from igibson.utils.asset_utils import download_assets, download_demo_data
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.asset_utils import download_assets, download_demo_data
def main():
"""
- Creates an iGibson environment from a config file with a turtlebot in Rs (not interactive).
+ Creates an OmniGibson environment from a config file with a turtlebot in Rs (not interactive).
It steps the environment 100 times with random actions sampled from the action space,
using the Gym interface, resetting it 10 times.
"""
@@ -220,12 +220,12 @@ def main():
# If they have not been downloaded before, download assets and Rs Gibson (non-interactive) models
download_assets()
download_demo_data()
- config_filename = os.path.join(igibson.example_config_path, "turtlebot_static_nav.yaml")
+ config_filename = os.path.join(omnigibson.example_config_path, "turtlebot_static_nav.yaml")
config_data = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
- env = iGibsonEnv(config_file=config_data, mode="gui_interactive")
+ env = OmniGibsonEnv(config_file=config_data, mode="gui_interactive")
for j in range(10):
logging.info("Resetting environment")
env.reset()
@@ -244,4 +244,4 @@ if __name__ == "__main__":
```
#### Interactive Environments
-In this example, we show how to instantiate `iGibsobEnv` with a fully interactive scene `Rs_int`. In this scene, the robot can interact with all the objects in the scene (chairs, tables, couches, etc). The code can be found here: [igibson/examples/environments/env_int_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/env_int_example.py).
+In this example, we show how to instantiate `iGibsobEnv` with a fully interactive scene `Rs_int`. In this scene, the robot can interact with all the objects in the scene (chairs, tables, couches, etc). The code can be found here: [omnigibson/examples/environments/env_int_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/env_int_example.py).
diff --git a/docs/examples.md b/docs/examples.md
index 5d67aa725..8e1133b7d 100644
--- a/docs/examples.md
+++ b/docs/examples.md
@@ -1,21 +1,21 @@
# Code Examples
-In this section, we include a description of most of the code examples you can find under `igibson/examples/`.
+In this section, we include a description of most of the code examples you can find under `omnigibson/examples/`.
There may be new examples since we wrote this, so check that folder!
-If you are interested in just getting started as an end-user, you only need check out `igibson/examples/environments`.
+If you are interested in just getting started as an end-user, you only need check out `omnigibson/examples/environments`.
-If you are looking for examples of BEHAVIOR, the benchmark of household activities that uses iGibson, please check the BEHAVIOR repository at https://github.com/StanfordVL/behavior.
+If you are looking for examples of BEHAVIOR, the benchmark of household activities that uses OmniGibson, please check the BEHAVIOR repository at https://github.com/StanfordVL/behavior.
-- environments: how to instantiate iGibson environments with interactive or static scenes, optionally with a scene selector.
+- environments: how to instantiate OmniGibson environments with interactive or static scenes, optionally with a scene selector.
- learning: how to train RL policies for robot navigation using stable baselines 3, and how to save and replay demos of agents for imitation learning.
- objects: how to create, load, and place objects to predefined locations or using a logic sampler (e.g. onTop(A, B)), how to change texture as a function of the temperature, and how to generate the minimum volume bounding boxes of objects.
- object_states: how to change various objects states, including dusty, stained, (water sources) toggled on, (cleaning tool) soaked, sliced, and temprature, and how to save and reload object states.
- observations: how to generate different observation modalities such as RGB, depth, LiDAR, segmentation, etc.
- renderer: how to use the renderer directly, without the physics engine.
- robots: how to (keyboard) control robots with differential drive controllers, IK controllers and sampling-based motion planners.
-- ros: how to run ROS with iGibson as if it is the real world.
+- ros: how to run ROS with OmniGibson as if it is the real world.
- scenes: how to load interactive and non-interactive scenes, how to use domain randomization (of object models and/or texture), and how to create a tour video of the scenes.
-- vr: how to use iGibson with VR.
-- web_ui: how to start a web server that hosts iGibson environments.
+- vr: how to use OmniGibson with VR.
+- web_ui: how to start a web server that hosts OmniGibson environments.
diff --git a/docs/extended_states.md b/docs/extended_states.md
index c6c89cb5a..ee4cc7c71 100644
--- a/docs/extended_states.md
+++ b/docs/extended_states.md
@@ -2,9 +2,9 @@
### Extended States
-iGibson 2.0 is an object-oriented simulator: the simulator maintains and updates a list of objects with properties that change over-time.
+OmniGibson 2.0 is an object-oriented simulator: the simulator maintains and updates a list of objects with properties that change over-time.
Object classes are organized following the WordNet hierarchy.
-Different to other simulators, iGibson 2.0 is not limited to kinematic properties of the objects (pose, velocity, acceleration, joint configurations for articulated objects), but it includes also a set of additional object-properties that we call "extended states".
+Different to other simulators, OmniGibson 2.0 is not limited to kinematic properties of the objects (pose, velocity, acceleration, joint configurations for articulated objects), but it includes also a set of additional object-properties that we call "extended states".
The extended states are:
#### Temperature
@@ -19,23 +19,23 @@ Integer value per object. It changes when in contact with a water dropplet.
#### Cleanliness (Dustiness and Stain Level)
-In iGibson 2.0, objects can be initialized with visible dust or stain particles on its surface. The number of particles at initialization corresponds to a 100% level of dustiness, d, or stains, s, as we assume that dust/stain particles cannot be generated after initialization. As particles are cleaned, the level decreases proportionally to the number of particles removed, reaching a level of 0% when all particles have been cleaned.
+In OmniGibson 2.0, objects can be initialized with visible dust or stain particles on its surface. The number of particles at initialization corresponds to a 100% level of dustiness, d, or stains, s, as we assume that dust/stain particles cannot be generated after initialization. As particles are cleaned, the level decreases proportionally to the number of particles removed, reaching a level of 0% when all particles have been cleaned.
![extstates3.jpg](images/extstates3.jpg)
#### Toggled State
-Some object categories in iGibson 2.0 can be toggled on and off. iGibson 2.0 maintains and updates an internal binary functional state for those objects.
+Some object categories in OmniGibson 2.0 can be toggled on and off. OmniGibson 2.0 maintains and updates an internal binary functional state for those objects.
![extstates4a.jpg](images/extstates4a.jpg)
#### Sliced State
Many cooking activities require the agent to slice objects, e.g. food items.
-In iGibson 2.0 some objects can be sliced into two halves and the state of the object will be updated.
+In OmniGibson 2.0 some objects can be sliced into two halves and the state of the object will be updated.
![extstates4b.jpg](images/extstates4b.jpg)
## Logic States
-Based on the kinematic and extended states, iGibson 2.0 implements a list of logic predicates, checking conditions to indicate if a logic state is True or False.
-The logic states implemented and checked by iGibson 2.0 include:
+Based on the kinematic and extended states, OmniGibson 2.0 implements a list of logic predicates, checking conditions to indicate if a logic state is True or False.
+The logic states implemented and checked by OmniGibson 2.0 include:
- InsideOf(o1,o2): Object o1 is inside of object o2 if we can find two orthogonal axes crossing at o1 center of mass that intersect o2 collision mesh in both directions.
- OnTopOf(o1,o2): Object o1 is on top of object o2 if o2 is in the list of objects InSameNegativeVerticalAxisObjs(o1) and o2 is in the list of objects InSamePositiveVerticalAxisObjs(o1) and InContactWith(o1, o2), where InSamePositive/NegativeVerticalAxisObjs(o1) is the list of objects in the same positive/negative vertical axis as o1 and InContactWith(o1, o2) is whether the two objects are in physical contact.
- NextTo(o1,o2): Object o1 is next to object o2 if o2 is in the list of objects InSameHorizontalPlaneObjs(o1) and l2(o1, o2) < tNextTo , where InSameHorizontalPlaneObjs(o1) is a list of objects in the same horizontal plane as o1, l2 is the L2 distance between the closest points of the two objects, and tNextTo is a distance threshold that is proportional to the average size of the two objects.
@@ -49,8 +49,8 @@ The logic states implemented and checked by iGibson 2.0 include:
- Soaked(o): The wetness level w of the object o is over a threshold, wsoaked , i.e., w ≥ wsoaked . The default value for the threshold is wsoaked = 1, (the object is soaked if it absorbs one or more droplets), a value that can be adapted per object category and model.
- Dusty(o): The dustiness level d of the object o is over a threshold, ddusty , i.e., d > ddusty . The default value for the threshold is ddusty = 0.5, (half of the dust particles have been cleaned), a value that can be adapted per object category and model.
- Stained(o): The stain level s of the object o is over a threshold, sstained , i.e., s > sstained . The default value for the threshold is sstained = 0.5, (half of the stain particles have been cleaned), a value that can be adapted per object category and model.
-- ToggledOn(o): Object o is toggled on or off. It is a direct query of the iGibson 2.0 objects’ extended state TS, the toggled state.
-- Sliced(o): Object o is sliced or not. It is a direct access of the iGibson 2.0 objects’ extended state SS, the sliced state.
+- ToggledOn(o): Object o is toggled on or off. It is a direct query of the OmniGibson 2.0 objects’ extended state TS, the toggled state.
+- Sliced(o): Object o is sliced or not. It is a direct access of the OmniGibson 2.0 objects’ extended state SS, the sliced state.
- InFoVOfAgent(o): Object o is in the field of view of the agent, i.e., at least one pixel of the image acquired by the agent’s onboard sensors corresponds to the surface of o.
- InHandOfAgent(o): Object o is grasped by the agent’s hands (i.e. assistive grasping is activated on that object).
- InReachOfAgent(o): Object o is within dreach = 2 meters away from the agent.
@@ -59,9 +59,9 @@ The logic states implemented and checked by iGibson 2.0 include:
We do not annotate all objects all extended states.
Objects are annotated with extended states that allow to query meaningful and useful logic states from them, e.g. temperature to query if food is cooked (or frozen or burned).
-This doesn't mean a table does not have temperature, but it is not relevant for most robotics activities simulated in iGibson 2.0.
+This doesn't mean a table does not have temperature, but it is not relevant for most robotics activities simulated in OmniGibson 2.0.
We annotate object classes with meaningful logic states that apply to them, and, possibly, the parameters to check these states.
-The list of classes and states can be found in the BDDL repository ([list of object characteristics](https://github.com/sanjanasrivastava/BDDL/blob/master/utils/synsets_to_filtered_properties_pruned_igibson.json)).
+The list of classes and states can be found in the BDDL repository ([list of object characteristics](https://github.com/sanjanasrivastava/BDDL/blob/master/utils/synsets_to_filtered_properties_pruned_omnigibson.json)).
The characteristics that can be annotated include (additional extended state and parameters in parenthesis):
(e.g., the temperature is not necessary/relevant for non-food categories for most tasks of interest).
diff --git a/docs/index.rst b/docs/index.rst
index f26d580ed..ed211ce3d 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -3,7 +3,7 @@
You can adapt this file completely to your liking, but it should at least
contain the root `toctree` directive.
-Welcome to iGibson's documentation!
+Welcome to OmniGibson's documentation!
==================================================
.. toctree::
diff --git a/docs/installation.md b/docs/installation.md
index b0d963b40..d16b39619 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -1,5 +1,5 @@
# Installation
-There are two steps to install iGibson, the Interactive Gibson Environment, on your computer.
+There are two steps to install OmniGibson, the Interactive Gibson Environment, on your computer.
First, you need to install the simulation environment. This may require installing additional dependencies. Then, you need to download the assets: models of the robotic agents, the interactive objects and 3D reconstructed real-world large environments for your agents to train.
@@ -33,7 +33,7 @@ Other system configurations may work, but we haven't tested them extensively and
### Installing dependencies in Linux machines
-As most Python packages, we recommend to install iGibson in a virtual environment.
+As most Python packages, we recommend to install OmniGibson in a virtual environment.
We suggest to use Conda instead of a standard virtual environment.
To setup anaconda with the right dependencies, run the following as your user account (**not as root/superuser**):
@@ -46,15 +46,15 @@ rm Miniconda-latest-Linux-x86_64.sh
# Add conda to your PATH
echo "export PATH=$HOME/.miniconda/bin:$PATH" >> .bashrc
-# Update conda and create a virtual environment for iGibson
+# Update conda and create a virtual environment for OmniGibson
conda update -y conda
-conda create -y -n igibson python=3.8
-conda activate igibson
+conda create -y -n omnigibson python=3.8
+conda activate omnigibson
```
Careful, this may change your GPU drivers!
-There are several system dependencies to correctly run iGibson on Linux, mostly related to Nvidia drivers and Cuda.
+There are several system dependencies to correctly run OmniGibson on Linux, mostly related to Nvidia drivers and Cuda.
In case your system is a clean Ubuntu 20.04, you can run the following commands as root/superuser to install all required dependencies:
@@ -69,7 +69,7 @@ apt-get update && apt-get install -y --no-install-recommends \
echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \
echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list
-# The following cuda libraries are required to compile igibson
+# The following cuda libraries are required to compile omnigibson
apt-get update && apt-get update && apt-get install -y --no-install-recommends \
nvidia-headless-470 \
cuda-cudart-11-1=11.1.74-1 \
@@ -77,7 +77,7 @@ apt-get update && apt-get update && apt-get install -y --no-install-recommends \
cuda-command-line-tools-11-1=11.1.1-1 \
cuda-libraries-dev-11-1=11.1.1-1 \
-# For building and running igibson
+# For building and running omnigibson
apt-get update && apt-get install -y --no-install-recommends \
cmake \
git \
@@ -98,7 +98,7 @@ apt-get update && apt-get install -y --no-install-recommends \
echo "deb https://developer.download.nvidia.com/compute/cuda/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/cuda.list && \
echo "deb https://developer.download.nvidia.com/compute/machine-learning/repos/ubuntu2004/x86_64 /" > /etc/apt/sources.list.d/nvidia-ml.list
-# The following cuda libraries are required to compile igibson
+# The following cuda libraries are required to compile omnigibson
apt-get update && apt-get update && apt-get install -y --no-install-recommends \
xserver-xorg-video-nvidia-470 \
cuda-cudart-11-1=11.1.74-1 \
@@ -106,7 +106,7 @@ apt-get update && apt-get update && apt-get install -y --no-install-recommends \
cuda-command-line-tools-11-1=11.1.1-1 \
cuda-libraries-dev-11-1=11.1.1-1 \
-# For building and running igibson
+# For building and running omnigibson
apt-get update && apt-get install -y --no-install-recommends \
cmake \
git \
@@ -119,7 +119,7 @@ apt-get update && apt-get install -y --no-install-recommends \
-By default, iGibson builds with CUDA support which requires that `nvcc` is on your path (or CUDA 11 is symlinked to `/usr/local/cuda` from `/usr/local/cuda-11.1`). Cmake uses `nvcc` to find the CUDA libraries and headers when building iGibson. Add the following to your shell rc (`.bashrc`, `.zshrc`, etc.) and re-login to your shell (`exec bash`, `exec zsh`, etc.):
+By default, OmniGibson builds with CUDA support which requires that `nvcc` is on your path (or CUDA 11 is symlinked to `/usr/local/cuda` from `/usr/local/cuda-11.1`). Cmake uses `nvcc` to find the CUDA libraries and headers when building OmniGibson. Add the following to your shell rc (`.bashrc`, `.zshrc`, etc.) and re-login to your shell (`exec bash`, `exec zsh`, etc.):
```bash
export PATH=/usr/local/cuda-11.1/bin:$PATH
```
@@ -134,73 +134,73 @@ Cuda compilation tools, release 11.1, V11.1.105
Build cuda_11.1.TC455_06.29190527_0
```
-In case you want to build without CUDA support (used for the "rendering to GPU tensor" feature), you will have to set `USE_CUDA` to `False` in `iGibson/igibson/render/CMakeLists.txt`.
+In case you want to build without CUDA support (used for the "rendering to GPU tensor" feature), you will have to set `USE_CUDA` to `False` in `OmniGibson/omnigibson/render/CMakeLists.txt`.
-### Installing iGibson
+### Installing OmniGibson
We provide 3 methods to install the simulator.
#### 1. pip
-iGibson's simulator can be installed as a python package using pip:
+OmniGibson's simulator can be installed as a python package using pip:
```bash
-pip install igibson # This step takes about 4 minutes
+pip install omnigibson # This step takes about 4 minutes
# run the demo
-python -m igibson.examples.environments.env_nonint_example
+python -m omnigibson.examples.environments.env_nonint_example
```
#### 2. Docker image
-Docker provides an easy way to reproduce the development environment across platforms without manually installing the software dependencies. We have prepared docker images that contain everything you need to get started with iGibson.
+Docker provides an easy way to reproduce the development environment across platforms without manually installing the software dependencies. We have prepared docker images that contain everything you need to get started with OmniGibson.
First, install Docker from the [official website](https://www.docker.com/). Please make sure that the docker version is at least v19.0 to enable native GPU support.
-Next, download our pre-built images with the script in the `iGibson` repo:
+Next, download our pre-built images with the script in the `OmniGibson` repo:
```
-cd iGibson
+cd OmniGibson
./docker/pull-images.sh
```
Two images will be downloaded:
-* `igibson/igibson:latest`: smaller image, but does not support GUI.
-* `igibson/igibson-gui:latest`: supports GUI and remote desktop access via VNC.
+* `omnigibson/omnigibson:latest`: smaller image, but does not support GUI.
+* `omnigibson/omnigibson-gui:latest`: supports GUI and remote desktop access via VNC.
We also provide scripts to build the images from scratch:
```
# image without GUI:
-cd iGibson/docker/base
+cd OmniGibson/docker/base
./build.sh
# image with GUI and VNC:
-cd iGibson/docker/headless-gui
+cd OmniGibson/docker/headless-gui
./build.sh
```
#### 3. Compile from source
-Alternatively, iGibson can be compiled from source: [iGibson GitHub Repo](https://github.com/StanfordVL/iGibson). First, you need to install anaconda following the guide on [their website](https://www.anaconda.com/).
+Alternatively, OmniGibson can be compiled from source: [OmniGibson GitHub Repo](https://github.com/StanfordVL/OmniGibson). First, you need to install anaconda following the guide on [their website](https://www.anaconda.com/).
```bash
-git clone https://github.com/StanfordVL/iGibson --recursive
-cd iGibson
+git clone https://github.com/StanfordVL/OmniGibson --recursive
+cd OmniGibson
# if you didn't create the conda environment before:
-conda create -y -n igibson python=3.8
-conda activate igibson
+conda create -y -n omnigibson python=3.8
+conda activate omnigibson
pip install -e . # This step takes about 4 minutes
```
-We recommend the third method if you plan to modify iGibson in your project. If you plan to use it as it is to train navigation and manipulation agents, the pip installation or docker image should meet your requirements.
+We recommend the third method if you plan to modify OmniGibson in your project. If you plan to use it as it is to train navigation and manipulation agents, the pip installation or docker image should meet your requirements.
Note: If you are not using conda, you will need the system packages python3-dev (header files to build Python extensions) and python3-opencv (provides opencv and its dependencies).
### The SVL pybullet fork
-To optimize and accelerate physics simulation, we use a custom version of pybullet in iGibson. This is installed automatically if you install iGibson in a fresh conda environment, but if you already have a regular pybullet, you should manually remove it and install our fork as follows (otherwise your 'pip install -e .' will fail):
+To optimize and accelerate physics simulation, we use a custom version of pybullet in OmniGibson. This is installed automatically if you install OmniGibson in a fresh conda environment, but if you already have a regular pybullet, you should manually remove it and install our fork as follows (otherwise your 'pip install -e .' will fail):
```bash
pip uninstall pybullet
@@ -211,59 +211,59 @@ pip install pybullet-svl
Once the environment has been installed, we need to download the assets to enable the simulation including models of the robotic agents, objects, 3D scenes, etc. This process requires three simple steps.
-First, we need to configure where the iGibson's assets are going to be stored. The desired path should be indicated in `your_installation_path/igibson/global_config.yaml`. The default place to store the data is:
+First, we need to configure where the OmniGibson's assets are going to be stored. The desired path should be indicated in `your_installation_path/omnigibson/global_config.yaml`. The default place to store the data is:
```bash
-assets_path: your_installation_path/igibson/data/assets
-g_dataset_path: your_installation_path/igibson/data/g_dataset
-ig_dataset_path: your_installation_path/igibson/data/ig_dataset
-threedfront_dataset_path: your_installation_path/igibson/data/threedfront_dataset
-cubicasa_dataset_path: your_installation_path/igibson/data/assetscubicasa_dataset
+assets_path: your_installation_path/omnigibson/data/assets
+g_dataset_path: your_installation_path/omnigibson/data/g_dataset
+og_dataset_path: your_installation_path/omnigibson/data/og_dataset
+threedfront_dataset_path: your_installation_path/omnigibson/data/threedfront_dataset
+cubicasa_dataset_path: your_installation_path/omnigibson/data/assetscubicasa_dataset
```
In case you prefer to store the assets in a different location, you can run the command:
```bash
-python -m igibson.utils.assets_utils --change_data_path
+python -m omnigibson.utils.assets_utils --change_data_path
```
-Second, we need to download the robot models and some small objects from the assets bundle [here](https://storage.googleapis.com/gibson_scenes/assets_igibson.tar.gz) and unpack it in the assets folder. More easily, this process can be automatically done by executing the command:
+Second, we need to download the robot models and some small objects from the assets bundle [here](https://storage.googleapis.com/gibson_scenes/assets_omnigibson.tar.gz) and unpack it in the assets folder. More easily, this process can be automatically done by executing the command:
```bash
-python -m igibson.utils.assets_utils --download_assets
+python -m omnigibson.utils.assets_utils --download_assets
```
-Third, we need to download datasets of scenes (Gibson or iGibson), and, possibly, the BEHAVIOR Datasets of Object Models.
-For interactive tasks, you need to download iGibson 2.0 Scenes and the BEHAVIOR Dataset of Objects, or iGibson 1.0 Scenes. They include several fully interactive scenes and hundreds of 3D objects to use with our simulator.
+Third, we need to download datasets of scenes (Gibson or OmniGibson), and, possibly, the BEHAVIOR Datasets of Object Models.
+For interactive tasks, you need to download OmniGibson 2.0 Scenes and the BEHAVIOR Dataset of Objects, or OmniGibson 1.0 Scenes. They include several fully interactive scenes and hundreds of 3D objects to use with our simulator.
For navigation tasks, you could use the interactive scenes, but we also provide back-compatibility to the Gibson Dataset, Stanford 2D-3D-Semantics Dataset, and Matterport3D Dataset that include non-interactive scene models.
Follow the detailed instructions [here](dataset.md) to download the aforementioned datasets.
-Alternatively, to avoid downloading an entire dataset before you can test iGibson's functionalities, we provide a single [high quality small non-interactive scene (R's)](https://storage.googleapis.com/gibson_scenes/Rs.tar.gz) for demo and testing purposes.
+Alternatively, to avoid downloading an entire dataset before you can test OmniGibson's functionalities, we provide a single [high quality small non-interactive scene (R's)](https://storage.googleapis.com/gibson_scenes/Rs.tar.gz) for demo and testing purposes.
To download this demo data, run:
```bash
-python -m igibson.utils.assets_utils --download_demo_data
+python -m omnigibson.utils.assets_utils --download_demo_data
```
## Examples
We provide multiple examples to get you started!
-Check the folder [igibson/examples](https://github.com/StanfordVL/iGibson/tree/master/igibson/examples) and the description [here](examples.md).
+Check the folder [omnigibson/examples](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/examples) and the description [here](examples.md).
After installing the code and downloading the demo data, you should be able to try out a simple robot navigation demo executing:
```bash
-python -m igibson.examples.environments.env_nonint_example
+python -m omnigibson.examples.environments.env_nonint_example
```
## Testing
-To test iGibson installation, you can run
+To test OmniGibson installation, you can run
```bash
python
->> import igibson
+>> import omnigibson
```
For a full suite of tests and benchmarks, you can refer to [tests](tests.md) for more details.
(For Mac users) Some tests will fail as they require a Nvidia GPU.
## Uninstalling
-Uninstalling iGibson is easy: `pip uninstall igibson`
+Uninstalling OmniGibson is easy: `pip uninstall omnigibson`
diff --git a/docs/intro.md b/docs/intro.md
index 9b931978d..770236f68 100644
--- a/docs/intro.md
+++ b/docs/intro.md
@@ -1,20 +1,20 @@
-# iGibson: the Interactive Gibson Environment
+# OmniGibson: the Interactive Gibson Environment
### Large Scale Interactive Simulation Environments for Robot Learning
-iGibson, the Interactive Gibson Environment, is a simulation environment providing fast visual rendering and physics simulation (based on Bullet).
+OmniGibson, the Interactive Gibson Environment, is a simulation environment providing fast visual rendering and physics simulation (based on Bullet).
It is packed with a dataset with hundreds of large 3D environments reconstructed from real homes and offices, and interactive objects that can be pushed and actuated.
-iGibson allows researchers to train and evaluate robotic agents that use RGB images and/or other visual sensors to solve indoor (interactive) navigation and mobile manipulation tasks such as opening doors, picking and placing objects, or searching for objects.
-With the latest extension, iGibson 2.0 supports new types of [object state changes](extended_states.md) (cook, soak, slice, freeze, etc), that can enable new types of simulated activities!
-iGibson implements all features required to evaluate AI solutions in the BEHAVIOR benchmark: [sampling logic activity descriptions](sampling.md), [checking logic states](extended_states.md), connecting to the BEHAVIOR dataset of 3D objects and evaluating BEHAVIOR metrics (information about the [dataset](https://stanfordvl.github.io/behavior/objects.html) and the [metrics](https://stanfordvl.github.io/behavior/metrics.html) can be found in the documentation of the BEHAVIOR repository).
+OmniGibson allows researchers to train and evaluate robotic agents that use RGB images and/or other visual sensors to solve indoor (interactive) navigation and mobile manipulation tasks such as opening doors, picking and placing objects, or searching for objects.
+With the latest extension, OmniGibson 2.0 supports new types of [object state changes](extended_states.md) (cook, soak, slice, freeze, etc), that can enable new types of simulated activities!
+OmniGibson implements all features required to evaluate AI solutions in the BEHAVIOR benchmark: [sampling logic activity descriptions](sampling.md), [checking logic states](extended_states.md), connecting to the BEHAVIOR dataset of 3D objects and evaluating BEHAVIOR metrics (information about the [dataset](https://stanfordvl.github.io/behavior/objects.html) and the [metrics](https://stanfordvl.github.io/behavior/metrics.html) can be found in the documentation of the BEHAVIOR repository).
### Citation
-If you use iGibson or its assets and models, consider citing the following publications:
+If you use OmniGibson or its assets and models, consider citing the following publications:
```
-@inproceedings{shen2021igibson,
- title={iGibson 1.0: a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
+@inproceedings{shen2021omnigibson,
+ title={OmniGibson 1.0: a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
author={Bokui Shen and Fei Xia and Chengshu Li and Roberto Mart\'in-Mart\'in and Linxi Fan and Guanzhi Wang and Claudia Pérez-D'Arpino and Shyamal Buch and Sanjana Srivastava and Lyne P. Tchapmi and Micael E. Tchapmi and Kent Vainio and Josiah Wong and Li Fei-Fei and Silvio Savarese},
booktitle={2021 IEEE/RSJ International Conference on Intelligent Robots and Systems},
year={2021},
@@ -24,8 +24,8 @@ If you use iGibson or its assets and models, consider citing the following publi
```
```
-@misc{li2021igibson,
- title={iGibson 2.0: Object-Centric Simulation for Robot Learning of Everyday Household Tasks},
+@misc{li2021omnigibson,
+ title={OmniGibson 2.0: Object-Centric Simulation for Robot Learning of Everyday Household Tasks},
author={Chengshu Li and Fei Xia and Roberto Martín-Martín and Michael Lingelbach and Sanjana Srivastava and Bokui Shen and Kent Vainio and Cem Gokmen and Gokul Dharan and Tanish Jain and Andrey Kurenkov and Karen Liu and Hyowon Gweon and Jiajun Wu and Li Fei-Fei and Silvio Savarese},
year={2021},
eprint={2108.03272},
@@ -35,9 +35,9 @@ If you use iGibson or its assets and models, consider citing the following publi
```
### Code Release
-The GitHub repository of iGibson can be found [here](https://github.com/StanfordVL/iGibson). Bug reports, suggestions for improvement, as well as community developments are encouraged and appreciated.
+The GitHub repository of OmniGibson can be found [here](https://github.com/StanfordVL/OmniGibson). Bug reports, suggestions for improvement, as well as community developments are encouraged and appreciated.
### Documentation
-The documentation for iGibson can be found [here](http://svl.stanford.edu/igibson/docs/). It includes installation guide (including data download instructions), quickstart guide, code examples, and APIs.
+The documentation for OmniGibson can be found [here](http://svl.stanford.edu/omnigibson/docs/). It includes installation guide (including data download instructions), quickstart guide, code examples, and APIs.
-If you want to know more about iGibson, you can also check out [our webpage](http://svl.stanford.edu/igibson), the [iGibson 2.0 arxiv preprint](https://arxiv.org/abs/2108.03272) and the [iGibson 1.0 arxiv preprint](https://arxiv.org/abs/2012.02924).
+If you want to know more about OmniGibson, you can also check out [our webpage](http://svl.stanford.edu/omnigibson), the [OmniGibson 2.0 arxiv preprint](https://arxiv.org/abs/2108.03272) and the [OmniGibson 1.0 arxiv preprint](https://arxiv.org/abs/2012.02924).
diff --git a/docs/issues.md b/docs/issues.md
index 80754cae2..0a8d8181b 100644
--- a/docs/issues.md
+++ b/docs/issues.md
@@ -7,7 +7,7 @@
2. Is libegl1 installed? You can determine this by `apt list --installed | grep libegl1`
3. Are openGL libraries visible? You can do so by
`export LD_LIBRARY_PATH=/usr/lib/nvidia-:$LD_LIBRARY_PATH`
-4. There are two ways of setting up openGL library, if the current installation doesn't work, you can try to install with USE_GLAD set to FALSE in [here](https://github.com/StanfordVL/iGibson/blob/master/igibson/render/CMakeLists.txt)
+4. There are two ways of setting up openGL library, if the current installation doesn't work, you can try to install with USE_GLAD set to FALSE in [here](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/render/CMakeLists.txt)
5. If you want to render in headless mode, make sure `$DISPLAY` environment variable is unset, otherwise you might have error `Failed to EGL with glad`, because EGL is sensitive to `$DISPLAY` environment variable.
It is a good idea to run `ldconfig -p | grep EGL` and you should be able to see `libEGL_nvidia` installed.
@@ -27,4 +27,4 @@ Make sure EGL is not linked to mesa, because in order for gibson to work, linkin
The EGL setup part is borrowed from Erwin Coumans [egl_example](https://github.com/erwincoumans/egl_example). It would be informative to see if that repository can run on your machine.
### Other issues
-For other issues, please submit an issue in our [github repository](https://github.com/StanfordVL/iGibson/issues).
+For other issues, please submit an issue in our [github repository](https://github.com/StanfordVL/OmniGibson/issues).
diff --git a/docs/learning_framework.md b/docs/learning_framework.md
index 0c6e9d245..01e8f7dde 100644
--- a/docs/learning_framework.md
+++ b/docs/learning_framework.md
@@ -2,13 +2,13 @@
### Overview
-iGibson can be used with any learning framework that accommodates OpenAI gym interface. Feel free to use your favorite ones.
+OmniGibson can be used with any learning framework that accommodates OpenAI gym interface. Feel free to use your favorite ones.
### Examples
#### TF-Agents
-In this example, we show an environment wrapper of [TF-Agents](https://github.com/tensorflow/agents) for iGibson and an example training code for [SAC agent](https://arxiv.org/abs/1801.01290). The code can be found in [our fork of TF-Agents](https://github.com/StanfordVL/agents/): [agents/blob/igibson/tf_agents/environments/suite_gibson.py](https://github.com/StanfordVL/agents/blob/igibson/tf_agents/environments/suite_gibson.py) and [agents/blob/igibson/tf_agents/agents/sac/examples/v1/train_single_env.sh](https://github.com/StanfordVL/agents/blob/igibson/tf_agents/agents/sac/examples/v1/train_single_env.sh).
+In this example, we show an environment wrapper of [TF-Agents](https://github.com/tensorflow/agents) for OmniGibson and an example training code for [SAC agent](https://arxiv.org/abs/1801.01290). The code can be found in [our fork of TF-Agents](https://github.com/StanfordVL/agents/): [agents/blob/omnigibson/tf_agents/environments/suite_gibson.py](https://github.com/StanfordVL/agents/blob/omnigibson/tf_agents/environments/suite_gibson.py) and [agents/blob/omnigibson/tf_agents/agents/sac/examples/v1/train_single_env.sh](https://github.com/StanfordVL/agents/blob/omnigibson/tf_agents/agents/sac/examples/v1/train_single_env.sh).
```python
def load(config_file,
@@ -20,8 +20,8 @@ def load(config_file,
gym_env_wrappers=(),
env_wrappers=(),
spec_dtype_map=None):
- config_file = os.path.join(os.path.dirname(igibson.__file__), config_file)
- env = iGibsonEnv(config_file=config_file,
+ config_file = os.path.join(os.path.dirname(omnigibson.__file__), config_file)
+ env = OmniGibsonEnv(config_file=config_file,
scene_id=model_id,
mode=env_mode,
action_timestep=action_timestep,
diff --git a/docs/objects.md b/docs/objects.md
index f434b129c..c29eaad43 100644
--- a/docs/objects.md
+++ b/docs/objects.md
@@ -12,15 +12,15 @@ We provide a wide variety of **Objects** that can be imported into the **Simulat
- `Cube`
- `VisualMarker`
-Typically, they take in the name or the path of an object (in `igibson.assets_path`) and provide a `load` function that be invoked externally (usually by `import_object` and `import_object` of `Simulator`). The `load` function imports the object into PyBullet. Some **Objects** (e.g. `ArticulatedObject`) also provide APIs to get and set the object pose.
+Typically, they take in the name or the path of an object (in `omnigibson.assets_path`) and provide a `load` function that be invoked externally (usually by `import_object` and `import_object` of `Simulator`). The `load` function imports the object into PyBullet. Some **Objects** (e.g. `ArticulatedObject`) also provide APIs to get and set the object pose.
-Most of the code can be found here: [igibson/objects](https://github.com/StanfordVL/iGibson/blob/master/igibson/objects).
+Most of the code can be found here: [omnigibson/objects](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/objects).
### BEHAVIOR Dataset of Objects
We use a new dataset of everyday objects, the BEHAVIOR Dataset of Objects. In total, we curate 1217 object models across 391 object categories, to support 100 BEHAVIOR activities. The categories range from food items to tableware, from home decorations to office supplies, and from apparel to cleaning tools.
-To maintain high visual realism, all object models include material information (metallic, roughness) that can be rendered by iGibson 2.0 renderer. To maintain high physics realism, object models are annotated with size, mass, center of mass, moment of inertia, and also stable orientations. The collision mesh is a simplified version of the visual mesh, obtained with a convex decomposition using the VHACD algorithm. Object models with a shape close to a box are annotated with a primitive box collision mesh, much more efficient and robust for collision checking.
+To maintain high visual realism, all object models include material information (metallic, roughness) that can be rendered by OmniGibson 2.0 renderer. To maintain high physics realism, object models are annotated with size, mass, center of mass, moment of inertia, and also stable orientations. The collision mesh is a simplified version of the visual mesh, obtained with a convex decomposition using the VHACD algorithm. Object models with a shape close to a box are annotated with a primitive box collision mesh, much more efficient and robust for collision checking.
All models in the BEHAVIOR Dataset are organized following the WordNet, associating them to synsets. This structure allows us to define properties for all models of the same categories, but it also facilitates more general sampling of activity instances fulfilling initial conditions such as onTop(fruit, table) that can be achieved using any model within the branch fruit of WordNet.
@@ -35,7 +35,7 @@ OBJECT_NAME
│
└───shape
│ └───visual
-│ │ │ # Directory containing visual meshes (vm) of the object. Used for iGibson's rendering. Encrypted
+│ │ │ # Directory containing visual meshes (vm) of the object. Used for OmniGibson's rendering. Encrypted
│ │ │ # All objs are UV mapped onto the same texture, linked by default.mtl. All faces are triangles.
│ │ │ vm1.encrypted.obj
│ │ │ vm2.encrypted.obj
@@ -43,7 +43,7 @@ OBJECT_NAME
│ │ │ default.mtl (links the geometry to the texture files)
│ │
│ └───collision
-│ │ │ # Directory containing collision meshes (cm) of the objects. Used for iGibson's physics simulation.
+│ │ │ # Directory containing collision meshes (cm) of the objects. Used for OmniGibson's physics simulation.
│ │ │ # Each obj represents a unique link of the object.
│ │ │ # For example, link_1_cm.obj represents the collision mesh of link_1.
│ │ │ cm1.obj
@@ -84,14 +84,14 @@ OBJECT_NAME
-### Adding other objects to iGibson
-We provide detailed instructions and scripts to import your own objects (non-articulated) into iGibson.
+### Adding other objects to OmniGibson
+We provide detailed instructions and scripts to import your own objects (non-articulated) into OmniGibson.
-Instruction can be found here: [External Objects](https://github.com/StanfordVL/iGibson/blob/master/igibson/utils/data_utils/ext_object).
+Instruction can be found here: [External Objects](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/utils/data_utils/ext_object).
### Examples
-In this example, we import a few objects into iGibson. The code can be found here: [igibson/examples/objects/load_objects.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/objects/load_objects.py).
+In this example, we import a few objects into OmniGibson. The code can be found here: [omnigibson/examples/objects/load_objects.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/objects/load_objects.py).
```python
import logging
@@ -101,18 +101,18 @@ from sys import platform
import numpy as np
import yaml
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.objects.usd_object import URDFObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.robots.turtlebot import Turtlebot
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_ig_avg_category_specs, get_ig_category_path, get_ig_model_path
-from igibson.utils.object_state_utils import let_user_pick, parse_config
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.objects.usd_object import URDFObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.robots.turtlebot import Turtlebot
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_og_avg_category_specs, get_og_category_path, get_og_model_path
+from omnigibson.utils.object_state_utils import let_user_pick, parse_config
def main():
@@ -129,7 +129,7 @@ def main():
type_of_scene = let_user_pick(scene_options) - 1
if type_of_scene == 0: # Empty
- config = parse_config(os.path.join(igibson.example_config_path, "turtlebot_static_nav.yaml"))
+ config = parse_config(os.path.join(omnigibson.example_config_path, "turtlebot_static_nav.yaml"))
settings = MeshRendererSettings(enable_shadow=False, msaa=False, texture_scale=0.5)
s = Simulator(mode="gui_interactive", image_width=512, image_height=512, rendering_settings=settings)
scene = EmptyScene(render_floor_plane=True, floor_plane_rgba=[0.6, 0.6, 0.6, 1])
@@ -141,7 +141,7 @@ def main():
s.import_object(turtlebot)
elif type_of_scene == 1: # iG
- config_filename = os.path.join(igibson.example_config_path, "turtlebot_nav.yaml")
+ config_filename = os.path.join(omnigibson.example_config_path, "turtlebot_nav.yaml")
config_data = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
config_data["load_object_categories"] = [] # Uncomment this line to accelerate loading with only the building
config_data["visible_target"] = False
@@ -149,11 +149,11 @@ def main():
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
- env = iGibsonEnv(config_file=config_data, mode="gui_interactive")
+ env = OmniGibsonEnv(config_file=config_data, mode="gui_interactive")
s = env.simulator
elif type_of_scene == 2: # Gibson
- config = parse_config(os.path.join(igibson.example_config_path, "turtlebot_static_nav.yaml"))
+ config = parse_config(os.path.join(omnigibson.example_config_path, "turtlebot_static_nav.yaml"))
settings = MeshRendererSettings(enable_shadow=False, msaa=False)
# Reduce texture scale for Mac.
if platform == "darwin":
@@ -188,7 +188,7 @@ def main():
}
# Load the specs of the object categories, e.g., common scaling factor
- avg_category_spec = get_ig_avg_category_specs()
+ avg_category_spec = get_og_avg_category_specs()
scene_objects = {}
try:
@@ -200,7 +200,7 @@ def main():
scene_objects[category] = 1
# Get the path for all models of this category
- category_path = get_ig_category_path(category)
+ category_path = get_og_category_path(category)
# If the specific model is given, we use it. If not, we select one randomly
if "model" in obj:
@@ -209,7 +209,7 @@ def main():
model = np.random.choice(os.listdir(category_path))
# Create the full path combining the path for all models and the name of the model
- model_path = get_ig_model_path(category, model)
+ model_path = get_og_model_path(category, model)
filename = os.path.join(model_path, model + ".urdf")
# Create a unique name for the object instance
diff --git a/docs/overview.md b/docs/overview.md
index 182921208..f1c6c8669 100644
--- a/docs/overview.md
+++ b/docs/overview.md
@@ -1,16 +1,16 @@
# Overview of Modules
-Next, we will give an overview of iGibson and briefly explain the different modules in our system.
+Next, we will give an overview of OmniGibson and briefly explain the different modules in our system.
![quickstart.png](images/overview.png)
First of all, we have **Datasets** and **Assets**.
-**Datasets** include the 3D reconstructed real-world environments (iGibson Dataset), and 3D models of objects (BEHAVIOR Dataset of Objects).
+**Datasets** include the 3D reconstructed real-world environments (OmniGibson Dataset), and 3D models of objects (BEHAVIOR Dataset of Objects).
**Assets** contain models of robots and some additional 3D objects.
The download guide for the assets can be found [here](installation.html#downloading-the-assets).
More info can be found in the sections [Datasets](dataset.md) and [Assets](assets.md).
Next, we have **Renderer** and **PhysicsEngine**.
-These are the two pillars that ensure the visual and physics fidelity of iGibson.
+These are the two pillars that ensure the visual and physics fidelity of OmniGibson.
We developed our own renderer that supports customizable camera configuration, physics-based rendering (PBR) and various image modalities, and renders at a lightening speed.
We use the open-sourced [PyBullet](http://www.pybullet.org/) as our underlying physics engine.
It can simulate rigid body collision and joint actuation for robots and articulated objects in an accurate and efficient manner.
@@ -18,9 +18,9 @@ Since we are using MeshRenderer for rendering and PyBullet for physics simulatio
Our code have already handled this for you. More info can be found here: [Renderer](renderer.md) and [PhysicsEngine](physics_engine.md).
Furthermore, we have **Scene**, **Object**, **Robot**, and **Simulator**.
-**Scene** loads 3D scene meshes from `igibson.g_dataset_path, igibson.ig_dataset_path` and loads/holds the list of objects associated with an interactive scene.
-**Object** loads interactable objects from `igibson.assets_path`.
-**Robot** loads robots from `igibson.assets_path`.
+**Scene** loads 3D scene meshes from `omnigibson.g_dataset_path, omnigibson.og_dataset_path` and loads/holds the list of objects associated with an interactive scene.
+**Object** loads interactable objects from `omnigibson.assets_path`.
+**Robot** loads robots from `omnigibson.assets_path`.
**Simulator** maintains an instance of **Renderer** and **PhysicsEngine** and provides APIs to import **Scene**, **Object** and **Robot** into both of them and keep them synchronized at all time.
More info can be found here: [Scene](./scenes.md), [Object](./objects.md), [Robot](./robots.md), and [Simulator](simulators.md).
@@ -32,7 +32,7 @@ It also provides task-specific reset functions and task-relevant observation def
More info can be found here: [Environment](environments.md).
Finally, any learning framework (e.g. RL, IL) or planning and control framework (e.g. ROS) can be used with **Environment** as long as they accommodate the OpenAI gym interface.
-We provide tight integration with **ROS** that allows for evaluation and visualization of, say, ROS Navigation Stack, in iGibson.
+We provide tight integration with **ROS** that allows for evaluation and visualization of, say, ROS Navigation Stack, in OmniGibson.
More info can be found here: [Learning Framework](learning_framework.md) and [ROS](ros_integration.md).
We highly recommend you go through each of the Modules below for more details and code examples.
diff --git a/docs/physics_engine.md b/docs/physics_engine.md
index c417c092a..42cb4140c 100644
--- a/docs/physics_engine.md
+++ b/docs/physics_engine.md
@@ -12,8 +12,8 @@ In this example, we import a scene, a robot and an object into PyBullet and step
```python
-from igibson.utils.asset_utils import get_scene_path, get_texture_file
-import igibson
+from omnigibson.utils.asset_utils import get_scene_path, get_texture_file
+import omnigibson
import os
import sys
@@ -45,12 +45,12 @@ def main():
baseVisualShapeIndex=visual_id)
# Load robots
- turtlebot_urdf = os.path.join(igibson.assets_path, 'models/turtlebot/turtlebot.urdf')
+ turtlebot_urdf = os.path.join(omnigibson.assets_path, 'models/turtlebot/turtlebot.urdf')
robot_id = p.loadURDF(turtlebot_urdf, flags=p.URDF_USE_MATERIAL_COLORS_FROM_MTL)
# Load objects
- obj_visual_filename = os.path.join(igibson.assets_path, 'models/ycb/002_master_chef_can/textured_simple.obj')
- obj_collision_filename = os.path.join(igibson.assets_path,
+ obj_visual_filename = os.path.join(omnigibson.assets_path, 'models/ycb/002_master_chef_can/textured_simple.obj')
+ obj_collision_filename = os.path.join(omnigibson.assets_path,
'models/ycb/002_master_chef_can/textured_simple_vhacd.obj')
collision_id = p.createCollisionShape(p.GEOM_MESH,
fileName=obj_collision_filename,
diff --git a/docs/projects.md b/docs/projects.md
index 3f8bef046..b32df6149 100644
--- a/docs/projects.md
+++ b/docs/projects.md
@@ -1,7 +1,7 @@
-Projects using Gibson/iGibson
+Projects using Gibson/OmniGibson
===================================
-It is exciting to see people using Gibson Environment in embodied AI research. Here is a list of projects using Gibson v1 or iGibson:
+It is exciting to see people using Gibson Environment in embodied AI research. Here is a list of projects using Gibson v1 or OmniGibson:
- K. Chen, J. P. de Vicente, G. Sepulveda, F. Xia, A. Soto, M. Vazquez, and S. Savarese. [A behavioral approach to visual navigation with graph localization networks](https://arxiv.org/pdf/1903.00445.pdf). In RSS, 2019.
- Hirose, Noriaki, et al. [Deep Visual MPC-Policy Learning for Navigation.](https://arxiv.org/pdf/1903.02749.pdf) arXiv preprint arXiv:1903.02749 (2019). IROS 2019.
@@ -30,11 +30,11 @@ These papers tested policies trained in Gibson v1 on real robots in the physical
- Hirose, Noriaki, et al. [Deep Visual MPC-Policy Learning for Navigation.](https://arxiv.org/pdf/1903.02749.pdf) arXiv preprint arXiv:1903.02749 (2019). IROS 2019.
-If you use Gibson, iGibson or their assets, please consider citing the following papers for iGibson, the Interactive Gibson Environment:
+If you use Gibson, OmniGibson or their assets, please consider citing the following papers for OmniGibson, the Interactive Gibson Environment:
```
-@article{shenigibson,
- title={iGibson, a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
+@article{shenomnigibson,
+ title={OmniGibson, a Simulation Environment for Interactive Tasks in Large Realistic Scenes},
author={Shen*, Bokui and Xia*, Fei and Li*, Chengshu and Mart{\'i}n-Mart{\'i}n*, Roberto and Fan, Linxi and Wang, Guanzhi and Buch, Shyamal and D’Arpino, Claudia and Srivastava, Sanjana and Tchapmi, Lyne P and Vainio, Kent and Fei-Fei, Li and Savarese, Silvio},
journal={arXiv preprint arXiv:2012.02924},
year={2020}
diff --git a/docs/quickstart.md b/docs/quickstart.md
index c4f937ecd..7e314499d 100644
--- a/docs/quickstart.md
+++ b/docs/quickstart.md
@@ -1,13 +1,13 @@
# Quickstart
-## iGibson in Action
-Assume you finished installation and assets downloading. Let's get our hands dirty and see iGibson in action.
+## OmniGibson in Action
+Assume you finished installation and assets downloading. Let's get our hands dirty and see OmniGibson in action.
```bash
-python -m igibson.examples.environments.env_nonint_example
+python -m omnigibson.examples.environments.env_nonint_example
```
-If the execution fails with segfault 11, you may need to reduce texture scaling in the config file (igibson/configs/turtlebot_static_nav.yaml) to avoid out-of-memory error.
+If the execution fails with segfault 11, you may need to reduce texture scaling in the config file (omnigibson/configs/turtlebot_static_nav.yaml) to avoid out-of-memory error.
You should see something like this. If you are on Mac OS X, you will only see the two small windows.
![quickstart.png](images/quickstart.png)
@@ -22,11 +22,11 @@ That's it!
## Using Docker and remote GUI access via VNC
-If you go the docker route, please first pull our pre-built images (see the installation guide). After downloading, run `docker images`, and you should see `igibson/igibson:latest` and `igibson/igibson-gui:latest`.
+If you go the docker route, please first pull our pre-built images (see the installation guide). After downloading, run `docker images`, and you should see `omnigibson/omnigibson:latest` and `omnigibson/omnigibson-gui:latest`.
On a headless server (such as a Google Cloud or AWS instance), run
```
-cd iGibson
+cd OmniGibson
./docker/headless-gui/run.sh
# run a GUI example after the container command line prompt shows:
python simulator_example.py
@@ -52,12 +52,12 @@ python benchmark.py
## Benchmarks
-Performance is a big designing focus for iGibson. We provide a few scripts to benchmark the rendering and physics
-simulation framerate in iGibson.
+Performance is a big designing focus for OmniGibson. We provide a few scripts to benchmark the rendering and physics
+simulation framerate in OmniGibson.
### Benchmark static scene (Gibson scenes)
```bash
-python -m igibson.test.benchmark.benchmark_static_scene
+python -m omnigibson.test.benchmark.benchmark_static_scene
```
You will see output similar to:
@@ -74,10 +74,10 @@ Rendering normal, resolution 512, render_to_tensor False: 265.70666134193806 fps
```
-### Benchmark physics simulation in interactive scenes (iGibson scene)
+### Benchmark physics simulation in interactive scenes (OmniGibson scene)
```bash
-python -m igibson.test.benchmark.benchmark_interactive_scene
+python -m omnigibson.test.benchmark.benchmark_interactive_scene
```
It will generate a report like below:
@@ -87,10 +87,10 @@ It will generate a report like below:
### Benchmark rendering in interactive scenes
-To run a comprehensive benchmark for all rendering in all iGibson scenes, you can excute the following command:
+To run a comprehensive benchmark for all rendering in all OmniGibson scenes, you can excute the following command:
```bash
-python -m igibson.test.benchmark.benchmark_interactive_scene_rendering
+python -m omnigibson.test.benchmark.benchmark_interactive_scene_rendering
```
It benchmarks two use cases, one for training visual RL agents (low resolution, shadow mapping off), another one for
diff --git a/docs/renderer.md b/docs/renderer.md
index ec6ce23e7..27baa6b94 100644
--- a/docs/renderer.md
+++ b/docs/renderer.md
@@ -2,13 +2,13 @@
### Overview
-We developed our own MeshRenderer that supports customizable camera configuration and various image modalities, and renders at a lightening speed. Specifically, you can specify image width, height and vertical field of view in the constructor of `class MeshRenderer`. Then you can call `renderer.render(modes=('rgb', 'normal', 'seg', '3d', 'optical_flow', 'scene_flow'))` to retrieve the images. Currently we support six different image modalities: RGB, surface normal, segmentation, 3D point cloud (z-channel can be extracted as depth map), optical flow, and scene flow. We also support two types of LiDAR sensors: 1-beam and 16-beam (like Velodyne VLP-16). Most of the code can be found in [igibson/render](https://github.com/StanfordVL/iGibson/tree/master/igibson/render).
+We developed our own MeshRenderer that supports customizable camera configuration and various image modalities, and renders at a lightening speed. Specifically, you can specify image width, height and vertical field of view in the constructor of `class MeshRenderer`. Then you can call `renderer.render(modes=('rgb', 'normal', 'seg', '3d', 'optical_flow', 'scene_flow'))` to retrieve the images. Currently we support six different image modalities: RGB, surface normal, segmentation, 3D point cloud (z-channel can be extracted as depth map), optical flow, and scene flow. We also support two types of LiDAR sensors: 1-beam and 16-beam (like Velodyne VLP-16). Most of the code can be found in [omnigibson/render](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/render).
### Examples
#### Simple Example
-In this example, we render an iGibson scene with a few lines of code. The code can be found in [igibson/examples/renderer/mesh_renderer_simple_example.py ](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/renderer/mesh_renderer_simple_example.py).
+In this example, we render an OmniGibson scene with a few lines of code. The code can be found in [omnigibson/examples/renderer/mesh_renderer_simple_example.py ](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/renderer/mesh_renderer_simple_example.py).
```
import logging
@@ -18,8 +18,8 @@ import sys
import cv2
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
-from igibson.utils.assets_utils import get_scene_path
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
+from omnigibson.utils.assets_utils import get_scene_path
def main():
@@ -66,9 +66,9 @@ For `Rs` scene, the rendering results will look like this:
In this example, we show an interactive demo of MeshRenderer.
```bash
-python -m igibson.examples.renderer.mesh_renderer_example
+python -m omnigibson.examples.renderer.mesh_renderer_example
```
-You may translate the camera by pressing "WASD" on your keyboard and rotate the camera by dragging your mouse. Press `Q` to exit the rendering loop. The code can be found in [igibson/examples/renderer/mesh_renderer_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/renderer/mesh_renderer_example.py).
+You may translate the camera by pressing "WASD" on your keyboard and rotate the camera by dragging your mouse. Press `Q` to exit the rendering loop. The code can be found in [omnigibson/examples/renderer/mesh_renderer_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/renderer/mesh_renderer_example.py).
#### PBR (Physics-Based Rendering) Example
@@ -77,7 +77,7 @@ You can test the physically based renderer with the PBR demo. You can render any
obj files in the folder.
```bash
-python -m igibson.examples.renderer.mesh_renderer_example_pbr /objects/sink/sink_1/shape/visual
+python -m omnigibson.examples.renderer.mesh_renderer_example_pbr /objects/sink/sink_1/shape/visual
```
![pbr_renderer.png](images/pbr_render.png)
@@ -86,14 +86,14 @@ You will get a nice rendering of the sink, and should see the metal parts have s
#### Velodyne VLP-16 Example
-In this example, we show a demo of 16-beam Velodyne VLP-16 LiDAR placed on top of a virtual Turtlebot. The code can be found in [igibson/examples/observations/generate_lidar_velodyne.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/observations/generate_lidar_velodyne.py).
+In this example, we show a demo of 16-beam Velodyne VLP-16 LiDAR placed on top of a virtual Turtlebot. The code can be found in [omnigibson/examples/observations/generate_lidar_velodyne.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/observations/generate_lidar_velodyne.py).
The Velodyne VLP-16 LiDAR visualization will look like this:
![lidar_velodyne.png](images/lidar_velodyne.png)
#### Render to PyTorch Tensors
-In this example, we show that MeshRenderer can directly render into a PyTorch tensor to maximize efficiency. PyTorch installation is required (otherwise, iGibson does not depend on PyTorch). The code can be found in [igibson/examples/renderer/mesh_renderer_gpu_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/renderer/mesh_renderer_gpu_example.py).
+In this example, we show that MeshRenderer can directly render into a PyTorch tensor to maximize efficiency. PyTorch installation is required (otherwise, OmniGibson does not depend on PyTorch). The code can be found in [omnigibson/examples/renderer/mesh_renderer_gpu_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/renderer/mesh_renderer_gpu_example.py).
#### About the 3D Image
@@ -136,4 +136,4 @@ seg = (seg[:, :, 0:1] * MAX_CLASS_COUNT).astype(np.int32)
ins_seg = (ins_seg[:, :, 0:1] * MAX_INSTANCE_COUNT).astype(np.int32)
```
-This transformation is directly performed if the segmentation is accessed through a `VisionSensor` (e.g., as part of the iGibsonEnv) using the method `get_seg` and `get_ins_seg`.
+This transformation is directly performed if the segmentation is accessed through a `VisionSensor` (e.g., as part of the OmniGibsonEnv) using the method `get_seg` and `get_ins_seg`.
diff --git a/docs/robots.md b/docs/robots.md
index 9a7dcd5ef..e82986e33 100644
--- a/docs/robots.md
+++ b/docs/robots.md
@@ -1,11 +1,11 @@
# Robots
## Overview
-We provide a wide variety of **Robots** that can be imported in iGibson.
+We provide a wide variety of **Robots** that can be imported in OmniGibson.
To quickly see all of them, you can play the example:
```
-python -m igibson.examples.robots.all_robots_visualizer
+python -m omnigibson.examples.robots.all_robots_visualizer
```
Below, we list the fully-supported robots:
@@ -20,7 +20,7 @@ Below, we list the fully-supported robots:
| JackRabbot | `TwoWheeledRobot` `ManipulationRobot` | 2 & 7 | [Stanford Project Link](http://cvgl.stanford.edu/projects/jackrabbot/) | Base: {Torque, Velocity, Position, Differential Drive} Arm: {Torque, Velocity, Position, Inverse Kinematics} |
| LocoBot | `TwoWheeledRobot` | 2 | [ROS](http://wiki.ros.org/locobot), [Manufacturer](https://www.trossenrobotics.com/locobot-pyrobot-ros-rover.aspx) | Base: {Torque, Velocity, Position, Differential Drive} |
-Typically, these robot classes take in the URDF file or MuJoCo XML file of an robot (in `igibson.assets_path`) and provide a `load` function that be invoked externally (usually by `import_object` of `Simulator`). The `load` function imports the robot into PyBullet.
+Typically, these robot classes take in the URDF file or MuJoCo XML file of an robot (in `omnigibson.assets_path`) and provide a `load` function that be invoked externally (usually by `import_object` of `Simulator`). The `load` function imports the robot into PyBullet.
### Robot Class Hierarchy
All robot classes inherit from `BaseRobot`, which provides the core interface for all Robot classes. From `BaseRobot`, there are additional abstract subclasses from which a robot can inherit from:
@@ -37,20 +37,20 @@ Note that because these abstract classes describe different aspects of a robot's
Each abstract robot class implements useful functions for controlling and accessing robot properties. For example, `ManipulationRobot` contains functionalities to query the state of the arms, and implements multiple grasping modes, including some simplified grasping modes like "sticky mitten" that could be used for researchers less interested on grasp-control and/or focused on task planning. For creating new robot classes to import custom robots, it is highly recommended to follow our robot hierarchy, to best leverage the features designed in our abstract classes.
-How are robot parameters specified? Each abstract robot class expects certain kwargs, which are optionally extended for specific robot classes. While default values (seen in each respective robot class) are loaded at runtime, these can also be easily overridden by specifying these kwargs in the constructor or in the config file that you pass into the iGibson environment constructor. The set of modifiable arguments and expected robot config structure for each robot can be found in [igibson/configs/robots](https://github.com/StanfordVL/iGibson/blob/master/igibson/configs/robots). For description of what each specific keyword argument corresponds to, please see the respective robot's class docstring.
+How are robot parameters specified? Each abstract robot class expects certain kwargs, which are optionally extended for specific robot classes. While default values (seen in each respective robot class) are loaded at runtime, these can also be easily overridden by specifying these kwargs in the constructor or in the config file that you pass into the OmniGibson environment constructor. The set of modifiable arguments and expected robot config structure for each robot can be found in [omnigibson/configs/robots](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/configs/robots). For description of what each specific keyword argument corresponds to, please see the respective robot's class docstring.
Our examples contain multiple demonstrations on how to specify, create and initialize robots, either through config files and the Environment API, or manually with the constructors.
### Robot Control
-iGibson v2.0 implements modular controllers that can be assigned to specific components of the robot. The controller classes can be found in [igibson/controllers](https://github.com/StanfordVL/iGibson/blob/master/igibson/controllers). They include very generic controllers, such as `JointController`, and some more morphology-specific controllers, such as `DifferentialDriveController` (designed to control a two-wheeled robot) and `InverseKinematicsController` (designed to control a robot arm in Cartesian space using an inverse kinematics solver to find the right arm configuration).
+OmniGibson v2.0 implements modular controllers that can be assigned to specific components of the robot. The controller classes can be found in [omnigibson/controllers](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/controllers). They include very generic controllers, such as `JointController`, and some more morphology-specific controllers, such as `DifferentialDriveController` (designed to control a two-wheeled robot) and `InverseKinematicsController` (designed to control a robot arm in Cartesian space using an inverse kinematics solver to find the right arm configuration).
Robots requests controllers of specific types based on the abstract classes they derive from. For example, a robot inheriting from `TwoWheeledRobot` requires loading a controller for the robot's `base`, and must be a `JointController` or `DifferentialDriveController`. A robot inheriting from `ManipulationRobot` requires loading a controller for each of the robot's `arm`s (`JointController` or `InverseKinematicsController`), and corresponding `gripper` (`JointController`, `MultiFingerGripperController`, `NullJointController`).
-How are controller parameters specified? Each abstract robot class implements default controller configurations for each supported controller, which are automatically loaded at runtime (you can see the default configs directly in the abstract class source code, e.g.: the `InverseKinematicsController` defaults in [manipulation_robot.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/robots/manipulation_robot.py)). However, you can easily override these defaults and set specific parameters in your config file that you pass into the iGibson environment constructor. The set of modifiable arguments and expected controller config structure for each controller can be found in [igibson/configs/controllers](https://github.com/StanfordVL/iGibson/blob/master/igibson/configs/controllers). For description of what each specific keyword argument corresponds to, please see the respective controller's class docstring.
+How are controller parameters specified? Each abstract robot class implements default controller configurations for each supported controller, which are automatically loaded at runtime (you can see the default configs directly in the abstract class source code, e.g.: the `InverseKinematicsController` defaults in [manipulation_robot.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/robots/manipulation_robot.py)). However, you can easily override these defaults and set specific parameters in your config file that you pass into the OmniGibson environment constructor. The set of modifiable arguments and expected controller config structure for each controller can be found in [omnigibson/configs/controllers](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/configs/controllers). For description of what each specific keyword argument corresponds to, please see the respective controller's class docstring.
We also include an example demo script showcasing controlling different robots. Please see [Robot Control Example](robots.md#robot-control-example) or run:
```
-python -m igibson.examples.robots.robot_control_example
+python -m omnigibson.examples.robots.robot_control_example
```
You can select the robot, the controllers, the input to the controllers (random or teleop with the keyboard) and the scene, and test them.
@@ -76,28 +76,28 @@ The reference frame of each body part is shown below.
## Examples
-We provide multiple examples showcasing our robots' functionality, described below. These examples, together with the provided config files, should help you getting started with all robots and controllers. All of these examples can be found in [igibson/examples/robots](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots)
+We provide multiple examples showcasing our robots' functionality, described below. These examples, together with the provided config files, should help you getting started with all robots and controllers. All of these examples can be found in [omnigibson/examples/robots](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots)
### Visualizing Robots Example
-In [all_robots_visualizer.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots/all_robots_visualizer.py), we iterate over all of our supported robots, loading them into a scene and applying random actions for a few seconds. This demo allows you to visualize all the robots and their corresponding DOFs in our iGibson GUI.
+In [all_robots_visualizer.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots/all_robots_visualizer.py), we iterate over all of our supported robots, loading them into a scene and applying random actions for a few seconds. This demo allows you to visualize all the robots and their corresponding DOFs in our OmniGibson GUI.
### Inverse Kinematics Example
-In [ik_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots/ik_example.py), we showcase using pybullet's built-in Inverse Kinematics (IK) control in an interactive way. We load Fetch and a visual marker in the pybullet GUI. You can then move a visual marker in the GUI, and apply IK to cause Fetch's arm to converge towards the marker.
+In [ik_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots/ik_example.py), we showcase using pybullet's built-in Inverse Kinematics (IK) control in an interactive way. We load Fetch and a visual marker in the pybullet GUI. You can then move a visual marker in the GUI, and apply IK to cause Fetch's arm to converge towards the marker.
### Motion Planning Example
-In [motion_planning_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots/motion_planning_example.py), we showcase using our motion planning module in an interactive way. We load Fetch into an empty building (`Rs_int` scene). You can interact with the GUI to set navigation and manipulation targets, which Fetch will converge to using our motion planner.
+In [motion_planning_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots/motion_planning_example.py), we showcase using our motion planning module in an interactive way. We load Fetch into an empty building (`Rs_int` scene). You can interact with the GUI to set navigation and manipulation targets, which Fetch will converge to using our motion planner.
### Robot Control Example
-In [robot_control_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots/robot_control_example.py), we showcase using our controllers to control our different robots. You can choose a robot, and specific set of controllers to control the robot, and then either deploy random actions or directly teleoperate the robot using your keyboard.
+In [robot_control_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots/robot_control_example.py), we showcase using our controllers to control our different robots. You can choose a robot, and specific set of controllers to control the robot, and then either deploy random actions or directly teleoperate the robot using your keyboard.
## Legacy Robots
-We also include robots originally supported in Gibson / iGibson v1.0, but have not been ported to iGibson v2.0.
-They are included in [igibson/robots/legacy](https://github.com/StanfordVL/iGibson/blob/master/igibson/robots/legacy)
-in their original unaltered state, and are not expected to work out of the box with the current iGibson environments.
-Interested users should consider modifying those robot class logics to be compatible with iGibson v2.0, or their own
-repositories. We are happy to accept community PR contributions standardizing these robot classes with iGibson v2.0!
+We also include robots originally supported in Gibson / OmniGibson v1.0, but have not been ported to OmniGibson v2.0.
+They are included in [omnigibson/robots/legacy](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/robots/legacy)
+in their original unaltered state, and are not expected to work out of the box with the current OmniGibson environments.
+Interested users should consider modifying those robot class logics to be compatible with OmniGibson v2.0, or their own
+repositories. We are happy to accept community PR contributions standardizing these robot classes with OmniGibson v2.0!
-Below, we list the legacy robots that can be found in iGibson:
+Below, we list the legacy robots that can be found in OmniGibson:
| Agent Name | DOF | Information | Controller |
|:-------------: | :-------------: |:-------------: |:-------------|
diff --git a/docs/ros_integration.md b/docs/ros_integration.md
index 544b7682c..0681d08e2 100644
--- a/docs/ros_integration.md
+++ b/docs/ros_integration.md
@@ -6,13 +6,13 @@ Introduction
[ROS](http://www.ros.org) is a set of well-engineered software libraries for building robotics applications. It includes a wide variety of packages, from low level drivers to efficient implementations of state of the art algorithms. As we strive to build intelligent agents and transfer them to real-world (on a real robot), we need to take advantage of ROS packages to complete the robot application pipeline.
-There are three key applications of integrating iGibson with ROS.
+There are three key applications of integrating OmniGibson with ROS.
- Benchmark existing algorithms in a controlled realistic simulation environment. This allows for comparing learning-based methods with classical methods in simulation environments.
-- Comparing robots in simulation with robots in the real world. In simulation, iGibson can simulate sensors of a robot and publish as messages. In the real world, a real robot publish sensor messages from onboard sensors. Therefore, it is possible to only change the message subscribed and benchmark the performance of downstream applications. This helps locate domain gap and debug algorithms.
+- Comparing robots in simulation with robots in the real world. In simulation, OmniGibson can simulate sensors of a robot and publish as messages. In the real world, a real robot publish sensor messages from onboard sensors. Therefore, it is possible to only change the message subscribed and benchmark the performance of downstream applications. This helps locate domain gap and debug algorithms.
- Using ROS functions in simulation, such as many motion planning implementations.
-The possibility of using iGibson with ROS is unlimited. As a starter, we provide an example of integrating iGibson with ROS for navigation. This is a ROS package integrates iGibson Env with ROS navigation stack. It follows the same node topology and topics as `turtlebot_navigation` package. As shown below, so after a policy is trained in iGibson, it requires minimal changes to deploy onto a real turtlebot.
+The possibility of using OmniGibson with ROS is unlimited. As a starter, we provide an example of integrating OmniGibson with ROS for navigation. This is a ROS package integrates OmniGibson Env with ROS navigation stack. It follows the same node topology and topics as `turtlebot_navigation` package. As shown below, so after a policy is trained in OmniGibson, it requires minimal changes to deploy onto a real turtlebot.
![](images/node_topo.jpg)
@@ -22,33 +22,33 @@ Environment Setup
## Preparation
1. Install ROS: in this package, we use navigation stack from ROS kinetic. Please follow the [instructions](http://wiki.ros.org/kinetic/Installation/Ubuntu).
-2. Install iGibson **from source** following [installation guide](installation.md) in **python2.7**. Note that ROS only supports `python2.7` at the moment, so you need to create python2.7 virtual environment to install iGibson instead of python3.x.
+2. Install OmniGibson **from source** following [installation guide](installation.md) in **python2.7**. Note that ROS only supports `python2.7` at the moment, so you need to create python2.7 virtual environment to install OmniGibson instead of python3.x.
```bash
-git clone https://github.com/StanfordVL/iGibson --recursive
-cd iGibson
+git clone https://github.com/StanfordVL/OmniGibson --recursive
+cd OmniGibson
-conda create -n py2-igibson python=2.7 anaconda # we support python 2.7, 3.5, 3.6, 3.7, 3.8
-source activate py2-igibson
+conda create -n py2-omnigibson python=2.7 anaconda # we support python 2.7, 3.5, 3.6, 3.7, 3.8
+source activate py2-omnigibson
pip install -e . # This step takes about 4 minutes
-source deactivate # This step is important because we will NOT use /envs/py2-igibson/bin/python
+source deactivate # This step is important because we will NOT use /envs/py2-omnigibson/bin/python
```
3. If you use anaconda for setting up python environment, some tweaks of `PATH` and `PYTHONPATH` variable are required to avoid conflict. In particular:
1. For `PATH`: conda related needs to be removed from `PATH`
```bash
echo $PATH | grep -oP "[^:;]+" | grep conda ## Remove these paths from $PATH
```
- 2. For `PYTHONPATH`: `/usr/lib/python2.7/dist-packages/`, `/opt/ros/kinetic/lib/python2.7/dist-packages`(ROS python libraries), `/envs/py2-igibson/lib/python2.7/site-packages`(iGibson dependencies) and `` need to be in `PYTHONPATH` **in this exact order**.
+ 2. For `PYTHONPATH`: `/usr/lib/python2.7/dist-packages/`, `/opt/ros/kinetic/lib/python2.7/dist-packages`(ROS python libraries), `/envs/py2-omnigibson/lib/python2.7/site-packages`(OmniGibson dependencies) and `` need to be in `PYTHONPATH` **in this exact order**.
4. Create `catkin_ws` folder
```bash
mkdir -p ~/catkin_ws
```
-5. Soft-link `igibson-ros` folder to your `catkin_ws/src` and run `catkin_make` to index `igibson-ros` package.
+5. Soft-link `omnigibson-ros` folder to your `catkin_ws/src` and run `catkin_make` to index `omnigibson-ros` package.
```bash
-cd /igibson
-ln -s $PWD/examples/ros/igibson-ros/ ~/catkin_ws/src/
+cd /omnigibson
+ln -s $PWD/examples/ros/omnigibson-ros/ ~/catkin_ws/src/
cd ~/catkin_ws && catkin_make
```
-5. Install `igibson-ros` dependencies:
+5. Install `omnigibson-ros` dependencies:
```bash
cd ~/catkin_ws
rosdep install --from-paths src --ignore-src -r -y
@@ -57,14 +57,14 @@ rosdep install --from-paths src --ignore-src -r -y
## Sanity check
```bash
-which python # Should give /usr/bin/python, NOT /envs/py2-igibson/bin/python
-python -c 'import igibson, rospy, rospkg' # Should run without errors
+which python # Should give /usr/bin/python, NOT /envs/py2-omnigibson/bin/python
+python -c 'import omnigibson, rospy, rospkg' # Should run without errors
```
Running
----------------
-In order to run iGibson+ROS examples, you will need to perform the following steps:
+In order to run OmniGibson+ROS examples, you will need to perform the following steps:
1. Prepare ROS environment
```bash
@@ -74,11 +74,11 @@ source ~/catkin_ws/devel/setup.bash
2. Repeat Step 3 from Preparation: sanitize `PATH` and `PYTHONPATH`
3. Here are some of the examples that you can run, including gmapping, hector mapping and navigation.
```bash
-roslaunch igibson-ros turtlebot_rgbd.launch # Bare minimal bringup example
-roslaunch igibson-ros turtlebot_gmapping.launch # Run gmapping
-roslaunch igibson-ros turtlebot_hector_mapping.launch # Run hector mapping
-roslaunch igibson-ros turtlebot_navigation.launch # Run the navigation stack, we have provided the map
-roslaunch igibson-ros turtlebot_gt_navigation.launch # Run the navigation stack with ground truth localization
+roslaunch omnigibson-ros turtlebot_rgbd.launch # Bare minimal bringup example
+roslaunch omnigibson-ros turtlebot_gmapping.launch # Run gmapping
+roslaunch omnigibson-ros turtlebot_hector_mapping.launch # Run hector mapping
+roslaunch omnigibson-ros turtlebot_navigation.launch # Run the navigation stack, we have provided the map
+roslaunch omnigibson-ros turtlebot_gt_navigation.launch # Run the navigation stack with ground truth localization
```
@@ -100,13 +100,13 @@ Publishes:
| Topic name | Type | Usage|
|:------------------:|:---------------------------:|:---:|
-|`/gibson_ros/camera/depth/camera_info`|`sensor_msgs/CameraInfo`|Camera parameters used in iGibson, same for depth and rgb|
-|`/gibson_ros/camera/rgb/image`|`sensor_msgs/Image`|RGB image captured in iGibson|
-|`/gibson_ros/camera/rgb/depth`|`sensor_msgs/Image`|Depth image captured in iGibson, in meters, with dtype being float32|
-|`/gibson_ros/camera/rgb/depth_raw`|`sensor_msgs/Image`|Depth image captured in iGibson, mimic raw depth data captured with OpenNI cameras, with dtype being uint16, see more [here](http://www.ros.org/reps/rep-0118.html)|
-|`/gibson_ros/lidar/points`|`sensor_msgs/PointCloud2`|1-beam LiDAR scan captured in iGibson, in meters, with dtype being float32|
-|`/odom`|`nav_msgs/Odometry` |The pose of `base_footprint` in `odom` frame, generated with groudtruth pose in iGibson|
-|`/ground_truth_odom`|`nav_msgs/Odometry`|The pose of `base_footprint` in `world` frame, generated with groudtruth pose in iGibson|
+|`/gibson_ros/camera/depth/camera_info`|`sensor_msgs/CameraInfo`|Camera parameters used in OmniGibson, same for depth and rgb|
+|`/gibson_ros/camera/rgb/image`|`sensor_msgs/Image`|RGB image captured in OmniGibson|
+|`/gibson_ros/camera/rgb/depth`|`sensor_msgs/Image`|Depth image captured in OmniGibson, in meters, with dtype being float32|
+|`/gibson_ros/camera/rgb/depth_raw`|`sensor_msgs/Image`|Depth image captured in OmniGibson, mimic raw depth data captured with OpenNI cameras, with dtype being uint16, see more [here](http://www.ros.org/reps/rep-0118.html)|
+|`/gibson_ros/lidar/points`|`sensor_msgs/PointCloud2`|1-beam LiDAR scan captured in OmniGibson, in meters, with dtype being float32|
+|`/odom`|`nav_msgs/Odometry` |The pose of `base_footprint` in `odom` frame, generated with groudtruth pose in OmniGibson|
+|`/ground_truth_odom`|`nav_msgs/Odometry`|The pose of `base_footprint` in `world` frame, generated with groudtruth pose in OmniGibson|
Subscribes:
diff --git a/docs/sampling.md b/docs/sampling.md
index 7a4496317..d2beaadcc 100644
--- a/docs/sampling.md
+++ b/docs/sampling.md
@@ -1,28 +1,28 @@
# Sampling Scene Instances
-The [logic states](extended_states.md) implemented in iGibson since the version 2.0 provide a mechanism that facilitates the generation of simulated scenes to study and develop robotic solutions.
-Users of iGibson can specify the desired configuration of the environment in the logic language [BDDL](https://github.com/StanfordVL/bddl#readme), part of the [BEHAVIOR benchmark](behavior.stanford.edu).
+The [logic states](extended_states.md) implemented in OmniGibson since the version 2.0 provide a mechanism that facilitates the generation of simulated scenes to study and develop robotic solutions.
+Users of OmniGibson can specify the desired configuration of the environment in the logic language [BDDL](https://github.com/StanfordVL/bddl#readme), part of the [BEHAVIOR benchmark](behavior.stanford.edu).
This language has similarities to the Planning Domain Definition Language (PDDL), allowing researchers to define scenes in logic-semantic manner (e.g., objects on top, next, inside of others) instead of the time consuming and tedious work of specifying manually their positions.
-Given a scene definition in BDDL, iGibson provides the functionalities to sample compliant instances of the logic description to be used in simulation.
+Given a scene definition in BDDL, OmniGibson provides the functionalities to sample compliant instances of the logic description to be used in simulation.
The image below shows an example of different instances sampled from the same logic description (three books on a table).
![sampling2.gif](images/sampling2.gif)
-The first step to generate a new activity in iGibson is to create its BDDL description.
+The first step to generate a new activity in OmniGibson is to create its BDDL description.
Please, follow the instructions [here](https://behavior.stanford.edu/activity-annotation) to create your own BDDL description using our online web interface, or modify some of the existing descriptions included as part of BEHAVIOR (see [here](https://github.com/StanfordVL/bddl/tree/master/bddl/activity_definitions)).
The next step is to download and install [BDDL](https://github.com/StanfordVL/bddl). Place your own BDDL description under `bddl/activity_definitions//problem0.bddl`.
Then you can select a *vanilla scene* to instantiate the BDDL description on.
-We provide 15 scenes as part of the iGibson dataset, furnished and with clutter objects to be specialized for multiple activities. See the available scenes in `ig_dataset/scenes` or [here](http://svl.stanford.edu/igibson/docs/dataset.html).
+We provide 15 scenes as part of the OmniGibson dataset, furnished and with clutter objects to be specialized for multiple activities. See the available scenes in `og_dataset/scenes` or [here](http://svl.stanford.edu/omnigibson/docs/dataset.html).
-With the BDDL description and a list of scene names, iGibson can generate valid scene instances of the description with the following script:
+With the BDDL description and a list of scene names, OmniGibson can generate valid scene instances of the description with the following script:
```
-python -m igibson.utils.data_utils.sampling_task.sampling_saver --task --task_id 0 --scenes ...
+python -m omnigibson.utils.data_utils.sampling_task.sampling_saver --task --task_id 0 --scenes ...
```
The script will sample possible poses of object models of the indicated classes until all conditions are fulfilled.
-The result will be stored as `ig_dataset/scenes//urdf/_task__0_0.urdf`, a description of the scene with additional objects that fulfill the initial conditions in the BDDL description.
+The result will be stored as `og_dataset/scenes//urdf/_task__0_0.urdf`, a description of the scene with additional objects that fulfill the initial conditions in the BDDL description.
The user should ensure that the definition is sampleable in the given scene. Otherwise, after a certain number of sampling attempts, the script will fail and return.
We recommend to use the BEHAVIOR Dataset of 3D objects to get access to hundreds of object models to create new activities.
diff --git a/docs/scenes.md b/docs/scenes.md
index 0f7363db5..467b92e9a 100644
--- a/docs/scenes.md
+++ b/docs/scenes.md
@@ -3,8 +3,8 @@
### Overview
We provide four types of scenes.
- `EmptyScene` and `StadiumScene`: they are simple scenes with flat grounds and no obstacles, useful for debugging purposes.
-- `StaticIndoorScene`: it loads static 3D scenes from `igibson.g_dataset_path`.
-- `InteractiveIndoorScene`: it loads fully interactive 3D scenes from `igibson.ig_dataset_path`.
+- `StaticIndoorScene`: it loads static 3D scenes from `omnigibson.g_dataset_path`.
+- `InteractiveIndoorScene`: it loads fully interactive 3D scenes from `omnigibson.og_dataset_path`.
Typically, they take in the `scene_id` of a scene and provide a `load` function that be invoked externally (usually by `import_scene` of the `Simulator`).
@@ -21,35 +21,35 @@ In addition to everything mentioned above, the `load` function of `InteractiveIn
- provides partial scene loading functionality: 1) only load objects of certain categories, 2) only load objects in certain room types, 3) only load objects in certain room instances.
- provides APIs for changing the state of articulated objects (e.g. open all "fridges" and "ovens" in the scene)
-Most of the code can be found here: [igibson/scenes](https://github.com/StanfordVL/iGibson/blob/master/igibson/scenes).
+Most of the code can be found here: [omnigibson/scenes](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/scenes).
-### Adding other scenes to iGibson
-We provide detailed instructions and scripts to import scenes from the following sources into iGibson:
+### Adding other scenes to OmniGibson
+We provide detailed instructions and scripts to import scenes from the following sources into OmniGibson:
1. [CubiCasa5k](https://github.com/CubiCasa/CubiCasa5k): A Dataset and an Improved Multi-Task Model for Floorplan Image Analysis. (Kalervo, Ahti, et al.)
2. [3D-FRONT](https://tianchi.aliyun.com/specials/promotion/alibaba-3d-scene-dataset): 3D Furnished Rooms with layOuts and semaNTics. (Fu, Huanl, et al.)
-Instruction can be found here: [External Scenes](https://github.com/StanfordVL/iGibson/blob/master/igibson/utils/data_utils/ext_scene).
+Instruction can be found here: [External Scenes](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/utils/data_utils/ext_scene).
### Examples
#### Stadium Scenes
-In this example, we import a simple stadium scene that is good for debugging. The code can be found here: [igibson/examples/scenes/stadium_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/stadium_example.py).
+In this example, we import a simple stadium scene that is good for debugging. The code can be found here: [omnigibson/examples/scenes/stadium_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/stadium_example.py).
```python
import logging
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.stadium_scene import StadiumScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.stadium_scene import StadiumScene
+from omnigibson.simulator import Simulator
def main():
"""
Loads the Stadium scene
- This scene is default in pybullet but is not really useful in iGibson
+ This scene is default in pybullet but is not really useful in OmniGibson
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
@@ -74,7 +74,7 @@ The stadium scene looks like this:
#### Static Building Scenes
-In this example, we import a static scene, and then randomly sample a pair of locations in the scene and compuete the shortest path between them. The code can be found here: [igibson/examples/scenes/g_scene_selector.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/g_scene_selector.py).
+In this example, we import a static scene, and then randomly sample a pair of locations in the scene and compuete the shortest path between them. The code can be found here: [omnigibson/examples/scenes/g_scene_selector.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/g_scene_selector.py).
```python
import logging
@@ -82,13 +82,13 @@ from sys import platform
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_available_g_scenes
-from igibson.utils.object_state_utils import let_user_pick
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_available_g_scenes
+from omnigibson.utils.object_state_utils import let_user_pick
def main():
@@ -138,7 +138,7 @@ if __name__ == "__main__":
#### Interactive Building Scenes
-In this example, we import a fully interactive scene, and randomly sample points given a room type such as "living_room". This can be useful for tasks that require the robot to always be spawned in certain room types. We support fifteen such scenes right now as part of the new iGibson Dataset. The code can be found here: [igibson/examples/scenes/ig_scene_selector.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/ig_scene_selector.py).
+In this example, we import a fully interactive scene, and randomly sample points given a room type such as "living_room". This can be useful for tasks that require the robot to always be spawned in certain room types. We support fifteen such scenes right now as part of the new OmniGibson Dataset. The code can be found here: [omnigibson/examples/scenes/og_scene_selector.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/og_scene_selector.py).
Note that all objects in these scenes can be interacted realistically.
![scene_interactive.png](images/scene_interactive.png)
@@ -149,12 +149,12 @@ from sys import platform
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_available_ig_scenes
-from igibson.utils.object_state_utils import let_user_pick
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_available_og_scenes
+from omnigibson.utils.object_state_utils import let_user_pick
def main():
@@ -164,8 +164,8 @@ def main():
Shows how to sample points in the scene by room type and how to compute geodesic distance and the shortest path
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- available_ig_scenes = get_available_ig_scenes()
- scene_id = available_ig_scenes[let_user_pick(available_ig_scenes) - 1]
+ available_og_scenes = get_available_og_scenes()
+ scene_id = available_og_scenes[let_user_pick(available_og_scenes) - 1]
settings = MeshRendererSettings(enable_shadow=True, msaa=False)
if platform == "darwin":
settings.texture_scale = 0.5
@@ -207,23 +207,23 @@ if __name__ == "__main__":
```
##### Texture Randomization
-In this example, we demonstrate material/texture randomization functionality of `InteractiveIndoorScene`. The goal is to randomize the material, texture and dynamic properties of all scene objects by calling `scene.randomize_texture` on-demand. The code can be found here: [igibson/examples/scenes/scene_texture_rand_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/scene_texture_rand_example.py ).
+In this example, we demonstrate material/texture randomization functionality of `InteractiveIndoorScene`. The goal is to randomize the material, texture and dynamic properties of all scene objects by calling `scene.randomize_texture` on-demand. The code can be found here: [omnigibson/examples/scenes/scene_texture_rand_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/scene_texture_rand_example.py ).
The randomized materials in the `ExternalView` window should look like this.
![scene_interactive_texture_rand](images/scene_interactive_texture_rand.png)
##### Object Randomization
-In this example, we demonstrate object randomization functionality of `InteractiveIndoorScene`. The goal is to randomize the object models while maintaining their poses and categories. Note that when object models are randomized, there is no guarantee that they have no collisions or the fixed, articulated objects can extend their joints without collision. We provide `scene.check_scene_quality` functionality to check scene quality and you should do object model re-sampling if this function returns `False`. An alternative way (recommended) is to use randoml object model configuration that we provide (10 for each scenes) which guarantees scene quality, by passing in `object_randomization_idx=[0-9]`. Finally, object randomization can be expensive because the new object models need to be loaded to the simulator each time, so we recommend only using it occasionally (e.g. every 1000 training episodes). The code can be found here: [igibson/examples/scenes/scene_texture_rand_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/scene_texture_rand_example.py).
+In this example, we demonstrate object randomization functionality of `InteractiveIndoorScene`. The goal is to randomize the object models while maintaining their poses and categories. Note that when object models are randomized, there is no guarantee that they have no collisions or the fixed, articulated objects can extend their joints without collision. We provide `scene.check_scene_quality` functionality to check scene quality and you should do object model re-sampling if this function returns `False`. An alternative way (recommended) is to use randoml object model configuration that we provide (10 for each scenes) which guarantees scene quality, by passing in `object_randomization_idx=[0-9]`. Finally, object randomization can be expensive because the new object models need to be loaded to the simulator each time, so we recommend only using it occasionally (e.g. every 1000 training episodes). The code can be found here: [omnigibson/examples/scenes/scene_texture_rand_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/scene_texture_rand_example.py).
The randomized object models in the `ExternalView` window should look like this.
![scene_interactive_object_rand](images/scene_interactive_object_rand.png)
##### Partial Scene Loading
-In this example, we demonstrate partial scene loading functionality of `InteractiveIndoorScene`. Specifically in this example we only load "chairs" in "living rooms". This can be useful for tasks that only require certain object categories or rooms. The code can be found here: [igibson/examples/scenes/scene_partial_loading_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/scenes/scene_partial_loading_example.py).
+In this example, we demonstrate partial scene loading functionality of `InteractiveIndoorScene`. Specifically in this example we only load "chairs" in "living rooms". This can be useful for tasks that only require certain object categories or rooms. The code can be found here: [omnigibson/examples/scenes/scene_partial_loading_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/scenes/scene_partial_loading_example.py).
#### Visualize Traversability Map
-In this example, we visuliaze the traversability map of a scene. We use this map to build an internal traversability graph for each floor so that we can compute the shortest path between two locations, and place robots and objects at valid locations inside the scene. The code can be found here: [igibson/examples/robots/trav_map_vis_example.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/examples/robots/trav_map_vis_example.py).
+In this example, we visuliaze the traversability map of a scene. We use this map to build an internal traversability graph for each floor so that we can compute the shortest path between two locations, and place robots and objects at valid locations inside the scene. The code can be found here: [omnigibson/examples/robots/trav_map_vis_example.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/examples/robots/trav_map_vis_example.py).
The traversability map of the scene `Rs` looks like this:
![trav_map_vis](images/trav_map_vis.png)
diff --git a/docs/simulators.md b/docs/simulators.md
index f690467b4..ded3e8fa1 100644
--- a/docs/simulators.md
+++ b/docs/simulators.md
@@ -6,26 +6,26 @@
Some key functions are the following:
- `load`: initialize PyBullet physics engine and MeshRenderer
-- `import_{scene, ig_scene}`: import the scene into PyBullet by calling `scene.load`, and then import it into MeshRenderer by calling `self.renderer.add_instance_group`. If `InteractiveIndoorScene` is imported using `import_scene`, all objects in the scene are also imported.
+- `import_{scene, og_scene}`: import the scene into PyBullet by calling `scene.load`, and then import it into MeshRenderer by calling `self.renderer.add_instance_group`. If `InteractiveIndoorScene` is imported using `import_scene`, all objects in the scene are also imported.
- `import_{object, articulated_object, robot}`: import the object, articulated object and robot into the simulator in a similar manner
- `sync`: synchronize the poses of the dynamic objects (including the robots) between PyBullet and MeshRenderer. Specifically, it calls `update_position` for each object, in which it retrieve the object's pose in PyBullet, and then update its pose accordingly in MeshRenderer.
-If `Simulator` uses `gui` mode, by default it will also maintain a `Viewer`, which essentially is a virtual camera in the scene that can render images. More info about the `Viewer` can be found here: [igibson/render/viewer.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/render/viewer.py). Notably, you can press `h` in the `ExternalView` window to show the help menu for mouse/keyboard control.
+If `Simulator` uses `gui` mode, by default it will also maintain a `Viewer`, which essentially is a virtual camera in the scene that can render images. More info about the `Viewer` can be found here: [omnigibson/render/viewer.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/render/viewer.py). Notably, you can press `h` in the `ExternalView` window to show the help menu for mouse/keyboard control.
-Most of the code can be found here: [igibson/simulator.py](https://github.com/StanfordVL/iGibson/blob/master/igibson/simulator.py).
+Most of the code can be found here: [omnigibson/simulator.py](https://github.com/StanfordVL/OmniGibson/blob/master/omnigibson/simulator.py).
### Examples
In this example, we import a `StaticIndoorScene`, a `Turtlebot`, and ten `YCBObject` into the simulator.
```python
-from igibson.robots.turtlebot import Turtlebot
-from igibson.simulator import Simulator
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.objects.ycb_object import YCBObject
-from igibson.utils.config_utils import parse_config
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.robots.turtlebot import Turtlebot
+from omnigibson.simulator import Simulator
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.utils.config_utils import parse_config
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
import numpy as np
-from igibson.render.profiler import Profiler
+from omnigibson.render.profiler import Profiler
from IPython import embed
diff --git a/docs/tests.md b/docs/tests.md
index be3f66706..078803c3c 100644
--- a/docs/tests.md
+++ b/docs/tests.md
@@ -1,6 +1,6 @@
# Tests and Examples
### Tests
-We provide tests in [this directory](https://github.com/StanfordVL/iGibson/tree/master/tests) in our repository.
+We provide tests in [this directory](https://github.com/StanfordVL/OmniGibson/tree/master/tests) in our repository.
You can run them executing:
```bash
pytest --ignore disabled --ignore benchmark
diff --git a/docs/viewer.md b/docs/viewer.md
index 0335485ac..4fcabced6 100644
--- a/docs/viewer.md
+++ b/docs/viewer.md
@@ -2,16 +2,16 @@
### Overview
-We developed an easy-to-use iGibson-human interface called **Viewer** for users to inspect and interact with our scenes and objects. The Viewer will automatically pop up if you use `gui_non_interactive` or `gui_interactive` mode in `Simulator`.
+We developed an easy-to-use OmniGibson-human interface called **Viewer** for users to inspect and interact with our scenes and objects. The Viewer will automatically pop up if you use `gui_non_interactive` or `gui_interactive` mode in `Simulator`.
To play with an example of viewer, you can run the following command:
```bash
-python -m igibson.examples.environments.env_int_example
+python -m omnigibson.examples.environments.env_int_example
```
or
```bash
-python -m igibson.examples.environments.env_nonint_example
+python -m omnigibson.examples.environments.env_nonint_example
```
after downloading the Rs_int or Rs scenes (interactive or non-interactive, see [the installation instructions](installation.md))
![viewer.png](images/viewer.png)
@@ -45,4 +45,4 @@ Mouse control in planning mode
In manipulation and planning modes, a visual indicator will be visualized in the `Viewer` to assist control (e.g. the blue sphere at the bottom in the image above).
-The code can be found in [igibson/render/viewer.py](https://github.com/StanfordVL/iGibson/tree/master/igibson/render/viewer.py).
+The code can be found in [omnigibson/render/viewer.py](https://github.com/StanfordVL/OmniGibson/tree/master/omnigibson/render/viewer.py).
diff --git a/docs/virtual_reality.md b/docs/virtual_reality.md
index 29b6259c2..14ba80adc 100644
--- a/docs/virtual_reality.md
+++ b/docs/virtual_reality.md
@@ -9,26 +9,26 @@ The HTC Vive Pro Eye tracking driver is not available for Linux. You must have t
2. (optional) if you plan to use eye tracking on Windows, create a [vive developer account](https://hub.vive.com/sso/login) then download and install the [SRAnipal runtime](https://developer.vive.com/resources/vive-sense/sdk/vive-eye-and-facial-tracking-sdk/). Note you should [calibrate](https://developer.vive.com/us/support/sdk/category_howto/how-to-calibrate-eye-tracking.html) the Vive eye tracker before each recording session.
-3. Ensure you have installed iGibson according to the installation [instructions](http://svl.stanford.edu/igibson/docs/installation.html#installation-method).
+3. Ensure you have installed OmniGibson according to the installation [instructions](http://svl.stanford.edu/omnigibson/docs/installation.html#installation-method).
* Note: On Windows VR support is enabled by default. On Linux, you must install with an additional environmental variable `USE_VR=TRUE pip install -e .`. You must also have addition development headers installed: on Ubuntu `sudo apt install xorg-dev` and on Centos/Fedora: `sudo dnf install libXinerama-devel libXi-devel libXrandr-devel libXcursor-devel`.
### VR examples
We have several examples showing how to use our VR interface:
-* vr demo files: `igibson/examples/vr`
+* vr demo files: `omnigibson/examples/vr`
-* multi-user VR (experimental): `igibson/examples/vr/muvr`
+* multi-user VR (experimental): `omnigibson/examples/vr/muvr`
-* benchmark: `igibson/examples/vr/in_development`
+* benchmark: `omnigibson/examples/vr/in_development`
- `vr_hand_dex_benchmark.py` -- Demonstrates various challenging manipulation tasks
- `vr_hand_speed_benchmark.py` -- Demonstrates various tasks to assess user speed at working in VR
-* data_save_replay: `igibson/examples/vr/data_save_replay`
+* data_save_replay: `omnigibson/examples/vr/data_save_replay`
- This folder demonstrates how to save VR recordings and "replay" them to reproduce the recorded trajectory
-* robot_embodiment: `igibson/examples/vr/robot_embodiment`
+* robot_embodiment: `omnigibson/examples/vr/robot_embodiment`
- This folder demonstrates the VR interface to the Fetch robot (WIP)
Benchmarks:
@@ -38,7 +38,7 @@ such as putting objects away into specific containers/cabinets. Please see the c
### VR config and button mapping:
-You can find the global vr settings in the `vr_config.yaml` in the igibson folder. We highly recommend reading through the default config as it is heavily documented. The three most crucial settings are **current_device**, **use_tracked_body**, and **torso_tracker_serial**.
+You can find the global vr settings in the `vr_config.yaml` in the omnigibson folder. We highly recommend reading through the default config as it is heavily documented. The three most crucial settings are **current_device**, **use_tracked_body**, and **torso_tracker_serial**.
* `current_device`: determines which of the `device_settings` keys will be used, and is used to set options specific to Oculus or the HTC Vive. The currently available keys, as seen in `device_settings` are `HTC_VIVE_PRO_EYE` and `OCULUS`
* `use_tracked_body`: determines if we will use [HTC Vive Trackers](https://developer.vive.com/us/support/sdk/category_howto/how-to-calibrate-eye-tracking.html) to track the body instead of inferring body position from the headset position.
@@ -52,26 +52,26 @@ Some additional options you may be interested in changing:
* `relative_movement_device` (default 'hmd'): which device to use to control touchpad movement direction (can be any VR device). You should not need to change this.
* `movement_speed` (default 0.01): how fast you move when using the touchpad. This number has been calibrated carefully, however feel free to change it if you want to speed up/slow down.
-We recommend looking at `igibson/render/mesh_renderer/mesh_renderer_vr.py` to see the VrSettings class which reads from `vr_config.yaml`. A single VrSettings object is created and passed in to the `Simulator` constructor.
+We recommend looking at `omnigibson/render/mesh_renderer/mesh_renderer_vr.py` to see the VrSettings class which reads from `vr_config.yaml`. A single VrSettings object is created and passed in to the `Simulator` constructor.
-Note(optional): If you are using a device not already mapped, please run `igibson/examples/vr/in_development/vr_button_mapping.py` to figure out which physical controller buttons correspond to which indices in OpenVR.
+Note(optional): If you are using a device not already mapped, please run `omnigibson/examples/vr/in_development/vr_button_mapping.py` to figure out which physical controller buttons correspond to which indices in OpenVR.
### Mirroring the VR view on the monitor
-iGibson VR utilizes Steam VR's built-in companion window to visualize what the user sees in their headset. To open this window:
+OmniGibson VR utilizes Steam VR's built-in companion window to visualize what the user sees in their headset. To open this window:
* launch Steam VR
* click on the hamburger menu in the top-left corner of the VR status menu (the dark blue window with icons for the VR devices)
* then click "Display VR View" button.
From this view, you can change which eye you are looking at (or look at both), and can make the window fullscreen.
-Note that this window will be black until an application is running, and the headset is detected by the lighthouse sensors. We also support a custom-build companion window that can run in iGibson - this can be enabled in the vr_config file, described below (although it is off by default).
+Note that this window will be black until an application is running, and the headset is detected by the lighthouse sensors. We also support a custom-build companion window that can run in OmniGibson - this can be enabled in the vr_config file, described below (although it is off by default).
Note: Press ESCAPE to force the fullscreen rendering window to close during program execution (although fullscreen is disabled by default)
### Contributing
-* Most VR functions can be found in `igibson/simulator.py`
-* The BehaviorRobot is located in `igibson/robots/behavior_robot.py`
-* VR utility functions are found in `igibson/utils/vr_utils.py`
-* The VR renderer can be found in `igibson/render/mesh_renderer.py`
-* The underlying VR C++ code (querying controller states from openvr, renderer for VR) can be found in `igibson/render/cpp/vr_mesh_renderer{.cpp,.h}
+* Most VR functions can be found in `omnigibson/simulator.py`
+* The BehaviorRobot is located in `omnigibson/robots/behavior_robot.py`
+* VR utility functions are found in `omnigibson/utils/vr_utils.py`
+* The VR renderer can be found in `omnigibson/render/mesh_renderer.py`
+* The underlying VR C++ code (querying controller states from openvr, renderer for VR) can be found in `omnigibson/render/cpp/vr_mesh_renderer{.cpp,.h}
diff --git a/igibson/.gitignore b/igibson/.gitignore
index 84b4e4ef9..e2713222d 100644
--- a/igibson/.gitignore
+++ b/igibson/.gitignore
@@ -2,6 +2,6 @@
data
assets
dataset
-ig_dataset
+og_dataset
render/coord.npy
render/model.pth
\ No newline at end of file
diff --git a/igibson/__init__.py b/igibson/__init__.py
index 54b1c291d..f094c74da 100644
--- a/igibson/__init__.py
+++ b/igibson/__init__.py
@@ -4,8 +4,8 @@
import yaml
import builtins
-# TODO: Need to fix somehow -- igibson gets imported first BEFORE we can actually modify the macros
-from igibson.macros import gm
+# TODO: Need to fix somehow -- omnigibson gets imported first BEFORE we can actually modify the macros
+from omnigibson.macros import gm
builtins.ISAAC_LAUNCHED_FROM_JUPYTER = (
os.getenv("ISAAC_JUPYTER_KERNEL") is not None
@@ -36,10 +36,10 @@
g_dataset_path = os.path.expanduser(g_dataset_path)
if "IGIBSON_DATASET_PATH" in os.environ:
- ig_dataset_path = os.environ["IGIBSON_DATASET_PATH"]
+ og_dataset_path = os.environ["IGIBSON_DATASET_PATH"]
else:
- ig_dataset_path = global_config["ig_dataset_path"]
-ig_dataset_path = os.path.expanduser(ig_dataset_path)
+ og_dataset_path = global_config["og_dataset_path"]
+og_dataset_path = os.path.expanduser(og_dataset_path)
if "3DFRONT_DATASET_PATH" in os.environ:
threedfront_dataset_path = os.environ["3DFRONT_DATASET_PATH"]
@@ -65,8 +65,8 @@
assets_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), assets_path)
if not os.path.isabs(g_dataset_path):
g_dataset_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), g_dataset_path)
-if not os.path.isabs(ig_dataset_path):
- ig_dataset_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), ig_dataset_path)
+if not os.path.isabs(og_dataset_path):
+ og_dataset_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), og_dataset_path)
if not os.path.isabs(threedfront_dataset_path):
threedfront_dataset_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), threedfront_dataset_path)
if not os.path.isabs(cubicasa_dataset_path):
@@ -74,13 +74,13 @@
if not os.path.isabs(key_path):
key_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), key_path)
-logging.info("Importing iGibson (igibson module)")
+logging.info("Importing OmniGibson (omnigibson module)")
logging.info("Assets path: {}".format(assets_path))
logging.info("Gibson Dataset path: {}".format(g_dataset_path))
-logging.info("iG Dataset path: {}".format(ig_dataset_path))
+logging.info("iG Dataset path: {}".format(og_dataset_path))
logging.info("3D-FRONT Dataset path: {}".format(threedfront_dataset_path))
logging.info("CubiCasa5K Dataset path: {}".format(cubicasa_dataset_path))
-logging.info("iGibson Key path: {}".format(key_path))
+logging.info("OmniGibson Key path: {}".format(key_path))
example_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "examples")
example_config_path = os.path.join(os.path.dirname(os.path.realpath(__file__)), "configs")
@@ -91,8 +91,8 @@
# whether to enable debugging mode for object sampling
debug_sampling = False
-# Finally, we must create the igibson application
-from igibson.app_omni import OmniApp
+# Finally, we must create the omnigibson application
+from omnigibson.app_omni import OmniApp
# Create app as a global reference so any submodule can access it
app = OmniApp(
@@ -104,19 +104,19 @@
# Next import must be simulator
sim = None
-from igibson.simulator import Simulator
+from omnigibson.simulator import Simulator
# Create simulator (this is a singleton so it's okay that it's global)
sim = Simulator()
-# Import any remaining items we want to access directly from the main igibson import
-from igibson.envs import Environment
-from igibson.scenes import REGISTERED_SCENES
-from igibson.robots import REGISTERED_ROBOTS
-from igibson.controllers import REGISTERED_CONTROLLERS
-from igibson.tasks import REGISTERED_TASKS
+# Import any remaining items we want to access directly from the main omnigibson import
+from omnigibson.envs import Environment
+from omnigibson.scenes import REGISTERED_SCENES
+from omnigibson.robots import REGISTERED_ROBOTS
+from omnigibson.controllers import REGISTERED_CONTROLLERS
+from omnigibson.tasks import REGISTERED_TASKS
-# Define convenience function for shutting down iGibson cleanly
+# Define convenience function for shutting down OmniGibson cleanly
def shutdown():
app.close()
exit(0)
diff --git a/igibson/action_primitives/__init__.py b/igibson/action_primitives/__init__.py
index 7ce83205f..ca6d10c16 100644
--- a/igibson/action_primitives/__init__.py
+++ b/igibson/action_primitives/__init__.py
@@ -1,3 +1,3 @@
-from igibson.action_primitives.action_primitive_set_base import BaseActionPrimitiveSet, REGISTERED_PRIMITIVE_SETS
-from igibson.action_primitives.behavior_discrete_action_primitives import BehaviorActionPrimitives
-# from igibson.action_primitives.starter_semantic_action_primitives import StarterSemanticActionPrimitives
+from omnigibson.action_primitives.action_primitive_set_base import BaseActionPrimitiveSet, REGISTERED_PRIMITIVE_SETS
+from omnigibson.action_primitives.behavior_discrete_action_primitives import BehaviorActionPrimitives
+# from omnigibson.action_primitives.starter_semantic_action_primitives import StarterSemanticActionPrimitives
diff --git a/igibson/action_primitives/action_primitive_set_base.py b/igibson/action_primitives/action_primitive_set_base.py
index a160ff088..62461f60b 100644
--- a/igibson/action_primitives/action_primitive_set_base.py
+++ b/igibson/action_primitives/action_primitive_set_base.py
@@ -4,9 +4,9 @@
from future.utils import with_metaclass
-from igibson.robots import BaseRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveTraversableScene
-from igibson.tasks.task_base import BaseTask
+from omnigibson.robots import BaseRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveTraversableScene
+from omnigibson.tasks.task_base import BaseTask
REGISTERED_PRIMITIVE_SETS = {}
diff --git a/igibson/action_primitives/behavior_discrete_action_primitives.py b/igibson/action_primitives/behavior_discrete_action_primitives.py
index c3e7a9bcd..092d8cc9a 100644
--- a/igibson/action_primitives/behavior_discrete_action_primitives.py
+++ b/igibson/action_primitives/behavior_discrete_action_primitives.py
@@ -8,12 +8,12 @@
import numpy as np
# import pybullet as p
-from igibson.action_primitives.action_primitive_set_base import ActionPrimitiveError, BaseActionPrimitiveSet
-from igibson.controllers import ControlType, JointController
-from igibson.object_states.pose import Pose
-from igibson.robots.manipulation_robot import IsGraspingState
-from igibson.utils.motion_planning_utils import MotionPlanner
-from igibson.utils.transform_utils import mat2euler, quat2mat
+from omnigibson.action_primitives.action_primitive_set_base import ActionPrimitiveError, BaseActionPrimitiveSet
+from omnigibson.controllers import ControlType, JointController
+from omnigibson.object_states.pose import Pose
+from omnigibson.robots.manipulation_robot import IsGraspingState
+from omnigibson.utils.motion_planning_utils import MotionPlanner
+from omnigibson.utils.transform_utils import mat2euler, quat2mat
logger = logging.getLogger(__name__)
logger.setLevel(logging.ERROR)
@@ -285,7 +285,7 @@
# # [5, 'cabinet.n.01_1'], # push
# ] # * 4
-# /home/robot/Desktop/behavior/iGibson-dev-jk/igibson/examples/robots/log_dir_his/20220510-001432_putting_away_Halloween_decorations_discrete_rgb_accumReward_m0.01
+# /home/robot/Desktop/behavior/OmniGibson-dev-jk/omnigibson/examples/robots/log_dir_his/20220510-001432_putting_away_Halloween_decorations_discrete_rgb_accumReward_m0.01
# wo vis operation
action_list_putting_away_Halloween_decorations_v2 = [
[0, "cabinet.n.01_1"], # move
diff --git a/igibson/action_primitives/starter_semantic_action_primitives.py b/igibson/action_primitives/starter_semantic_action_primitives.py
index 5ec3f39d2..3cdda3d05 100644
--- a/igibson/action_primitives/starter_semantic_action_primitives.py
+++ b/igibson/action_primitives/starter_semantic_action_primitives.py
@@ -16,23 +16,23 @@
# import pybullet as p
from scipy.spatial.transform import Rotation
-from igibson import object_states
-from igibson.action_primitives.action_primitive_set_base import ActionPrimitiveError, BaseActionPrimitiveSet
-# from igibson.external.pybullet_tools.utils import set_joint_position
-from igibson.object_states.on_floor import RoomFloor
-from igibson.utils.object_state_utils import get_center_extent, sample_kinematics
-# from igibson.objects.articulated_object import URDFObject
-# from igibson.objects.object_base import BaseObject
-# from igibson.robots import BaseRobot, behavior_robot
-# from igibson.robots.behavior_robot import DEFAULT_BODY_OFFSET_FROM_FLOOR, BehaviorRobot
-# from igibson.tasks.behavior_task import BehaviorTask
-# from igibson.utils.behavior_robot_motion_planning_utils import (
+from omnigibson import object_states
+from omnigibson.action_primitives.action_primitive_set_base import ActionPrimitiveError, BaseActionPrimitiveSet
+# from omnigibson.external.pybullet_tools.utils import set_joint_position
+from omnigibson.object_states.on_floor import RoomFloor
+from omnigibson.utils.object_state_utils import get_center_extent, sample_kinematics
+# from omnigibson.objects.articulated_object import URDFObject
+# from omnigibson.objects.object_base import BaseObject
+# from omnigibson.robots import BaseRobot, behavior_robot
+# from omnigibson.robots.behavior_robot import DEFAULT_BODY_OFFSET_FROM_FLOOR, BehaviorRobot
+# from omnigibson.tasks.behavior_task import BehaviorTask
+# from omnigibson.utils.behavior_robot_motion_planning_utils import (
# get_pose3d_hand_collision_fn,
# plan_base_motion_br,
# plan_hand_motion_br,
# )
-# from igibson.utils.grasp_planning_utils import get_grasp_poses_for_object, get_grasp_position_for_open
-# from igibson.utils.utils import restoreState
+# from omnigibson.utils.grasp_planning_utils import get_grasp_poses_for_object, get_grasp_position_for_open
+# from omnigibson.utils.utils import restoreState
MAX_STEPS_FOR_HAND_MOVE = 100
MAX_STEPS_FOR_HAND_MOVE_WHEN_OPENING = 30
diff --git a/igibson/app_omni.py b/igibson/app_omni.py
index fb3c16bba..666365328 100644
--- a/igibson/app_omni.py
+++ b/igibson/app_omni.py
@@ -16,7 +16,7 @@
import carb
import omni.kit.app
import builtins
-from igibson.macros import gm, create_module_macros
+from omnigibson.macros import gm, create_module_macros
# Create settings for this module
@@ -176,8 +176,8 @@ def _load_omni_extensions(self, launch_config=None, experience=""):
:param launch_config: dict, settings for generating this app
:param experience: str, path to extension settings file for this app
"""
- # Load igibson module now to prevent circular imports
- import igibson
+ # Load omnigibson module now to prevent circular imports
+ import omnigibson
# Sanity check to see if any extra omniverse modules are loaded
# Warn users if so because this will usually cause issues.
@@ -221,7 +221,7 @@ def _load_omni_extensions(self, launch_config=None, experience=""):
# Override settings from input config
self.config = self.DEFAULT_LAUNCHER_CONFIG
if experience == "":
- experience = f'{igibson.root_path}/configs/apps/omni.isaac.sim.python.kit'
+ experience = f'{omnigibson.root_path}/configs/apps/omni.isaac.sim.python.kit'
self.config.update({"experience": experience})
if launch_config is not None:
self.config.update(launch_config)
diff --git a/igibson/app_omni_public.py b/igibson/app_omni_public.py
index 60993dbbd..8d446efd4 100644
--- a/igibson/app_omni_public.py
+++ b/igibson/app_omni_public.py
@@ -162,8 +162,8 @@ def _load_omni_extensions(self, launch_config=None, experience=""):
:param launch_config: dict, settings for generating this app
:param experience: str, path to extension settings file for this app
"""
- # Load igibson module now to prevent circular imports
- import igibson
+ # Load omnigibson module now to prevent circular imports
+ import omnigibson
# Sanity check to see if any extra omniverse modules are loaded
# Warn users if so because this will usually cause issues.
@@ -207,7 +207,7 @@ def _load_omni_extensions(self, launch_config=None, experience=""):
# Override settings from input config
self.config = self.DEFAULT_LAUNCHER_CONFIG
if experience == "":
- experience = f'{igibson.root_path}/configs/apps/public/omni.isaac.sim.python.kit'
+ experience = f'{omnigibson.root_path}/configs/apps/public/omni.isaac.sim.python.kit'
self.config.update({"experience": experience})
if launch_config is not None:
self.config.update(launch_config)
diff --git a/igibson/configs/apps/omni.isaac.sim.python.kit b/igibson/configs/apps/omni.isaac.sim.python.kit
index 95f294563..c6ba821f6 100644
--- a/igibson/configs/apps/omni.isaac.sim.python.kit
+++ b/igibson/configs/apps/omni.isaac.sim.python.kit
@@ -1,13 +1,13 @@
[package]
-title = "Isaac Sim Python + iGibson"
-description = "A minimal app for use with iGibson"
+title = "Isaac Sim Python + OmniGibson"
+description = "A minimal app for use with OmniGibson"
version = "2022.1.0"
# That makes it browsable in UI with "experience" filter
keywords = ["experience", "app", "usd"]
[settings]
-app.name = "iGibson"
+app.name = "OmniGibson"
app.version = "2022.1.0"
[dependencies]
@@ -92,7 +92,7 @@ persistent = false
dev_build = false
[settings.app.window]
-title = "iGibson"
+title = "OmniGibson"
# width = 1700
# height = 900
# x = -1
diff --git a/igibson/configs/apps/public/omni.isaac.sim.python.kit b/igibson/configs/apps/public/omni.isaac.sim.python.kit
index 28d4bccd7..6dff6316c 100644
--- a/igibson/configs/apps/public/omni.isaac.sim.python.kit
+++ b/igibson/configs/apps/public/omni.isaac.sim.python.kit
@@ -1,13 +1,13 @@
[package]
-title = "Isaac Sim Python + iGibson"
-description = "A minimal app for use with iGibson"
+title = "Isaac Sim Python + OmniGibson"
+description = "A minimal app for use with OmniGibson"
version = "2021.2.1"
# That makes it browsable in UI with "experience" filter
keywords = ["experience", "app", "usd"]
[settings]
-app.name = "iGibson"
+app.name = "OmniGibson"
app.version = "2021.2.1"
[dependencies]
@@ -76,7 +76,7 @@ persistent = false
dev_build = false
[settings.app.window]
-title = "iGibson"
+title = "OmniGibson"
# width = 1700
# height = 900
# x = -1
diff --git a/igibson/configs/behavior_full_observability.yaml b/igibson/configs/behavior_full_observability.yaml
index c20d882f5..a4b6ed090 100644
--- a/igibson/configs/behavior_full_observability.yaml
+++ b/igibson/configs/behavior_full_observability.yaml
@@ -1,7 +1,7 @@
# TODO: Refactor!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Beechwood_1_int
#clutter: false
#build_graph: true
diff --git a/igibson/configs/behavior_full_observability_fetch.yaml b/igibson/configs/behavior_full_observability_fetch.yaml
index ee8b7e744..e5a2243dc 100644
--- a/igibson/configs/behavior_full_observability_fetch.yaml
+++ b/igibson/configs/behavior_full_observability_fetch.yaml
@@ -1,7 +1,7 @@
# TODO: Verify works!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Beechwood_1_int
#clutter: false
#build_graph: true
diff --git a/igibson/configs/behavior_vr.yaml b/igibson/configs/behavior_vr.yaml
index 7f7200792..605c92c00 100644
--- a/igibson/configs/behavior_vr.yaml
+++ b/igibson/configs/behavior_vr.yaml
@@ -1,7 +1,7 @@
# TODO: Refactor!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Beechwood_1_int
#clutter: false
#build_graph: true
diff --git a/igibson/configs/controllers/dd.yaml b/igibson/configs/controllers/dd.yaml
index a109e7944..486fb1c2b 100644
--- a/igibson/configs/controllers/dd.yaml
+++ b/igibson/configs/controllers/dd.yaml
@@ -1,5 +1,5 @@
# Example Differential Drive control config (shown for base control)
-# See igibson/controllers/dd_controller for docstring of arguments
+# See omnigibson/controllers/dd_controller for docstring of arguments
# Arguments below are the arguments that should be specified by external user (other kwargs
# used in constructor are generated automatically at runtime)
robot:
diff --git a/igibson/configs/controllers/ik.yaml b/igibson/configs/controllers/ik.yaml
index 0db6cb0bf..69db50157 100644
--- a/igibson/configs/controllers/ik.yaml
+++ b/igibson/configs/controllers/ik.yaml
@@ -1,5 +1,5 @@
# Example IK config (shown for arm control)
-# See igibson/controllers/ik_controller for docstring of arguments
+# See omnigibson/controllers/ik_controller for docstring of arguments
# Arguments below are the arguments that should be specified by external user (other kwargs
# used in constructor are generated automatically at runtime)
robot:
diff --git a/igibson/configs/controllers/joint.yaml b/igibson/configs/controllers/joint.yaml
index faf0af3fd..665ad5a9a 100644
--- a/igibson/configs/controllers/joint.yaml
+++ b/igibson/configs/controllers/joint.yaml
@@ -1,5 +1,5 @@
# Example Joint control config (shown for arm control)
-# See igibson/controllers/joint_controller for docstring of arguments
+# See omnigibson/controllers/joint_controller for docstring of arguments
# Arguments below are the arguments that should be specified by external user (other kwargs
# used in constructor are generated automatically at runtime)
robot:
diff --git a/igibson/configs/controllers/multi_finger_gripper.yaml b/igibson/configs/controllers/multi_finger_gripper.yaml
index d1260bbfa..298195559 100644
--- a/igibson/configs/controllers/multi_finger_gripper.yaml
+++ b/igibson/configs/controllers/multi_finger_gripper.yaml
@@ -1,5 +1,5 @@
# Example Multi Finger Gripper control config (shown for arm control)
-# See igibson/controllers/parallel_jaw_gripper_controller for docstring of arguments
+# See omnigibson/controllers/parallel_jaw_gripper_controller for docstring of arguments
# Arguments below are the arguments that should be specified by external user (other kwargs
# used in constructor are generated automatically at runtime)
robot:
diff --git a/igibson/configs/controllers/null_gripper.yaml b/igibson/configs/controllers/null_gripper.yaml
index c59e34e84..caf867b1f 100644
--- a/igibson/configs/controllers/null_gripper.yaml
+++ b/igibson/configs/controllers/null_gripper.yaml
@@ -1,5 +1,5 @@
# Example Null Gripper control config (shown for gripper control)
-# See igibson/controllers/null_gripper_controller for docstring of arguments
+# See omnigibson/controllers/null_gripper_controller for docstring of arguments
# Arguments below are the arguments that should be specified by external user (other kwargs
# used in constructor are generated automatically at runtime)
robot:
diff --git a/igibson/configs/fetch_motion_planning.yaml b/igibson/configs/fetch_motion_planning.yaml
index ea2f53ff8..a8f381f34 100644
--- a/igibson/configs/fetch_motion_planning.yaml
+++ b/igibson/configs/fetch_motion_planning.yaml
@@ -1,7 +1,7 @@
# TODO: Refactor!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Rs_int
#build_graph: true
#load_texture: true
diff --git a/igibson/configs/fetch_rearrangement.yaml b/igibson/configs/fetch_rearrangement.yaml
index a26a00ea4..964cfc808 100644
--- a/igibson/configs/fetch_rearrangement.yaml
+++ b/igibson/configs/fetch_rearrangement.yaml
@@ -1,7 +1,7 @@
# TODO: Refactor!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Rs_int
#build_graph: true
#load_texture: true
diff --git a/igibson/configs/turtlebot_dynamic_nav.yaml b/igibson/configs/turtlebot_dynamic_nav.yaml
index 66d31957e..b8b9260a7 100644
--- a/igibson/configs/turtlebot_dynamic_nav.yaml
+++ b/igibson/configs/turtlebot_dynamic_nav.yaml
@@ -1,7 +1,7 @@
# TODO: Refactor!
## scene
-#scene: igibson
+#scene: omnigibson
#scene_id: Rs_int
#build_graph: true
#load_texture: true
diff --git a/igibson/controllers/__init__.py b/igibson/controllers/__init__.py
index ac720d9ea..8e4122fa2 100644
--- a/igibson/controllers/__init__.py
+++ b/igibson/controllers/__init__.py
@@ -1,4 +1,4 @@
-from igibson.controllers.controller_base import (
+from omnigibson.controllers.controller_base import (
REGISTERED_CONTROLLERS,
REGISTERED_LOCOMOTION_CONTROLLERS,
REGISTERED_MANIPULATION_CONTROLLERS,
@@ -8,12 +8,12 @@
ManipulationController,
GripperController,
)
-from igibson.controllers.dd_controller import DifferentialDriveController
-from igibson.controllers.ik_controller import InverseKinematicsController
-from igibson.controllers.joint_controller import JointController
-from igibson.controllers.multi_finger_gripper_controller import MultiFingerGripperController
-from igibson.controllers.null_joint_controller import NullJointController
-from igibson.utils.python_utils import assert_valid_key
+from omnigibson.controllers.dd_controller import DifferentialDriveController
+from omnigibson.controllers.ik_controller import InverseKinematicsController
+from omnigibson.controllers.joint_controller import JointController
+from omnigibson.controllers.multi_finger_gripper_controller import MultiFingerGripperController
+from omnigibson.controllers.null_joint_controller import NullJointController
+from omnigibson.utils.python_utils import assert_valid_key
def create_controller(name, **kwargs):
diff --git a/igibson/controllers/controller_base.py b/igibson/controllers/controller_base.py
index 162825537..f1505daf8 100644
--- a/igibson/controllers/controller_base.py
+++ b/igibson/controllers/controller_base.py
@@ -3,7 +3,7 @@
import numpy as np
-from igibson.utils.python_utils import classproperty, assert_valid_key, Serializable, Registerable, Recreatable
+from omnigibson.utils.python_utils import classproperty, assert_valid_key, Serializable, Registerable, Recreatable
# Global dicts that will contain mappings
REGISTERED_CONTROLLERS = OrderedDict()
diff --git a/igibson/controllers/dd_controller.py b/igibson/controllers/dd_controller.py
index 73eeaef49..015cbd079 100644
--- a/igibson/controllers/dd_controller.py
+++ b/igibson/controllers/dd_controller.py
@@ -1,6 +1,6 @@
import numpy as np
-from igibson.controllers import ControlType, LocomotionController
+from omnigibson.controllers import ControlType, LocomotionController
class DifferentialDriveController(LocomotionController):
diff --git a/igibson/controllers/ik_controller.py b/igibson/controllers/ik_controller.py
index 3fbe5b762..d0508a32d 100644
--- a/igibson/controllers/ik_controller.py
+++ b/igibson/controllers/ik_controller.py
@@ -1,10 +1,10 @@
import numpy as np
-import igibson.utils.transform_utils as T
-from igibson.controllers import ControlType, ManipulationController
-from igibson.utils.processing_utils import MovingAverageFilter
-from igibson.utils.control_utils import IKSolver
-from igibson.utils.python_utils import assert_valid_key
+import omnigibson.utils.transform_utils as T
+from omnigibson.controllers import ControlType, ManipulationController
+from omnigibson.utils.processing_utils import MovingAverageFilter
+from omnigibson.utils.control_utils import IKSolver
+from omnigibson.utils.python_utils import assert_valid_key
# Different modes
IK_MODE_COMMAND_DIMS = {
diff --git a/igibson/controllers/joint_controller.py b/igibson/controllers/joint_controller.py
index e1e193bed..22a374ae0 100644
--- a/igibson/controllers/joint_controller.py
+++ b/igibson/controllers/joint_controller.py
@@ -1,9 +1,9 @@
import numpy as np
-from igibson.controllers import IsGraspingState, ControlType, LocomotionController, ManipulationController, \
+from omnigibson.controllers import IsGraspingState, ControlType, LocomotionController, ManipulationController, \
GripperController
-from igibson.utils.python_utils import assert_valid_key
-import igibson.utils.transform_utils as T
+from omnigibson.utils.python_utils import assert_valid_key
+import omnigibson.utils.transform_utils as T
class JointController(LocomotionController, ManipulationController, GripperController):
diff --git a/igibson/controllers/multi_finger_gripper_controller.py b/igibson/controllers/multi_finger_gripper_controller.py
index f0edb6c1b..c93387d5b 100644
--- a/igibson/controllers/multi_finger_gripper_controller.py
+++ b/igibson/controllers/multi_finger_gripper_controller.py
@@ -1,8 +1,8 @@
import numpy as np
-from igibson.macros import create_module_macros
-from igibson.controllers import IsGraspingState, ControlType, GripperController
-from igibson.utils.python_utils import assert_valid_key
+from omnigibson.macros import create_module_macros
+from omnigibson.controllers import IsGraspingState, ControlType, GripperController
+from omnigibson.utils.python_utils import assert_valid_key
VALID_MODES = {
"binary",
diff --git a/igibson/controllers/null_joint_controller.py b/igibson/controllers/null_joint_controller.py
index 091ccfe1a..1616a9c90 100644
--- a/igibson/controllers/null_joint_controller.py
+++ b/igibson/controllers/null_joint_controller.py
@@ -1,6 +1,6 @@
import numpy as np
-from igibson.controllers import JointController
+from omnigibson.controllers import JointController
class NullJointController(JointController):
diff --git a/igibson/envs/__init__.py b/igibson/envs/__init__.py
index 845a7d5bd..8cc3c969a 100644
--- a/igibson/envs/__init__.py
+++ b/igibson/envs/__init__.py
@@ -1 +1 @@
-from igibson.envs.env_base import Environment
+from omnigibson.envs.env_base import Environment
diff --git a/igibson/envs/behavior_mp_env.py b/igibson/envs/behavior_mp_env.py
index 3bcafcfad..34abe9c3d 100644
--- a/igibson/envs/behavior_mp_env.py
+++ b/igibson/envs/behavior_mp_env.py
@@ -8,15 +8,15 @@
# import numpy as np
#
#
-# from igibson import object_states
-# from igibson.envs.behavior_env import BehaviorEnv
-# # from igibson.external.pybullet_tools.utils import CIRCULAR_LIMITS
-# from igibson.object_states.on_floor import RoomFloor
-# from igibson.object_states.utils import sample_kinematics
-# from igibson.objects.usd_object import URDFObject
-# from igibson.robots.behavior_robot import BRBody, BREye, BRHand
-# from igibson.utils.behavior_robot_planning_utils import dry_run_base_plan, plan_base_motion_br, plan_hand_motion_br
-# from igibson.utils.utils import restoreState
+# from omnigibson import object_states
+# from omnigibson.envs.behavior_env import BehaviorEnv
+# # from omnigibson.external.pybullet_tools.utils import CIRCULAR_LIMITS
+# from omnigibson.object_states.on_floor import RoomFloor
+# from omnigibson.object_states.utils import sample_kinematics
+# from omnigibson.objects.usd_object import URDFObject
+# from omnigibson.robots.behavior_robot import BRBody, BREye, BRHand
+# from omnigibson.utils.behavior_robot_planning_utils import dry_run_base_plan, plan_base_motion_br, plan_hand_motion_br
+# from omnigibson.utils.utils import restoreState
#
#
# class ActionPrimitives(IntEnum):
@@ -60,7 +60,7 @@
#
# class BehaviorMPEnv(BehaviorEnv):
# """
-# iGibson Environment (OpenAI Gym interface)
+# OmniGibson Environment (OpenAI Gym interface)
# """
#
# def __init__(
@@ -520,7 +520,7 @@
# parser.add_argument(
# "--config",
# "-c",
-# default="igibson/examples/configs/behavior.yaml",
+# default="omnigibson/examples/configs/behavior.yaml",
# help="which config file to use [default: use yaml files in examples/configs]",
# )
# parser.add_argument(
diff --git a/igibson/envs/env_base.py b/igibson/envs/env_base.py
index f54792f52..981951c40 100644
--- a/igibson/envs/env_base.py
+++ b/igibson/envs/env_base.py
@@ -2,15 +2,15 @@
import logging
from collections import OrderedDict
-import igibson as ig
-from igibson.macros import gm, create_module_macros
-from igibson.robots import REGISTERED_ROBOTS
-from igibson.tasks import REGISTERED_TASKS
-from igibson.scenes import REGISTERED_SCENES
-from igibson.utils.gym_utils import GymObservable
-from igibson.utils.sim_utils import get_collisions
-from igibson.utils.config_utils import parse_config
-from igibson.utils.python_utils import assert_valid_key, merge_nested_dicts, create_class_from_registry_and_config,\
+import omnigibson as og
+from omnigibson.macros import gm, create_module_macros
+from omnigibson.robots import REGISTERED_ROBOTS
+from omnigibson.tasks import REGISTERED_TASKS
+from omnigibson.scenes import REGISTERED_SCENES
+from omnigibson.utils.gym_utils import GymObservable
+from omnigibson.utils.sim_utils import get_collisions
+from omnigibson.utils.config_utils import parse_config
+from omnigibson.utils.python_utils import assert_valid_key, merge_nested_dicts, create_class_from_registry_and_config,\
Serializable, Recreatable
@@ -78,11 +78,11 @@ def __init__(
merge_nested_dicts(base_dict=self.config, extra_dict=parse_config(config), inplace=True)
# Set the simulator settings
- ig.sim.set_simulation_dt(physics_dt=physics_timestep, rendering_dt=action_timestep)
- ig.sim.viewer_width = self.render_config["viewer_width"]
- ig.sim.viewer_height = self.render_config["viewer_height"]
- ig.sim.vertical_fov = self.render_config["vertical_fov"]
- ig.sim.device = device
+ og.sim.set_simulation_dt(physics_dt=physics_timestep, rendering_dt=action_timestep)
+ og.sim.viewer_width = self.render_config["viewer_width"]
+ og.sim.viewer_height = self.render_config["viewer_height"]
+ og.sim.vertical_fov = self.render_config["vertical_fov"]
+ og.sim.device = device
# Load this environment
self.load()
@@ -240,8 +240,8 @@ def _load_task(self):
cls_type_descriptor="task",
)
- assert ig.sim.is_stopped(), "sim should be stopped when load_task starts"
- ig.sim.play()
+ assert og.sim.is_stopped(), "sim should be stopped when load_task starts"
+ og.sim.play()
# Load task. Should load additinal task-relevant objects and configure the scene into its default initial state
self._task.load(env=self)
@@ -249,7 +249,7 @@ def _load_task(self):
# Update the initial scene state
self._scene.update_initial_state()
- ig.sim.stop()
+ og.sim.stop()
def _load_scene(self):
"""
@@ -262,7 +262,7 @@ def _load_scene(self):
cfg=self.scene_config,
cls_type_descriptor="scene",
)
- ig.sim.import_scene(scene)
+ og.sim.import_scene(scene)
# Save scene internally
self._scene = scene
@@ -288,7 +288,7 @@ def _load_robots(self):
cls_type_descriptor="robot",
)
# Import the robot into the simulator
- ig.sim.import_object(robot)
+ og.sim.import_object(robot)
def _load_observation_space(self):
# Grab robot(s) and task obs spaces
@@ -323,7 +323,7 @@ def load(self):
self._load_robots()
self._load_task()
- ig.sim.play()
+ og.sim.play()
self.reset()
# Load the obs / action spaces
@@ -352,7 +352,7 @@ def close(self):
"""
Clean up the environment and shut down the simulation.
"""
- ig.sim.close()
+ og.sim.close()
def get_obs(self):
"""
@@ -458,7 +458,7 @@ def step(self, action):
robot.apply_action(action_dict[robot.name])
# Run simulation step
- ig.sim.step()
+ og.sim.step()
# Grab collisions and store internally
if gm.ENABLE_GLOBAL_CONTACT_REPORTING:
@@ -502,7 +502,7 @@ def randomize_domain(self):
self.reload_model_object_randomization()
if self._texture_randomization_freq is not None:
if self._current_episode % self._texture_randomization_freq == 0:
- ig.sim.scene.randomize_texture()
+ og.sim.scene.randomize_texture()
def _reset_variables(self):
"""
@@ -519,8 +519,8 @@ def reset(self):
Reset episode.
"""
# Stop and restart the simulation
- ig.sim.stop()
- ig.sim.play()
+ og.sim.stop()
+ og.sim.play()
# Do any domain randomization
self.randomize_domain()
@@ -536,7 +536,7 @@ def reset(self):
self._reset_variables()
# Run a single simulator step to make sure we can grab updated observations
- ig.sim.step()
+ og.sim.step()
# Grab and return observations
obs = self.get_obs()
diff --git a/igibson/envs/igibson_ray_env.py b/igibson/envs/igibson_ray_env.py
index fea8da62a..6ddf35e07 100644
--- a/igibson/envs/igibson_ray_env.py
+++ b/igibson/envs/igibson_ray_env.py
@@ -1,7 +1,7 @@
# TODO: Refactor!
# """
-# Example showing how to wrap the iGibson class using ray for low-level environment control.
+# Example showing how to wrap the OmniGibson class using ray for low-level environment control.
# Multiple environments are only supported on Linux. If issues arise, please ensure torch/numpy
# are installed *without* MKL support.
# """
@@ -10,13 +10,13 @@
#
# import ray
#
-# from igibson.envs.igibson_env import iGibsonEnv
+# from omnigibson.envs.omnigibson_env import OmniGibsonEnv
#
# ray.init()
#
#
# @ray.remote
-# class iGibsonRayEnv(iGibsonEnv):
+# class OmniGibsonRayEnv(OmniGibsonEnv):
# def sample_action_space(self):
# return self.action_space.sample()
#
@@ -36,7 +36,7 @@
# )
# args = parser.parse_args()
#
-# env = iGibsonRayEnv.remote(
+# env = OmniGibsonRayEnv.remote(
# config_file=args.config, mode=args.mode, action_timestep=1.0 / 10.0, physics_timestep=1.0 / 40.0
# )
#
diff --git a/igibson/envs/parallel_env.py b/igibson/envs/parallel_env.py
index 147fc4fd2..5f7de6a12 100644
--- a/igibson/envs/parallel_env.py
+++ b/igibson/envs/parallel_env.py
@@ -8,11 +8,11 @@
#
# import numpy as np
#
-# import igibson
-# from igibson.envs.igibson_env import iGibsonEnv
+# import omnigibson
+# from omnigibson.envs.omnigibson_env import OmniGibsonEnv
#
#
-# class ParallelNavEnv(iGibsonEnv):
+# class ParallelNavEnv(OmniGibsonEnv):
# """Batch together environments and simulate them in external processes.
# The environments are created in external processes by calling the provided
# callables. This can be an environment class, or a function creating the
@@ -245,10 +245,10 @@
#
#
# if __name__ == "__main__":
-# config_filename = os.path.join(os.path.dirname(igibson.__file__), "..", "tests", "test.yaml")
+# config_filename = os.path.join(os.path.dirname(omnigibson.__file__), "..", "tests", "test.yaml")
#
# def load_env():
-# return iGibsonEnv(config_file=config_filename, mode="headless")
+# return OmniGibsonEnv(config_file=config_filename, mode="headless")
#
# parallel_env = ParallelNavEnv([load_env] * 2, blocking=False)
#
diff --git a/igibson/examples/README.md b/igibson/examples/README.md
index 685523a5d..f6351899c 100644
--- a/igibson/examples/README.md
+++ b/igibson/examples/README.md
@@ -1,19 +1,19 @@
### Code Examples
-The following examples illustrate the use of iGibson.
+The following examples illustrate the use of OmniGibson.
If you are interested in just getting started as an end-user, you only need check out `./environments`.
-If you are looking for examples of BEHAVIOR, the benchmark of household activities that uses iGibson, please check the BEHAVIOR repository at https://github.com/StanfordVL/behavior.
+If you are looking for examples of BEHAVIOR, the benchmark of household activities that uses OmniGibson, please check the BEHAVIOR repository at https://github.com/StanfordVL/behavior.
-- environments: how to instantiate iGibson environments with interactive or static scenes, optionally with a scene selector.
+- environments: how to instantiate OmniGibson environments with interactive or static scenes, optionally with a scene selector.
- learning: how to train RL policies for robot navigation using stable baselines 3, and how to save and replay demos of agents for imitation learning.
- objects: how to create, load, and place objects to predefined locations or using a logic sampler (e.g. onTop(A, B)), how to change texture as a function of the temperature, and how to generate the minimum volume bounding boxes of objects.
- object_states: how to change various objects states, including dusty, stained, (water sources) toggled on, (cleaning tool) soaked, sliced, and temprature, and how to save and reload object states.
- observations: how to generate different observation modalities such as RGB, depth, LiDAR, segmentation, etc.
- renderer: how to use the renderer directly, without the physics engine.
- robots: how to (keyboard) control robots with differential drive controllers, IK controllers and sampling-based motion planners.
-- ros: how to run ROS with iGibson as if it is the real world.
+- ros: how to run ROS with OmniGibson as if it is the real world.
- scenes: how to load interactive and non-interactive scenes, how to use domain randomization (of object models and/or texture), and how to create a tour video of the scenes.
-- vr: how to use iGibson with VR.
-- web_ui: how to start a web server that hosts iGibson environments.
+- vr: how to use OmniGibson with VR.
+- web_ui: how to start a web server that hosts OmniGibson environments.
diff --git a/igibson/examples/environments/behavior_env_demo.py b/igibson/examples/environments/behavior_env_demo.py
index f9c81ecc6..ec753a114 100644
--- a/igibson/examples/environments/behavior_env_demo.py
+++ b/igibson/examples/environments/behavior_env_demo.py
@@ -3,7 +3,7 @@
import yaml
-import igibson as ig
+import omnigibson as og
def main(random_selection=False, headless=False, short_exec=False):
@@ -16,14 +16,14 @@ def main(random_selection=False, headless=False, short_exec=False):
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
# Load the pre-selected configuration
- config_filename = os.path.join(ig.example_config_path, "fetch_behavior.yaml")
+ config_filename = os.path.join(og.example_config_path, "fetch_behavior.yaml")
cfg = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
# Load the environment
- env = ig.Environment(configs=cfg)
+ env = og.Environment(configs=cfg)
# Allow user to move camera more easily
- ig.sim.enable_viewer_camera_teleoperation()
+ og.sim.enable_viewer_camera_teleoperation()
# Run a simple loop and reset periodically
max_iterations = 10 if not short_exec else 1
diff --git a/igibson/examples/environments/config_selector.py b/igibson/examples/environments/config_selector.py
index 0267def9d..6d7ffb387 100644
--- a/igibson/examples/environments/config_selector.py
+++ b/igibson/examples/environments/config_selector.py
@@ -3,9 +3,9 @@
import yaml
-import igibson as ig
-from igibson.utils.asset_utils import folder_is_hidden
-from igibson.utils.ui_utils import choose_from_options
+import omnigibson as og
+from omnigibson.utils.asset_utils import folder_is_hidden
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
@@ -17,24 +17,24 @@ def main(random_selection=False, headless=False, short_exec=False):
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
# Grab all configs and choose one to load
- ig_config_path = ig.example_config_path
+ og_config_path = og.example_config_path
available_configs = sorted(
[
f
- for f in os.listdir(ig_config_path)
- if (not folder_is_hidden(f) and os.path.isfile(os.path.join(ig_config_path, f)))
+ for f in os.listdir(og_config_path)
+ if (not folder_is_hidden(f) and os.path.isfile(os.path.join(og_config_path, f)))
]
)
config_id = choose_from_options(options=available_configs, name="config file", random_selection=random_selection)
logging.info("Using config file " + config_id)
- config_filename = os.path.join(ig.example_config_path, config_id)
+ config_filename = os.path.join(og.example_config_path, config_id)
config = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
# Uncomment the following line to accelerate loading with only the building
# config["scene"]["load_object_categories"] = ["floors", "walls", "ceilings"]
# Load the environment
- env = ig.Environment(configs=config)
+ env = og.Environment(configs=config)
max_iterations = 10 if not short_exec else 1
for j in range(max_iterations):
diff --git a/igibson/examples/environments/navigation_env_demo.py b/igibson/examples/environments/navigation_env_demo.py
index 24e52aa31..12e1303f3 100644
--- a/igibson/examples/environments/navigation_env_demo.py
+++ b/igibson/examples/environments/navigation_env_demo.py
@@ -3,8 +3,8 @@
import yaml
-import igibson as ig
-from igibson.utils.ui_utils import choose_from_options
+import omnigibson as og
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
@@ -24,7 +24,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
scene_type = choose_from_options(options=scene_options, name="scene type", random_selection=random_selection)
config_name = "turtlebot_nav" if scene_type == "InteractiveTraversableScene" else "turtlebot_static_nav"
- config_filename = os.path.join(ig.example_config_path, f"{config_name}.yaml")
+ config_filename = os.path.join(og.example_config_path, f"{config_name}.yaml")
config = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
# If the scene type is interactive, also check if we want to quick load or full load the scene
@@ -38,10 +38,10 @@ def main(random_selection=False, headless=False, short_exec=False):
config["scene"]["load_object_categories"] = ["floors", "walls", "ceilings"]
# Load the environment
- env = ig.Environment(configs=config)
+ env = og.Environment(configs=config)
# Allow user to move camera more easily
- ig.sim.enable_viewer_camera_teleoperation()
+ og.sim.enable_viewer_camera_teleoperation()
# Run a simple loop and reset periodically
max_iterations = 10 if not short_exec else 1
diff --git a/igibson/examples/example_selector.py b/igibson/examples/example_selector.py
index c11865dae..c397c51c3 100644
--- a/igibson/examples/example_selector.py
+++ b/igibson/examples/example_selector.py
@@ -5,8 +5,8 @@
from multiprocessing import Process
from collections import OrderedDict
-import igibson.examples as examples
-from igibson.utils.ui_utils import choose_from_options
+import omnigibson.examples as examples
+from omnigibson.utils.ui_utils import choose_from_options
TIMEOUT = 4
@@ -72,7 +72,7 @@ def main():
elif help_demo == 3:
print("Toggle the test mode to execute examples (short, headless versions)")
else:
- module_help = importlib.import_module("igibson.examples." + examples_list[help_demo])
+ module_help = importlib.import_module("omnigibson.examples." + examples_list[help_demo])
print(module_help.main.__doc__)
input("Press enter")
elif selected_demo == 1:
@@ -92,7 +92,7 @@ def main():
continue
print("Executing " + examples_list[idx])
- i = importlib.import_module("igibson.examples." + examples_list[idx])
+ i = importlib.import_module("omnigibson.examples." + examples_list[idx])
if test_mode:
p = Process(
target=i.main,
@@ -137,7 +137,7 @@ def main():
print(
"Executing " + examples_list[selected_demo] + " " + ["in interactive mode", "in test mode"][test_mode]
)
- i = importlib.import_module("igibson.examples." + examples_list[selected_demo])
+ i = importlib.import_module("omnigibson.examples." + examples_list[selected_demo])
i.main(random_selection=test_mode, headless=test_mode, short_exec=test_mode)
diff --git a/igibson/examples/learning/demo_collection_example.py b/igibson/examples/learning/demo_collection_example.py
index e4731c102..0aa6ff445 100644
--- a/igibson/examples/learning/demo_collection_example.py
+++ b/igibson/examples/learning/demo_collection_example.py
@@ -7,11 +7,11 @@
import numpy as np
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.utils.log_utils import IGLogWriter
-from igibson.utils.config_utils import parse_config
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.utils.log_utils import IGLogWriter
+from omnigibson.utils.config_utils import parse_config
def main(random_selection=False, headless=False, short_exec=False):
@@ -73,7 +73,7 @@ def parse_args():
choices=scene_choices,
default="Rs_int",
nargs="?",
- help="Scene name/ID matching iGibson interactive scenes.",
+ help="Scene name/ID matching OmniGibson interactive scenes.",
)
parser.add_argument(
"--task",
@@ -114,7 +114,7 @@ def parse_args():
parser.add_argument(
"--config",
help="which config file to use [default: use yaml files in examples/configs]",
- default=os.path.join(igibson.example_config_path, "behavior_vr.yaml"),
+ default=os.path.join(omnigibson.example_config_path, "behavior_vr.yaml"),
required=False,
)
return parser.parse_args()
@@ -129,17 +129,17 @@ def collect_demo(
disable_save=False,
disable_scene_cache=False,
profile=False,
- config_file=os.path.join(igibson.example_config_path, "behavior_vr.yaml"),
+ config_file=os.path.join(omnigibson.example_config_path, "behavior_vr.yaml"),
short_exec=False,
):
""" """
# HDR files for PBR rendering
- hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+ hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# Rendering settings
rendering_settings = MeshRendererSettings(
@@ -162,7 +162,7 @@ def collect_demo(
config["instance_id"] = instance_id
config["online_sampling"] = disable_scene_cache
config["load_clutter"] = True
- env = iGibsonEnv(
+ env = OmniGibsonEnv(
config_file=config,
mode="headless",
action_timestep=1 / 30.0,
diff --git a/igibson/examples/learning/demo_replaying_example.py b/igibson/examples/learning/demo_replaying_example.py
index 629b6e7c2..399f9e34b 100644
--- a/igibson/examples/learning/demo_replaying_example.py
+++ b/igibson/examples/learning/demo_replaying_example.py
@@ -7,13 +7,13 @@
import h5py
import numpy as np
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.utils.git_utils import project_git_info
-from igibson.utils.log_utils import IGLogReader, IGLogWriter
-from igibson.utils.config_utils import parse_config, parse_str_config
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.utils.git_utils import project_git_info
+from omnigibson.utils.log_utils import IGLogReader, IGLogWriter
+from omnigibson.utils.config_utils import parse_config, parse_str_config
def main(random_selection=False, headless=False, short_exec=False):
@@ -86,7 +86,7 @@ def parse_args():
parser.add_argument(
"--config",
help="which config file to use [default: use yaml files in examples/configs]",
- default=os.path.join(igibson.example_config_path, "behavior_vr.yaml"),
+ default=os.path.join(omnigibson.example_config_path, "behavior_vr.yaml"),
)
return parser.parse_args()
@@ -98,7 +98,7 @@ def replay_demo(
frame_save_path=None,
verbose=True,
mode="headless",
- config_file=os.path.join(igibson.example_config_path, "behavior_vr.yaml"),
+ config_file=os.path.join(omnigibson.example_config_path, "behavior_vr.yaml"),
start_callbacks=[],
step_callbacks=[],
end_callbacks=[],
@@ -121,23 +121,23 @@ def replay_demo(
mode, the demo will be replayed with simple robot view.
@param config_file: environment config file
@param start_callback: A callback function that will be called immediately before starting to replay steps. Should
- take two arguments: iGibsonEnv and IGLogReader
+ take two arguments: OmniGibsonEnv and IGLogReader
@param step_callback: A callback function that will be called immediately following each replayed step. Should
- take two arguments: iGibsonEnv and IGLogReader
+ take two arguments: OmniGibsonEnv and IGLogReader
@param end_callback: A callback function that will be called when replay has finished. Should
- take two arguments: iGibsonEnv and IGLogReader
+ take two arguments: OmniGibsonEnv and IGLogReader
@param profile: Whether the replay should be profiled, with profiler output to stdout.
@param image_size: The image size that should be used by the renderer.
@param use_pb_gui: display the interactive pybullet gui (for debugging)
@return if disable_save is True, returns None. Otherwise, returns a boolean indicating if replay was deterministic.
"""
# HDR files for PBR rendering
- hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+ hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
rendering_setting = MeshRendererSettings(
@@ -207,7 +207,7 @@ def replay_demo(
config["image_height"] = image_size[1]
config["online_sampling"] = False
- env = iGibsonEnv(
+ env = OmniGibsonEnv(
config_file=config,
mode=mode,
action_timestep=render_timestep,
diff --git a/igibson/examples/learning/ray_rllib_example.py b/igibson/examples/learning/ray_rllib_example.py
index d3c64aa08..7d176c5d6 100644
--- a/igibson/examples/learning/ray_rllib_example.py
+++ b/igibson/examples/learning/ray_rllib_example.py
@@ -1,5 +1,5 @@
"""
-Example showing how to wrap the iGibson class using ray for rllib.
+Example showing how to wrap the OmniGibson class using ray for rllib.
Multiple environments are only supported on Linux. If issues arise, please ensure torch/numpy
are installed *without* MKL support.
@@ -25,8 +25,8 @@
from ray.rllib.models.torch.torch_modelv2 import TorchModelV2
from ray.tune.registry import register_env
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
# ray.init(local_mode=True)
ray.init(ignore_reinit_error=True)
@@ -98,7 +98,7 @@ def forward(self, x):
return x
-class iGibsonPPOModel(TorchModelV2, nn.Module):
+class OmniGibsonPPOModel(TorchModelV2, nn.Module):
"""Example of a PyTorch custom model that just delegates to a fc-net."""
def __init__(self, obs_space, action_space, num_outputs, model_config, name):
@@ -138,7 +138,7 @@ def value_function(self):
return self._value_out
-class iGibsonRLLibEnv(iGibsonEnv):
+class OmniGibsonRLLibEnv(OmniGibsonEnv):
def __init__(self, env_config):
super().__init__(
config_file=env_config["config_file"],
@@ -161,7 +161,7 @@ def main(random_selection=False, headless=False, short_exec=False):
parser.add_argument(
"--config",
"-c",
- default=os.path.join(igibson.root_path, "configs", "turtlebot_nav.yaml"),
+ default=os.path.join(omnigibson.root_path, "configs", "turtlebot_nav.yaml"),
help="which config file to use [default: use yaml files in examples/configs]",
)
parser.add_argument(
@@ -176,7 +176,7 @@ def main(random_selection=False, headless=False, short_exec=False):
)
parser.add_argument("--restore_checkpoint", default=None, help="Checkpoint to force restore")
parser.add_argument(
- "--exp_name", default="my_igibson_run", help="which mode for simulation (default: headless)"
+ "--exp_name", default="my_omnigibson_run", help="which mode for simulation (default: headless)"
)
parser.add_argument(
"--mode",
@@ -194,21 +194,21 @@ def main(random_selection=False, headless=False, short_exec=False):
exp_name = args.exp_name
mode = args.mode
else:
- config = os.path.join(igibson.root_path, "configs", "turtlebot_nav.yaml")
+ config = os.path.join(omnigibson.root_path, "configs", "turtlebot_nav.yaml")
local_dir = None
resume = None
restore_checkpoint = None
- exp_name = "my_igibson_run"
+ exp_name = "my_omnigibson_run"
mode = "headless"
- ModelCatalog.register_custom_model("iGibsonPPOModel", iGibsonPPOModel)
- register_env("iGibsonEnv", lambda c: iGibsonRLLibEnv(c))
+ ModelCatalog.register_custom_model("OmniGibsonPPOModel", OmniGibsonPPOModel)
+ register_env("OmniGibsonEnv", lambda c: OmniGibsonRLLibEnv(c))
# Note, some things you may want to change
# See: https://docs.ray.io/en/master/rllib-training.html#common-parameters for more details
# num_gpus -- number of GPUs used for the driver (trainer)
# num_cpus_per_driver -- number of cpus used on the driver (trainer)
# num_workers -- defines the number of workers collecting iG trials
- # num_envs_per_worker -- number of iGibson instances per worker
+ # num_envs_per_worker -- number of OmniGibson instances per worker
# num_cpus_per_driver -- number of cpus used on the worker
# remote_worker_envs -- this parallelizes the data collection loop *on the worker* into its own process with IPC overhead, not recommended
# train_batch_size -- the total timesteps per SGD (affects RAM usage)
@@ -217,7 +217,7 @@ def main(random_selection=False, headless=False, short_exec=False):
#
# train_batch size should be divisble by sgd_minibatch_size **and** (rollout fragment length * num_workers)
# Good config: train_batch_size = 1000, rollout_fragment_length = 200, num_workers = 5, sgd_minibatch_size = largest your GPU can support that train_batch_size is divisble by (500 for Titan X)
- config_filename = os.path.join(igibson.example_config_path, config)
+ config_filename = os.path.join(omnigibson.example_config_path, config)
config_data = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
config_data["image_width"] = 160
@@ -228,7 +228,7 @@ def main(random_selection=False, headless=False, short_exec=False):
short_exec = True # Force a short training process. Remove this for real training
config_data["texture_scale"] = 0.5
config = {
- "env": "iGibsonEnv",
+ "env": "OmniGibsonEnv",
"env_config": {
"config_file": config_data,
"mode": mode,
@@ -245,13 +245,13 @@ def main(random_selection=False, headless=False, short_exec=False):
"train_batch_size": 1000 if not short_exec else 10,
"sgd_minibatch_size": 100 if not short_exec else 1,
"model": {
- "custom_model": "iGibsonPPOModel",
+ "custom_model": "OmniGibsonPPOModel",
},
"framework": "torch",
}
else: # Server or Linux
config = {
- "env": "iGibsonEnv",
+ "env": "OmniGibsonEnv",
"env_config": {
"config_file": config_data,
"mode": mode,
@@ -268,7 +268,7 @@ def main(random_selection=False, headless=False, short_exec=False):
"train_batch_size": 1000 if not short_exec else 10,
"sgd_minibatch_size": 100 if not short_exec else 1,
"model": {
- "custom_model": "iGibsonPPOModel",
+ "custom_model": "OmniGibsonPPOModel",
},
"framework": "torch",
}
diff --git a/igibson/examples/learning/stable_baselines3_example.py b/igibson/examples/learning/stable_baselines3_example.py
index cd260dc77..a4dfbff6d 100644
--- a/igibson/examples/learning/stable_baselines3_example.py
+++ b/igibson/examples/learning/stable_baselines3_example.py
@@ -2,8 +2,8 @@
import os
from typing import Callable
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
try:
import gym
@@ -108,9 +108,9 @@ def main(random_selection=False, headless=False, short_exec=False):
# Function callback to create environments
def make_env(rank: int, seed: int = 0) -> Callable:
- def _init() -> iGibsonEnv:
- env = iGibsonEnv(
- config_file=os.path.join(igibson.example_config_path, config_file),
+ def _init() -> OmniGibsonEnv:
+ env = OmniGibsonEnv(
+ config_file=os.path.join(omnigibson.example_config_path, config_file),
mode="headless",
action_timestep=1 / 10.0,
physics_timestep=1 / 120.0,
@@ -126,8 +126,8 @@ def _init() -> iGibsonEnv:
env = VecMonitor(env)
# Create a new environment for evaluation
- eval_env = iGibsonEnv(
- config_file=os.path.join(igibson.example_config_path, config_file),
+ eval_env = OmniGibsonEnv(
+ config_file=os.path.join(omnigibson.example_config_path, config_file),
mode="headless",
action_timestep=1 / 10.0,
physics_timestep=1 / 120.0,
diff --git a/igibson/examples/object_states/attachment_demo.py b/igibson/examples/object_states/attachment_demo.py
index b4962e61e..331743bad 100644
--- a/igibson/examples/object_states/attachment_demo.py
+++ b/igibson/examples/object_states/attachment_demo.py
@@ -1,19 +1,19 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import DatasetObject, LightObject
-from igibson.utils.ui_utils import choose_from_options
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import DatasetObject, LightObject
+from omnigibson.utils.ui_utils import choose_from_options
def setup_scene_for_abilities(abilities1, abilities2):
# Make sure simulation is stopped
- ig.sim.stop()
+ og.sim.stop()
# Recreate the environment (this will automatically override the old environment instance)
# We load the default config, which is simply an EmptyScene with no objects loaded in by default
- env = ig.Environment(configs=f"{ig.example_config_path}/default_cfg.yaml") #, physics_timestep=1/120., action_timestep=1/60.)
+ env = og.Environment(configs=f"{og.example_config_path}/default_cfg.yaml") #, physics_timestep=1/120., action_timestep=1/60.)
objs = [None, None]
abilities_arr = [abilities1, abilities2]
@@ -27,7 +27,7 @@ def setup_scene_for_abilities(abilities1, abilities2):
radius=0.01,
intensity=5000,
)
- ig.sim.import_object(light)
+ og.sim.import_object(light)
light.set_position(np.array([0, 0, 1.0]))
for idx, (obj_category, obj_model) in enumerate((("apple", "00_0"), ("fridge", "12252"))):
@@ -39,11 +39,11 @@ def setup_scene_for_abilities(abilities1, abilities2):
name=f"{name}",
abilities=abilities_arr[idx],
)
- ig.sim.import_object(objs[idx])
+ og.sim.import_object(objs[idx])
objs[idx].set_position_orientation(position=position_arr[idx])
# Set viewer camera pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([-0.972333, -2.0899 , 1.0654 ]),
orientation=np.array([ 0.60682517, -0.24656188, -0.28443909, 0.70004632]),
)
@@ -66,7 +66,7 @@ def demo_sticky_attachment():
# Obj1 moves towards obj2 and they are attached together.
obj1.set_linear_velocity(velocity=np.array([3.0, 0, 3.0]))
for i in range(200):
- ig.sim.step()
+ og.sim.step()
assert obj1.states[object_states.StickyAttachment].get_value(obj2)
# Apply a large force to obj1 but the two objects cannot move much because obj2 is heavy.
diff --git a/igibson/examples/object_states/cleaning_demo.py b/igibson/examples/object_states/cleaning_demo.py
index a1a6f5f8b..cba21c5f4 100644
--- a/igibson/examples/object_states/cleaning_demo.py
+++ b/igibson/examples/object_states/cleaning_demo.py
@@ -1,10 +1,10 @@
import logging
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import PrimitiveObject
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import PrimitiveObject
def main(random_selection=False, headless=False, short_exec=False):
@@ -27,7 +27,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
}
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Load a cleaning tool (a block with the ability to be soaked and is a cleaning tool)
block = PrimitiveObject(
@@ -38,7 +38,7 @@ def main(random_selection=False, headless=False, short_exec=False):
rgba=[0.5, 1.0, 1.0, 1.0],
abilities={"soakable": {}, "cleaningTool": {}},
)
- ig.sim.import_object(block)
+ og.sim.import_object(block)
block.set_position([-1.4, 3.0, 1.5])
# Set everything that can go dirty and activate the water sources
@@ -60,7 +60,7 @@ def main(random_selection=False, headless=False, short_exec=False):
obj.states[object_states.ToggledOn].set_value(True)
# Set the camera to be in a good position
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([-0.825556, 2.42499 , 1.04104 ]),
orientation=np.array([0.56919735, 0.09896035, 0.13981109, 0.80416049]),
)
diff --git a/igibson/examples/object_states/cleaning_demo_simple.py b/igibson/examples/object_states/cleaning_demo_simple.py
index bd0abf945..a7f06966e 100644
--- a/igibson/examples/object_states/cleaning_demo_simple.py
+++ b/igibson/examples/object_states/cleaning_demo_simple.py
@@ -2,10 +2,10 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.objects import DatasetObject
-from igibson.macros import gm
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.objects import DatasetObject
+from omnigibson.macros import gm
def main(random_selection=False, headless=False, short_exec=False):
@@ -27,7 +27,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
}
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Load sink ON
sink = DatasetObject(
@@ -38,7 +38,7 @@ def main(random_selection=False, headless=False, short_exec=False):
scale=np.array([0.8, 0.8, 0.8]),
abilities={"toggleable": {}, "waterSource": {}, "waterSink": {}},
)
- ig.sim.import_object(sink)
+ og.sim.import_object(sink)
sink.set_position([1, 1, 0.8])
# Load cleaning tool
@@ -51,7 +51,7 @@ def main(random_selection=False, headless=False, short_exec=False):
avg_obj_dims=avg,
fit_avg_dim_volume=True,
)
- ig.sim.import_object(brush)
+ og.sim.import_object(brush)
brush.set_position([0, -2, 0.4])
# Load table with dust
@@ -63,7 +63,7 @@ def main(random_selection=False, headless=False, short_exec=False):
scale=np.array([0.8, 0.8, 0.8]),
abilities={"dustyable": {}},
)
- ig.sim.import_object(desk)
+ og.sim.import_object(desk)
desk.set_position([1, -2, 0.4])
# Load a bowl with stains
@@ -75,7 +75,7 @@ def main(random_selection=False, headless=False, short_exec=False):
scale=np.array([0.8, 0.8, 0.8]),
abilities={"dustyable": {}, "stainable": {}},
)
- ig.sim.import_object(bowl)
+ og.sim.import_object(bowl)
# Take a sim step to make sure everything is initialized properly, and then sanity check the initial state
env.step(np.array([])) # Empty action since no robots are in the scene
@@ -87,7 +87,7 @@ def main(random_selection=False, headless=False, short_exec=False):
# Take a step, and save the state
env.step(np.array([]))
- initial_state = ig.sim.dump_state()
+ initial_state = og.sim.dump_state()
# Main simulation loop.
max_steps = 1000
@@ -114,7 +114,7 @@ def main(random_selection=False, headless=False, short_exec=False):
logging.info("Reset because max steps")
# Reset to the initial state
- ig.sim.load_state(initial_state)
+ og.sim.load_state(initial_state)
iteration += 1
diff --git a/igibson/examples/object_states/heated_source_or_sink_demo.py b/igibson/examples/object_states/heated_source_or_sink_demo.py
index c22a4cacd..997771794 100644
--- a/igibson/examples/object_states/heated_source_or_sink_demo.py
+++ b/igibson/examples/object_states/heated_source_or_sink_demo.py
@@ -1,8 +1,8 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import DatasetObject
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import DatasetObject
def main():
@@ -17,10 +17,10 @@ def main():
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Set camera to appropriate viewing pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([-0.0792399, -1.30104, 1.51981]),
orientation=np.array([0.54897692, 0.00110359, 0.00168013, 0.83583509]),
)
@@ -39,7 +39,7 @@ def main():
assert object_states.ToggledOn in stove.states
# Import this object into the simulator, and take a step to initialize the object
- ig.sim.import_object(stove)
+ og.sim.import_object(stove)
stove.set_position(np.array([0, 0, 0.4]))
env.step(np.array([]))
diff --git a/igibson/examples/object_states/heated_state_demo.py b/igibson/examples/object_states/heated_state_demo.py
index 7fa1ebe87..30734ac71 100644
--- a/igibson/examples/object_states/heated_state_demo.py
+++ b/igibson/examples/object_states/heated_state_demo.py
@@ -1,8 +1,8 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import DatasetObject, LightObject
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import DatasetObject, LightObject
def main():
@@ -17,10 +17,10 @@ def main():
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Set camera to appropriate viewing pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([ 0.182103, -2.07295 , 0.14017 ]),
orientation=np.array([0.77787037, 0.00267566, 0.00216149, 0.62841535]),
)
@@ -33,7 +33,7 @@ def main():
radius=0.01,
intensity=1e5,
)
- ig.sim.import_object(light)
+ og.sim.import_object(light)
light.set_position(np.array([-2.0, -2.0, 1.0]))
env.step(np.array([]))
@@ -56,7 +56,7 @@ def main():
)
# Make sure the bowls can be heated
assert object_states.Heated in obj.states
- ig.sim.import_object(obj)
+ og.sim.import_object(obj)
obj.set_position(np.array([x, 0, 0]))
objs.append(obj)
diff --git a/igibson/examples/object_states/object_state_texture_demo.py b/igibson/examples/object_states/object_state_texture_demo.py
index 869ea808b..d3934e252 100644
--- a/igibson/examples/object_states/object_state_texture_demo.py
+++ b/igibson/examples/object_states/object_state_texture_demo.py
@@ -1,9 +1,9 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.objects import DatasetObject, LightObject
-from igibson.macros import gm, macros
-from igibson.systems import WaterSystem
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.objects import DatasetObject, LightObject
+from omnigibson.macros import gm, macros
+from omnigibson.systems import WaterSystem
def main():
@@ -19,10 +19,10 @@ def main():
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Set camera to appropriate viewing pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([ 1.7789 , -1.68822, 1.13551]),
orientation=np.array([0.57065614, 0.20331904, 0.267029 , 0.74947212]),
)
@@ -35,7 +35,7 @@ def main():
radius=0.01,
intensity=1e5,
)
- ig.sim.import_object(light)
+ og.sim.import_object(light)
light.set_position(np.array([-2.0, -2.0, 1.0]))
env.step(np.array([]))
@@ -56,7 +56,7 @@ def main():
assert object_states.ToggledOn in obj.states
# Add the object and take a step to make sure the cabinet is fully initialized
- ig.sim.import_object(obj)
+ og.sim.import_object(obj)
obj.set_position(np.array([0, 0, 0.55]))
env.step(np.array([]))
diff --git a/igibson/examples/object_states/sample_kinematics_demo.py b/igibson/examples/object_states/sample_kinematics_demo.py
index b44f967bf..3f37febad 100644
--- a/igibson/examples/object_states/sample_kinematics_demo.py
+++ b/igibson/examples/object_states/sample_kinematics_demo.py
@@ -3,10 +3,10 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import DatasetObject
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import DatasetObject
def main(random_selection=False, headless=False, short_exec=False):
@@ -29,7 +29,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Sample microwave and boxes
sample_boxes_on_shelf(env)
@@ -55,7 +55,7 @@ def sample_microwave_plates_apples(env):
model="46380",
# scale=2.0,
)
- ig.sim.import_object(cabinet)
+ og.sim.import_object(cabinet)
z_offset = -cabinet.aabb_center[2] + cabinet.aabb_extent[2] / 2
cabinet.set_position(np.array([1.0, 0, z_offset]))
env.step(np.array([])) # One step is needed for the object to be fully initialized
@@ -71,7 +71,7 @@ def sample_microwave_plates_apples(env):
model="7128",
scale=0.5,
)
- ig.sim.import_object(microwave)
+ og.sim.import_object(microwave)
env.step(np.array([])) # One step is needed for the object to be fully initialized
assert microwave.states[object_states.OnTop].set_value(cabinet, True, use_ray_casting_method=True)
assert microwave.states[object_states.Open].set_value(True)
@@ -89,7 +89,7 @@ def sample_microwave_plates_apples(env):
model="plate_000",
bounding_box=np.array([0.25, 0.25, 0.05]),
)
- ig.sim.import_object(plate)
+ og.sim.import_object(plate)
env.step(np.array([])) # One step is needed for the object to be fully initialized
# Put the 1st plate in the microwave
@@ -112,7 +112,7 @@ def sample_microwave_plates_apples(env):
category="apple",
model="00_0",
)
- ig.sim.import_object(apple)
+ og.sim.import_object(apple)
env.step(np.array([])) # One step is needed for the object to be fully initialized
assert apple.states[object_states.OnTop].set_value(plate, True, use_ray_casting_method=True)
logging.info("Apple %d loaded and placed." % j)
@@ -128,7 +128,7 @@ def sample_boxes_on_shelf(env):
model="1170df5b9512c1d92f6bce2b7e6c12b7",
bounding_box=np.array([1.0, 0.4, 2.0]),
)
- ig.sim.import_object(shelf)
+ og.sim.import_object(shelf)
z_offset = -shelf.aabb_center[2] + shelf.aabb_extent[2] / 2
shelf.set_position(np.array([-1.0, 0, z_offset]))
env.step(np.array([])) # One step is needed for the object to be fully initialized
@@ -145,7 +145,7 @@ def sample_boxes_on_shelf(env):
model="cracker_box_000",
bounding_box=np.array([0.2, 0.05, 0.3]),
)
- ig.sim.import_object(box)
+ og.sim.import_object(box)
env.step(np.array([])) # One step is needed for the object to be fully initialized
box.states[object_states.Inside].set_value(shelf, True, use_ray_casting_method=True)
logging.info(f"Box {i} placed.")
diff --git a/igibson/examples/object_states/sliceable_demo_with_reset.py b/igibson/examples/object_states/sliceable_demo_with_reset.py
index a2396efeb..5bf3cb5cb 100644
--- a/igibson/examples/object_states/sliceable_demo_with_reset.py
+++ b/igibson/examples/object_states/sliceable_demo_with_reset.py
@@ -4,13 +4,13 @@
import numpy as np
-from igibson import object_states
-from igibson.objects.usd_object import URDFObject
-from igibson.objects.multi_object_wrappers import ObjectGrouper, ObjectMultiplexer
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_ig_model_path
-# from igibson.utils.utils import restoreState
+from omnigibson import object_states
+from omnigibson.objects.usd_object import URDFObject
+from omnigibson.objects.multi_object_wrappers import ObjectGrouper, ObjectMultiplexer
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_og_model_path
+# from omnigibson.utils.utils import restoreState
def main(random_selection=False, headless=False, short_exec=False):
@@ -34,7 +34,7 @@ def main(random_selection=False, headless=False, short_exec=False):
s.import_scene(scene)
# Load a desk
- model_path = os.path.join(get_ig_model_path("breakfast_table", "19203"), "19203.urdf")
+ model_path = os.path.join(get_og_model_path("breakfast_table", "19203"), "19203.urdf")
desk = URDFObject(
filename=model_path, category="breakfast_table", name="19898", scale=np.array([0.8, 0.8, 0.8]), abilities={}
)
@@ -42,7 +42,7 @@ def main(random_selection=False, headless=False, short_exec=False):
desk.set_position([0, 0, 0.4])
# Create an URDF object of an apple, but doesn't load it in the simulator
- model_path = os.path.join(get_ig_model_path("apple", "00_0"), "00_0.urdf")
+ model_path = os.path.join(get_og_model_path("apple", "00_0"), "00_0.urdf")
whole_obj = URDFObject(model_path, name="00_0", category="apple", scale=np.array([1.0, 1.0, 1.0]))
object_parts = []
@@ -53,7 +53,7 @@ def main(random_selection=False, headless=False, short_exec=False):
# Scale the offset accordingly
part_pos = part["pos"] * whole_obj.scale
part_orn = part["orn"]
- part_model_path = get_ig_model_path(part_category, part_model)
+ part_model_path = get_og_model_path(part_category, part_model)
part_filename = os.path.join(part_model_path, part_model + ".urdf")
part_obj_name = whole_obj.name + "_part_{}".format(i)
part_obj = URDFObject(
diff --git a/igibson/examples/object_states/temperature_demo.py b/igibson/examples/object_states/temperature_demo.py
index 289a33fb3..0144b958b 100644
--- a/igibson/examples/object_states/temperature_demo.py
+++ b/igibson/examples/object_states/temperature_demo.py
@@ -2,10 +2,10 @@
import numpy as np
-import igibson as ig
-from igibson import object_states
-from igibson.macros import gm
-from igibson.objects import DatasetObject, LightObject
+import omnigibson as og
+from omnigibson import object_states
+from omnigibson.macros import gm
+from omnigibson.objects import DatasetObject, LightObject
def main(random_selection=False, headless=False, short_exec=False):
@@ -29,10 +29,10 @@ def main(random_selection=False, headless=False, short_exec=False):
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
+ env = og.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
# Set camera to appropriate viewing pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([ 0.46938863, -3.97887141, 1.64106008]),
orientation=np.array([0.63311689, 0.00127259, 0.00155577, 0.77405359]),
)
@@ -45,7 +45,7 @@ def main(random_selection=False, headless=False, short_exec=False):
radius=0.01,
intensity=1e5,
)
- ig.sim.import_object(light)
+ og.sim.import_object(light)
light.set_position(np.array([-2.0, -2.0, 1.0]))
# Load stove ON
@@ -55,7 +55,7 @@ def main(random_selection=False, headless=False, short_exec=False):
category="stove",
model="101943",
)
- ig.sim.import_object(stove)
+ og.sim.import_object(stove)
stove.set_position([0, 0, 0.65])
# Load microwave ON
@@ -66,7 +66,7 @@ def main(random_selection=False, headless=False, short_exec=False):
model="7128",
scale=0.25,
)
- ig.sim.import_object(microwave)
+ og.sim.import_object(microwave)
microwave.set_position([2.5, 0, 0.094])
# Load oven ON
@@ -76,7 +76,7 @@ def main(random_selection=False, headless=False, short_exec=False):
category="oven",
model="7120",
)
- ig.sim.import_object(oven)
+ og.sim.import_object(oven)
oven.set_position([-1.25, 0, 0.80])
# Load tray
@@ -87,7 +87,7 @@ def main(random_selection=False, headless=False, short_exec=False):
model="tray_000",
scale=0.15,
)
- ig.sim.import_object(tray)
+ og.sim.import_object(tray)
tray.set_position([0, 0, 1.24])
# Load fridge
@@ -103,7 +103,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
},
)
- ig.sim.import_object(fridge)
+ og.sim.import_object(fridge)
fridge.set_position([1.25, 0, 0.80])
# Load 5 apples
@@ -115,7 +115,7 @@ def main(random_selection=False, headless=False, short_exec=False):
category="apple",
model="00_0",
)
- ig.sim.import_object(apple)
+ og.sim.import_object(apple)
apple.set_position([0, i * 0.05, 1.65])
apples.append(apple)
diff --git a/igibson/examples/objects/draw_bounding_box.py b/igibson/examples/objects/draw_bounding_box.py
index 96a7c0642..176e3c098 100644
--- a/igibson/examples/objects/draw_bounding_box.py
+++ b/igibson/examples/objects/draw_bounding_box.py
@@ -2,8 +2,8 @@
import matplotlib.pyplot as plt
import numpy as np
-import igibson as ig
-from igibson.objects import DatasetObject
+import omnigibson as og
+from omnigibson.objects import DatasetObject
from omni.isaac.synthetic_utils.visualization import colorize_bboxes
@@ -28,10 +28,10 @@ def main(random_selection=False, headless=False, short_exec=False):
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Set camera to appropriate viewing pose
- cam = ig.sim.viewer_camera
+ cam = og.sim.viewer_camera
cam.set_position_orientation(
position=np.array([-4.62785 , -0.418575, 0.933943]),
orientation=np.array([ 0.52196595, -0.4231939 , -0.46640436, 0.5752612 ]),
@@ -50,7 +50,7 @@ def main(random_selection=False, headless=False, short_exec=False):
model="09_0",
scale=[3.0, 5.0, 2.0],
)
- ig.sim.import_object(banana)
+ og.sim.import_object(banana)
banana.set_position_orientation(
position=np.array([-0.906661, -0.545106, 0.136824]),
orientation=np.array([0, 0, 0.76040583, -0.6494482 ]),
@@ -62,7 +62,7 @@ def main(random_selection=False, headless=False, short_exec=False):
category="door",
model="8930",
)
- ig.sim.import_object(door)
+ og.sim.import_object(door)
door.set_position_orientation(
position=np.array([-2.0, 0, 0.70000001]),
orientation=np.array([0, 0, -0.38268343, 0.92387953]),
diff --git a/igibson/examples/objects/highlight_objects.py b/igibson/examples/objects/highlight_objects.py
index d2617a013..25f2cc383 100644
--- a/igibson/examples/objects/highlight_objects.py
+++ b/igibson/examples/objects/highlight_objects.py
@@ -3,7 +3,7 @@
"""
import logging
import numpy as np
-import igibson as ig
+import omnigibson as og
def main(random_selection=False, headless=False, short_exec=False):
@@ -23,10 +23,10 @@ def main(random_selection=False, headless=False, short_exec=False):
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
+ env = og.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
# Grab all window objects
- windows = ig.sim.scene.object_registry("category", "window")
+ windows = og.sim.scene.object_registry("category", "window")
# Step environment while toggling window highlighting
i = 0
diff --git a/igibson/examples/objects/load_object_selector.py b/igibson/examples/objects/load_object_selector.py
index f4c770905..22a2261db 100644
--- a/igibson/examples/objects/load_object_selector.py
+++ b/igibson/examples/objects/load_object_selector.py
@@ -1,13 +1,13 @@
import logging
import numpy as np
-import igibson as ig
-from igibson.objects import DatasetObject
-from igibson.utils.asset_utils import (
+import omnigibson as og
+from omnigibson.objects import DatasetObject
+from omnigibson.utils.asset_utils import (
get_all_object_categories,
- get_ig_avg_category_specs,
+ get_og_avg_category_specs,
get_object_models_of_category,
)
-from igibson.utils.ui_utils import choose_from_options
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
@@ -33,7 +33,7 @@ def main(random_selection=False, headless=False, short_exec=False):
cfg["scene"]["scene_model"] = "Adrian"
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
+ env = og.Environment(configs=cfg, action_timestep=1 / 60., physics_timestep=1 / 60.)
# Select a category to load
available_obj_categories = get_all_object_categories()
@@ -44,7 +44,7 @@ def main(random_selection=False, headless=False, short_exec=False):
obj_model = choose_from_options(options=available_obj_models, name="object model", random_selection=random_selection)
# Load the specs of the object categories, e.g., common scaling factor
- avg_category_spec = get_ig_avg_category_specs()
+ avg_category_spec = get_og_avg_category_specs()
# Create and load this object into the simulator
obj = DatasetObject(
@@ -55,7 +55,7 @@ def main(random_selection=False, headless=False, short_exec=False):
bounding_box=avg_category_spec.get(obj_category),
fit_avg_dim_volume=True,
)
- ig.sim.import_object(obj)
+ og.sim.import_object(obj)
obj.set_position(np.array([0.5, -0.5, 1.01]))
# Step through the environment
diff --git a/igibson/examples/objects/visualize_object.py b/igibson/examples/objects/visualize_object.py
index 8942b4055..6c6d6944b 100644
--- a/igibson/examples/objects/visualize_object.py
+++ b/igibson/examples/objects/visualize_object.py
@@ -3,20 +3,20 @@
import numpy as np
-import igibson as ig
-from igibson.objects import USDObject, LightObject
-from igibson.utils.asset_utils import (
+import omnigibson as og
+from omnigibson.objects import USDObject, LightObject
+from omnigibson.utils.asset_utils import (
get_all_object_categories,
get_object_models_of_category,
)
-from igibson.utils.ui_utils import choose_from_options
-import igibson.utils.transform_utils as T
+from omnigibson.utils.ui_utils import choose_from_options
+import omnigibson.utils.transform_utils as T
def main(random_selection=False, headless=False, short_exec=False):
"""
Visualizes object as specified by its USD path, @usd_path. If None if specified, will instead
- result in an object selection from iGibson's object dataset
+ result in an object selection from OmniGibson's object dataset
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
@@ -42,10 +42,10 @@ def main(random_selection=False, headless=False, short_exec=False):
}
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Set camera to appropriate viewing pose
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([-0.00913503, -1.95750906, 1.36407314]),
orientation=np.array([0.63468727, 0.02012955, 0.02448817, 0.77211864]),
)
@@ -58,7 +58,7 @@ def main(random_selection=False, headless=False, short_exec=False):
radius=0.01,
intensity=1e5,
)
- ig.sim.import_object(light0)
+ og.sim.import_object(light0)
light0.set_position(np.array([-2.0, -2.0, 2.0]))
light1 = LightObject(
@@ -68,7 +68,7 @@ def main(random_selection=False, headless=False, short_exec=False):
radius=0.01,
intensity=1e5,
)
- ig.sim.import_object(light1)
+ og.sim.import_object(light1)
light1.set_position(np.array([-2.0, 2.0, 2.0]))
# Make sure we have a valid usd path
@@ -81,7 +81,7 @@ def main(random_selection=False, headless=False, short_exec=False):
available_obj_models = get_object_models_of_category(obj_category)
obj_model = choose_from_options(options=available_obj_models, name="object model", random_selection=random_selection)
- usd_path = f"{ig.ig_dataset_path}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd"
+ usd_path = f"{og.og_dataset_path}/objects/{obj_category}/{obj_model}/usd/{obj_model}.usd"
# Import the desired object
obj = USDObject(
@@ -90,7 +90,7 @@ def main(random_selection=False, headless=False, short_exec=False):
usd_path=usd_path,
visual_only=True,
)
- ig.sim.import_object(obj)
+ og.sim.import_object(obj)
# Standardize the scale of the object so it fits in a [1,1,1] box
extents = obj.aabb_extent
@@ -102,7 +102,7 @@ def main(random_selection=False, headless=False, short_exec=False):
obj.set_position(center_offset)
# Allow the user to easily move the camera around
- ig.sim.enable_viewer_camera_teleoperation()
+ og.sim.enable_viewer_camera_teleoperation()
# Rotate the object in place
max_steps = 100 if short_exec else 10000
diff --git a/igibson/examples/observations/generate_additional_visual_channels.py b/igibson/examples/observations/generate_additional_visual_channels.py
index d57491264..8a382aa67 100644
--- a/igibson/examples/observations/generate_additional_visual_channels.py
+++ b/igibson/examples/observations/generate_additional_visual_channels.py
@@ -6,11 +6,11 @@
import numpy as np
import yaml
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.render.profiler import Profiler
-from igibson.utils.constants import MAX_CLASS_COUNT, MAX_INSTANCE_COUNT
-from igibson.utils.vision_utils import randomize_colors, segmentation_to_rgb
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.constants import MAX_CLASS_COUNT, MAX_INSTANCE_COUNT
+from omnigibson.utils.vision_utils import randomize_colors, segmentation_to_rgb
def main(random_selection=False, headless=False, short_exec=False):
@@ -19,7 +19,7 @@ def main(random_selection=False, headless=False, short_exec=False):
Loads Rs_int (interactive) with some objects and and renders depth, normals, semantic and instance segmentation
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- config_filename = os.path.join(igibson.example_config_path, "turtlebot_nav.yaml")
+ config_filename = os.path.join(omnigibson.example_config_path, "turtlebot_nav.yaml")
config_data = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
# Only load a few objects
# config_data["load_object_categories"] = [
@@ -36,7 +36,7 @@ def main(random_selection=False, headless=False, short_exec=False):
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
- env = iGibsonEnv(config_file=config_data, mode="gui_interactive" if not headless else "headless")
+ env = OmniGibsonEnv(config_file=config_data, mode="gui_interactive" if not headless else "headless")
if not headless:
# Set a better viewing direction
diff --git a/igibson/examples/observations/generate_colored_dense_pointcloud.py b/igibson/examples/observations/generate_colored_dense_pointcloud.py
index 78fa79e65..81427ba1e 100644
--- a/igibson/examples/observations/generate_colored_dense_pointcloud.py
+++ b/igibson/examples/observations/generate_colored_dense_pointcloud.py
@@ -7,8 +7,8 @@
import yaml
from mpl_toolkits.mplot3d import Axes3D
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
def main(random_selection=False, headless=False, short_exec=False):
@@ -20,15 +20,15 @@ def main(random_selection=False, headless=False, short_exec=False):
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- # Create iGibsonEnvironment with the Fetch rearrangement config
+ # Create OmniGibsonEnvironment with the Fetch rearrangement config
mode = "headless"
- config = os.path.join(igibson.example_config_path, "fetch_rearrangement.yaml")
+ config = os.path.join(omnigibson.example_config_path, "fetch_rearrangement.yaml")
config_data = yaml.load(open(config, "r"), Loader=yaml.FullLoader)
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
scene_id = "Rs_int"
- nav_env = iGibsonEnv(
+ nav_env = OmniGibsonEnv(
config_file=config_data, mode=mode, scene_id=scene_id, action_timestep=1.0 / 120.0, physics_timestep=1.0 / 120.0
)
diff --git a/igibson/examples/observations/generate_lidar_colored_pointcloud.py b/igibson/examples/observations/generate_lidar_colored_pointcloud.py
index 51add4e10..c2f5e7a13 100644
--- a/igibson/examples/observations/generate_lidar_colored_pointcloud.py
+++ b/igibson/examples/observations/generate_lidar_colored_pointcloud.py
@@ -7,8 +7,8 @@
import yaml
from mpl_toolkits.mplot3d import Axes3D
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
def get_lidar_sampling_pattern():
@@ -116,12 +116,12 @@ def main(random_selection=False, headless=False, short_exec=False):
# Create environment
mode = "headless"
scene_id = "Rs_int"
- config = os.path.join(igibson.example_config_path, "fetch_rearrangement.yaml")
+ config = os.path.join(omnigibson.example_config_path, "fetch_rearrangement.yaml")
config_data = yaml.load(open(config, "r"), Loader=yaml.FullLoader)
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
- nav_env = iGibsonEnv(
+ nav_env = OmniGibsonEnv(
config_file=config_data, mode=mode, scene_id=scene_id, action_timestep=1.0 / 120.0, physics_timestep=1.0 / 120.0
)
diff --git a/igibson/examples/observations/generate_lidar_velodyne.py b/igibson/examples/observations/generate_lidar_velodyne.py
index 8dd146097..554196019 100644
--- a/igibson/examples/observations/generate_lidar_velodyne.py
+++ b/igibson/examples/observations/generate_lidar_velodyne.py
@@ -4,12 +4,12 @@
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
-import igibson
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.robots.turtlebot import Turtlebot
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.config_utils import parse_config
+import omnigibson
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.robots.turtlebot import Turtlebot
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.config_utils import parse_config
def main(random_selection=False, headless=False, short_exec=False):
@@ -19,7 +19,7 @@ def main(random_selection=False, headless=False, short_exec=False):
It plots the velodyne point cloud with matplotlib
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- config = parse_config(os.path.join(igibson.example_config_path, "turtlebot_static_nav.yaml"))
+ config = parse_config(os.path.join(omnigibson.example_config_path, "turtlebot_static_nav.yaml"))
settings = MeshRendererSettings(enable_shadow=False, msaa=False, texture_scale=0.01)
s = Simulator(mode="headless", image_width=256, image_height=256, rendering_settings=settings)
diff --git a/igibson/examples/observations/generate_optical_and_scene_flow.py b/igibson/examples/observations/generate_optical_and_scene_flow.py
index 5f2be3857..c6e71cc7e 100644
--- a/igibson/examples/observations/generate_optical_and_scene_flow.py
+++ b/igibson/examples/observations/generate_optical_and_scene_flow.py
@@ -6,12 +6,12 @@
import numpy as np
import yaml
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.profiler import Profiler
-from igibson.utils.constants import MAX_CLASS_COUNT, MAX_INSTANCE_COUNT
-from igibson.utils.vision_utils import randomize_colors, segmentation_to_rgb
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.constants import MAX_CLASS_COUNT, MAX_INSTANCE_COUNT
+from omnigibson.utils.vision_utils import randomize_colors, segmentation_to_rgb
FLOW_SCALING_FACTOR = 500
@@ -36,7 +36,7 @@ def main(random_selection=False, headless=True, short_exec=True):
Loads Rs_int (interactive) with some objects and and renders depth, normals, semantic and instance segmentation
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- config_filename = os.path.join(igibson.example_config_path, "turtlebot_static_nav.yaml")
+ config_filename = os.path.join(omnigibson.example_config_path, "turtlebot_static_nav.yaml")
config_data = yaml.load(open(config_filename, "r"), Loader=yaml.FullLoader)
config_data["vertical_fov"] = 90
@@ -46,7 +46,7 @@ def main(random_selection=False, headless=True, short_exec=True):
# Reduce texture scale for Mac.
if platform == "darwin":
config_data["texture_scale"] = 0.5
- env = iGibsonEnv(config_file=config_data, mode="gui_interactive" if not headless else "headless")
+ env = OmniGibsonEnv(config_file=config_data, mode="gui_interactive" if not headless else "headless")
if not headless:
# Set a better viewing direction
diff --git a/igibson/examples/observations/generate_topdown_semseg.py b/igibson/examples/observations/generate_topdown_semseg.py
index 306bbad16..e1e796bdb 100644
--- a/igibson/examples/observations/generate_topdown_semseg.py
+++ b/igibson/examples/observations/generate_topdown_semseg.py
@@ -3,11 +3,11 @@
import cv2
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.constants import MAX_CLASS_COUNT
-from igibson.utils.vision_utils import segmentation_to_rgb
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.constants import MAX_CLASS_COUNT
+from omnigibson.utils.vision_utils import segmentation_to_rgb
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/renderer/mesh_renderer_example.py b/igibson/examples/renderer/mesh_renderer_example.py
index bd1cb4f36..ac163eca1 100644
--- a/igibson/examples/renderer/mesh_renderer_example.py
+++ b/igibson/examples/renderer/mesh_renderer_example.py
@@ -5,9 +5,9 @@
import cv2
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
-from igibson.render.profiler import Profiler
-from igibson.utils.asset_utils import get_scene_path
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.asset_utils import get_scene_path
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/renderer/mesh_renderer_gpu_example.py b/igibson/examples/renderer/mesh_renderer_gpu_example.py
index 515d1ee2f..3d91885c0 100644
--- a/igibson/examples/renderer/mesh_renderer_gpu_example.py
+++ b/igibson/examples/renderer/mesh_renderer_gpu_example.py
@@ -6,9 +6,9 @@
import matplotlib.pyplot as plt
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_tensor import MeshRendererG2G
-from igibson.render.profiler import Profiler
-from igibson.utils.asset_utils import get_scene_path
+from omnigibson.render.mesh_renderer.mesh_renderer_tensor import MeshRendererG2G
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.asset_utils import get_scene_path
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/renderer/mesh_renderer_pano_example.py b/igibson/examples/renderer/mesh_renderer_pano_example.py
index 2229f6fb9..83c8adf80 100644
--- a/igibson/examples/renderer/mesh_renderer_pano_example.py
+++ b/igibson/examples/renderer/mesh_renderer_pano_example.py
@@ -5,10 +5,10 @@
import cv2
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.utils.asset_utils import get_scene_path
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.utils.asset_utils import get_scene_path
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/renderer/mesh_renderer_pbr_example.py b/igibson/examples/renderer/mesh_renderer_pbr_example.py
index 00dabd967..2de10585f 100644
--- a/igibson/examples/renderer/mesh_renderer_pbr_example.py
+++ b/igibson/examples/renderer/mesh_renderer_pbr_example.py
@@ -5,10 +5,10 @@
import cv2
import numpy as np
-import igibson
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
+import omnigibson
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
def load_obj_np(filename_obj, normalization=False, texture_size=4, texture_wrapping="REPEAT", use_bilinear=True):
@@ -57,7 +57,7 @@ def main(random_selection=False, headless=False, short_exec=False):
if len(sys.argv) > 1 and not (random_selection and headless and short_exec):
model_path = sys.argv[1]
else:
- model_path = os.path.join(igibson.ig_dataset_path, "scenes", "Rs_int", "shape", "visual")
+ model_path = os.path.join(omnigibson.og_dataset_path, "scenes", "Rs_int", "shape", "visual")
settings = MeshRendererSettings(msaa=True, enable_shadow=True)
renderer = MeshRenderer(width=512, height=512, vertical_fov=70, rendering_settings=settings)
renderer.set_light_position_direction([0, 0, 10], [0, 0, 0])
diff --git a/igibson/examples/renderer/mesh_renderer_simple_example.py b/igibson/examples/renderer/mesh_renderer_simple_example.py
index 96a4a985c..811e0ec05 100644
--- a/igibson/examples/renderer/mesh_renderer_simple_example.py
+++ b/igibson/examples/renderer/mesh_renderer_simple_example.py
@@ -5,8 +5,8 @@
import cv2
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
-from igibson.utils.asset_utils import get_scene_path
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRenderer
+from omnigibson.utils.asset_utils import get_scene_path
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/renderer_settings/renderer_settings_example.py b/igibson/examples/renderer_settings/renderer_settings_example.py
index 61317eb7e..be70c860d 100644
--- a/igibson/examples/renderer_settings/renderer_settings_example.py
+++ b/igibson/examples/renderer_settings/renderer_settings_example.py
@@ -1,6 +1,6 @@
-from igibson.renderer_settings.renderer_settings import RendererSettings
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.simulator import Simulator
+from omnigibson.renderer_settings.renderer_settings import RendererSettings
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.simulator import Simulator
# Create simulator and import scene.
sim = Simulator()
diff --git a/igibson/examples/robots/advanced/ik_example.py b/igibson/examples/robots/advanced/ik_example.py
index 14bbf1af9..8f34be546 100644
--- a/igibson/examples/robots/advanced/ik_example.py
+++ b/igibson/examples/robots/advanced/ik_example.py
@@ -5,12 +5,12 @@
import numpy as np
from collections import OrderedDict
-import igibson as ig
-from igibson.objects import PrimitiveObject
-from igibson.robots import Fetch
-from igibson.scenes import EmptyScene
-from igibson.sensors import VisionSensor
-from igibson.utils.control_utils import IKSolver
+import omnigibson as og
+from omnigibson.objects import PrimitiveObject
+from omnigibson.robots import Fetch
+from omnigibson.scenes import EmptyScene
+from omnigibson.sensors import VisionSensor
+from omnigibson.utils.control_utils import IKSolver
import carb
import omni
@@ -22,7 +22,7 @@ def main(random_selection=False, headless=False, short_exec=False):
Minimal example of usage of inverse kinematics solver
This example showcases how to construct your own IK functionality using omniverse's native lula library
- without explicitly utilizing all of iGibson's class abstractions, and also showcases how to manipulate
+ without explicitly utilizing all of OmniGibson's class abstractions, and also showcases how to manipulate
the simulator at a lower-level than the main Environment entry point.
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
@@ -45,10 +45,10 @@ def main(random_selection=False, headless=False, short_exec=False):
# Import scene and robot (Fetch)
scene = EmptyScene()
- ig.sim.import_scene(scene)
+ og.sim.import_scene(scene)
# Update the viewer camera's pose so that it points towards the robot
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([4.32248, -5.74338, 6.85436]),
orientation=np.array([0.39592, 0.13485, 0.29286, 0.85982]),
)
@@ -69,14 +69,14 @@ def main(random_selection=False, headless=False, short_exec=False):
}
}
)
- ig.sim.import_object(robot)
+ og.sim.import_object(robot)
# Set robot base at the origin
robot.set_position_orientation(np.array([0, 0, 0]), np.array([0, 0, 0, 1]))
# At least one simulation step while the simulator is playing must occur for the robot (or in general, any object)
# to be fully initialized after it is imported into the simulator
- ig.sim.play()
- ig.sim.step()
+ og.sim.play()
+ og.sim.step()
# Make sure none of the joints are moving
robot.keep_still()
@@ -104,7 +104,7 @@ def execute_ik(pos, quat=None, max_iter=100):
robot.set_joint_positions(joint_pos, indices=control_idx, target=True)
else:
logging.info("EE position not reachable.")
- ig.sim.step()
+ og.sim.step()
if programmatic_pos or headless:
# Sanity check IK using pre-defined hardcoded positions
@@ -122,12 +122,12 @@ def execute_ik(pos, quat=None, max_iter=100):
visual_only=True,
rgba=[1.0, 0, 0, 1.0],
)
- ig.sim.import_object(marker)
+ og.sim.import_object(marker)
# Get initial EE position and set marker to that location
command = robot.get_eef_position()
marker.set_position(command)
- ig.sim.step()
+ og.sim.step()
# Setup callbacks for grabbing keyboard inputs from omni
exit_now = False
@@ -151,7 +151,7 @@ def keyboard_event_handler(event, *args, **kwargs):
if delta_cmd is not None:
command = command + delta_cmd
marker.set_position(command)
- ig.sim.step()
+ og.sim.step()
# Callback must return True if valid
return True
@@ -167,10 +167,10 @@ def keyboard_event_handler(event, *args, **kwargs):
# Loop until the user requests an exit
while not exit_now:
- ig.sim.step()
+ og.sim.step()
# Always shut the simulation down cleanly at the end
- ig.app.close()
+ og.app.close()
def input_to_xyz_delta_command(inp, delta=0.01):
diff --git a/igibson/examples/robots/all_robots_visualizer.py b/igibson/examples/robots/all_robots_visualizer.py
index 1a9123fbd..01229e3d9 100644
--- a/igibson/examples/robots/all_robots_visualizer.py
+++ b/igibson/examples/robots/all_robots_visualizer.py
@@ -2,9 +2,9 @@
import numpy as np
-import igibson as ig
-from igibson.robots import REGISTERED_ROBOTS
-from igibson.scenes import EmptyScene
+import omnigibson as og
+from omnigibson.robots import REGISTERED_ROBOTS
+from omnigibson.scenes import EmptyScene
def main(random_selection=False, headless=False, short_exec=False):
@@ -20,7 +20,7 @@ def main(random_selection=False, headless=False, short_exec=False):
}
}
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Iterate over all robots and demo their motion
for robot_name, robot_cls in REGISTERED_ROBOTS.items():
@@ -30,11 +30,11 @@ def main(random_selection=False, headless=False, short_exec=False):
name=robot_name,
obs_modalities=[], # We're just moving robots around so don't load any observation modalities
)
- ig.sim.import_object(robot)
+ og.sim.import_object(robot)
# At least one step is always needed while sim is playing for any imported object to be fully initialized
- ig.sim.play()
- ig.sim.step()
+ og.sim.play()
+ og.sim.step()
# Reset robot and make sure it's not moving
robot.reset()
@@ -46,14 +46,14 @@ def main(random_selection=False, headless=False, short_exec=False):
if not headless:
# Set viewer in front facing robot
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([4.32248, -5.74338, 6.85436]),
orientation=np.array([0.39592, 0.13485, 0.29286, 0.85982]),
)
# Hold still briefly so viewer can see robot
for _ in range(100):
- ig.sim.step()
+ og.sim.step()
# Then apply random actions for a bit
for _ in range(30):
@@ -62,8 +62,8 @@ def main(random_selection=False, headless=False, short_exec=False):
env.step(action)
# Re-import the scene
- ig.sim.stop()
- ig.sim.import_scene(EmptyScene())
+ og.sim.stop()
+ og.sim.import_scene(EmptyScene())
# Always shut down the environment cleanly at the end
env.close()
diff --git a/igibson/examples/robots/grasping_mode_example.py b/igibson/examples/robots/grasping_mode_example.py
index 1830e65f3..0011b2272 100644
--- a/igibson/examples/robots/grasping_mode_example.py
+++ b/igibson/examples/robots/grasping_mode_example.py
@@ -11,10 +11,10 @@
import numpy as np
-import igibson as ig
-from igibson.objects import DatasetObject, PrimitiveObject
-from igibson.utils.asset_utils import get_ig_avg_category_specs, get_ig_category_path, get_ig_model_path
-from igibson.utils.ui_utils import choose_from_options, KeyboardRobotController
+import omnigibson as og
+from omnigibson.objects import DatasetObject, PrimitiveObject
+from omnigibson.utils.asset_utils import get_og_avg_category_specs, get_og_category_path, get_og_model_path
+from omnigibson.utils.ui_utils import choose_from_options, KeyboardRobotController
GRASPING_MODES = OrderedDict(
sticky="Sticky Mitten - Objects are magnetized when they touch the fingers and a CLOSE command is given",
@@ -47,7 +47,7 @@ def main(random_selection=False, headless=False, short_exec=False):
cfg = OrderedDict(scene=scene_cfg, robots=[robot0_cfg])
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Load objects (1 table)
objects_to_load = {
@@ -86,9 +86,9 @@ def main(random_selection=False, headless=False, short_exec=False):
name=obj_name,
**obj_cfg["init_kwargs"],
)
- ig.sim.import_object(obj)
+ og.sim.import_object(obj)
obj.set_position_orientation(**obj_cfg["init_pose"])
- ig.sim.step_physics()
+ og.sim.step_physics()
# Now load a box on the table
box = PrimitiveObject(
@@ -98,9 +98,9 @@ def main(random_selection=False, headless=False, short_exec=False):
rgba=[1.0, 0, 0, 1.0],
size=0.05,
)
- ig.sim.import_object(box)
+ og.sim.import_object(box)
box.set_position(np.array([0.53, -0.1, 0.97]))
- ig.sim.step_physics()
+ og.sim.step_physics()
# Reset the robot
robot = env.robots[0]
@@ -109,7 +109,7 @@ def main(random_selection=False, headless=False, short_exec=False):
robot.keep_still()
# Update the simulator's viewer camera's pose so it points towards the robot
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([-2.39951, 2.26469, 2.66227]),
orientation=np.array([-0.23898481, 0.48475231, 0.75464013, -0.37204802]),
)
diff --git a/igibson/examples/robots/motion_planning_example.py b/igibson/examples/robots/motion_planning_example.py
index e812d8ff0..0e75dea3b 100644
--- a/igibson/examples/robots/motion_planning_example.py
+++ b/igibson/examples/robots/motion_planning_example.py
@@ -9,11 +9,11 @@
# import numpy as np
# import yaml
#
-# import igibson
-# from igibson import object_states
-# from igibson.envs.igibson_env import iGibsonEnv
-# from igibson.utils.constants import ViewerMode
-# from igibson.utils.motion_planning_wrapper import MotionPlanningWrapper
+# import omnigibson
+# from omnigibson import object_states
+# from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+# from omnigibson.utils.constants import ViewerMode
+# from omnigibson.utils.motion_planning_wrapper import MotionPlanningWrapper
#
#
# def print_mp_info():
@@ -65,7 +65,7 @@
# ] # Uncomment this line to accelerate loading with only the building
# config_data["load_room_types"] = ["living_room"]
# config_data["hide_robot"] = False
-# env = iGibsonEnv(
+# env = OmniGibsonEnv(
# config_file=config_data,
# mode="gui_interactive" if not headless else "headless",
# action_timestep=1.0 / 120.0,
@@ -185,7 +185,7 @@
# parser.add_argument(
# "--config",
# "-c",
-# default=os.path.join(igibson.example_config_path, "fetch_motion_planning.yaml"),
+# default=os.path.join(omnigibson.example_config_path, "fetch_motion_planning.yaml"),
# help="which config file to use [default: use yaml files in examples/configs]",
# )
# parser.add_argument(
@@ -199,7 +199,7 @@
# config = args.config
# programmatic_actions = args.programmatic_actions
# else:
-# config = os.path.join(igibson.example_config_path, "fetch_motion_planning.yaml")
+# config = os.path.join(omnigibson.example_config_path, "fetch_motion_planning.yaml")
# programmatic_actions = True
# run_example(config, programmatic_actions, headless, short_exec)
#
diff --git a/igibson/examples/robots/robot_control_example.py b/igibson/examples/robots/robot_control_example.py
index 29ca71b7f..f066f63b4 100644
--- a/igibson/examples/robots/robot_control_example.py
+++ b/igibson/examples/robots/robot_control_example.py
@@ -8,9 +8,9 @@
import numpy as np
-import igibson as ig
-from igibson.robots import REGISTERED_ROBOTS
-from igibson.utils.ui_utils import choose_from_options, KeyboardRobotController
+import omnigibson as og
+from omnigibson.robots import REGISTERED_ROBOTS
+from omnigibson.utils.ui_utils import choose_from_options, KeyboardRobotController
CONTROL_MODES = OrderedDict(
@@ -88,7 +88,7 @@ def main(random_selection=False, headless=False, short_exec=False):
cfg = OrderedDict(scene=scene_cfg, robots=[robot0_cfg])
# Create the environment
- env = ig.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
+ env = og.Environment(configs=cfg, action_timestep=1/60., physics_timestep=1/60.)
# Choose robot controller to use
robot = env.robots[0]
@@ -105,7 +105,7 @@ def main(random_selection=False, headless=False, short_exec=False):
robot.reload_controllers(controller_config=controller_config)
# Update the simulator's viewer camera's pose so it points towards the robot
- ig.sim.viewer_camera.set_position_orientation(
+ og.sim.viewer_camera.set_position_orientation(
position=np.array([1.46949, -3.97358, 2.21529]),
orientation=np.array([0.56829048, 0.09569975, 0.13571846, 0.80589577]),
)
diff --git a/igibson/examples/ros/igibson-ros/CMakeLists.txt b/igibson/examples/ros/igibson-ros/CMakeLists.txt
index c066b4690..fc7aba9de 100644
--- a/igibson/examples/ros/igibson-ros/CMakeLists.txt
+++ b/igibson/examples/ros/igibson-ros/CMakeLists.txt
@@ -1,5 +1,5 @@
cmake_minimum_required(VERSION 2.8.3)
-project(igibson-ros)
+project(omnigibson-ros)
## Compile as C++11, supported in ROS Kinetic and newer
# add_compile_options(-std=c++11)
@@ -104,7 +104,7 @@ find_package(catkin REQUIRED COMPONENTS
## DEPENDS: system dependencies of this project that dependent projects also need
catkin_package(
# INCLUDE_DIRS include
-# LIBRARIES igibson-ros
+# LIBRARIES omnigibson-ros
# CATKIN_DEPENDS roscpp rospy std_msgs
# DEPENDS system_lib
)
@@ -122,7 +122,7 @@ include_directories(
## Declare a C++ library
# add_library(${PROJECT_NAME}
-# src/${PROJECT_NAME}/igibson-ros.cpp
+# src/${PROJECT_NAME}/omnigibson-ros.cpp
# )
## Add cmake target dependencies of the library
@@ -133,7 +133,7 @@ include_directories(
## Declare a C++ executable
## With catkin_make all packages are built within a single CMake context
## The recommended prefix ensures that target names across packages don't collide
-# add_executable(${PROJECT_NAME}_node src/igibson-ros_node.cpp)
+# add_executable(${PROJECT_NAME}_node src/omnigibson-ros_node.cpp)
## Rename C++ executable without prefix
## The above recommended prefix causes long target names, the following renames the
@@ -190,7 +190,7 @@ include_directories(
#############
## Add gtest based cpp test target and link libraries
-# catkin_add_gtest(${PROJECT_NAME}-test test/test_igibson-ros.cpp)
+# catkin_add_gtest(${PROJECT_NAME}-test test/test_omnigibson-ros.cpp)
# if(TARGET ${PROJECT_NAME}-test)
# target_link_libraries(${PROJECT_NAME}-test ${PROJECT_NAME})
# endif()
diff --git a/igibson/examples/ros/igibson-ros/README.md b/igibson/examples/ros/igibson-ros/README.md
index 5c6936efe..8b56fdfc4 100644
--- a/igibson/examples/ros/igibson-ros/README.md
+++ b/igibson/examples/ros/igibson-ros/README.md
@@ -1 +1 @@
-Please refer to [our documentation](http://svl.stanford.edu/igibson/docs/ros_integration.html).
\ No newline at end of file
+Please refer to [our documentation](http://svl.stanford.edu/omnigibson/docs/ros_integration.html).
\ No newline at end of file
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_gmapping.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_gmapping.launch
index 09c701970..68f1c1a21 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_gmapping.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_gmapping.launch
@@ -5,19 +5,19 @@
-
+
-
+
-
+
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_gt_navigation.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_gt_navigation.launch
index 3437e87b0..8a98fa12d 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_gt_navigation.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_gt_navigation.launch
@@ -5,25 +5,25 @@
-
+
-
+
-
+
-
+
@@ -39,7 +39,7 @@
-
+ $(arg map_file)
@@ -48,17 +48,17 @@
-
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_hector_mapping.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_hector_mapping.launch
index e77152304..52c3f56c1 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_hector_mapping.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_hector_mapping.launch
@@ -5,12 +5,12 @@
-
+
-
+
@@ -18,7 +18,7 @@
-
+
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation.launch
index 2b6af6106..4e5f0419a 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation.launch
@@ -5,12 +5,12 @@
-
+
-
+
@@ -18,7 +18,7 @@
-
+
@@ -32,7 +32,7 @@
-
+
@@ -47,17 +47,17 @@
-
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation_noisy.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation_noisy.launch
index c87f081be..bd352830d 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation_noisy.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_navigation_noisy.launch
@@ -5,22 +5,22 @@
-
+
-
+
-
+
-
+
@@ -42,7 +42,7 @@
-
+
@@ -56,17 +56,17 @@
-
+
-
-
-
-
-
-
-
-
+
+
+
+
+
+
+
+
diff --git a/igibson/examples/ros/igibson-ros/launch/turtlebot_rgbd.launch b/igibson/examples/ros/igibson-ros/launch/turtlebot_rgbd.launch
index 81eda221b..e23597553 100644
--- a/igibson/examples/ros/igibson-ros/launch/turtlebot_rgbd.launch
+++ b/igibson/examples/ros/igibson-ros/launch/turtlebot_rgbd.launch
@@ -7,15 +7,15 @@
-
+
-
+
-
+
diff --git a/igibson/examples/ros/igibson-ros/package.xml b/igibson/examples/ros/igibson-ros/package.xml
index 234a402cd..08980c717 100644
--- a/igibson/examples/ros/igibson-ros/package.xml
+++ b/igibson/examples/ros/igibson-ros/package.xml
@@ -1,8 +1,8 @@
- igibson-ros
+ omnigibson-ros0.0.0
- The igibson-ros package
+ The omnigibson-ros package
@@ -19,7 +19,7 @@
-
+
diff --git a/igibson/examples/ros/igibson-ros/turtlebot/turtlebot.urdf b/igibson/examples/ros/igibson-ros/turtlebot/turtlebot.urdf
index 54794c905..192a40792 100644
--- a/igibson/examples/ros/igibson-ros/turtlebot/turtlebot.urdf
+++ b/igibson/examples/ros/igibson-ros/turtlebot/turtlebot.urdf
@@ -21,7 +21,7 @@
-
+
@@ -68,7 +68,7 @@
-
+
@@ -99,7 +99,7 @@
-
+
@@ -330,7 +330,7 @@
-
+
@@ -354,7 +354,7 @@
-
+
@@ -378,7 +378,7 @@
-
+
@@ -402,7 +402,7 @@
-
+
@@ -426,7 +426,7 @@
-
+
@@ -450,7 +450,7 @@
-
+
@@ -474,7 +474,7 @@
-
+
@@ -498,7 +498,7 @@
-
+
@@ -522,7 +522,7 @@
-
+
@@ -546,7 +546,7 @@
-
+
@@ -570,7 +570,7 @@
-
+
@@ -594,7 +594,7 @@
-
+
@@ -618,7 +618,7 @@
-
+
@@ -642,7 +642,7 @@
-
+
@@ -666,7 +666,7 @@
-
+
@@ -690,7 +690,7 @@
-
+
@@ -715,7 +715,7 @@
-
+
@@ -740,7 +740,7 @@
-
+
@@ -764,7 +764,7 @@
-
+
@@ -812,7 +812,7 @@
-
+
diff --git a/igibson/examples/ros/igibson-ros/turtlebot_rgbd.py b/igibson/examples/ros/igibson-ros/turtlebot_rgbd.py
index 7225039e4..d46460646 100755
--- a/igibson/examples/ros/igibson-ros/turtlebot_rgbd.py
+++ b/igibson/examples/ros/igibson-ros/turtlebot_rgbd.py
@@ -14,14 +14,14 @@
from sensor_msgs.msg import PointCloud2
from std_msgs.msg import Header
-from igibson.envs.igibson_env import iGibsonEnv
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
class SimNode:
def __init__(self):
- rospy.init_node("igibson_sim")
+ rospy.init_node("omnigibson_sim")
rospack = rospkg.RosPack()
- path = rospack.get_path("igibson-ros")
+ path = rospack.get_path("omnigibson-ros")
config_filename = os.path.join(path, "turtlebot_rgbd.yaml")
self.cmdx = 0.0
@@ -41,7 +41,7 @@ def __init__(self):
self.bridge = CvBridge()
self.br = tf.TransformBroadcaster()
- self.env = iGibsonEnv(
+ self.env = OmniGibsonEnv(
config_file=config_filename, mode="headless", action_timestep=1 / 30.0
) # assume a 30Hz simulation
self.env.reset()
diff --git a/igibson/examples/scenes/g_scene_selector.py b/igibson/examples/scenes/g_scene_selector.py
index 804fc9987..d296b8d9b 100644
--- a/igibson/examples/scenes/g_scene_selector.py
+++ b/igibson/examples/scenes/g_scene_selector.py
@@ -3,12 +3,12 @@
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_available_g_scenes
-from igibson.utils.ui_utils import choose_from_options
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_available_g_scenes
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/scenes/ig_scene_selector.py b/igibson/examples/scenes/ig_scene_selector.py
index e11c041ce..296095adb 100644
--- a/igibson/examples/scenes/ig_scene_selector.py
+++ b/igibson/examples/scenes/ig_scene_selector.py
@@ -3,12 +3,12 @@
import numpy as np
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_available_ig_scenes
-from igibson.utils.ui_utils import choose_from_options
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_available_og_scenes
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
@@ -18,8 +18,8 @@ def main(random_selection=False, headless=False, short_exec=False):
Shows how to sample points in the scene by room type and how to compute geodesic distance and the shortest path
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
- available_ig_scenes = get_available_ig_scenes()
- scene_id = choose_from_options(options=available_ig_scenes, name="ig scene", random_selection=random_selection)
+ available_og_scenes = get_available_og_scenes()
+ scene_id = choose_from_options(options=available_og_scenes, name="ig scene", random_selection=random_selection)
settings = MeshRendererSettings(enable_shadow=True, msaa=False)
if platform == "darwin":
settings.texture_scale = 0.5
diff --git a/igibson/examples/scenes/scene_object_rand_example.py b/igibson/examples/scenes/scene_object_rand_example.py
index 091b56f6b..f24316873 100644
--- a/igibson/examples/scenes/scene_object_rand_example.py
+++ b/igibson/examples/scenes/scene_object_rand_example.py
@@ -1,9 +1,9 @@
import logging
from sys import platform
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/scenes/scene_partial_loading_example.py b/igibson/examples/scenes/scene_partial_loading_example.py
index 5611eca35..e307c4ecc 100644
--- a/igibson/examples/scenes/scene_partial_loading_example.py
+++ b/igibson/examples/scenes/scene_partial_loading_example.py
@@ -1,9 +1,9 @@
import logging
from sys import platform
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/scenes/scene_selector.py b/igibson/examples/scenes/scene_selector.py
index 91543c080..6a2df84b8 100644
--- a/igibson/examples/scenes/scene_selector.py
+++ b/igibson/examples/scenes/scene_selector.py
@@ -1,7 +1,7 @@
import logging
-import igibson as ig
-from igibson.utils.asset_utils import get_available_g_scenes, get_available_ig_scenes
-from igibson.utils.ui_utils import choose_from_options
+import omnigibson as og
+from omnigibson.utils.asset_utils import get_available_g_scenes, get_available_og_scenes
+from omnigibson.utils.ui_utils import choose_from_options
def main(random_selection=False, headless=False, short_exec=False):
@@ -20,7 +20,7 @@ def main(random_selection=False, headless=False, short_exec=False):
scene_type = choose_from_options(options=scene_options, name="scene type", random_selection=random_selection)
# Choose the scene model to load
- scenes = get_available_ig_scenes() if scene_type == "InteractiveTraversableScene" else get_available_g_scenes()
+ scenes = get_available_og_scenes() if scene_type == "InteractiveTraversableScene" else get_available_g_scenes()
scene_model = choose_from_options(options=scenes, name="scene model", random_selection=random_selection)
print(f"scene model: {scene_model}")
@@ -50,10 +50,10 @@ def main(random_selection=False, headless=False, short_exec=False):
cfg["scene"]["load_object_categories"] = ["floors", "walls", "ceilings"]
# Load the environment
- env = ig.Environment(configs=cfg)
+ env = og.Environment(configs=cfg)
# Allow user to move camera more easily
- ig.sim.enable_viewer_camera_teleoperation()
+ og.sim.enable_viewer_camera_teleoperation()
# Run a simple loop and reset periodically
max_iterations = 10 if not short_exec else 1
diff --git a/igibson/examples/scenes/scene_texture_rand_example.py b/igibson/examples/scenes/scene_texture_rand_example.py
index bd1401693..bd62880eb 100644
--- a/igibson/examples/scenes/scene_texture_rand_example.py
+++ b/igibson/examples/scenes/scene_texture_rand_example.py
@@ -1,9 +1,9 @@
import logging
from sys import platform
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/scenes/scene_tour_video_gen.py b/igibson/examples/scenes/scene_tour_video_gen.py
index 1eab04a42..5dd5f5911 100644
--- a/igibson/examples/scenes/scene_tour_video_gen.py
+++ b/igibson/examples/scenes/scene_tour_video_gen.py
@@ -8,12 +8,12 @@
import numpy as np
from PIL import Image
-import igibson
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.asset_utils import get_ig_scene_path
+import omnigibson
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.asset_utils import get_og_scene_path
def main(random_selection=False, headless=False, short_exec=False):
@@ -49,12 +49,12 @@ def main(random_selection=False, headless=False, short_exec=False):
object_rand = False
# hdr_texture1 = os.path.join(
- # igibson.ig_dataset_path, 'scenes', 'background', 'photo_studio_01_2k.hdr')
- hdr_texture1 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- light_map = os.path.join(get_ig_scene_path(scene_name), "layout", "floor_lighttype_0.png")
+ # omnigibson.og_dataset_path, 'scenes', 'background', 'photo_studio_01_2k.hdr')
+ hdr_texture1 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ light_map = os.path.join(get_og_scene_path(scene_name), "layout", "floor_lighttype_0.png")
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
settings = MeshRendererSettings(
env_texture_filename=hdr_texture1,
@@ -76,8 +76,8 @@ def main(random_selection=False, headless=False, short_exec=False):
s.import_scene(scene)
# Load trajectory path
- traj_path = os.path.join(get_ig_scene_path(scene_name), "misc", "tour_cam_trajectory.txt")
- save_dir = os.path.join(get_ig_scene_path(scene_name), save_dir)
+ traj_path = os.path.join(get_og_scene_path(scene_name), "misc", "tour_cam_trajectory.txt")
+ save_dir = os.path.join(get_og_scene_path(scene_name), save_dir)
os.makedirs(save_dir, exist_ok=True)
tmp_dir = os.path.join(save_dir, "tmp")
os.makedirs(tmp_dir, exist_ok=True)
diff --git a/igibson/examples/scenes/scenescan2igibson/README.md b/igibson/examples/scenes/scenescan2igibson/README.md
index b7aa9cefd..b220296b1 100644
--- a/igibson/examples/scenes/scenescan2igibson/README.md
+++ b/igibson/examples/scenes/scenescan2igibson/README.md
@@ -1,7 +1,7 @@
-## Guide on Converting Matterport (or other) scans to iGibson format
+## Guide on Converting Matterport (or other) scans to OmniGibson format
This document contains steps to convert a matterport scan (it might work with other scan formats, as long as the data is represented as a
-triangular mesh) to iGibson format.
+triangular mesh) to OmniGibson format.
A typical matterport scan in "matterpak" format has the following file format:
```
@@ -23,11 +23,11 @@ A typical matterport scan in "matterpak" format has the following file format:
├── colorplan_000.jpg
├── colorplan.pdf
```
-There are a few steps involved in converting this format to iGibson format:
+There are a few steps involved in converting this format to OmniGibson format:
1. Make sure it is actually in z-up convention. Usually, matterport meshes are already
following this convention.
2. Combine all the textures into one file, and modify the `mtl` file and `obj` file accordingly. It can be done with `combine_texture.py`, please follow the steps there.
-3. (required) Add surface normals to `mesh_z_up.obj`, if you want normal to be correctly rendered in iGibson. This can be done with the following commands:
+3. (required) Add surface normals to `mesh_z_up.obj`, if you want normal to be correctly rendered in OmniGibson. This can be done with the following commands:
```
meshlabserver -i mesh_z_up.obj -o mesh_z_up_with_normal.obj -om vn vc wt
mv mesh_z_up_with_normal.obj mesh_z_up.obj
@@ -39,4 +39,4 @@ python generate_floor_map.py
python generate_traversable_map.py
```
-5. Move `` to iGibson `dataset`, and you should be able to use it in iGibson.
+5. Move `` to OmniGibson `dataset`, and you should be able to use it in OmniGibson.
diff --git a/igibson/examples/scenes/stadium_example.py b/igibson/examples/scenes/stadium_example.py
index 743da7206..a450f2dca 100644
--- a/igibson/examples/scenes/stadium_example.py
+++ b/igibson/examples/scenes/stadium_example.py
@@ -1,15 +1,15 @@
import logging
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.profiler import Profiler
-from igibson.scenes.stadium_scene import StadiumScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.profiler import Profiler
+from omnigibson.scenes.stadium_scene import StadiumScene
+from omnigibson.simulator import Simulator
def main(random_selection=False, headless=False, short_exec=False):
"""
Loads the Stadium scene
- This scene is default in pybullet but is not really useful in iGibson
+ This scene is default in pybullet but is not really useful in OmniGibson
"""
logging.info("*" * 80 + "\nDescription:" + main.__doc__ + "*" * 80)
diff --git a/igibson/examples/scenes/traversability_map_example.py b/igibson/examples/scenes/traversability_map_example.py
index 082963131..ee13fa451 100644
--- a/igibson/examples/scenes/traversability_map_example.py
+++ b/igibson/examples/scenes/traversability_map_example.py
@@ -6,7 +6,7 @@
import numpy as np
from PIL import Image
-from igibson.utils.asset_utils import get_scene_path
+from omnigibson.utils.asset_utils import get_scene_path
def main(random_selection=False, headless=False, short_exec=False):
diff --git a/igibson/examples/simulator/sim_save_load_example.py b/igibson/examples/simulator/sim_save_load_example.py
index 521521d69..bbea68cc5 100644
--- a/igibson/examples/simulator/sim_save_load_example.py
+++ b/igibson/examples/simulator/sim_save_load_example.py
@@ -2,13 +2,13 @@
import numpy as np
-from igibson import ig_dataset_path
-from igibson.robots.turtlebot import Turtlebot
-from igibson.scenes.interactive_traversable_scene import InteractiveTraversableScene
-from igibson.simulator import Simulator
+from omnigibson import og_dataset_path
+from omnigibson.robots.turtlebot import Turtlebot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveTraversableScene
+from omnigibson.simulator import Simulator
SCENE_ID = "Rs_int"
-USD_TEMPLATE_FILE = f"{ig_dataset_path}/scenes/{SCENE_ID}/urdf/{SCENE_ID}_best_template.usd"
+USD_TEMPLATE_FILE = f"{og_dataset_path}/scenes/{SCENE_ID}/urdf/{SCENE_ID}_best_template.usd"
TEST_OUT_PATH = "" # Define output directory here.
#### SAVE SIMULATION ENV #####
diff --git a/igibson/examples/vr/data_save_replay/vr_sr.py b/igibson/examples/vr/data_save_replay/vr_sr.py
index 6b1ad6ca3..2fb09af34 100644
--- a/igibson/examples/vr/data_save_replay/vr_sr.py
+++ b/igibson/examples/vr/data_save_replay/vr_sr.py
@@ -19,14 +19,14 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.log_utils import IGLogReader, IGLogWriter
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.log_utils import IGLogReader, IGLogWriter
# Number of frames to save
FRAMES_TO_SAVE = 600
@@ -44,12 +44,12 @@ def run_action_sr(mode):
is_save = mode == "save"
# HDR files for PBR rendering
- hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+ hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
vr_rendering_settings = MeshRendererSettings(
@@ -118,7 +118,7 @@ def run_action_sr(mode):
obj = ArticulatedObject(
os.path.join(
- igibson.ig_dataset_path,
+ omnigibson.og_dataset_path,
"objects",
"basket",
"e3bae8da192ab3d4a17ae19fa77775ff",
diff --git a/igibson/examples/vr/in_development/vr_body_tracker_test.py b/igibson/examples/vr/in_development/vr_body_tracker_test.py
index d8770cb5c..b722cee5d 100644
--- a/igibson/examples/vr/in_development/vr_body_tracker_test.py
+++ b/igibson/examples/vr/in_development/vr_body_tracker_test.py
@@ -1,10 +1,10 @@
"""
Demo for testing VR body based on torso tracker
"""
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.simulator import Simulator
def main():
diff --git a/igibson/examples/vr/in_development/vr_button_mapping.py b/igibson/examples/vr/in_development/vr_button_mapping.py
index dcf0f74d5..dd5c6a812 100644
--- a/igibson/examples/vr/in_development/vr_button_mapping.py
+++ b/igibson/examples/vr/in_development/vr_button_mapping.py
@@ -7,9 +7,9 @@
Please use this if creating a custom action-button mapping for a VR controller
that is neither an HTC Vive controller nor an Oculus controller.
"""
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.simulator import Simulator
def main():
diff --git a/igibson/examples/vr/in_development/vr_cleaning_demo.py b/igibson/examples/vr/in_development/vr_cleaning_demo.py
index e9153283f..7d0c532e9 100644
--- a/igibson/examples/vr/in_development/vr_cleaning_demo.py
+++ b/igibson/examples/vr/in_development/vr_cleaning_demo.py
@@ -3,14 +3,14 @@
"""
import os
-import igibson
-from igibson import object_states
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson import object_states
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Set to true to use viewer manipulation instead of VR
# Set to false by default so this benchmark task can be performed in VR
@@ -21,12 +21,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
vr_rendering_settings = MeshRendererSettings(
diff --git a/igibson/examples/vr/in_development/vr_hand_dex_benchmark.py b/igibson/examples/vr/in_development/vr_hand_dex_benchmark.py
index 7d3bfee65..32eee4826 100644
--- a/igibson/examples/vr/in_development/vr_hand_dex_benchmark.py
+++ b/igibson/examples/vr/in_development/vr_hand_dex_benchmark.py
@@ -2,20 +2,20 @@
and interactive objects, and provides a good way to qualitatively measure the dexterity of a VR hand.
You can use the left and right controllers to start/stop/reset the timer,
as well as show/hide its display. The "overlay toggle" action and its
-corresponding button index mapping can be found in the vr_config.yaml file in the igibson folder.
+corresponding button index mapping can be found in the vr_config.yaml file in the omnigibson folder.
"""
import os
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Objects in the benchmark - corresponds to Rs kitchen environment, for range of items and
# transferability to the real world
@@ -39,12 +39,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main():
@@ -170,63 +170,63 @@ def main():
table_objects_to_load = {
"tray": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "tray", "tray_000", "tray_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "tray", "tray_000", "tray_000.urdf"),
"pos": (1.100000, 0.200000, 0.650000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.15,
"mass": 1.7,
},
"plate_1": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
"pos": (0.700000, -0.300000, 0.650000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 1.5,
},
"plate_2": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
"pos": (1.100000, -0.300000, 0.650000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 1.5,
},
"plate_3": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
"pos": (0.700000, -1.200000, 0.000000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 1.5,
},
"plate_4": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "plate", "plate_000", "plate_000.urdf"),
"pos": (1.100000, -1.200000, 0.000000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 1.5,
},
"chip_1": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "chip", "chip_000", "chip_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "chip", "chip_000", "chip_000.urdf"),
"pos": (0.700000, -0.800000, 0.750000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 0.22,
},
"chip_2": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "chip", "chip_000", "chip_000.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "chip", "chip_000", "chip_000.urdf"),
"pos": (1.100000, -0.800000, 0.750000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 0.01,
"mass": 0.22,
},
"cherry_1": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "cherry", "02_0", "02_0.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "cherry", "02_0", "02_0.urdf"),
"pos": (0.700000, -0.600000, 0.680000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 1,
"mass": 0.02,
},
"cherry_2": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "cherry", "02_0", "02_0.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "cherry", "02_0", "02_0.urdf"),
"pos": (1.100000, -0.600000, 0.680000),
"orn": (0.000000, 0.00000, 0.707107, 0.707107),
"scale": 1,
@@ -234,7 +234,7 @@ def main():
},
"shelf": {
"urdf": os.path.join(
- igibson.ig_dataset_path,
+ omnigibson.og_dataset_path,
"objects",
"shelf",
"de3b28f255111570bc6a557844fbbce9",
@@ -246,21 +246,21 @@ def main():
"mass": 11,
},
"wine_bottle_1": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
"pos": (1.700000, -3.500000, 1.90000),
"orn": (0.000000, 0.00000, -0.707107, 0.707107),
"scale": 1,
"mass": 1.2,
},
"wine_bottle_2": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
"pos": (1.700000, -3.2500000, 1.90000),
"orn": (0.000000, 0.00000, -0.707107, 0.707107),
"scale": 1,
"mass": 1.2,
},
"wine_bottle_3": {
- "urdf": os.path.join(igibson.ig_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
+ "urdf": os.path.join(omnigibson.og_dataset_path, "objects", "wine_bottle", "23_1", "23_1.urdf"),
"pos": (1.700000, -3.750000, 1.90000),
"orn": (0.000000, 0.00000, -0.707107, 0.707107),
"scale": 1,
diff --git a/igibson/examples/vr/in_development/vr_hand_speed_benchmark.py b/igibson/examples/vr/in_development/vr_hand_speed_benchmark.py
index 3cc3e6740..bb64e7c76 100644
--- a/igibson/examples/vr/in_development/vr_hand_speed_benchmark.py
+++ b/igibson/examples/vr/in_development/vr_hand_speed_benchmark.py
@@ -4,22 +4,22 @@
You can use the left and right controllers to start/stop/reset the timer,
as well as show/hide its display. The "overlay toggle" action and its
-corresponding button index mapping can be found in the vr_config.yaml file in the igibson folder.
+corresponding button index mapping can be found in the vr_config.yaml file in the omnigibson folder.
"""
import os
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.vr_utils import VrTimer
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.vr_utils import VrTimer
# Set to true to use viewer manipulation instead of VR
# Set to false by default so this benchmark task can be performed in VR
@@ -29,12 +29,12 @@
# Set to true to use gripper instead of VR hands
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main():
@@ -101,7 +101,7 @@ def main():
obj = ArticulatedObject(
os.path.join(
- igibson.ig_dataset_path,
+ omnigibson.og_dataset_path,
"objects",
"basket",
"e3bae8da192ab3d4a17ae19fa77775ff",
diff --git a/igibson/examples/vr/in_development/vr_playground.py b/igibson/examples/vr/in_development/vr_playground.py
index 09e15a6fe..56ac4a523 100644
--- a/igibson/examples/vr/in_development/vr_playground.py
+++ b/igibson/examples/vr/in_development/vr_playground.py
@@ -3,11 +3,11 @@
Important - VR functionality and where to find it:
-1) Most VR functions can be found in the igibson/simulator.py
-2) The BehaviorRobot and its associated parts can be found in igibson/robots/behavior_robot.py
-3) VR utility functions are found in igibson/utils/vr_utils.py
-4) The VR renderer can be found in igibson/render/mesh_renderer.py
-5) The underlying VR C++ code can be found in vr_mesh_render.h and .cpp in igibson/render/cpp
+1) Most VR functions can be found in the omnigibson/simulator.py
+2) The BehaviorRobot and its associated parts can be found in omnigibson/robots/behavior_robot.py
+3) VR utility functions are found in omnigibson/utils/vr_utils.py
+4) The VR renderer can be found in omnigibson/render/mesh_renderer.py
+5) The underlying VR C++ code can be found in vr_mesh_render.h and .cpp in omnigibson/render/cpp
"""
import os
@@ -15,13 +15,13 @@
import numpy as np
-import igibson
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Whether to use VR mode or not
VR_MODE = True
@@ -33,12 +33,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main():
diff --git a/igibson/examples/vr/in_development/vr_sample_hud.py b/igibson/examples/vr/in_development/vr_sample_hud.py
index dd9f14639..94723ce72 100644
--- a/igibson/examples/vr/in_development/vr_sample_hud.py
+++ b/igibson/examples/vr/in_development/vr_sample_hud.py
@@ -8,13 +8,13 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Objects in the benchmark - corresponds to Rs kitchen environment, for range of items and
# transferability to the real world
@@ -44,12 +44,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main():
@@ -167,7 +167,7 @@ def main():
p.changeDynamics(body_id, -1, mass=masses[i])
title = s.add_vr_overlay_text(
- text_data="Welcome to iGibson VR!", font_size=85, font_style="Bold", color=[0, 0, 0.5], pos=[150, 900]
+ text_data="Welcome to OmniGibson VR!", font_size=85, font_style="Bold", color=[0, 0, 0.5], pos=[150, 900]
)
sample_condition_1 = s.add_vr_overlay_text(
text_data="for box in cupboard:\n1) find box\n2) pick up box",
diff --git a/igibson/examples/vr/in_development/vr_tracker_test.py b/igibson/examples/vr/in_development/vr_tracker_test.py
index 8b318b1ee..0ac3ea86a 100644
--- a/igibson/examples/vr/in_development/vr_tracker_test.py
+++ b/igibson/examples/vr/in_development/vr_tracker_test.py
@@ -3,9 +3,9 @@
The serial number for a tracker can be found in
SteamVR settings -> controllers -> manage vive trackers
"""
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.scenes.empty_scene import EmptyScene
-from igibson.simulator import Simulator
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.scenes.empty_scene import EmptyScene
+from omnigibson.simulator import Simulator
# Note: replace this with another tracker serial number to test it
TEST_TRACKER_SERIAL_NUM = "LHR-DF82C682"
diff --git a/igibson/examples/vr/muvr/muvr_demo.py b/igibson/examples/vr/muvr/muvr_demo.py
index 046ccf04a..60db9b7ae 100644
--- a/igibson/examples/vr/muvr/muvr_demo.py
+++ b/igibson/examples/vr/muvr/muvr_demo.py
@@ -10,21 +10,21 @@
import numpy as np
-import igibson
-from igibson import assets_path
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.muvr_utils import IGVRClient, IGVRServer
+import omnigibson
+from omnigibson import assets_path
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.muvr_utils import IGVRClient, IGVRServer
sample_urdf_folder = os.path.join(assets_path, "models", "sample_urdfs")
# Only load in first few objects in Rs to decrease load times
LOAD_PARTIAL = True
-# Whether to print iGibson + networking FPS each frame
+# Whether to print OmniGibson + networking FPS each frame
PRINT_FPS = True
# Whether to wait for client before simulating
WAIT_FOR_CLIENT = False
@@ -35,7 +35,7 @@
def run_muvr(mode="server", host="localhost", port="8885"):
"""
- Sets up the iGibson environment that will be used by both server and client
+ Sets up the OmniGibson environment that will be used by both server and client
"""
print("INFO: Running MUVR {} at {}:{}".format(mode, host, port))
# This function only runs if mode is one of server or client, so setting this bool is safe
@@ -44,12 +44,12 @@ def run_muvr(mode="server", host="localhost", port="8885"):
vr_settings = RUN_SETTINGS[mode]
# HDR files for PBR rendering
- hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+ hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
vr_rendering_settings = MeshRendererSettings(
@@ -111,7 +111,7 @@ def run_muvr(mode="server", host="localhost", port="8885"):
while True:
frame_start = time.time()
if is_server:
- # Update iGibson with latest vr data from client
+ # Update OmniGibson with latest vr data from client
vr_server.ingest_vr_data()
if not WAIT_FOR_CLIENT or vr_server.client_connected():
diff --git a/igibson/examples/vr/robot_embodiment/vr_demo_robot_explore.py b/igibson/examples/vr/robot_embodiment/vr_demo_robot_explore.py
index 6b735eae8..e082f7540 100644
--- a/igibson/examples/vr/robot_embodiment/vr_demo_robot_explore.py
+++ b/igibson/examples/vr/robot_embodiment/vr_demo_robot_explore.py
@@ -5,12 +5,12 @@
import numpy as np
-import igibson
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# IMPORTANT: Change this value if you have a more powerful machine
VR_FPS = 20
@@ -22,12 +22,12 @@
VR_MODE = True
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
vr_rendering_settings = MeshRendererSettings(
diff --git a/igibson/examples/vr/robot_embodiment/vr_demo_robot_grasping.py b/igibson/examples/vr/robot_embodiment/vr_demo_robot_grasping.py
index 4a9205642..e7c5d8085 100644
--- a/igibson/examples/vr/robot_embodiment/vr_demo_robot_grasping.py
+++ b/igibson/examples/vr/robot_embodiment/vr_demo_robot_grasping.py
@@ -6,13 +6,13 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# IMPORTANT: Change this value if you have a more powerful machine
VR_FPS = 20
@@ -22,12 +22,12 @@
VR_MODE = True
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
# VR rendering settings
vr_rendering_settings = MeshRendererSettings(
diff --git a/igibson/examples/vr/test/muvr_lag_test.py b/igibson/examples/vr/test/muvr_lag_test.py
index 8e65d3076..c1e5bb9c0 100644
--- a/igibson/examples/vr/test/muvr_lag_test.py
+++ b/igibson/examples/vr/test/muvr_lag_test.py
@@ -6,7 +6,7 @@
import argparse
import time
-from igibson.utils.muvr_utils import IGVRTestClient, IGVRTestServer
+from omnigibson.utils.muvr_utils import IGVRTestClient, IGVRTestServer
# Client and server settings
SERVER_FPS = 30.0
@@ -18,7 +18,7 @@
def run_lag_test(mode="server", host="localhost", port="7500"):
"""
- Sets up the iGibson environment that will be used by both server and client
+ Sets up the OmniGibson environment that will be used by both server and client
"""
print("INFO: Running MUVR {} at {}:{}".format(mode, host, port))
# This function only runs if mode is one of server or client, so setting this bool is safe
diff --git a/igibson/examples/vr/test/vr_condition_switch.py b/igibson/examples/vr/test/vr_condition_switch.py
index 08ede08ed..056140c22 100644
--- a/igibson/examples/vr/test/vr_condition_switch.py
+++ b/igibson/examples/vr/test/vr_condition_switch.py
@@ -5,21 +5,21 @@
-import igibson
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrConditionSwitcher, VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrConditionSwitcher, VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main():
diff --git a/igibson/examples/vr/test/vr_hand_geom_vis.py b/igibson/examples/vr/test/vr_hand_geom_vis.py
index 6b0ee1cef..6aa964c4a 100644
--- a/igibson/examples/vr/test/vr_hand_geom_vis.py
+++ b/igibson/examples/vr/test/vr_hand_geom_vis.py
@@ -7,7 +7,7 @@
-from igibson import assets_path
+from omnigibson import assets_path
p.connect(p.GUI)
diff --git a/igibson/examples/vr/test/vr_overlay_color_test.py b/igibson/examples/vr/test/vr_overlay_color_test.py
index 17d1575bf..0c17ce3aa 100644
--- a/igibson/examples/vr/test/vr_overlay_color_test.py
+++ b/igibson/examples/vr/test/vr_overlay_color_test.py
@@ -3,7 +3,7 @@
You can use the left and right controllers to start/stop/reset the timer,
as well as show/hide its display. The "overlay toggle" action and its
-corresponding button index mapping can be found in the vr_config.yaml file in the igibson folder.
+corresponding button index mapping can be found in the vr_config.yaml file in the omnigibson folder.
"""
import os
@@ -11,13 +11,13 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Objects in the benchmark - corresponds to Rs kitchen environment, for range of items and
# transferability to the real world
@@ -41,12 +41,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
SYMBOL_LIST = [l for l in ".,:;!?()+-=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"]
diff --git a/igibson/examples/vr/test/vr_scroll_wrap_text_test.py b/igibson/examples/vr/test/vr_scroll_wrap_text_test.py
index bc03df9c3..dbb6185a1 100644
--- a/igibson/examples/vr/test/vr_scroll_wrap_text_test.py
+++ b/igibson/examples/vr/test/vr_scroll_wrap_text_test.py
@@ -3,7 +3,7 @@
You can use the left and right controllers to start/stop/reset the timer,
as well as show/hide its display. The "overlay toggle" action and its
-corresponding button index mapping can be found in the vr_config.yaml file in the igibson folder.
+corresponding button index mapping can be found in the vr_config.yaml file in the omnigibson folder.
"""
import os
@@ -11,13 +11,13 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.objects.ycb_object import YCBObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.objects.ycb_object import YCBObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# Objects in the benchmark - corresponds to Rs kitchen environment, for range of items and
# transferability to the real world
@@ -41,12 +41,12 @@
USE_GRIPPER = False
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
SYMBOL_LIST = [l for l in ".,:;!?()+-=abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789"]
diff --git a/igibson/examples/vr/vr_gaze_test.py b/igibson/examples/vr/vr_gaze_test.py
index 5cb9b5878..7c8ca29bf 100644
--- a/igibson/examples/vr/vr_gaze_test.py
+++ b/igibson/examples/vr/vr_gaze_test.py
@@ -8,21 +8,21 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# HDR files for PBR rendering
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main(random_selection=False, headless=False, short_exec=False):
@@ -72,7 +72,7 @@ def main(random_selection=False, headless=False, short_exec=False):
obj = ArticulatedObject(
os.path.join(
- igibson.ig_dataset_path,
+ omnigibson.og_dataset_path,
"objects",
"basket",
"e3bae8da192ab3d4a17ae19fa77775ff",
diff --git a/igibson/examples/vr/vr_simple_demo.py b/igibson/examples/vr/vr_simple_demo.py
index e76a9716b..0f86f0a0b 100644
--- a/igibson/examples/vr/vr_simple_demo.py
+++ b/igibson/examples/vr/vr_simple_demo.py
@@ -7,23 +7,23 @@
import pybullet_data
-import igibson
-from igibson.objects.usd_object import ArticulatedObject
-from igibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
-from igibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
-from igibson.robots.behavior_robot import BehaviorRobot
-from igibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
-from igibson.simulator import Simulator
+import omnigibson
+from omnigibson.objects.usd_object import ArticulatedObject
+from omnigibson.render.mesh_renderer.mesh_renderer_cpu import MeshRendererSettings
+from omnigibson.render.mesh_renderer.mesh_renderer_vr import VrSettings
+from omnigibson.robots.behavior_robot import BehaviorRobot
+from omnigibson.scenes.interactive_traversable_scene import InteractiveIndoorScene
+from omnigibson.simulator import Simulator
# HDR files for PBR rendering
-from igibson.simulator_vr import SimulatorVR
+from omnigibson.simulator_vr import SimulatorVR
-hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
-hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
-background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
def main(random_selection=False, headless=False, short_exec=False):
@@ -73,7 +73,7 @@ def main(random_selection=False, headless=False, short_exec=False):
obj = ArticulatedObject(
os.path.join(
- igibson.ig_dataset_path,
+ omnigibson.og_dataset_path,
"objects",
"basket",
"e3bae8da192ab3d4a17ae19fa77775ff",
diff --git a/igibson/examples/web_ui/sampling_ui.py b/igibson/examples/web_ui/sampling_ui.py
index 568ae80c4..0bd7aecf2 100644
--- a/igibson/examples/web_ui/sampling_ui.py
+++ b/igibson/examples/web_ui/sampling_ui.py
@@ -16,12 +16,12 @@
from flask_apscheduler import APScheduler
from flask_cors import CORS
-import igibson
-from igibson.envs.igibson_env import iGibsonEnv
-from igibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
-from igibson.scenes.gibson_indoor_scene import StaticIndoorScene
-from igibson.simulator import Simulator
-from igibson.utils.config_utils import parse_config, restoreState
+import omnigibson
+from omnigibson.envs.omnigibson_env import OmniGibsonEnv
+from omnigibson.render.mesh_renderer.mesh_renderer_settings import MeshRendererSettings
+from omnigibson.scenes.gibson_indoor_scene import StaticIndoorScene
+from omnigibson.simulator import Simulator
+from omnigibson.utils.config_utils import parse_config, restoreState
interactive = True
NUM_REQUIRED_SUCCESSFUL_SCENES = 3
@@ -200,13 +200,13 @@ def _worker(self, conn, env_constructor):
class ToyEnv(object):
def __init__(self):
- config = parse_config(os.path.join(igibson.example_config_path, "turtlebot_demo.yaml"))
- hdr_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_02.hdr")
- hdr_texture2 = os.path.join(igibson.ig_dataset_path, "scenes", "background", "probe_03.hdr")
+ config = parse_config(os.path.join(omnigibson.example_config_path, "turtlebot_demo.yaml"))
+ hdr_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_02.hdr")
+ hdr_texture2 = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "probe_03.hdr")
light_modulation_map_filename = os.path.join(
- igibson.ig_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
+ omnigibson.og_dataset_path, "scenes", "Rs_int", "layout", "floor_lighttype_0.png"
)
- background_texture = os.path.join(igibson.ig_dataset_path, "scenes", "background", "urban_street_01.jpg")
+ background_texture = os.path.join(omnigibson.og_dataset_path, "scenes", "background", "urban_street_01.jpg")
settings = MeshRendererSettings(enable_shadow=False, enable_pbr=False)
@@ -225,7 +225,7 @@ def close(self):
class ToyEnvInt(object):
def __init__(self, scene="Rs_int"):
- config_file = os.path.join(igibson.example_config_path, "behavior_vr.yaml")
+ config_file = os.path.join(omnigibson.example_config_path, "behavior_vr.yaml")
env_config = parse_config(config_file)
env_config["scene_id"] = scene
env_config["task"] = "trivial"
@@ -233,7 +233,7 @@ def __init__(self, scene="Rs_int"):
env_config["online_sampling"] = True
env_config["load_clutter"] = False
settings = MeshRendererSettings(texture_scale=0.01)
- self.env = iGibsonEnv(config_file=env_config, mode="headless", rendering_settings=settings)
+ self.env = OmniGibsonEnv(config_file=env_config, mode="headless", rendering_settings=settings)
self.state_id = p.saveState()
self.num_body_ids = p.getNumBodies()
self.num_particle_systems = len(self.env.simulator.particle_systems)
diff --git a/igibson/examples/web_ui/templates/demo.html b/igibson/examples/web_ui/templates/demo.html
index 3e89adce1..4462b0055 100644
--- a/igibson/examples/web_ui/templates/demo.html
+++ b/igibson/examples/web_ui/templates/demo.html
@@ -1,6 +1,6 @@
- iGibson Web Interface
+ OmniGibson Web Interface