Skip to content

Commit 239641d

Browse files
committed
feat: update all dependencies and project configs
Changes: 1. Update github actions base image to 22.04 since github is retiring 20.04. 2. Use pip-tools to compile all Python requirements. 3. Update Python dependeny versions. 4. Regenerated all pb2 files for proto3. 5. Add extras_require for proto4.
1 parent 2bcb53d commit 239641d

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

45 files changed

+886
-6842
lines changed

.github/workflows/build-docker.yml

+3-3
Original file line numberDiff line numberDiff line change
@@ -3,17 +3,17 @@ name: build-docker
33
on:
44
push:
55
branches:
6-
- 'master'
6+
- "master"
77
tags:
8-
- '*'
8+
- "*"
99

1010
env:
1111
REGISTRY: ghcr.io
1212
IMAGE_NAME: ${{ github.repository }}
1313

1414
jobs:
1515
build-and-push-image:
16-
runs-on: ubuntu-20.04
16+
runs-on: ubuntu-22.04
1717
permissions:
1818
contents: read
1919
packages: write

.github/workflows/coverage.yml

+7-8
Original file line numberDiff line numberDiff line change
@@ -2,24 +2,24 @@ name: coverage
22
on:
33
pull_request:
44
branches:
5-
- 'master'
5+
- "master"
66
push:
77
branches:
8-
- 'master'
8+
- "master"
99
jobs:
1010
code-coverage:
11-
runs-on: ubuntu-20.04
11+
runs-on: ubuntu-22.04
1212
env:
1313
ENVIRONMENT: TEST_RUNNER
14-
OS: ubuntu-20.04
15-
PYTHON: '3.9'
14+
OS: ubuntu-22.04
15+
PYTHON: "3.9"
1616
COVERAGE_TOTAL: 49 # Coverage threshold percentage
1717
steps:
1818
- name: Checkout (admin token)
1919
if: ${{github.event_name != 'pull_request'}} # We don't want to use the admin token for PR flows
2020
uses: actions/checkout@master
2121
with:
22-
token: '${{ secrets.GIT_ADMIN_WORKFLOW_TOKEN }}'
22+
token: "${{ secrets.GIT_ADMIN_WORKFLOW_TOKEN }}"
2323
fetch-depth: "2" # Original commit + code cov badge commit
2424
- name: Checkout (normal flow)
2525
if: ${{github.event_name == 'pull_request'}}
@@ -34,9 +34,8 @@ jobs:
3434
id: coverage-installer
3535
run: |
3636
python -m pip install --upgrade pip
37-
pip install cython==0.29.21 numpy==1.23.2
3837
sudo apt-get install jq
39-
pip install -r requirements.txt
38+
pip install -r requirements_proto3.txt
4039
pip install -r requirements-dev.txt
4140
pip install coverage-badge
4241
- name: Run tests and calculate coverage

.github/workflows/doc-gen.yml

+7-8
Original file line numberDiff line numberDiff line change
@@ -2,17 +2,17 @@ name: doc-gen
22
on:
33
push:
44
branches:
5-
- 'master'
5+
- "master"
66
pull_request:
77
branches:
8-
- 'master'
8+
- "master"
99

1010
jobs:
1111
run:
12-
runs-on: ubuntu-20.04
12+
runs-on: ubuntu-22.04
1313
env:
14-
OS: ubuntu-20.04
15-
PYTHON: '3.9'
14+
OS: ubuntu-22.04
15+
PYTHON: "3.9"
1616
steps:
1717
- uses: actions/checkout@master
1818
with:
@@ -26,8 +26,7 @@ jobs:
2626
- name: Setup requirements and run sphinx
2727
run: |
2828
python -m pip install --upgrade pip
29-
pip install cython==0.29.21 numpy==1.23.2
30-
pip install -r requirements.txt
29+
pip install -r requirements_proto3.txt
3130
pip install -r requirements-dev.txt
3231
pip install -r docs/requirements-doc.txt
3332
cd docs
@@ -39,4 +38,4 @@ jobs:
3938
with:
4039
branch: gh-pages
4140
folder: ./docs/build/html
42-
commit-message: 'docs: update build documentation'
41+
commit-message: "docs: update build documentation"

.github/workflows/pre-merge.yml

+2-3
Original file line numberDiff line numberDiff line change
@@ -9,7 +9,7 @@ on:
99

1010
jobs:
1111
pre-merge-tests:
12-
runs-on: ubuntu-20.04
12+
runs-on: ubuntu-22.04
1313
env:
1414
ENVIRONMENT: TEST_RUNNER
1515
steps:
@@ -28,8 +28,7 @@ jobs:
2828
- name: Install dependencies
2929
run: |
3030
python -m pip install --upgrade pip
31-
pip install cython==0.29.21 numpy==1.23.2
32-
pip install -r requirements.txt
31+
pip install -r requirements_proto3.txt
3332
pip install -r requirements-dev.txt
3433
- name: Lint all files with pre-commit
3534
run: |

.gitignore

+2
Original file line numberDiff line numberDiff line change
@@ -111,3 +111,5 @@ Pipfile
111111
Pipfile.lock
112112

113113
\.vscode/
114+
*.pyi
115+
*.ply

.pre-commit-config.yaml

+16-9
Original file line numberDiff line numberDiff line change
@@ -61,14 +61,21 @@ repos:
6161
hooks:
6262
- id: yapf
6363
# pre-commit requires that we split args at whitespce boundaries.
64-
args: [
65-
--style, .style.yapf,
66-
-i,
67-
-e, '*pb2.py',
68-
-e, '*pb2_grpc.py',
69-
-e, '*eggs*.py',
70-
-e, '*env*',
71-
-e, 'build/*'
72-
]
64+
args:
65+
[
66+
--style,
67+
.style.yapf,
68+
-i,
69+
-e,
70+
"*pb2.py",
71+
-e,
72+
"*pb2_grpc.py",
73+
-e,
74+
"*eggs*.py",
75+
-e,
76+
"*env*",
77+
-e,
78+
"build/*",
79+
]
7380
exclude: '\w*pb2.py'
7481
additional_dependencies: [toml]

dgp/__init__.py

+21-22
Original file line numberDiff line numberDiff line change
@@ -1,52 +1,51 @@
11
# Copyright 2021-2022 Toyota Research Institute. All rights reserved.
22
import os
3-
from collections import OrderedDict
43

5-
__version__ = '1.0'
4+
__version__ = "1.6.0"
65

7-
DGP_PATH = os.getenv('DGP_PATH', default=os.getenv('HOME', os.getcwd()))
8-
DGP_DATA_DIR = os.path.join(DGP_PATH, '.dgp')
9-
DGP_CACHE_DIR = os.path.join(DGP_DATA_DIR, 'cache')
10-
DGP_DATASETS_CACHE_DIR = os.path.join(DGP_DATA_DIR, 'datasets')
6+
DGP_PATH = os.getenv("DGP_PATH", default=os.getenv("HOME", os.getcwd()))
7+
DGP_DATA_DIR = os.path.join(DGP_PATH, ".dgp")
8+
DGP_CACHE_DIR = os.path.join(DGP_DATA_DIR, "cache")
9+
DGP_DATASETS_CACHE_DIR = os.path.join(DGP_DATA_DIR, "datasets")
1110

1211
TRI_DGP_FOLDER_PREFIX = "dgp/"
1312
TRI_RAW_FOLDER_PREFIX = "raw/"
1413
TRI_DGP_JSON_PREFIX = "dataset_v"
1514

1615
# DGP Directory structure constants
17-
RGB_FOLDER = 'rgb'
18-
POINT_CLOUD_FOLDER = 'point_cloud'
16+
RGB_FOLDER = "rgb"
17+
POINT_CLOUD_FOLDER = "point_cloud"
1918
RADAR_POINT_CLOUD_FOLDER = "radar_point_cloud"
20-
BOUNDING_BOX_2D_FOLDER = 'bounding_box_2d'
21-
BOUNDING_BOX_3D_FOLDER = 'bounding_box_3d'
22-
SEMANTIC_SEGMENTATION_2D_FOLDER = 'semantic_segmentation_2d'
23-
SEMANTIC_SEGMENTATION_3D_FOLDER = 'semantic_segmentation_3d'
24-
INSTANCE_SEGMENTATION_2D_FOLDER = 'instance_segmentation_2d'
25-
INSTANCE_SEGMENTATION_3D_FOLDER = 'instance_segmentation_3d'
26-
DEPTH_FOLDER = 'depth'
19+
BOUNDING_BOX_2D_FOLDER = "bounding_box_2d"
20+
BOUNDING_BOX_3D_FOLDER = "bounding_box_3d"
21+
SEMANTIC_SEGMENTATION_2D_FOLDER = "semantic_segmentation_2d"
22+
SEMANTIC_SEGMENTATION_3D_FOLDER = "semantic_segmentation_3d"
23+
INSTANCE_SEGMENTATION_2D_FOLDER = "instance_segmentation_2d"
24+
INSTANCE_SEGMENTATION_3D_FOLDER = "instance_segmentation_3d"
25+
DEPTH_FOLDER = "depth"
2726
EXTRA_DATA_FOLDER = "extra_data"
2827
FEATURE_ONTOLOGY_FOLDER = "feature_ontology"
2928
AGENT_FOLDER = "agent"
3029
CLASSIFICATION_FOLDER = "classification"
3130

3231
# Scene Directory structure constants
33-
CALIBRATION_FOLDER = 'calibration'
34-
ONTOLOGY_FOLDER = 'ontology'
35-
SCENE_JSON_FILENAME = 'scene.json'
32+
CALIBRATION_FOLDER = "calibration"
33+
ONTOLOGY_FOLDER = "ontology"
34+
SCENE_JSON_FILENAME = "scene.json"
3635

3736
# DGP file naming conventions
3837
TRI_DGP_SCENE_DATASET_JSON_NAME = "scene_dataset_v{version}.json"
3938
TRI_DGP_SCENE_JSON_NAME = "scene_{scene_hash}.json"
40-
ANNOTATION_FILE_NAME = '{image_content_hash}_{annotation_content_hash}.json'
39+
ANNOTATION_FILE_NAME = "{image_content_hash}_{annotation_content_hash}.json"
4140

4241
# DGP file naming conventions
4342
TRI_DGP_SCENE_DATASET_JSON_NAME = "scene_dataset_v{version}.json"
4443
TRI_DGP_AGENT_TRACKS_JSON_NAME = "agent_tracks_{track_hash}.json"
4544
TRI_DGP_SCENE_JSON_NAME = "scene_{scene_hash}.json"
46-
ANNOTATION_FILE_NAME = '{image_content_hash}_{annotation_content_hash}.json'
45+
ANNOTATION_FILE_NAME = "{image_content_hash}_{annotation_content_hash}.json"
4746
TRI_DGP_AGENTS_JSON_NAME = "agents_{agent_hash}.json"
4847
TRI_DGP_AGENTS_SLICES_JSON_NAME = "agents_slices_{slice_hash}.json"
4948

5049
# Autolabel constants
51-
AUTOLABEL_FOLDER = 'autolabels'
52-
AUTOLABEL_SCENE_JSON_NAME = 'scene.json'
50+
AUTOLABEL_FOLDER = "autolabels"
51+
AUTOLABEL_SCENE_JSON_NAME = "scene.json"
+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
-r ../../../requirements.in
2+
wicker[spark]
3+
retry
+1
Original file line numberDiff line numberDiff line change
@@ -1,2 +1,3 @@
1+
-r ../../../requirements.txt
12
wicker[spark]
23
retry

dgp/contribs/dgp2wicker/setup.py

+12-13
Original file line numberDiff line numberDiff line change
@@ -19,35 +19,34 @@ def run(self):
1919
develop.run(self)
2020

2121

22-
__version__ = importlib.import_module('dgp2wicker').__version__
22+
__version__ = importlib.import_module("dgp2wicker").__version__
2323

24-
with open('requirements.txt', 'r', encoding='utf-8') as f:
24+
with open("requirements.txt", "r", encoding="utf-8") as f:
2525
requirements = f.read().splitlines()
2626

27-
with open('README.md', 'r', encoding='utf-8') as f:
27+
with open("README.md", "r", encoding="utf-8") as f:
2828
readme = f.read()
2929

30-
packages = find_packages(exclude=['tests'])
30+
packages = find_packages(exclude=["tests"])
3131
setup(
3232
name="dgp2wicker",
3333
version=__version__,
3434
description="Tools to convert TRI's DGP to L5's Wicker format.",
3535
long_description=readme,
36-
long_description_content_type='text/markdown',
36+
long_description_content_type="text/markdown",
3737
author="Chris Ochoa, Kuan Lee",
38-
38+
3939
url="https://github.com/TRI-ML/dgp/tree/master/dgp/contribs/dgp2wicker",
4040
packages=packages,
41-
entry_points={'console_scripts': [
42-
'dgp2wicker=dgp2wicker.cli:cli',
41+
entry_points={"console_scripts": [
42+
"dgp2wicker=dgp2wicker.cli:cli",
4343
]},
4444
include_package_data=True,
45-
setup_requires=['cython==0.29.21', 'grpcio==1.41.0', 'grpcio-tools==1.41.0'],
4645
install_requires=requirements,
4746
zip_safe=False,
48-
python_requires='>=3.7',
47+
python_requires=">=3.8",
4948
cmdclass={
50-
'install': CustomInstallCommand,
51-
'develop': CustomDevelopCommand,
52-
}
49+
"install": CustomInstallCommand,
50+
"develop": CustomDevelopCommand,
51+
},
5352
)

0 commit comments

Comments
 (0)