Skip to content

Commit

Permalink
Merge pull request #114 from IDEA-Research/dev
Browse files Browse the repository at this point in the history
release: 0.9.2
  • Loading branch information
imhuwq authored Dec 13, 2023
2 parents 8a7a014 + 6112526 commit 4b475a6
Show file tree
Hide file tree
Showing 4 changed files with 35 additions and 31 deletions.
23 changes: 23 additions & 0 deletions deepdataspace/model/dataset.py
Original file line number Diff line number Diff line change
Expand Up @@ -439,3 +439,26 @@ def eval_description(self):
except:
logger.warning(f"Failed to eval description_func[{self.description_func}] for dataset[{self.id}]")
return self.description or self.path

@staticmethod
def cascade_delete(dataset: "DataSet"):
"""
Cascade delete the dataset, along with all its images, labels, categories and objects.
"""
if dataset is None:
return

dataset_id = dataset.id
print(f"dataset [{dataset_id}] is found, deleting...")

print(f"dataset [{dataset_id}] is found, deleting categories...")
Category.delete_many({"dataset_id": dataset_id})

print(f"dataset [{dataset_id}] is found, deleting labels...")
Label.delete_many({"dataset_id": dataset_id})

print(f"dataset [{dataset_id}] is found, deleting images...")
Image(dataset_id).get_collection().drop()

DataSet.delete_many({"id": dataset_id})
print(f"dataset [{dataset_id}] is deleted.")
19 changes: 1 addition & 18 deletions deepdataspace/scripts/dataset_cmds.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
"""

import os
import shutil

import pkg_resources

Expand All @@ -17,10 +16,7 @@
@ddsop.command("delete_one", help="Delete a dataset.")
@click.argument("dataset_dir")
def delete_one(dataset_dir):
from deepdataspace.model import Image
from deepdataspace.model import Label
from deepdataspace.model import DataSet
from deepdataspace.model import Category
from deepdataspace.utils.string import get_str_md5

dataset_dir = os.path.abspath(dataset_dir)
Expand All @@ -30,20 +26,7 @@ def delete_one(dataset_dir):
print(f"dataset [{dataset_dir}] is not imported before, skip...")
return

dataset_id = dataset.id
print(f"dataset [{dataset_id}] is found, deleting...")

print(f"dataset [{dataset_id}] is found, deleting categories...")
Category.delete_many({"dataset_id": dataset_id})

print(f"dataset [{dataset_id}] is found, deleting labels...")
Label.delete_many({"dataset_id": dataset_id})

print(f"dataset [{dataset_id}] is found, deleting images...")
Image(dataset_id).get_collection().drop()

DataSet.delete_many({"id": dataset_id})
print(f"dataset [{dataset_id}] is deleted.")
DataSet.cascade_delete(dataset)


@ddsop.command("import_all", help="Trigger a background task of importing all datasets in a data dir.")
Expand Down
22 changes: 10 additions & 12 deletions deepdataspace/services/dds.py
Original file line number Diff line number Diff line change
Expand Up @@ -170,6 +170,7 @@ def _init_shared_files_and_dirs(self):
if self.quickstart is True:
if self.data_dir is None:
self.data_dir = os.path.join(config.RUNTIME_DIR, "datasets")
os.makedirs(self.data_dir, exist_ok=True)
self.init_samples()
config.DATA_DIR = self.data_dir

Expand Down Expand Up @@ -315,20 +316,17 @@ def _init_shared_libs(self):
url = f"{self.dl_prefix}/lib/libcurl.so.4.8.0"
download_by_requests(url, config.SHARED_CURL_LIB)

if config.LD_LIBRARY_PATH is None:
config.LD_LIBRARY_PATH = config.SHARED_LIB_DIR
elif config.SHARED_LIB_DIR is not None and config.LD_LIBRARY_PATH != config.SHARED_LIB_DIR:
config.LD_LIBRARY_PATH = f"{config.SHARED_LIB_DIR}:{config.LD_LIBRARY_PATH}"
paths = {config.SHARED_LIB_DIR}
if config.LD_LIBRARY_PATH:
for path in config.LD_LIBRARY_PATH.split(":"):
paths.add(path)

ld_path = os.environ.get("LD_LIBRARY_PATH", None)
if ld_path is not None:
config.LD_LIBRARY_PATH = f"{ld_path}:{config.LD_LIBRARY_PATH}"

current_lib_paths = find_shared_dirs_on_ubuntu()
current_lib_paths = ":".join(current_lib_paths)
if current_lib_paths:
config.LD_LIBRARY_PATH = f"{current_lib_paths}:{config.LD_LIBRARY_PATH}"
lib_paths = find_shared_dirs_on_ubuntu()
for path in lib_paths:
paths.add(path)

ld_library_path = ":".join(paths)
config.LD_LIBRARY_PATH = ld_library_path
os.environ["LD_LIBRARY_PATH"] = config.LD_LIBRARY_PATH

def _init_sentry(self):
Expand Down
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from setuptools import setup

version = "0.9.1"
version = "0.9.2"

description = "A tool for CV dataset labeling, visualizing and analysing"
with open("README.md", "r", encoding="utf8") as fp:
Expand Down

0 comments on commit 4b475a6

Please sign in to comment.