Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 3 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,6 @@ datavault-webapp/pids
# ignore intellij run files
.run/
TEMPLATES/*
dv5/local-db/docker/backup.D.SPEED.sql
dv5/local-db/docker/backup.D.SPEED.sql
# this can set the java version for the Intellij IDE and will set java versions for terminals too if 'sdk config set sdkman_auto_env true'
.sdkmanrc
Original file line number Diff line number Diff line change
Expand Up @@ -55,9 +55,9 @@
JacksonConfig.class, PropertiesConfig.class, EncryptionConfig.class, ActuatorConfig.class,
ScheduleConfig.class, InitialiseConfig.class,
SecurityActuatorConfig.class, SecurityConfig.class, ControllerConfig.class,
ServiceConfig.class, DatabaseConfig.class,
DatabaseConfig.class,
LdapConfig.class, EmailConfig.class, EmailLocalConfig.class, RabbitConfig.class,
StorageClassNameResolverConfig.class, WebConfig.class
StorageClassNameResolverConfig.class, WebConfig.class, ServiceConfig.class
})
@Slf4j
//@EnableJSONDoc
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -10,14 +10,14 @@
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.collections4.CollectionUtils;
import org.datavaultplatform.broker.queue.Sender;
import org.datavaultplatform.broker.services.AdminDepositService;
import org.datavaultplatform.broker.services.*;
import org.datavaultplatform.common.PropNames;
import org.datavaultplatform.common.event.Event;

import org.datavaultplatform.common.model.*;
import org.datavaultplatform.common.response.*;

import org.datavaultplatform.common.task.Task;
import org.jsondoc.core.annotation.Api;
import org.jsondoc.core.annotation.ApiHeader;
import org.jsondoc.core.annotation.ApiHeaders;
Expand All @@ -33,8 +33,6 @@
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.*;

import com.fasterxml.jackson.databind.ObjectMapper;

import jakarta.servlet.http.HttpServletRequest;

/**
Expand All @@ -60,6 +58,7 @@ public class AdminController {
private final ExternalMetadataService externalMetadataService;
private final AuditsService auditsService;
private final RolesAndPermissionsService permissionsService;
private final AdminDepositService adminDepositService;
private final Sender sender;
private final String optionsDir;
private final String tempDir;
Expand All @@ -73,7 +72,8 @@ public AdminController(VaultsService vaultsService, UsersService usersService,
DepositsService depositsService, RetrievesService retrievesService,
EventService eventService, ArchiveStoreService archiveStoreService, JobsService jobsService,
ExternalMetadataService externalMetadataService, AuditsService auditsService,
RolesAndPermissionsService permissionsService, Sender sender,
RolesAndPermissionsService permissionsService, AdminDepositService adminDepositService,
Sender sender,
@Value("${optionsDir:#{null}}") String optionsDir,
@Value("${tempDir:#{null}}") String tempDir,
@Value("${s3.bucketName:#{null}}") String bucketName,
Expand All @@ -90,6 +90,7 @@ public AdminController(VaultsService vaultsService, UsersService usersService,
this.externalMetadataService = externalMetadataService;
this.auditsService = auditsService;
this.permissionsService = permissionsService;
this.adminDepositService = adminDepositService;
this.sender = sender;
this.optionsDir = optionsDir;
this.tempDir = tempDir;
Expand Down Expand Up @@ -356,61 +357,10 @@ public ResponseEntity<Object> deleteDeposit(@RequestHeader(HEADER_USER_ID) Strin
if (user == null) {
throw new Exception("User '" + userID + "' does not exist");
}

List<Job> jobs = deposit.getJobs();
for (Job job : jobs) {
if (job.isError() == false && job.getState() != job.getStates().size() - 1) {
// There's an in-progress job for this deposit
throw new IllegalArgumentException("Job in-progress for this Deposit");
}
}

List<ArchiveStore> archiveStores = archiveStoreService.getArchiveStores();
if (archiveStores.isEmpty()) {
throw new Exception("No configured archive storage");
}
LOGGER.info("Delete deposit archiveStores : {}", archiveStores);
archiveStores = this.addArchiveSpecificOptions(archiveStores);

// Create a job to track this delete
Job job = new Job("org.datavaultplatform.worker.tasks.Delete");
jobsService.addJob(deposit, job);

// Ask the worker to process the data delete
try {
HashMap<String, String> deleteProperties = new HashMap<>();
deleteProperties.put(PropNames.DEPOSIT_ID, deposit.getID());
deleteProperties.put(PropNames.BAG_ID, deposit.getBagId());
deleteProperties.put(PropNames.ARCHIVE_SIZE, Long.toString(deposit.getArchiveSize()));
deleteProperties.put(PropNames.USER_ID, user.getID());
deleteProperties.put(PropNames.NUM_OF_CHUNKS, Integer.toString(deposit.getNumOfChunks()));
for (Archive archive : deposit.getArchives()) {
deleteProperties.put(archive.getArchiveStore().getID(), archive.getArchiveId());
}

// Add a single entry for the user file storage
Map<String, String> userFileStoreClasses = new HashMap<>();
Map<String, Map<String, String>> userFileStoreProperties = new HashMap<>();
//userFileStoreClasses.put(storageID, userStore.getStorageClass());
//userFileStoreProperties.put(storageID, userStore.getProperties());


Task deleteTask = new Task(
job, deleteProperties, archiveStores,
userFileStoreProperties, userFileStoreClasses,
null, null,
null,
null, null,
null, null, null);
ObjectMapper mapper = new ObjectMapper();
String jsonDelete = mapper.writeValueAsString(deleteTask);
sender.send(jsonDelete);
} catch (Exception e) {
LOGGER.error("Exception while deleting a deposit", e);
}
adminDepositService.deleteDeposit(deposit, user);
return new ResponseEntity<>(HttpStatus.OK);

}

private List<ArchiveStore> addArchiveSpecificOptions(List<ArchiveStore> archiveStores) {
if (archiveStores != null && ! archiveStores.isEmpty()) {
for (ArchiveStore archiveStore : archiveStores) {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@
import org.datavaultplatform.common.event.audit.ChunkAuditStarted;
import org.datavaultplatform.common.event.delete.DeleteComplete;
import org.datavaultplatform.common.event.delete.DeleteStart;
import org.datavaultplatform.common.event.delete.DeletedChunk;
import org.datavaultplatform.common.event.deposit.ChunksDigestEvent;
import org.datavaultplatform.common.event.deposit.Complete;
import org.datavaultplatform.common.event.deposit.CompleteCopyUpload;
Expand All @@ -59,6 +60,7 @@
import org.datavaultplatform.common.model.Retrieve;
import org.datavaultplatform.common.model.User;
import org.datavaultplatform.common.model.Vault;
import org.datavaultplatform.common.model.Archive;
import org.springframework.amqp.core.Message;
import org.springframework.amqp.core.MessageListener;
import org.springframework.amqp.rabbit.annotation.RabbitListener;
Expand Down Expand Up @@ -364,6 +366,8 @@ void processEvent(String messageBody, Event event, Deposit deposit, Job job)
process28UploadedToUserStore(uploadedToUserStore);
} else if (event instanceof UserStoreSpaceAvailableChecked userStoreSpaceAvailableChecked ){
process29UserStoreSpaceAvailableChecked(userStoreSpaceAvailableChecked);
} else if (event instanceof DeletedChunk deletedChunk ){
process30DeletedChunk(deletedChunk);
} else {
throw new Exception(
String.format("Failed to process unknown Event class[%s]message[%s]", event.getClass(),
Expand Down Expand Up @@ -881,6 +885,18 @@ protected void process29UserStoreSpaceAvailableChecked(UserStoreSpaceAvailableCh
ignore(event);
}

protected void process30DeletedChunk(DeletedChunk deletedChunk) {
processDeposit(deletedChunk.getDeposit(), $deposit -> {
String archiveId = deletedChunk.getArchiveId();
if (archiveId != null) {
Archive archive = archivesService.getArchiveByArchiveId(archiveId);
if (archive != null) {
deletedChunk.setArchive(archive);
}
}
});
}

String getUserSubject(String type) {
String userSubjectKey = USER_DEPOSIT_PREFIX + type;
log.info("User Subject key: {}", userSubjectKey);
Expand Down
Original file line number Diff line number Diff line change
@@ -1,11 +1,8 @@
package org.datavaultplatform.broker.scheduled;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.datavaultplatform.broker.queue.Sender;
import org.datavaultplatform.broker.services.AdminDepositService;
import org.datavaultplatform.broker.services.*;
import org.datavaultplatform.common.PropNames;
import org.datavaultplatform.common.model.*;
import org.datavaultplatform.common.task.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.scheduling.annotation.Scheduled;
Expand All @@ -28,27 +25,17 @@ public class CheckForDelete implements ScheduledTask {
private static final Logger log = LoggerFactory.getLogger(CheckForDelete.class);

private final VaultsService vaultsService;
private final VaultsReviewService vaultsReviewService;

private final DepositsReviewService depositsReviewService;
private final ArchiveStoreService archiveStoreService;
private final RolesAndPermissionsService rolesAndPermissionsService;
private final UsersService usersService;
private final JobsService jobsService;
private final Sender sender;
private final AdminDepositService adminDepositService;

@Autowired
public CheckForDelete(VaultsService vaultsService, VaultsReviewService vaultsReviewService,
DepositsReviewService depositsReviewService, ArchiveStoreService archiveStoreService,
RolesAndPermissionsService rolesAndPermissionsService, UsersService usersService,
JobsService jobsService, Sender sender) {
public CheckForDelete(VaultsService vaultsService,
DepositsReviewService depositsReviewService,
AdminDepositService adminDepositService) {
this.vaultsService = vaultsService;
this.vaultsReviewService = vaultsReviewService;
this.depositsReviewService = depositsReviewService;
this.archiveStoreService = archiveStoreService;
this.rolesAndPermissionsService = rolesAndPermissionsService;
this.usersService = usersService;
this.jobsService = jobsService;
this.sender = sender;
this.adminDepositService = adminDepositService;
}

@Override
Expand Down Expand Up @@ -114,57 +101,6 @@ public void execute() throws Exception {

// todo : move this method to a service class
private void deleteDeposit(Deposit deposit) throws Exception {
log.info("Delete deposit with name " + deposit.getName());

List<Job> jobs = deposit.getJobs();
for (Job job : jobs) {
if (job.isError() == false && job.getState() != job.getStates().size() - 1) {
// There's an in-progress job for this deposit
throw new IllegalArgumentException("Job in-progress for this Deposit");
}
}

List<ArchiveStore> archiveStores = archiveStoreService.getArchiveStores();
if (archiveStores.isEmpty()) {
throw new Exception("No configured archive storage");
}

log.info("Delete deposit archiveStores : {}", archiveStores);
archiveStores = archiveStoreService.addArchiveSpecificOptions(archiveStores);

// Create a job to track this delete
Job job = new Job("org.datavaultplatform.worker.tasks.Delete");
jobsService.addJob(deposit, job);

// Ask the worker to process the data delete

HashMap<String, String> deleteProperties = new HashMap<>();
deleteProperties.put(PropNames.DEPOSIT_ID, deposit.getID());
deleteProperties.put(PropNames.BAG_ID, deposit.getBagId());
deleteProperties.put(PropNames.ARCHIVE_SIZE, Long.toString(deposit.getArchiveSize()));
// We have no record of who requested the delete, is that acceptable?
deleteProperties.put(PropNames.USER_ID, null);
deleteProperties.put(PropNames.NUM_OF_CHUNKS, Integer.toString(deposit.getNumOfChunks()));
for (Archive archive : deposit.getArchives()) {
deleteProperties.put(archive.getArchiveStore().getID(), archive.getArchiveId());
}

// Add a single entry for the user file storage
Map<String, String> userFileStoreClasses = new HashMap<>();
Map<String, Map<String, String>> userFileStoreProperties = new HashMap<>();
//userFileStoreClasses.put(storageID, userStore.getStorageClass());
//userFileStoreProperties.put(storageID, userStore.getProperties());

Task deleteTask = new Task(
job, deleteProperties, archiveStores,
userFileStoreProperties, userFileStoreClasses,
null, null,
null,
null, null,
null, null, null);
ObjectMapper mapper = new ObjectMapper();
String jsonDelete = mapper.writeValueAsString(deleteTask);
sender.send(jsonDelete);

adminDepositService.deleteDeposit(deposit, null);
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
package org.datavaultplatform.broker.services;

import com.fasterxml.jackson.databind.ObjectMapper;
import org.datavaultplatform.broker.queue.Sender;
import org.datavaultplatform.common.PropNames;
import org.datavaultplatform.common.model.*;
import org.datavaultplatform.common.task.Task;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;

import org.springframework.boot.autoconfigure.condition.ConditionalOnBean;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;

import java.util.HashMap;
import java.util.List;
import java.util.Map;

@Service
@Transactional
@ConditionalOnBean(Sender.class)
public class AdminDepositService {

private static final Logger LOG = LoggerFactory.getLogger(AdminDepositService.class);

private final ArchiveStoreService archiveStoreService;
private final JobsService jobsService;
private final Sender sender;
private final boolean workersSendDeletedChunkEvents;

public AdminDepositService(ArchiveStoreService archiveStoreService,
JobsService jobsService, Sender sender,
@Value("${workers.send.deleted.chunk.events:false}") boolean workersSendDeletedChunkEvents) {
this.archiveStoreService = archiveStoreService;
this.jobsService = jobsService;
this.sender = sender;
this.workersSendDeletedChunkEvents = workersSendDeletedChunkEvents;
}

public void deleteDeposit(Deposit deposit, User user) throws Exception {
final String userId = user == null ? null : user.getID();
LOG.info("Delete deposit with name [{}] userId[{}]", deposit.getName(), userId);

List<Job> jobs = deposit.getJobs();
for (Job job : jobs) {
if (job.isError() == false && job.getState() != job.getStates().size() - 1) {
// There's an in-progress job for this deposit
throw new IllegalArgumentException("Job in-progress for this Deposit");
}
}

List<ArchiveStore> archiveStores = archiveStoreService.getArchiveStores();
if (archiveStores.isEmpty()) {
throw new Exception("No configured archive storage");
}

LOG.info("Delete deposit archiveStores : {}", archiveStores);
archiveStores = archiveStoreService.addArchiveSpecificOptions(archiveStores);

// Create a job to track this delete
Job job = new Job("org.datavaultplatform.worker.tasks.Delete");
jobsService.addJob(deposit, job);

// Ask the worker to process the data delete
try {

HashMap<String, String> deleteProperties = new HashMap<>();
deleteProperties.put(PropNames.DEPOSIT_ID, deposit.getID());
deleteProperties.put(PropNames.BAG_ID, deposit.getBagId());
deleteProperties.put(PropNames.ARCHIVE_SIZE, Long.toString(deposit.getArchiveSize()));
// NOTE : for scheduled deleted = the userId will be null
deleteProperties.put(PropNames.USER_ID, userId);
deleteProperties.put(PropNames.NUM_OF_CHUNKS, Integer.toString(deposit.getNumOfChunks()));
for (Archive archive : deposit.getArchives()) {
deleteProperties.put(archive.getArchiveStore().getID(), archive.getArchiveId());
}
deleteProperties.put(PropNames.WORKERS_SEND_DELETED_CHUNK_EVENTS,
Boolean.toString(workersSendDeletedChunkEvents));

// Add a single entry for the user file storage
Map<String, String> userFileStoreClasses = new HashMap<>();
Map<String, Map<String, String>> userFileStoreProperties = new HashMap<>();
//userFileStoreClasses.put(storageID, userStore.getStorageClass());
//userFileStoreProperties.put(storageID, userStore.getProperties());

Task deleteTask = new Task(
job, deleteProperties, archiveStores,
userFileStoreProperties, userFileStoreClasses,
null, null,
null,
null, null,
null, null, null);
ObjectMapper mapper = new ObjectMapper();
String jsonDelete = mapper.writeValueAsString(deleteTask);
sender.send(jsonDelete);
} catch (Exception e) {
LOG.error("Exception while deleting a deposit", e);
}
}
}
Loading