diff --git a/.github/workflows/docker-image.yml b/.github/workflows/docker-image.yml index d07d37c5..891908ac 100644 --- a/.github/workflows/docker-image.yml +++ b/.github/workflows/docker-image.yml @@ -1,20 +1,24 @@ -name: Docker Image CI +name: Docker Image Publish CI ( Dockerhub and ECR ) on: - push: - branches: [ "master" ] - pull_request: - branches: [ "master" ] + release: + types: [created] jobs: - build: - runs-on: ubuntu-latest - + outputs: + id: git_tag_output steps: - name: Checkout uses: actions/checkout@v4 + - name: Determine tag type + id: tag_type + run: echo "::set-output name=is_release_tag::$(echo ${GITHUB_REF#refs/tags/} | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo 'false')" + - name: Set version + id: version + run: | + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV - name: Set up QEMU uses: docker/setup-qemu-action@v3 - name: Set up Docker Buildx @@ -25,11 +29,42 @@ jobs: DOCKER_PASSWORD: ${{secrets.DOCKER_PASSWORD}} run: docker login -u $DOCKER_USER -p $DOCKER_PASSWORD - - name: Build and push - uses: docker/build-push-action@v5 + - name: Build and push dockerhub + uses: docker/build-push-action@v5 with: context: . platforms: linux/amd64,linux/arm64 push: true - tags: onedatashare/transfer_service:latest + tags: onedatashare/transfer_service:${{ env.VERSION }} + + push-to-ecr: + needs: build + runs-on: ubuntu-latest + steps: + - name: Determine tag type + id: tag_type + run: echo "::set-output name=is_release_tag::$(echo ${GITHUB_REF#refs/tags/} | grep -E '^v[0-9]+\.[0-9]+\.[0-9]+$' || echo 'false')" + - name: Set version + id: version + run: | + echo "VERSION=${GITHUB_REF#refs/tags/}" >> $GITHUB_ENV + - name: Setup AWS ECR Details + uses: aws-actions/configure-aws-credentials@v3 + with: + aws-access-key-id: ${{ secrets.AWS_ACCESS_KEY_ID }} + aws-secret-access-key: ${{ secrets.AWS_SECRET_ACCESS_KEY }} + aws-region: us-east-1 + - name: Login to Amazon ECR + id: login-pf-aws-ecr + uses: aws-actions/amazon-ecr-login@v2 + + - name: Build and push the tagged docker image to Amazon ECR + env: + ECR_REGISTRY: ${{ steps.login-pf-aws-ecr.outputs.registry }} + ECR_REPOSITORY: onedatashare/transfer_service + IMAGE_TAG: ${{ env.VERSION }} + run: | + docker pull onedatashare/transfer_service:$IMAGE_TAG + docker tag onedatashare/transfer_service:$IMAGE_TAG $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG + docker push $ECR_REGISTRY/$ECR_REPOSITORY:$IMAGE_TAG \ No newline at end of file diff --git a/Dockerfile b/Dockerfile index 54411a54..fcc57b88 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,54 +1,27 @@ -FROM maven:3.9.5-amazoncorretto-21 AS build +FROM maven:3.9.7-amazoncorretto-21 AS build COPY src /home/app/src COPY pom.xml /home/app RUN mvn -f /home/app/pom.xml clean package -DskipTests # Final Image -FROM amazoncorretto:21-alpine-jdk +FROM amazoncorretto:21-alpine3.18-jdk +RUN apk update RUN apk --no-cache add python3-dev py3-pip build-base gcc linux-headers -RUN pip3 install pmeter-ods==1.0.8 + +RUN pip install pmeter-ods COPY --from=build /home/app/target/ods-transfer-service-0.0.1-SNAPSHOT.jar /usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar ENV PIP_ROOT_USER_ACTION=ignore -ENV NODE_NAME="${NODE_NAME}" -ENV USER_NAME="${USER_NAME}" -ENV APP_NAME="${USER_NAME}"-"${NODE_NAME}" - -ENV CONNECTOR_QUEUE="${APP_NAME}" -ENV ODS_GDRIVE_CLIENT_ID="${ODS_GDRIVE_CLIENT_ID}" -ENV ODS_GDRIVE_CLIENT_SECRET="${ODS_GDRIVE_CLIENT_SECRET}" -ENV ODS_GDRIVE_PROJECT_ID="onedatashare-dev" -ENV EUREKA_URI="${EUREKA_URI}" -ENV EUREKA_PASS="${EUREKA_PASS}" -ENV EUREKA_USER="${EUREKA_USER}" -ENV FOLDER_WITH_CERTS="${FOLDER_WITH_CERTS}" -COPY ${FOLDER_WITH_CERTS} /certs/ -ENV COCKROACH_URI="${COCKROACH_URI}" -ENV COCKROACH_USER="${COCKROACH_USER}" -ENV COCKROACH_PASS="${COCKROACH_PASS}" -ENV RMQ_ADDRESS="amqps://b-0e720b16-3ea7-4227-ad65-6cce3704121c.mq.us-east-2.amazonaws.com:5671" - -#use ODS user for your private queue. -#create creds through aws console -ENV AMPQ_USER="${AMPQ_USER}" -ENV AMPQ_PWD="${AMPQ_PWD}" #change to monitor the active NIC ENV PMETER_CLI_OPTIONS="-NS" ENV PMETER_NIC_INTERFACE="${PMETER_NIC_INTERFACE:-eth0}" -ENV INFLUX_ORG="${INFLUX_ORG}" -ENV INFLUX_BUCKET="${USER_NAME}" -ENV INFLUX_TOKEN="${INFLUX_TOKEN}" -ENV INFLUX_URI="https://influxdb.onedatashare.org" ENV ENABLE_PMETER="true" ENV PMETER_CRON_EXP="*/15 * * * * *" - -ENV OPTIMIZER_URL="${OPTIMIZER_URL}" -ENV OPTIMIZER_ENABLE="${OPTIMIZER_ENABLE}" -ENV SPRING_PROFILE="${SPRING_PROFILE:-hsql}" +ENV SPRING_PROFILES_ACTIVE=aws,virtual,cockroach ENV PATH "/home/ods/.local/bin:${PATH}" @@ -56,4 +29,4 @@ RUN mkdir -p $HOME/.pmeter/ RUN touch $HOME/.pmeter/transfer_service_pmeter_measure.txt EXPOSE 8092 -ENTRYPOINT ["java", "-Dspring.profiles.active=hsql","-jar", "/usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar"] \ No newline at end of file +ENTRYPOINT ["java","-jar", "/usr/local/lib/ods-transfer-service-0.0.1-SNAPSHOT.jar"] \ No newline at end of file diff --git a/pom.xml b/pom.xml index a5dd7ab6..1413894c 100644 --- a/pom.xml +++ b/pom.xml @@ -16,7 +16,7 @@ 21 - 2023.0.0-RC1 + 2023.0.2 @@ -36,12 +36,43 @@ false + + private-repository + Hazelcast Private Repository + https://repository.hazelcast.com/release/ + + true + + + false + + com.fasterxml.jackson.datatype jackson-datatype-jsr310 + + com.hazelcast + hazelcast-enterprise + 5.5.1 + + + + org.springframework.cloud + spring-cloud-starter-vault-config + + + org.springframework.vault + spring-vault-core + 3.1.2 + + + org.springframework.cloud + spring-cloud-starter-bootstrap + 4.1.3 + com.influxdb influxdb-client-java @@ -52,10 +83,6 @@ commons-pool2 2.11.1 - - org.springframework.boot - spring-boot-starter-amqp - com.box box-java-sdk @@ -130,20 +157,25 @@ org.springframework.boot spring-boot-starter-actuator + + org.jsoup + jsoup + 1.17.2 + org.springframework.boot spring-boot-starter-web - - - - - - - - - - - + + + + + + + + + + + org.springframework.cloud spring-cloud-starter-netflix-eureka-client @@ -157,6 +189,16 @@ com.amazonaws aws-java-sdk-s3 + + software.amazon.awssdk + auth + 2.25.67 + + + software.amazon.awssdk + sts + 2.25.67 + jakarta.servlet jakarta.servlet-api diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java new file mode 100644 index 00000000..60f7a005 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/Enum/MessageType.java @@ -0,0 +1,5 @@ +package org.onedatashare.transferservice.odstransferservice.Enum; + +public enum MessageType { + CARBON_AVG_REQUEST, TRANSFER_JOB_REQUEST, APPLICATION_PARAM_CHANGE, STOP_JOB_REQUEST, CARBON_IP_REQUEST +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java index 0a890310..c6b8bcfb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/OdsTransferService.java @@ -1,6 +1,5 @@ package org.onedatashare.transferservice.odstransferservice; -import org.springframework.batch.core.configuration.annotation.EnableBatchProcessing; import org.springframework.boot.SpringApplication; import org.springframework.boot.autoconfigure.SpringBootApplication; import org.springframework.cloud.client.discovery.EnableDiscoveryClient; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java index 826e235c..1d0d6ecb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/BatchConfig.java @@ -7,38 +7,40 @@ import org.springframework.context.annotation.Configuration; import org.springframework.core.task.SimpleAsyncTaskExecutor; import org.springframework.jdbc.datasource.DataSourceTransactionManager; +import org.springframework.retry.backoff.BackOffPolicy; +import org.springframework.retry.backoff.ExponentialBackOffPolicy; import org.springframework.transaction.PlatformTransactionManager; import javax.sql.DataSource; -import java.util.HashSet; -import java.util.Set; +import java.util.concurrent.TimeUnit; @Configuration public class BatchConfig { -// @Bean -// public JobLauncher jobLauncher(JobRepository jobRepository) { -// TaskExecutorJobLauncher taskExecutorJobLauncher = new TaskExecutorJobLauncher(); -// taskExecutorJobLauncher.setJobRepository(jobRepository); -// return taskExecutorJobLauncher; -// } - - @Bean - public Set jobIds() { - return new HashSet<>(); - } - @Bean public PlatformTransactionManager transactionManager(DataSource dataSource) { return new DataSourceTransactionManager(dataSource); } @Bean - public JobLauncher asyncJobLauncher(JobRepository jobRepository) { + public JobLauncher jobLauncher(JobRepository jobRepository) { TaskExecutorJobLauncher jobLauncher = new TaskExecutorJobLauncher(); jobLauncher.setJobRepository(jobRepository); - jobLauncher.setTaskExecutor(new SimpleAsyncTaskExecutor()); + SimpleAsyncTaskExecutor taskExecutor = new SimpleAsyncTaskExecutor(); + taskExecutor.setConcurrencyLimit(4); + jobLauncher.setTaskExecutor(taskExecutor); return jobLauncher; } + + + @Bean + public BackOffPolicy backOffPolicy() { + ExponentialBackOffPolicy backOffPolicy = new ExponentialBackOffPolicy(); + backOffPolicy.setInitialInterval(TimeUnit.SECONDS.toMillis(5)); + backOffPolicy.setMultiplier(2.0); + backOffPolicy.setMaxInterval(TimeUnit.DAYS.toMillis(1)); + return backOffPolicy; + } + } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java new file mode 100644 index 00000000..e3cc56a1 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/HazelcastClientConfig.java @@ -0,0 +1,105 @@ +package org.onedatashare.transferservice.odstransferservice.config; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.client.HazelcastClient; +import com.hazelcast.client.config.ClientConfig; +import com.hazelcast.collection.IQueue; +import com.hazelcast.config.IndexType; +import com.hazelcast.config.SSLConfig; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.onedatashare.transferservice.odstransferservice.service.VaultSSLService; +import org.springframework.beans.factory.annotation.Qualifier; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.context.annotation.Bean; +import org.springframework.context.annotation.Configuration; +import org.springframework.context.annotation.Profile; +import org.springframework.core.env.Environment; +import org.springframework.vault.core.VaultTemplate; + +import java.util.Properties; +import java.util.Set; +import java.util.UUID; + +@Configuration +public class HazelcastClientConfig { + + private final Environment env; + private final ObjectMapper objectMapper; + private final VaultSSLService vaultSslService; + + public HazelcastClientConfig(Environment environment, ObjectMapper objectMapper, VaultTemplate vaultTemplate, VaultSSLService vaultSSLService) { + this.env = environment; + this.objectMapper = objectMapper; + this.vaultSslService = vaultSSLService; + } + + @Value("${spring.application.name}") + private String appName; + + @Bean + @Qualifier("clientConfig") + @Profile("local") + public ClientConfig devClientConfig(SSLConfig sslConfig) { + ClientConfig clientConfig = new ClientConfig(); + clientConfig.setClusterName("dev-scheduler-cluster"); + clientConfig.getNetworkConfig().setSSLConfig(sslConfig); + clientConfig.setInstanceName(this.appName); + return clientConfig; + } + + @Bean + @Qualifier("clientConfig") + @Profile({"prod", "eks", "ec2",}) + public ClientConfig prodClientConfig(SSLConfig sslConfig) { + ClientConfig clientConfig = new ClientConfig(); + clientConfig.setClusterName("prod-scheduler-cluster"); + clientConfig.getNetworkConfig().setSSLConfig(sslConfig); + clientConfig.getNetworkConfig().addAddress(env.getProperty("hz.ipaddr", "localhost")); + return clientConfig; + } + + @Bean + public SSLConfig sslConfig() { + Properties properties = new Properties(); + properties.setProperty("protocol", "TLSv1.2"); + properties.setProperty("mutualAuthentication", "OPTIONAL"); + properties.setProperty("trustStore", this.vaultSslService.getStorePath().toAbsolutePath().toString()); + properties.setProperty("trustStorePassword", env.getProperty("hz.keystore.password", "changeit")); + properties.setProperty("trustStoreType", "PKCS12"); + properties.setProperty("keyMaterialDuration", this.vaultSslService.getStoreDuration().toString()); + properties.setProperty("validateIdentity", "false"); + + SSLConfig sslConfig = new SSLConfig(); + sslConfig.setEnabled(true); + sslConfig.setProperties(properties); + return sslConfig; + } + + @Bean + public HazelcastInstance hazelcastInstance(ClientConfig clientConfig) { + clientConfig.addLabel(this.env.getProperty("spring.application.name")); + return HazelcastClient.newHazelcastClient(clientConfig); + } + + @Bean + public IMap fileTransferNodeRegistrationMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("file-transfer-node-map"); + } + + @Bean + public IMap fileTransferScheduleMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("file-transfer-schedule-map"); + } + + @Bean + public IMap carbonIntensityMap(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getMap("carbon-intensity-map"); + } + + @Bean + public IQueue messageQueue(@Qualifier("hazelcastInstance") HazelcastInstance hazelcastInstance) { + return hazelcastInstance.getQueue(appName); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java deleted file mode 100644 index e952bdcd..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/MetricsConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import com.fasterxml.jackson.databind.ObjectMapper; -import com.fasterxml.jackson.databind.SerializationFeature; -import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; -import lombok.Data; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.retry.backoff.FixedBackOffPolicy; -import org.springframework.retry.support.RetryTemplate; - -@Configuration -@Data -public class MetricsConfig { - @Bean - public ObjectMapper pmeterMapper() { - ObjectMapper objectMapper = new ObjectMapper(); - objectMapper.registerModule(new JavaTimeModule()); - objectMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); - return objectMapper; - } - - @Bean - public RetryTemplate retryTemplateForReaderAndWriter() { - RetryTemplate retryTemplate = new RetryTemplate(); - FixedBackOffPolicy fixedBackOffPolicy = new FixedBackOffPolicy(); - fixedBackOffPolicy.setBackOffPeriod(2000l); - retryTemplate.setBackOffPolicy(fixedBackOffPolicy); - return retryTemplate; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java deleted file mode 100644 index b164d4f6..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/OptimizerConfig.java +++ /dev/null @@ -1,31 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import org.springframework.beans.factory.annotation.Value; -import org.springframework.boot.web.client.RestTemplateBuilder; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; -import org.springframework.web.client.RestTemplate; -import org.springframework.web.util.DefaultUriBuilderFactory; - -import java.util.concurrent.Executor; -import java.util.concurrent.ExecutorService; -import java.util.concurrent.Executors; - -@Configuration -public class OptimizerConfig { - - @Value("${optimizer.url}") - private String optimizerUrl; - - @Bean - public RestTemplate optimizerTemplate() { - return new RestTemplateBuilder() - .uriTemplateHandler(new DefaultUriBuilderFactory(optimizerUrl)) - .build(); - } - - @Bean(name ="optimizerTaskExecutor") - public Executor optimizerTaskExecutor(){ - return Executors.newVirtualThreadPerTaskExecutor(); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java deleted file mode 100644 index be81ad00..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/config/RabbitMQConfig.java +++ /dev/null @@ -1,48 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.config; - -import com.google.gson.*; -import org.springframework.amqp.core.*; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.context.annotation.Bean; -import org.springframework.context.annotation.Configuration; - -import java.util.Date; -import java.util.Locale; - -@Configuration -public class RabbitMQConfig { - - @Value("${ods.rabbitmq.queue}") - String queueName; - - @Value("${ods.rabbitmq.exchange}") - String exchange; - - @Value("${ods.rabbitmq.routingkey}") - String routingKey; - - @Bean - public Gson gson() { - GsonBuilder builder = new GsonBuilder() - .registerTypeAdapter(Date.class, (JsonDeserializer) (json, typeOfT, context) -> new Date(json.getAsJsonPrimitive().getAsLong())); - return builder.create(); - } - - @Bean - Queue userQueue(){ - //String name, boolean durable, boolean exclusive, boolean autoDelete - return new Queue(this.queueName, true, false, false); - } - - @Bean - public DirectExchange exchange(){ - return new DirectExchange(exchange); - } - - @Bean - public Binding binding(DirectExchange exchange, Queue userQueue){ - return BindingBuilder.bind(userQueue) - .to(exchange) - .with(routingKey); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java index bee32170..03d38624 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/DataInfluxConstants.java @@ -2,6 +2,7 @@ public class DataInfluxConstants { public static final String NETWORK_INTERFACE = "interface"; + public static final String IS_RUNNING = "isRunning"; public static final String ODS_USER = "ods_user"; public static final String TRANSFER_NODE_NAME = "transfer_node_name"; public static final String ACTIVE_CORE_COUNT = "active_core_count"; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java index b91d561f..a9088f41 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/constant/ODSConstants.java @@ -6,6 +6,7 @@ public class ODSConstants { public static final String TIME = "time"; public static final String SOURCE_HOST = "sourceURI"; public static final String SOURCE_PORT = "sourcePort"; + public static final String CARBON_SCORE_SOURCE = "sourceCarbonScore"; public static final String SOURCE_BASE_PATH = "sourceBasePath"; public static final String DEST_BASE_PATH = "destBasePath"; public static final String FILE_COUNT = "fileCount"; @@ -15,6 +16,7 @@ public class ODSConstants { public static final String DEST_CREDENTIAL_TYPE = "destCredentialType"; public static final String DEST_HOST = "destURI"; public static final String DEST_PORT = "destPort"; + public static final String CARBON_SCORE_DEST = "destCarbonScore"; public static final String CHUNK_SIZE = "chunkSize"; public static final String JOB_UUID = "jobUuid"; public static final String OWNER_ID = "ownerId"; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java deleted file mode 100644 index 46118159..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/consumer/RabbitMQConsumer.java +++ /dev/null @@ -1,86 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.consumer; - - -import com.fasterxml.jackson.annotation.JsonInclude; -import com.fasterxml.jackson.databind.DeserializationFeature; -import com.fasterxml.jackson.databind.ObjectMapper; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; -import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.TransferApplicationParams; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; -import org.onedatashare.transferservice.odstransferservice.service.JobControl; -import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.VfsExpander; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.amqp.core.Message; -import org.springframework.amqp.core.Queue; -import org.springframework.amqp.rabbit.annotation.RabbitListener; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.stereotype.Service; - -import java.util.ArrayList; -import java.util.List; - -@Service -public class RabbitMQConsumer { - - private final ObjectMapper objectMapper; - private final ThreadPoolManager threadPoolManager; - Logger logger = LoggerFactory.getLogger(RabbitMQConsumer.class); - - JobControl jc; - - JobLauncher jobLauncher; - - JobParamService jobParamService; - - Queue userQueue; - - VfsExpander vfsExpander; - - public RabbitMQConsumer(VfsExpander vfsExpander, Queue userQueue, JobParamService jobParamService, JobLauncher asyncJobLauncher, JobControl jc, ThreadPoolManager threadPoolManager) { - this.vfsExpander = vfsExpander; - this.userQueue = userQueue; - this.jobParamService = jobParamService; - this.jobLauncher = asyncJobLauncher; - this.jc = jc; - this.objectMapper = new ObjectMapper(); - this.objectMapper.configure(DeserializationFeature.FAIL_ON_UNKNOWN_PROPERTIES, true); - this.objectMapper.setDefaultPropertyInclusion(JsonInclude.Include.ALWAYS); - this.threadPoolManager = threadPoolManager; - } - - @RabbitListener(queues = "#{userQueue}") - public void consumeDefaultMessage(final Message message) { - String jsonStr = new String(message.getBody()); - - logger.info("Message recv: {}", jsonStr); - try { - TransferJobRequest request = objectMapper.readValue(jsonStr, TransferJobRequest.class); - logger.info("Job Recieved: {}", request.toString()); - - if (request.getSource().getType().equals(EndpointType.vfs)) { - List fileExpandedList = vfsExpander.expandDirectory(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); - request.getSource().setInfoList(new ArrayList<>(fileExpandedList)); - } - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - jc.setRequest(request); - jobLauncher.run(jc.concurrentJobDefinition(), parameters); - - return; - } catch (Exception e) { - logger.error("Failed to parse jsonStr: {} to TransferJobRequest.java", jsonStr); - } - try { - TransferApplicationParams params = objectMapper.readValue(jsonStr, TransferApplicationParams.class); - logger.info("Parsed TransferApplicationParams: {}", params); - this.threadPoolManager.applyOptimizer(params.getConcurrency(), params.getParallelism()); - } catch (Exception e) { - logger.error("Did not apply transfer params due to parsing message failure"); - } - } -} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java index 3db70aff..15554efd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/JobMonitor.java @@ -1,11 +1,10 @@ package org.onedatashare.transferservice.odstransferservice.controller; -import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.BatchJobData; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.explore.JobExplorer; import org.springframework.http.ResponseEntity; import org.springframework.web.bind.annotation.GetMapping; @@ -13,7 +12,8 @@ import org.springframework.web.bind.annotation.RequestParam; import org.springframework.web.bind.annotation.RestController; -import java.util.*; +import java.util.List; +import java.util.Optional; @RequestMapping("/api/v1/job") @@ -21,13 +21,13 @@ public class JobMonitor { private final JobExplorer jobExplorer; - private Set jobIds; + private final JobControl jobControl; Logger logger = LoggerFactory.getLogger(JobMonitor.class); - public JobMonitor(JobExplorer jobExplorer, Set jobIds) { + public JobMonitor(JobExplorer jobExplorer, JobControl jobControl) { + this.jobControl = jobControl; this.jobExplorer = jobExplorer; - this.jobIds = jobIds; } @GetMapping("/execution") @@ -44,30 +44,10 @@ public ResponseEntity getJobExecution(@RequestParam("jobId") Optio } } - @GetMapping("/ids") - public ResponseEntity> getJobIdsRun() { - logger.info("Listing Job Ids"); - return ResponseEntity.ok(new ArrayList<>(this.jobIds)); + @GetMapping("/latest") + public ResponseEntity getLatestJobExecution() { + JobExecution jobExecution = this.jobControl.getLatestJobExecution(); + if(jobExecution == null) {return ResponseEntity.ok(null);} + return ResponseEntity.ok(BatchJobData.convertFromJobExecution(jobExecution)); } - -// @GetMapping("/uuid") -// public ResponseEntity> getJobExec(@RequestParam Optional> jobIds){ -// List jobUuids = new ArrayList<>(); -// if(jobIds.isPresent()){ -// for(Long jobId: jobIds.get()){ -// JobExecution jobExecution = this.jobExplorer.getJobExecution(jobId); -// JobParameters jobParameters = jobExecution.getJobParameters(); -// String jobUuid = jobParameters.getString(ODSConstants.JOB_UUID); -// jobUuids.add(UUID.fromString(jobUuid)); -// } -// }else{ -// for(Long jobId : this.jobIds){ -// JobExecution jobExecution = this.jobExplorer.getJobExecution(jobId); -// JobParameters jobParameters = jobExecution.getJobParameters(); -// String jobUuid = jobParameters.getString(ODSConstants.JOB_UUID); -// jobUuids.add(UUID.fromString(jobUuid)); -// } -// } -// return ResponseEntity.ok(jobUuids); -// } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java index fcb2bce3..be31f3d2 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/controller/TransferController.java @@ -1,20 +1,8 @@ package org.onedatashare.transferservice.odstransferservice.controller; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.service.JobControl; -import org.onedatashare.transferservice.odstransferservice.service.JobParamService; -import org.onedatashare.transferservice.odstransferservice.service.VfsExpander; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobParameters; -import org.springframework.batch.core.JobParametersBuilder; -import org.springframework.batch.core.launch.JobLauncher; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.http.HttpStatus; import org.springframework.http.ResponseEntity; import org.springframework.scheduling.annotation.Async; import org.springframework.web.bind.annotation.RequestBody; @@ -22,10 +10,6 @@ import org.springframework.web.bind.annotation.RequestMethod; import org.springframework.web.bind.annotation.RestController; -import java.util.ArrayList; -import java.util.List; -import java.util.Set; - /** * Transfer controller with to initiate transfer request @@ -34,33 +18,17 @@ @RequestMapping("/api/v1/transfer") public class TransferController { - Logger logger = LoggerFactory.getLogger(TransferController.class); - - @Autowired - JobControl jc; - - @Autowired - JobLauncher asyncJobLauncher; + JobControl jobControl; - @Autowired - JobParamService jobParamService; - - @Autowired - VfsExpander vfsExpander; + public TransferController(JobControl jobControl) { + this.jobControl = jobControl; + } @RequestMapping(value = "/start", method = RequestMethod.POST) @Async - public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { - logger.info("Controller Entry point"); - if (request.getSource().getType().equals(EndpointType.vfs)) { - List fileExpandedList = vfsExpander.expandDirectory(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); - request.getSource().setInfoList(new ArrayList<>(fileExpandedList)); - } - JobParameters parameters = jobParamService.translate(new JobParametersBuilder(), request); - jc.setRequest(request); - Job job = jc.concurrentJobDefinition(); - JobExecution jobExecution = asyncJobLauncher.run(job, parameters); - return ResponseEntity.status(HttpStatus.OK).body("Your batch job has been submitted with \n ID: " + jobExecution.getJobId()); + public ResponseEntity start(@RequestBody TransferJobRequest request) throws Exception { + JobExecution jobExecution = this.jobControl.runJob(request); + return ResponseEntity.ok(jobExecution.getJobId()); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java new file mode 100644 index 00000000..29dd65f6 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/MessageHandler.java @@ -0,0 +1,9 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.hazelcast.core.HazelcastJsonValue; + +import java.io.IOException; + +public interface MessageHandler { + void messageHandler(HazelcastJsonValue jsonMsg) throws IOException; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java new file mode 100644 index 00000000..880fba4b --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/StopJobRequestHandler.java @@ -0,0 +1,46 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import org.onedatashare.transferservice.odstransferservice.model.StopJobRequest; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.explore.JobExplorer; +import org.springframework.batch.core.launch.JobExecutionNotRunningException; +import org.springframework.batch.core.launch.JobOperator; +import org.springframework.batch.core.launch.NoSuchJobExecutionException; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.util.Set; + +@Service +public class StopJobRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final JobOperator jobOperator; + private final JobExplorer jobExplorer; + private final Logger logger; + + public StopJobRequestHandler(JobExplorer jobExplorer, JobOperator jobOperator, ObjectMapper objectMapper) { + this.jobOperator = jobOperator; + this.objectMapper = objectMapper; + this.jobExplorer = jobExplorer; + this.logger = LoggerFactory.getLogger(StopJobRequestHandler.class); + } + + @Override + public void messageHandler(HazelcastJsonValue jsonMsg) throws IOException { + StopJobRequest stopJobRequest = this.objectMapper.readValue(jsonMsg.getValue(), StopJobRequest.class); + Set jobExecutionSet = this.jobExplorer.findRunningJobExecutions(stopJobRequest.getJobUuid().toString()); + for (JobExecution jobExecution : jobExecutionSet) { + try { + jobOperator.stop(jobExecution.getId()); + } catch (NoSuchJobExecutionException | JobExecutionNotRunningException e) { + logger.error("Was unable to stop job: {} with error message: {}", jobExecution, e.getMessage()); + } + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java new file mode 100644 index 00000000..0efbf52b --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferApplicationParamHandler.java @@ -0,0 +1,31 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import org.onedatashare.transferservice.odstransferservice.model.TransferApplicationParams; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +@Service +public class TransferApplicationParamHandler implements MessageHandler { + + private final ObjectMapper mesageObjectMapper; + private final ThreadPoolContract threadPool; + Logger logger = LoggerFactory.getLogger(TransferApplicationParamHandler.class); + + public TransferApplicationParamHandler(ObjectMapper messageObjectMapper, ThreadPoolContract threadPool) { + this.mesageObjectMapper = messageObjectMapper; + this.threadPool = threadPool; + } + + @Override + public void messageHandler(HazelcastJsonValue jsonMsg) throws JsonProcessingException { + String jsonStr = jsonMsg.getValue(); + TransferApplicationParams params = mesageObjectMapper.readValue(jsonStr, TransferApplicationParams.class); + logger.info("Parsed TransferApplicationParams: {}", params); + this.threadPool.applyOptimizer(params.getConcurrency(), params.getParallelism()); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java new file mode 100644 index 00000000..dbbdcba0 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/message/TransferJobRequestHandler.java @@ -0,0 +1,50 @@ +package org.onedatashare.transferservice.odstransferservice.message; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.service.JobControl; +import org.onedatashare.transferservice.odstransferservice.service.expanders.ExpanderFactory; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.util.List; + +@Service +public class TransferJobRequestHandler implements MessageHandler { + + private final ObjectMapper objectMapper; + private final JobControl jobControl; + private final ExpanderFactory expanderFactory; + + Logger logger = LoggerFactory.getLogger(TransferJobRequestHandler.class); + + public TransferJobRequestHandler(ObjectMapper messageObjectMapper, JobControl jobControl, ExpanderFactory expanderFactory) { + this.objectMapper = messageObjectMapper; + this.jobControl = jobControl; + this.expanderFactory = expanderFactory; + } + + @Override + public void messageHandler(HazelcastJsonValue jsonMessage) throws JsonProcessingException { + String jsonStr = jsonMessage.getValue(); + TransferJobRequest request = null; + try { + request = objectMapper.readValue(jsonStr, TransferJobRequest.class); + } catch (JsonProcessingException e) { + logger.error("Failed to parse Transfer Job Request: {}", jsonStr); + return; + } + logger.info("Job Received: {}", request.toString()); + List fileInfo = expanderFactory.getExpander(request.getSource()); + request.getSource().setInfoList(fileInfo); + try { + this.jobControl.runJob(request); + } catch (Exception e) { + logger.error(e.getMessage()); + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java deleted file mode 100644 index bc361574..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/AWSSinglePutRequestMetaData.java +++ /dev/null @@ -1,54 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Getter; -import lombok.Setter; -import lombok.SneakyThrows; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.math.BigInteger; -import java.nio.ByteBuffer; -import java.security.MessageDigest; -import java.util.*; -import java.util.concurrent.ConcurrentLinkedQueue; -import java.util.stream.Collectors; - -@Getter -@Setter -public class AWSSinglePutRequestMetaData { - private Queue dataChunkPriorityQueue; - Logger logger = LoggerFactory.getLogger(AWSSinglePutRequestMetaData.class); - - public AWSSinglePutRequestMetaData(){ - this.dataChunkPriorityQueue = new ConcurrentLinkedQueue(); - } - public void addChunk(DataChunk chunk){ - this.dataChunkPriorityQueue.add(chunk); - } - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - @SneakyThrows - public InputStream condenseListToOneStream(long size){ - MessageDigest md = MessageDigest.getInstance("SHA-256"); - byte[] data = new byte[Long.valueOf(size).intValue()]; - ByteBuffer buffer = ByteBuffer.wrap(data); - List list = this.dataChunkPriorityQueue.stream().sorted(new DataChunkComparator()).collect(Collectors.toList()); - for(DataChunk currentChunk : list){ - logger.info("Processing chunk {}", currentChunk); - buffer.put(currentChunk.getData()); - md.update(currentChunk.getData()); - } - String output = String.format("%032X", new BigInteger(1, md.digest())); - logger.info(String.valueOf(output)); - this.dataChunkPriorityQueue.clear(); - return new ByteArrayInputStream(buffer.array()); - } - - public void clear(){ - this.dataChunkPriorityQueue.clear(); - } -} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java deleted file mode 100644 index ad7f3764..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/BoxSmallFileUpload.java +++ /dev/null @@ -1,37 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import lombok.Getter; -import lombok.Setter; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.PriorityQueue; - -/** - * This class is used to Buffer all of the data from a small file. So any file less than 20MB according to the box api - */ -@Getter -@Setter -public class BoxSmallFileUpload { - private PriorityQueue dataChunkPriorityQueue; - - public BoxSmallFileUpload(){ - this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); - } - - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - public InputStream condenseListToOneStream(long size){ - byte[] data = new byte[Long.valueOf(size).intValue()];//we know this file will always be <= 20MB - ByteBuffer buffer = ByteBuffer.wrap(data); - for(DataChunk chunk : this.dataChunkPriorityQueue){ - buffer.put(chunk.getData()); - } - this.dataChunkPriorityQueue.clear(); - return new ByteArrayInputStream(buffer.array()); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java new file mode 100644 index 00000000..40038a2e --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIntensityMapKey.java @@ -0,0 +1,18 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.AllArgsConstructor; +import lombok.Data; +import lombok.NoArgsConstructor; + +import java.time.LocalDateTime; +import java.util.UUID; + +@Data +@AllArgsConstructor +@NoArgsConstructor +public class CarbonIntensityMapKey { + String ownerId; + String transferNodeName; + UUID jobUuid; + LocalDateTime timeMeasuredAt; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java new file mode 100644 index 00000000..5595f6d0 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonIpEntry.java @@ -0,0 +1,11 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +@Data +public class CarbonIpEntry { + String ip; + int carbonIntensity; + double lat; + double lon; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java new file mode 100644 index 00000000..6853c123 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/CarbonMeasurement.java @@ -0,0 +1,17 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +import java.time.LocalDateTime; +import java.util.List; +import java.util.UUID; + +@Data +public class CarbonMeasurement { + + List traceRouteCarbon; + String ownerId; + String transferNodeName; + UUID jobUuid; + LocalDateTime timeMeasuredAt; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java index c01faa88..433514cb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/EntityInfo.java @@ -1,5 +1,7 @@ package org.onedatashare.transferservice.odstransferservice.model; +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; import com.fasterxml.jackson.annotation.JsonIgnoreProperties; import lombok.AllArgsConstructor; import lombok.Data; @@ -14,4 +16,14 @@ public class EntityInfo { private String path; private long size; private int chunkSize; + + @Override + public String toString(){ + ObjectMapper objectMapper = new ObjectMapper(); + try { + return objectMapper.writeValueAsString(this); + } catch (JsonProcessingException e) { + throw new RuntimeException(e); + } + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java deleted file mode 100644 index d68d459d..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileBuffer.java +++ /dev/null @@ -1,34 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model; - -import java.io.ByteArrayInputStream; -import java.io.InputStream; -import java.nio.ByteBuffer; -import java.util.List; -import java.util.PriorityQueue; - -public class FileBuffer { - private PriorityQueue dataChunkPriorityQueue; - - public FileBuffer(){ - this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); - } - public void addChunk(DataChunk chunk){ - this.dataChunkPriorityQueue.add(chunk); - } - public void addAllChunks(List chunks){ - this.dataChunkPriorityQueue.addAll(chunks); - } - - public InputStream condenseListToOneStream(long size){ - byte[] data = new byte[Long.valueOf(size).intValue()]; - ByteBuffer buffer = ByteBuffer.wrap(data); - for(DataChunk chunk : this.dataChunkPriorityQueue){ - buffer.put(chunk.getData()); - } - return new ByteArrayInputStream(buffer.array()); - } - - public void clear(){ - this.dataChunkPriorityQueue.clear(); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java new file mode 100644 index 00000000..013573c2 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/FileTransferNodeMetaData.java @@ -0,0 +1,23 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.AllArgsConstructor; +import lombok.Builder; +import lombok.Data; + +import java.io.Serializable; +import java.util.UUID; + +@Data +@AllArgsConstructor +@Builder +public class FileTransferNodeMetaData implements Serializable { + + //ods metrics + String odsOwner; + String nodeName; + UUID nodeUuid; + Boolean runningJob; + Boolean online; + long jobId; + UUID jobUuid; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java new file mode 100644 index 00000000..fe16efdb --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/SmallFileUpload.java @@ -0,0 +1,42 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Getter; +import lombok.Setter; + +import java.io.ByteArrayInputStream; +import java.io.InputStream; +import java.util.List; +import java.util.PriorityQueue; + +@Getter +@Setter +public class SmallFileUpload { + + private PriorityQueue dataChunkPriorityQueue; + + public SmallFileUpload(){ + this.dataChunkPriorityQueue = new PriorityQueue(new DataChunkComparator()); + } + + public void addAllChunks(List chunks){ + this.dataChunkPriorityQueue.addAll(chunks); + } + + public InputStream condenseListToOneStream(){ + int totalLength = this.dataChunkPriorityQueue.stream().mapToInt(byteArray -> byteArray.getData().length).sum(); + byte[] combinedBytes = new byte[totalLength]; + + int currentIndex = 0; + for (DataChunk chunk : dataChunkPriorityQueue) { + byte[] byteArray = chunk.getData(); + System.arraycopy(byteArray, 0, combinedBytes, currentIndex, byteArray.length); + currentIndex += byteArray.length; + } + + return new ByteArrayInputStream(combinedBytes); + } + + public void clearBuffer(){ + this.dataChunkPriorityQueue.clear(); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java new file mode 100644 index 00000000..b6f18c16 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/StopJobRequest.java @@ -0,0 +1,12 @@ +package org.onedatashare.transferservice.odstransferservice.model; + +import lombok.Data; + +import java.util.UUID; + +@Data +public class StopJobRequest { + UUID jobUuid; + Integer jobId; + String ownerId; +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java similarity index 94% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java index bec5c9b3..2d88ccac 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/TransferApplicationParams.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferApplicationParams.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; +package org.onedatashare.transferservice.odstransferservice.model; import lombok.Data; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java index 0064299e..2c2b6ffb 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/TransferJobRequest.java @@ -6,8 +6,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; - -import java.util.ArrayList; +import java.util.List; import java.util.UUID; @Data @@ -15,10 +14,13 @@ @NoArgsConstructor public class TransferJobRequest { - @NonNull private String ownerId; + @NonNull + private String ownerId; private int connectionBufferSize; - @NonNull private Source source; - @NonNull private Destination destination; + @NonNull + private Source source; + @NonNull + private Destination destination; private TransferOptions options; @JsonInclude(JsonInclude.Include.NON_NULL) private UUID jobUuid; @@ -28,7 +30,8 @@ public class TransferJobRequest { @AllArgsConstructor @NoArgsConstructor public static class Destination { - @NonNull private EndpointType type; + @NonNull + private EndpointType type; String credId; private AccountEndpointCredential vfsDestCredential; private OAuthEndpointCredential oauthDestCredential; @@ -39,11 +42,13 @@ public static class Destination { @AllArgsConstructor @NoArgsConstructor public static class Source { - @NonNull private EndpointType type; + @NonNull + private EndpointType type; String credId; private AccountEndpointCredential vfsSourceCredential; private OAuthEndpointCredential oauthSourceCredential; private String fileSourcePath; - @NonNull private ArrayList infoList; + @NonNull + private List infoList; } } \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java index c816c33c..1777e80b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/credential/AccountEndpointCredential.java @@ -8,7 +8,7 @@ @Data @JsonIgnoreProperties(ignoreUnknown = true) -public class AccountEndpointCredential extends EndpointCredential{ +public class AccountEndpointCredential extends EndpointCredential { private String uri; //the hostname and port to reach the server private String username; //this should be the username for the client @ToString.Exclude @@ -18,12 +18,11 @@ public class AccountEndpointCredential extends EndpointCredential{ public static String[] uriFormat(AccountEndpointCredential credential, EndpointType type) { String noTypeUri = ""; - if(type.equals(EndpointType.sftp)){ + if (type.equals(EndpointType.sftp)) { noTypeUri = credential.getUri().replaceFirst("sftp://", ""); - }else if(type.equals(EndpointType.ftp)){ + } else if (type.equals(EndpointType.ftp)) { noTypeUri = credential.getUri().replaceFirst("ftp://", ""); - } - else{ + } else { noTypeUri = credential.getUri().replaceFirst("http://", ""); } return noTypeUri.split(":"); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java new file mode 100644 index 00000000..0ad44096 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/CarbonScore.java @@ -0,0 +1,14 @@ +package org.onedatashare.transferservice.odstransferservice.model.metrics; + +import com.fasterxml.jackson.annotation.JsonIgnoreProperties; +import lombok.Data; + +@Data +@JsonIgnoreProperties(ignoreUnknown = true) +public class CarbonScore { + public int avgCarbon; + + public CarbonScore(){ + this.avgCarbon = 0; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java index 5eada8ed..3cb4af20 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/metrics/DataInflux.java @@ -19,144 +19,147 @@ public class DataInflux { @JsonProperty(value = NETWORK_INTERFACE) @Column(name = NETWORK_INTERFACE) - private String networkInterface; + private String networkInterface = ""; @JsonProperty(value = ODS_USER) @Column(name = ODS_USER, tag = true) - private String odsUser; + private String odsUser = ""; @JsonProperty(value = TRANSFER_NODE_NAME) @Column(name = TRANSFER_NODE_NAME, tag = true) - private String transferNodeName; + private String transferNodeName = ""; @JsonProperty(value = ACTIVE_CORE_COUNT) @Column(name = ACTIVE_CORE_COUNT) - private Integer coreCount; + private Integer coreCount = Runtime.getRuntime().availableProcessors(); @JsonProperty(value = CPU_FREQUENCY_MAX) @Column(name = CPU_FREQUENCY_MAX) - private Double cpu_frequency_max; + private Double cpu_frequency_max = 0.0; @JsonProperty(value = CPU_FREQUENCY_CURRENT) @Column(name = CPU_FREQUENCY_CURRENT) - private Double cpu_frequency_current; + private Double cpu_frequency_current = 0.0; @JsonProperty(value = CPU_FREQUENCY_MIN) @Column(name = CPU_FREQUENCY_MIN) - private Double cpu_frequency_min; + private Double cpu_frequency_min = 0.0; @JsonProperty(value = CPU_ARCHITECTURE) @Column(name = CPU_ARCHITECTURE) - private String cpuArchitecture; + private String cpuArchitecture = ""; @JsonProperty(value = PACKET_LOSS_RATE) @Column(name = PACKET_LOSS_RATE) - private Double packetLossRate; + private Double packetLossRate = 0.0; //NIC values @JsonProperty(value = BYTES_SENT) @Column(name = BYTES_SENT) - private Long bytesSent; + private Long bytesSent = 0L; @JsonProperty(value = BYTES_RECEIVED) @Column(name = BYTES_RECEIVED) - private Long bytesReceived; + private Long bytesReceived = 0L; @JsonProperty(value = PACKETS_SENT) @Column(name = PACKETS_SENT) - private Long packetSent; + private Long packetSent = 0L; @JsonProperty(value = PACKETS_RECEIVED) @Column(name = PACKETS_RECEIVED) - private Long packetReceived; + private Long packetReceived = 0L; @JsonProperty(value = DROP_IN) @Column(name = DROP_IN) - private Long dropin; + private Long dropin = 0L; @JsonProperty(value = DROP_OUT) @Column(name = DROP_OUT) - private Long dropout; + private Long dropout = 0L; @JsonProperty(value = NIC_MTU) @Column(name = NIC_MTU) - private Integer nicMtu; + private Integer nicMtu = 0; @JsonProperty(value = NIC_SPEED) @Column(name = NIC_SPEED) - private Integer nicSpeed; + private Integer nicSpeed = 0; @JsonProperty(value = LATENCY) @Column(name = LATENCY) - private Double latency; + private Double latency = 0.0; @JsonProperty(value = RTT) @Column(name = RTT) - private Double rtt; + private Double rtt = 0.0; @Column(name = SOURCE_RTT) - private Double sourceRtt; + private Double sourceRtt = 0.0; @Column(name = SOURCE_LATENCY) - private Double sourceLatency; + private Double sourceLatency = 0.0; @Column(name = DESTINATION_RTT) - private Double destinationRtt; + private Double destinationRtt = 0.0; @Column(name = DEST_LATENCY) - private Double destLatency; + private Double destLatency = 0.0; @JsonProperty(value = ERROR_IN) @Column(name = ERROR_IN) - private Long errin; + private Long errin = 0L; @JsonProperty(value = ERROR_OUT) @Column(name = ERROR_OUT) - private Long errout; + private Long errout = 0L; //Job Values @Column(name = JOB_ID, tag = true) - private Long jobId; + private Long jobId = 0L; @Column(name = READ_THROUGHPUT) - private Double readThroughput; + private Double readThroughput = 0.0; @Column(name = WRITE_THROUGHPUT) - private Double writeThroughput; + private Double writeThroughput = 0.0; @Column(name = BYTES_UPLOADED) - private Long bytesWritten; + private Long bytesWritten = 0L; @Column(name = BYTES_DOWNLOADED) - private Long bytesRead; + private Long bytesRead = 0L; @Column(name = CONCURRENCY) - private Integer concurrency; + private Integer concurrency = 0; @Column(name = PARALLELISM) - private Integer parallelism; + private Integer parallelism = 0; @Column(name = PIPELINING) - private Integer pipelining; + private Integer pipelining = 0; @Column(name = MEMORY) - private Long memory; + private Long memory = Runtime.getRuntime().totalMemory() - Runtime.getRuntime().freeMemory(); @Column(name = MAX_MEMORY) - private Long maxMemory; + private Long maxMemory = Runtime.getRuntime().maxMemory(); @Column(name = FREE_MEMORY) - private Long freeMemory; + private Long freeMemory = Runtime.getRuntime().freeMemory(); @Column(name = ALLOCATED_MEMORY) - private Long allocatedMemory; + private Long allocatedMemory = Runtime.getRuntime().totalMemory(); @Column(name = JOB_SIZE) - private Long jobSize; + private Long jobSize = 0L; @Column(name = AVERAGE_FILE_SIZE) - private Long avgFileSize; + private Long avgFileSize = 0L; @Column(name = SOURCE_TYPE, tag = true) - private String sourceType; + private String sourceType = ""; @Column(name = SOURCE_CRED_ID, tag = true) - private String sourceCredId; + private String sourceCredId = ""; @Column(name = DESTINATION_TYPE, tag = true) - private String destType; + private String destType = ""; @Column(name = DESTINATION_CRED_IT, tag = true) - private String destCredId; + private String destCredId = ""; @Column(name = CHUNK_SIZE) - private Long chunksize; + private Long chunksize = 0L; @Column(name = JOB_UUID, tag = true) private UUID jobUuid; + + @Column(name = IS_RUNNING) + private Boolean isRunning = false; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java deleted file mode 100644 index 37488f10..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerCreateRequest.java +++ /dev/null @@ -1,35 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; - -import lombok.AllArgsConstructor; -import lombok.Data; - -import java.util.UUID; - -@Data -public class OptimizerCreateRequest { - String nodeId; - int maxConcurrency; - int maxParallelism; - int maxPipelining; - int maxChunkSize; - String optimizerType; - long fileCount; - Long jobId; - String dbType; - String jobUuid; - String userId; - - public OptimizerCreateRequest(String userId,String nodeId, int maxConcurrency, int maxParallelism, int maxPipelining, String optimizerType, long fileCount, long jobId, String dbType, String jobUuid) { - this.userId = userId; - this.maxConcurrency = maxConcurrency; - this.maxChunkSize = Integer.MAX_VALUE; - this.maxParallelism = maxParallelism; - this.maxPipelining = maxPipelining; - this.nodeId = nodeId; - this.optimizerType = optimizerType; - this.fileCount = fileCount; - this.jobId = jobId; - this.dbType = dbType; - this.jobUuid = jobUuid; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java deleted file mode 100644 index 5f47d53e..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/model/optimizer/OptimizerDeleteRequest.java +++ /dev/null @@ -1,12 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.model.optimizer; - -import lombok.Data; - -@Data -public class OptimizerDeleteRequest { - private String nodeId; - - public OptimizerDeleteRequest(String nodeId) { - this.nodeId = nodeId; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java new file mode 100644 index 00000000..04886902 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolContract.java @@ -0,0 +1,13 @@ +package org.onedatashare.transferservice.odstransferservice.pools; + +import org.springframework.core.task.TaskExecutor; + +public interface ThreadPoolContract { + public TaskExecutor createExecutor(int threadCount, String prefix); + public void applyOptimizer(int concurrency, int parallelism); + public void clearPools(); + public int concurrencyCount(); + public int parallelismCount(); + public TaskExecutor stepPool(int threadCount); + public TaskExecutor parallelPool(int threadCount, String filePath); +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java deleted file mode 100644 index 74c32d00..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManager.java +++ /dev/null @@ -1,179 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.pools; - -import lombok.Getter; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.core.task.SimpleAsyncTaskExecutor; -import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; -import org.springframework.stereotype.Service; - -import java.util.HashMap; - -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.PARALLEL_POOL_PREFIX; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.STEP_POOL_PREFIX; - -@Service -public class ThreadPoolManager { - - @Getter - HashMap executorHashmap; - HashMap platformThreadMap; - - Logger logger = LoggerFactory.getLogger(ThreadPoolManager.class); - - public ThreadPoolManager() { - this.executorHashmap = new HashMap<>(); - this.platformThreadMap = new HashMap<>(); - } - - public ThreadPoolTaskExecutor createPlatformThreads(int corePoolSize, String prefix) { - ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); - executor.setPrestartAllCoreThreads(true); -// executor.setQueueCapacity(1); - executor.setAllowCoreThreadTimeOut(false); - executor.setCorePoolSize(corePoolSize); -// executor.setMaxPoolSize(corePoolSize); - executor.setThreadNamePrefix(prefix); - executor.initialize(); - if (this.executorHashmap == null) { - this.executorHashmap = new HashMap<>(); - } - logger.info("Created ThreadPoolTaskExecutor: Prefix:{} with size:{}", prefix, corePoolSize); - this.platformThreadMap.put(prefix, executor); - return executor; - } - - public SimpleAsyncTaskExecutor createVirtualThreadExecutor(int corePoolSize, String prefix) { - SimpleAsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); - executor.setThreadNamePrefix(prefix); - executor.setVirtualThreads(true); - executor.setConcurrencyLimit(corePoolSize); - if (this.executorHashmap == null) { - this.executorHashmap = new HashMap<>(); - } - logger.info("Created a SimpleAsyncTaskExecutor: Prefix:{} with size:{}", prefix, corePoolSize); - this.executorHashmap.put(prefix, executor); - return executor; - } - - /** - * @param concurrency - * @param parallel - */ - public void applyOptimizer(int concurrency, int parallel) { - for (String key : this.executorHashmap.keySet()) { - SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); - if (key.contains(STEP_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), concurrency); - if (concurrency > 0 && concurrency != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(concurrency); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); - } - } - if (key.contains(PARALLEL_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getConcurrencyLimit(), parallel); - if (parallel > 0 && parallel != pool.getConcurrencyLimit()) { - pool.setConcurrencyLimit(parallel); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); - } - } - } - - for (String key : this.platformThreadMap.keySet()) { - ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); - if (key.contains(STEP_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getCorePoolSize(), concurrency); - if (concurrency > 0 && concurrency != pool.getCorePoolSize()) { - pool.setCorePoolSize(concurrency); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); - } - } - if (key.contains(PARALLEL_POOL_PREFIX)) { - logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getCorePoolSize(), parallel); - if (parallel > 0 && parallel != pool.getCorePoolSize()) { - pool.setCorePoolSize(parallel); - logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallel); - - } - } - } - } - - public void clearJobPool() { - for (String key : this.platformThreadMap.keySet()) { - ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); - pool.shutdown(); - logger.info("Shutting ThreadPoolTaskExecutor down {}", pool.getThreadNamePrefix()); - } - for (String key : this.executorHashmap.keySet()) { - SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); - pool.close(); - logger.info("Shutting SimpleAsyncTaskExec down {}", pool.getThreadNamePrefix()); - } - this.executorHashmap.clear(); - this.platformThreadMap.clear(); - logger.info("Cleared all thread pools"); - } - - // public SimpleAsyncTaskExecutor sequentialThreadPool() { -// return this.createVirtualThreadExecutor(1, SEQUENTIAL_POOL_PREFIX); -// } -// - public SimpleAsyncTaskExecutor stepTaskExecutorVirtual(int threadCount) { - SimpleAsyncTaskExecutor te = this.executorHashmap.get(STEP_POOL_PREFIX); - if (te == null) { - return this.createVirtualThreadExecutor(threadCount, STEP_POOL_PREFIX); - } - return te; - } - - public ThreadPoolTaskExecutor stepTaskExecutorPlatform(int threadCount) { - ThreadPoolTaskExecutor te = this.platformThreadMap.get(STEP_POOL_PREFIX); - if (te == null) { - return this.createPlatformThreads(threadCount, STEP_POOL_PREFIX); - } - return te; - } - - public SimpleAsyncTaskExecutor parallelThreadPoolVirtual(int threadCount, String fileName) { - SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX); - if (te == null) { - te = this.createVirtualThreadExecutor(threadCount, PARALLEL_POOL_PREFIX); - } - return te; - } - - public ThreadPoolTaskExecutor parallelThreadPoolPlatform(int threadCount, String fileName) { - return this.createPlatformThreads(threadCount, new StringBuilder().append(fileName).append("-").append(PARALLEL_POOL_PREFIX).toString()); - } - - public Integer concurrencyCount() { - SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(STEP_POOL_PREFIX); - if (threadPoolManager == null) { - return 0; - } - return threadPoolManager.getConcurrencyLimit(); - } - - public Integer parallelismCount() { - int parallelism = 0; - for (String key : this.executorHashmap.keySet()) { - if (key.contains(PARALLEL_POOL_PREFIX)) { - parallelism = this.executorHashmap.get(key).getConcurrencyLimit(); - if (parallelism > 0) { - return parallelism; - } - } - } - for (String key : this.platformThreadMap.keySet()) { - if (key.contains(PARALLEL_POOL_PREFIX)) { - parallelism = this.platformThreadMap.get(key).getCorePoolSize(); - if (parallelism > 0) { - return parallelism; - } - } - } - return parallelism; - } - -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java new file mode 100644 index 00000000..f5fbe593 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerPlatform.java @@ -0,0 +1,113 @@ +package org.onedatashare.transferservice.odstransferservice.pools; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.scheduling.concurrent.ThreadPoolTaskExecutor; +import org.springframework.stereotype.Service; + +import java.util.HashMap; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.PARALLEL_POOL_PREFIX; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.STEP_POOL_PREFIX; + +@Service("threadPool") +@Profile("platform") +public class ThreadPoolManagerPlatform implements ThreadPoolContract { + HashMap platformThreadMap; + Logger logger = LoggerFactory.getLogger(ThreadPoolManagerPlatform.class); + + public ThreadPoolManagerPlatform() { + this.platformThreadMap = new HashMap<>(); + } + + @Override + public ThreadPoolTaskExecutor createExecutor(int threadCount, String prefix) { + ThreadPoolTaskExecutor executor = new ThreadPoolTaskExecutor(); + executor.setAllowCoreThreadTimeOut(true); + if(threadCount > 0) { + executor.setCorePoolSize(threadCount); + } + executor.setPrestartAllCoreThreads(true); + executor.setThreadNamePrefix(prefix); + executor.initialize(); + if (this.platformThreadMap == null) { + this.platformThreadMap = new HashMap<>(); + } + logger.info("Created ThreadPoolTaskExecutor: Prefix:{} with size:{}", prefix, threadCount); + this.platformThreadMap.put(prefix, executor); + return executor; + } + + @Override + public void applyOptimizer(int concurrency, int parallelism) { + for (String key : this.platformThreadMap.keySet()) { + ThreadPoolTaskExecutor pool = this.platformThreadMap.get(key); + if (key.contains(STEP_POOL_PREFIX)) { + if (concurrency > 0 && concurrency != pool.getPoolSize()) { + pool.setCorePoolSize(concurrency); + logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), concurrency); + } + } + if (key.contains(PARALLEL_POOL_PREFIX)) { + logger.info("Changing {} pool size from {} to {}", pool.getThreadNamePrefix(), pool.getPoolSize(), parallelism); + if (parallelism > 0 && parallelism != pool.getPoolSize()) { + pool.setCorePoolSize(parallelism); + logger.info("Set {} pool size to {}", pool.getThreadNamePrefix(), parallelism); + } + } + } + } + + @Override + public void clearPools() { + for (String key : this.platformThreadMap.keySet()) { + ThreadPoolTaskExecutor pe = this.platformThreadMap.get(key); + pe.shutdown(); + } + this.platformThreadMap.clear(); + } + + @Override + public int concurrencyCount() { + ThreadPoolTaskExecutor pe = this.platformThreadMap.get(STEP_POOL_PREFIX); + if (pe == null) { + return 0; + } + return pe.getCorePoolSize(); + } + + @Override + public int parallelismCount() { + for (String key : this.platformThreadMap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + ThreadPoolTaskExecutor threadPoolManager = this.platformThreadMap.get(key); + if(threadPoolManager != null){ + int parallelismCount = threadPoolManager.getCorePoolSize(); + if(parallelismCount != 0){ + return parallelismCount; + } + } + } + } + return 0; + } + + @Override + public ThreadPoolTaskExecutor stepPool(int threadCount) { + ThreadPoolTaskExecutor te = this.platformThreadMap.get(STEP_POOL_PREFIX); + if (te == null) { + return this.createExecutor(threadCount, STEP_POOL_PREFIX); + } + return te; + } + + @Override + public ThreadPoolTaskExecutor parallelPool(int threadCount, String filePath) { + ThreadPoolTaskExecutor te = this.platformThreadMap.get(PARALLEL_POOL_PREFIX + filePath); + if (te == null) { + te = this.createExecutor(threadCount, PARALLEL_POOL_PREFIX + filePath); + } + return te; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java new file mode 100644 index 00000000..af125590 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/pools/ThreadPoolManagerVirtual.java @@ -0,0 +1,116 @@ +package org.onedatashare.transferservice.odstransferservice.pools; + +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.context.annotation.Profile; +import org.springframework.core.task.SimpleAsyncTaskExecutor; +import org.springframework.stereotype.Service; + +import java.util.HashMap; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.PARALLEL_POOL_PREFIX; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.STEP_POOL_PREFIX; + +@Service("threadPool") +@Profile("virtual") +public class ThreadPoolManagerVirtual implements ThreadPoolContract { + + HashMap executorHashmap; + + Logger logger = LoggerFactory.getLogger(ThreadPoolManagerVirtual.class); + + public ThreadPoolManagerVirtual() { + this.executorHashmap = new HashMap<>(); + } + + + @Override + public SimpleAsyncTaskExecutor createExecutor(int threadCount, String prefix) { + SimpleAsyncTaskExecutor executor = new SimpleAsyncTaskExecutor(); + executor.setThreadNamePrefix(prefix); + executor.setVirtualThreads(true); + if(threadCount > 0){ + executor.setConcurrencyLimit(threadCount); + } + if (this.executorHashmap == null) { + this.executorHashmap = new HashMap<>(); + } + logger.info("Created a SimpleAsyncTaskExecutor: Prefix:{} with size:{}", prefix, threadCount); + this.executorHashmap.put(prefix, executor); + return executor; + } + + /** + * @param concurrency + * @param parallel + */ + public void applyOptimizer(int concurrency, int parallel) { + SimpleAsyncTaskExecutor stepPool = this.executorHashmap.get(STEP_POOL_PREFIX); + if (stepPool != null) { + if (concurrency > 0 && concurrency != stepPool.getConcurrencyLimit()) { + stepPool.setConcurrencyLimit(concurrency); + logger.info("Set {} pool size to {}", stepPool.getThreadNamePrefix(), concurrency); + } + } + for (String key : this.executorHashmap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + SimpleAsyncTaskExecutor parallelPool = this.executorHashmap.get(key); + if (parallelPool != null) { + if (parallel > 0 && parallel != parallelPool.getConcurrencyLimit()) { + parallelPool.setConcurrencyLimit(parallel); + logger.info("Set {} pool size to {}", parallelPool.getThreadNamePrefix(), parallel); + } + } + } + } + } + + @Override + public void clearPools() { + for (String key : this.executorHashmap.keySet()) { + SimpleAsyncTaskExecutor pool = this.executorHashmap.get(key); + pool.close(); + logger.info("Shutting SimpleAsyncTaskExec down {}", pool.getThreadNamePrefix()); + } + this.executorHashmap.clear(); + logger.info("Cleared all thread pools"); + } + + @Override + public SimpleAsyncTaskExecutor stepPool(int threadCount) { + SimpleAsyncTaskExecutor te = this.executorHashmap.get(STEP_POOL_PREFIX); + if (te == null) { + return this.createExecutor(threadCount, STEP_POOL_PREFIX); + } + return te; + } + + @Override + public SimpleAsyncTaskExecutor parallelPool(int threadCount, String filePath) { + SimpleAsyncTaskExecutor te = this.executorHashmap.get(PARALLEL_POOL_PREFIX + filePath); + if (te == null) { + te = this.createExecutor(threadCount, PARALLEL_POOL_PREFIX + filePath); + } + return te; + } + + public int concurrencyCount() { + SimpleAsyncTaskExecutor threadPoolManager = this.executorHashmap.get(STEP_POOL_PREFIX); + if (threadPoolManager == null) { + return 0; + } + return threadPoolManager.getConcurrencyLimit(); + } + + public int parallelismCount() { + for (String key : this.executorHashmap.keySet()) { + if (key.contains(PARALLEL_POOL_PREFIX)) { + SimpleAsyncTaskExecutor executor = this.executorHashmap.get(key); + if (executor != null) { + return executor.getConcurrencyLimit(); + } + } + } + return 0; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java new file mode 100644 index 00000000..9d90f5ca --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasure.java @@ -0,0 +1,129 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import com.hazelcast.query.Predicate; +import com.hazelcast.query.PredicateBuilder; +import com.hazelcast.query.Predicates; +import jakarta.annotation.PostConstruct; +import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; +import org.onedatashare.transferservice.odstransferservice.model.CarbonMeasurement; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.beans.factory.annotation.Value; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.time.LocalDateTime; +import java.util.ArrayList; +import java.util.Collection; +import java.util.List; +import java.util.UUID; +import java.util.stream.Collectors; + +@Service +public class CarbonJobMeasure { + + private final IMap carbonIntensityMap; + private final IMap fileTransferScheduleMap; + private final PredicateBuilder.EntryObject entryObj; + private final PmeterParser pmeterParser; + private final ObjectMapper objectMapper; + private final Logger logger = LoggerFactory.getLogger(CarbonJobMeasure.class); + private boolean odsConnector; + + @Value("${spring.application.name}") + private String appName; + + @Value("${ods.user}") + private String odsUser; + + public CarbonJobMeasure(IMap carbonIntensityMap, IMap fileTransferScheduleMap, PmeterParser pmeterParser, ObjectMapper objectMapper) { + this.carbonIntensityMap = carbonIntensityMap; + this.fileTransferScheduleMap = fileTransferScheduleMap; + this.entryObj = Predicates.newPredicateBuilder().getEntryObject(); + this.pmeterParser = pmeterParser; + this.objectMapper = objectMapper; + this.odsConnector = false; + } + + @PostConstruct + public void init() { + //set ODS Connector + if(this.odsUser.equals("OneDataShare") || this.appName.equals("ODSTransferService")) { + this.odsConnector = true; + } + + } + + public List getPotentialJobsFromMap() { + Predicate potentialJobs; + if (this.odsConnector) { + logger.info("{}} Querying Hazelcast for jobs", this.appName); + potentialJobs = this.entryObj.get("transferNodeName").equal(""); + } else { + logger.info("ODS Connector: {} Querying Hazelcast for jobs", this.appName); + potentialJobs = this.entryObj.get("transferNodeName").equal(appName).or(this.entryObj.get("source.credId").equal(appName)).or(this.entryObj.get("destination.credId").equal(appName)).or(this.entryObj.get("ownerId").equal(this.odsUser)); + } + + Collection jsonJobs = this.fileTransferScheduleMap.values(potentialJobs); + return jsonJobs.stream().map(hazelcastJsonValue -> { + try { + return this.objectMapper.readValue(hazelcastJsonValue.getValue(), TransferJobRequest.class); + } catch (JsonProcessingException e) { + logger.error("Json Processing Exception: {}\n With message: {}", e, e.getMessage()); + } + return null; + }).collect(Collectors.toList()); + } + + @Scheduled(cron = "0 * * * * *") + public void measureCarbonOfPotentialJobs() { + List potentialJobs = getPotentialJobsFromMap(); + logger.info("Potential jobs from ODS to run: {}", potentialJobs); + potentialJobs.forEach(transferJobRequest -> { + try { + String sourceIp = ""; + if (transferJobRequest.getSource().getVfsSourceCredential() != null) { + sourceIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getSource().getVfsSourceCredential(), transferJobRequest.getSource().getType()); + } else { + sourceIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getSource().getOauthSourceCredential(), transferJobRequest.getSource().getType()); + } + String destIp = ""; + if (transferJobRequest.getDestination().getVfsDestCredential() != null) { + destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getVfsDestCredential(), transferJobRequest.getDestination().getType()); + } else { + destIp = ODSUtility.uriFromEndpointCredential(transferJobRequest.getDestination().getOauthDestCredential(), transferJobRequest.getDestination().getType()); + } + List totalEntries = new ArrayList<>(); + if (!transferJobRequest.getSource().getType().equals(EndpointType.vfs)) { + totalEntries.addAll(this.pmeterParser.carbonPerIp(sourceIp)); + } + if (transferJobRequest.getDestination().getType().equals(EndpointType.vfs)) { + totalEntries.addAll(this.pmeterParser.carbonPerIp(destIp)); + } + CarbonMeasurement carbonMeasurement = new CarbonMeasurement(); + carbonMeasurement.setTimeMeasuredAt(LocalDateTime.now()); + carbonMeasurement.setJobUuid(transferJobRequest.getJobUuid()); + carbonMeasurement.setOwnerId(transferJobRequest.getOwnerId()); + carbonMeasurement.setTransferNodeName(transferJobRequest.getTransferNodeName()); + carbonMeasurement.setTraceRouteCarbon(totalEntries); + HazelcastJsonValue jsonValue = new HazelcastJsonValue(this.objectMapper.writeValueAsString(carbonMeasurement)); + UUID randomUUID = UUID.randomUUID(); + this.carbonIntensityMap.put(randomUUID, jsonValue); + logger.info("Created Carbon entry with Key={} and Value={}", randomUUID, jsonValue.getValue()); + } catch (JsonProcessingException e) { + logger.error("Failed to parse job: {} \n Error received: \t {}", transferJobRequest.toString(), e.getMessage()); + } catch (IOException e) { + logger.error("Failed to measure ip: {} \n Error received: \t {}", transferJobRequest.toString(), e); + } + }); + } + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java new file mode 100644 index 00000000..9040d059 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationService.java @@ -0,0 +1,63 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import jakarta.annotation.PostConstruct; +import lombok.SneakyThrows; +import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; +import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Service; + +import java.util.UUID; + +@Service +public class FileTransferNodeRegistrationService { + + private final IMap fileTransferNodeRegistrationMap; + private final UUID nodeUuid; + private final String appName; + private final String odsOwner; + private final ObjectMapper objectMapper; + private final Logger logger = LoggerFactory.getLogger(FileTransferNodeRegistrationService.class); + + public FileTransferNodeRegistrationService(HazelcastInstance hazelcastInstance, IMap fileTransferNodeRegistrationMap, Environment environment, ObjectMapper objectMapper) { + this.fileTransferNodeRegistrationMap = fileTransferNodeRegistrationMap; + this.nodeUuid = hazelcastInstance.getLocalEndpoint().getUuid(); + this.appName = environment.getProperty("spring.application.name"); + this.odsOwner = environment.getProperty("ods.user"); + this.objectMapper = objectMapper; + } + + @SneakyThrows + @PostConstruct + public void init() { + this.updateRegistrationInHazelcast(null); + } + + public void updateRegistrationInHazelcast(JobExecution jobExecution) throws JsonProcessingException { + var metaDataBuilder = FileTransferNodeMetaData.builder(); + if (jobExecution == null) { + metaDataBuilder.jobId(-1L); + metaDataBuilder.runningJob(false); + metaDataBuilder.jobUuid(new UUID(0, 0)); + } else { + metaDataBuilder.jobId(jobExecution.getJobId()); + metaDataBuilder.runningJob(jobExecution.isRunning()); + metaDataBuilder.jobUuid(UUID.fromString(jobExecution.getJobParameters().getString(ODSConstants.JOB_UUID))); + } + metaDataBuilder.online(true); + metaDataBuilder.nodeName(this.appName); + metaDataBuilder.odsOwner(this.odsOwner); + metaDataBuilder.nodeUuid(this.nodeUuid); + String jsonValue = this.objectMapper.writeValueAsString(metaDataBuilder.build()); + logger.info("Registering node: {}", jsonValue); + this.fileTransferNodeRegistrationMap.put(this.appName, new HazelcastJsonValue(jsonValue)); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java new file mode 100644 index 00000000..a858aca9 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/HazelcastConsumer.java @@ -0,0 +1,82 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.JsonNode; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.node.ObjectNode; +import com.hazelcast.collection.IQueue; +import com.hazelcast.core.HazelcastJsonValue; +import org.onedatashare.transferservice.odstransferservice.Enum.MessageType; +import org.onedatashare.transferservice.odstransferservice.message.StopJobRequestHandler; +import org.onedatashare.transferservice.odstransferservice.message.TransferApplicationParamHandler; +import org.onedatashare.transferservice.odstransferservice.message.TransferJobRequestHandler; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.task.TaskExecutor; +import org.springframework.scheduling.annotation.Scheduled; +import org.springframework.stereotype.Service; + +import java.io.IOException; + +@Service +public class HazelcastConsumer { + + private final IQueue messageQueue; + private final ObjectMapper objectMapper; + private final TransferJobRequestHandler transferJobRequestHandler; + private final TransferApplicationParamHandler transferParamApplicationHandler; + private final Logger logger; + private final StopJobRequestHandler stopJobRequestHandler; + private final TaskExecutor executor; + + public HazelcastConsumer(ThreadPoolContract threadPoolContract, StopJobRequestHandler stopJobRequestHandler, IQueue messageQueue, ObjectMapper objectMapper, TransferJobRequestHandler transferJobRequestHandler, TransferApplicationParamHandler transferApplicationParamHandler) { + this.messageQueue = messageQueue; + this.transferJobRequestHandler = transferJobRequestHandler; + this.objectMapper = objectMapper; + this.transferParamApplicationHandler = transferApplicationParamHandler; + this.logger = LoggerFactory.getLogger(HazelcastConsumer.class); + this.stopJobRequestHandler = stopJobRequestHandler; + this.executor = threadPoolContract.createExecutor(10, "HazelcastConsumer"); + } + + + @Scheduled(cron = "0/5 * * * * *") + public void runConsumer() throws JsonProcessingException { + HazelcastJsonValue jsonMsg = this.messageQueue.poll(); + if (jsonMsg == null) return; + JsonNode jsonNode = this.objectMapper.readTree(jsonMsg.getValue()); + logger.info("Got Msg: {}", jsonNode.toPrettyString()); + String type = ((ObjectNode) jsonNode).get("type").asText(); + ((ObjectNode) jsonNode).remove("type"); + HazelcastJsonValue properJsonMsg = new HazelcastJsonValue(jsonNode.toString()); + this.executor.execute(() -> { + switch (MessageType.valueOf(type)) { + case MessageType.TRANSFER_JOB_REQUEST: + try { + this.transferJobRequestHandler.messageHandler(properJsonMsg); + } catch (JsonProcessingException e) { + logger.error("Failed to parse json in TransferJobReqeust Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; + + case MessageType.APPLICATION_PARAM_CHANGE: + try { + this.transferParamApplicationHandler.messageHandler(properJsonMsg); + } catch (JsonProcessingException e) { + logger.error("Failed to parse json in TransferParam Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; + + case MessageType.STOP_JOB_REQUEST: + try { + this.stopJobRequestHandler.messageHandler(properJsonMsg); + } catch (IOException e) { + logger.error("Failed to parse json in Stop Job Message Handler: {} \n Error: {}", properJsonMsg, e.getMessage()); + } + break; + } + }); + } + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java index c314dc35..705a2948 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxCache.java @@ -2,7 +2,7 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.JobMetric; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecution; @@ -26,7 +26,7 @@ @Service public class InfluxCache { - private final ThreadPoolManager threadPoolManager; + private final ThreadPoolContract threadPool; public ConcurrentHashMap threadCache; //stores a JobMetric that represents everything that thread has processed for the step. Thus each JobMetric is an aggregate of what has happened Logger logger = LoggerFactory.getLogger(InfluxCache.class); @@ -36,8 +36,8 @@ public enum ThroughputType { WRITER } - public InfluxCache(ThreadPoolManager threadPoolManager) { - this.threadPoolManager = threadPoolManager; + public InfluxCache(ThreadPoolContract threadPool) { + this.threadPool = threadPool; this.threadCache = new ConcurrentHashMap<>(); } @@ -47,8 +47,8 @@ public void addMetric(long threadId, StepExecution stepExecution, long totalByte prevMetric = new JobMetric(); prevMetric.setThreadId(threadId); prevMetric.setStepExecution(stepExecution); - prevMetric.setConcurrency(this.threadPoolManager.concurrencyCount()); - prevMetric.setParallelism(this.threadPoolManager.parallelismCount()); + prevMetric.setConcurrency(this.threadPool.concurrencyCount()); + prevMetric.setParallelism(this.threadPool.parallelismCount()); prevMetric.setPipelining(stepExecution.getJobParameters().getLong(PIPELINING).intValue()); prevMetric.setChunkSize(chunkSize); this.threadCache.put(threadId, prevMetric); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java similarity index 68% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java index 2f39e73e..9012c52c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/DatabaseService/InfluxIOService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/InfluxIOService.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.service.DatabaseService; +package org.onedatashare.transferservice.odstransferservice.service; import com.influxdb.client.InfluxDBClient; import com.influxdb.client.WriteApi; @@ -6,26 +6,26 @@ import com.influxdb.client.domain.WritePrecision; import com.influxdb.exceptions.InfluxException; import com.influxdb.exceptions.UnprocessableEntityException; +import jakarta.annotation.PostConstruct; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; import org.springframework.beans.factory.annotation.Value; -import org.springframework.stereotype.Component; import org.springframework.stereotype.Service; -@Component +@Service public class InfluxIOService { private final InfluxDBClient influxClient; Logger logger = LoggerFactory.getLogger(InfluxIOService.class); @Value("${ods.influx.bucket}") - private String bucketName; + private String defaultInfluxBucket; @Value("${ods.influx.org}") String org; + Bucket bucket; private WriteApi writeApi; public InfluxIOService(InfluxDBClient influxClient) { @@ -33,29 +33,27 @@ public InfluxIOService(InfluxDBClient influxClient) { this.writeApi = this.influxClient.makeWriteApi(); } - public void reconfigureBucketForNewJob(String ownerId) { - logger.info("********* Reconfiguring the Bucket ***********"); - Bucket bucket; - if (ownerId == null) { - bucket = influxClient.getBucketsApi().findBucketByName(this.bucketName); - } else { - bucket = influxClient.getBucketsApi().findBucketByName(ownerId); - } + @PostConstruct + public void postConstruct() { + this.reconfigureBucketForNewJob(this.defaultInfluxBucket); + } + public void reconfigureBucketForNewJob(String bucketName) { + logger.info("********* Reconfiguring the Bucket to Owner {}***********", bucketName); + bucket = influxClient.getBucketsApi().findBucketByName(bucketName); if (bucket == null) { - logger.info("Creating the Influx bucket name={}, org={}", ownerId, org); + logger.info("Creating the Influx bucket name={}, org={}", bucketName, org); try { - bucket = this.influxClient.getBucketsApi().createBucket(ownerId, org); + bucket = this.influxClient.getBucketsApi().createBucket(bucketName, org); } catch (UnprocessableEntityException ignored) { } } - this.writeApi = this.influxClient.makeWriteApi(); } public void insertDataPoint(DataInflux point) { try { - writeApi.writeMeasurement(WritePrecision.MS, point); + writeApi.writeMeasurement(this.bucket.getName(), this.org, WritePrecision.MS, point); } catch (InfluxException exception) { logger.error("Exception occurred while pushing measurement to influx: " + exception.getMessage()); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java new file mode 100644 index 00000000..c3f21cde --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobCompletionListener.java @@ -0,0 +1,65 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobExecutionListener; +import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.scheduling.annotation.Async; +import org.springframework.stereotype.Service; + +import java.time.Duration; +import java.util.Set; + + +@Service +public class JobCompletionListener implements JobExecutionListener { + private final ThreadPoolContract threadPool; + private Set jobIds; + Logger logger = LoggerFactory.getLogger(JobCompletionListener.class); + + ConnectionBag connectionBag; + + MetricsCollector metricsCollector; + + @Autowired + FileTransferNodeRegistrationService fileTransferNodeRegistrationService; + + public JobCompletionListener(MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolContract threadPool, Set jobIds) { + this.metricsCollector = metricsCollector; + this.connectionBag = connectionBag; + this.threadPool = threadPool; + this.jobIds = jobIds; + } + + + @Override + @Async + public void beforeJob(JobExecution jobExecution) { + logger.info("*****Job Execution start Time***** : {} with jobId={}", jobExecution.getStartTime(), jobExecution.getJobId()); + this.jobIds.add(jobExecution.getJobId()); + try { + this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(jobExecution); + } catch (JsonProcessingException e) { + logger.error("Failed to update status of FTN inside of Hazelcast for job start. Exception \n {}", e.getMessage()); + } + } + + @Override + @Async + public void afterJob(JobExecution jobExecution) { + logger.info("*****Job Execution End Time**** : {}", jobExecution.getEndTime()); + logger.info("Total Job Time in seconds: {}", Duration.between(jobExecution.getStartTime(), jobExecution.getEndTime()).toSeconds()); + connectionBag.closePools(); + this.threadPool.clearPools(); + System.gc(); + try { + this.fileTransferNodeRegistrationService.updateRegistrationInHazelcast(jobExecution); + } catch (JsonProcessingException e) { + logger.error("Failed to update status of FTN inside of Hazelcast for job end. Exception \n {}", e.getMessage()); + } + } +} + diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java index b4e19ce4..cf62267e 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobControl.java @@ -3,55 +3,31 @@ import lombok.Getter; import lombok.NoArgsConstructor; import lombok.Setter; -import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; -import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; -import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; -import org.onedatashare.transferservice.odstransferservice.service.listner.JobCompletionListener; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3LargeFileWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3Reader; -import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3SmallFileWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxReader; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterLargeFile; -import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterSmallFile; -import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxChunkedWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxReader; -import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveReader; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveResumableWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveSimpleWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.http.HttpReader; -import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPReader; -import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPWriter; -import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsReader; -import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsWriter; +import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolContract; +import org.onedatashare.transferservice.odstransferservice.service.step.ReaderWriterFactory; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.Job; +import org.springframework.batch.core.*; import org.springframework.batch.core.job.builder.FlowBuilder; import org.springframework.batch.core.job.builder.JobBuilder; import org.springframework.batch.core.job.flow.Flow; +import org.springframework.batch.core.launch.JobLauncher; +import org.springframework.batch.core.repository.JobExecutionAlreadyRunningException; +import org.springframework.batch.core.repository.JobInstanceAlreadyCompleteException; import org.springframework.batch.core.repository.JobRepository; +import org.springframework.batch.core.repository.JobRestartException; import org.springframework.batch.core.step.builder.SimpleStepBuilder; import org.springframework.batch.core.step.builder.StepBuilder; -import org.springframework.batch.item.ItemReader; -import org.springframework.batch.item.ItemWriter; import org.springframework.beans.factory.annotation.Autowired; +import org.springframework.retry.backoff.BackOffPolicy; import org.springframework.stereotype.Service; import org.springframework.transaction.PlatformTransactionManager; import java.util.List; import java.util.stream.Collectors; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; - @Service @NoArgsConstructor @@ -59,12 +35,10 @@ @Setter public class JobControl { - public TransferJobRequest request; - Logger logger = LoggerFactory.getLogger(JobControl.class); @Autowired - VfsExpander vfsExpander; + ReaderWriterFactory readerWriterFactory; @Autowired JobRepository jobRepository; @@ -76,26 +50,28 @@ public class JobControl { JobCompletionListener jobCompletionListener; @Autowired - MetricsCollector metricsCollector; + PlatformTransactionManager platformTransactionManager; + + @Autowired + InfluxIOService influxIOService; @Autowired - InfluxCache influxCache; + ThreadPoolContract threadPool; @Autowired - PlatformTransactionManager platformTransactionManager; + BackOffPolicy backOffPolicy; @Autowired - InfluxIOService influxIOService; + JobLauncher jobLauncher; @Autowired - ThreadPoolManager threadPoolManager; - - private List createConcurrentFlow(List infoList, String basePath) { - if (this.request.getSource().getType().equals(EndpointType.vfs)) { - infoList = vfsExpander.expandDirectory(infoList, basePath); - logger.info("File list: {}", infoList); - } - return infoList.stream().map(file -> { + JobParamService jobParamService; + + JobExecution latestJobExecution; + + private List createConcurrentFlow(TransferJobRequest request) { + String basePath = request.getSource().getFileSourcePath(); + return request.getSource().getInfoList().stream().map(file -> { String idForStep = ""; if (!file.getId().isEmpty()) { idForStep = file.getId(); @@ -103,118 +79,28 @@ private List createConcurrentFlow(List infoList, String basePa idForStep = file.getPath(); } SimpleStepBuilder stepBuilder = new StepBuilder(idForStep, this.jobRepository) - .chunk(this.request.getOptions().getPipeSize(), this.platformTransactionManager); + .chunk(request.getOptions().getPipeSize(), this.platformTransactionManager); stepBuilder - .reader(getRightReader(request.getSource().getType(), file)) - .writer(getRightWriter(request.getDestination().getType(), file)); - if (this.request.getOptions().getParallelThreadCount() > 0) { - stepBuilder.taskExecutor(threadPoolManager.parallelThreadPoolVirtual(request.getOptions().getParallelThreadCount() * request.getOptions().getConcurrencyThreadCount(), file.getPath())); + .reader(readerWriterFactory.getRightReader(request.getSource(), file, request.getOptions())) + .writer(readerWriterFactory.getRightWriter(request.getDestination(), file)); + if (request.getOptions().getParallelThreadCount() > 0) { + stepBuilder.taskExecutor(threadPool.parallelPool(request.getOptions().getParallelThreadCount(), file.getPath())); } - stepBuilder.throttleLimit(64); + stepBuilder.faultTolerant() + .backOffPolicy(this.backOffPolicy); return new FlowBuilder(basePath + idForStep) .start(stepBuilder.build()).build(); }).collect(Collectors.toList()); } - protected ItemReader getRightReader(EndpointType type, EntityInfo fileInfo) { - switch (type) { - case http: - HttpReader hr = new HttpReader(fileInfo, request.getSource().getVfsSourceCredential()); - hr.setPool(connectionBag.getHttpReaderPool()); - return hr; - case vfs: - VfsReader vfsReader = new VfsReader(request.getSource().getVfsSourceCredential(), fileInfo); - return vfsReader; - case sftp: - SFTPReader sftpReader = new SFTPReader(request.getSource().getVfsSourceCredential(), fileInfo, request.getOptions().getPipeSize()); - sftpReader.setPool(connectionBag.getSftpReaderPool()); - return sftpReader; - case ftp: - FTPReader ftpReader = new FTPReader(request.getSource().getVfsSourceCredential(), fileInfo); - ftpReader.setPool(connectionBag.getFtpReaderPool()); - return ftpReader; - case s3: - AmazonS3Reader amazonS3Reader = new AmazonS3Reader(request.getSource().getVfsSourceCredential(), fileInfo); - amazonS3Reader.setPool(connectionBag.getS3ReaderPool()); - return amazonS3Reader; - case box: - BoxReader boxReader = new BoxReader(request.getSource().getOauthSourceCredential(), fileInfo); - boxReader.setMaxRetry(this.request.getOptions().getRetry()); - return boxReader; - case dropbox: - DropBoxReader dropBoxReader = new DropBoxReader(request.getSource().getOauthSourceCredential(), fileInfo); - return dropBoxReader; - case scp: - SCPReader reader = new SCPReader(fileInfo); - reader.setPool(connectionBag.getSftpReaderPool()); - return reader; - case gdrive: - GDriveReader dDriveReader = new GDriveReader(request.getSource().getOauthSourceCredential(), fileInfo); - return dDriveReader; - } - return null; - } - - protected ItemWriter getRightWriter(EndpointType type, EntityInfo fileInfo) { - switch (type) { - case vfs: - VfsWriter vfsWriter = new VfsWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return vfsWriter; - case sftp: - SFTPWriter sftpWriter = new SFTPWriter(request.getDestination().getVfsDestCredential(), this.metricsCollector, this.influxCache); - sftpWriter.setPool(connectionBag.getSftpWriterPool()); - return sftpWriter; - case ftp: - FTPWriter ftpWriter = new FTPWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - ftpWriter.setPool(connectionBag.getFtpWriterPool()); - return ftpWriter; - case s3: - if (fileInfo.getSize() < TWENTY_MB) { - AmazonS3SmallFileWriter amazonS3SmallFileWriter = new AmazonS3SmallFileWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - amazonS3SmallFileWriter.setPool(connectionBag.getS3WriterPool()); - return amazonS3SmallFileWriter; - } else { - AmazonS3LargeFileWriter amazonS3LargeFileWriter = new AmazonS3LargeFileWriter(request.getDestination().getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - amazonS3LargeFileWriter.setPool(connectionBag.getS3WriterPool()); - return amazonS3LargeFileWriter; - } - case box: - if (fileInfo.getSize() < TWENTY_MB) { - BoxWriterSmallFile boxWriterSmallFile = new BoxWriterSmallFile(request.getDestination().getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return boxWriterSmallFile; - } else { - BoxWriterLargeFile boxWriterLargeFile = new BoxWriterLargeFile(request.getDestination().getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); - return boxWriterLargeFile; - } - case dropbox: - DropBoxChunkedWriter dropBoxChunkedWriter = new DropBoxChunkedWriter(request.getDestination().getOauthDestCredential(), this.metricsCollector, this.influxCache); - return dropBoxChunkedWriter; - case scp: - SCPWriter scpWriter = new SCPWriter(fileInfo, this.metricsCollector, this.influxCache); - scpWriter.setPool(connectionBag.getSftpWriterPool()); - return scpWriter; - case gdrive: - if (fileInfo.getSize() < FIVE_MB) { - GDriveSimpleWriter writer = new GDriveSimpleWriter(request.getDestination().getOauthDestCredential(), fileInfo); - return writer; - } else { - GDriveResumableWriter writer = new GDriveResumableWriter(request.getDestination().getOauthDestCredential(), fileInfo); - writer.setPool(connectionBag.getGoogleDriveWriterPool()); - return writer; - } - } - return null; - } - - public Job concurrentJobDefinition() { - JobBuilder jobBuilder = new JobBuilder(this.request.getJobUuid().toString(), this.jobRepository); - connectionBag.preparePools(this.request); - List flows = createConcurrentFlow(request.getSource().getInfoList(), request.getSource().getFileSourcePath()); - this.influxIOService.reconfigureBucketForNewJob(this.request.getOwnerId()); + public Job concurrentJobDefinition(TransferJobRequest request) { + JobBuilder jobBuilder = new JobBuilder(request.getJobUuid().toString(), this.jobRepository); + connectionBag.preparePools(request); + List flows = createConcurrentFlow(request); + this.influxIOService.reconfigureBucketForNewJob(request.getOwnerId()); Flow[] fl = new Flow[flows.size()]; Flow f = new FlowBuilder("splitFlow") -// .split(this.threadPoolManager.stepTaskExecutorVirtual(this.request.getOptions().getConcurrencyThreadCount())) - .split(this.threadPoolManager.stepTaskExecutorVirtual(this.request.getOptions().getConcurrencyThreadCount())) + .split(this.threadPool.stepPool(request.getOptions().getConcurrencyThreadCount())) .add(flows.toArray(fl)) .build(); return jobBuilder @@ -224,4 +110,11 @@ public Job concurrentJobDefinition() { .build(); } + public JobExecution runJob(TransferJobRequest transferJobRequest) throws JobInstanceAlreadyCompleteException, JobExecutionAlreadyRunningException, JobParametersInvalidException, JobRestartException { + Job job = this.concurrentJobDefinition(transferJobRequest); + JobParameters jobParameters = this.jobParamService.translate(new JobParametersBuilder(), transferJobRequest); + this.latestJobExecution = this.jobLauncher.run(job, jobParameters); + return this.latestJobExecution; + } + } \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java index 2f52c965..3cfaf054 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/JobParamService.java @@ -5,11 +5,13 @@ import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; -import org.onedatashare.transferservice.odstransferservice.utility.S3Utility; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.JobParametersBuilder; +import org.springframework.beans.factory.annotation.Value; import org.springframework.stereotype.Service; import java.net.URI; @@ -22,6 +24,14 @@ public class JobParamService { Logger logger = LoggerFactory.getLogger(JobParamService.class); + @Value("${spring.application.name}") + private String appName; + + PmeterParser pmeterParser; + + public JobParamService(PmeterParser pmeterParser) { + this.pmeterParser = pmeterParser; + } /** * Here we are adding basically the whole request except for sensitive credentials to the Job Params table. @@ -52,7 +62,7 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest builder.addLong(PIPELINING, (long) request.getOptions().getPipeSize()); builder.addString(COMPRESS, String.valueOf(request.getOptions().getCompress())); builder.addLong(RETRY, (long) request.getOptions().getRetry()); - builder.addString(APP_NAME, System.getenv("APP_NAME")); + builder.addString(APP_NAME, this.appName); builder.addString(OPTIMIZER, request.getOptions().getOptimizer()); builder.addLong(FILE_COUNT, (long) request.getSource().getInfoList().size()); long totalSize = 0L; @@ -69,47 +79,30 @@ public JobParameters translate(JobParametersBuilder builder, TransferJobRequest //adding the source host and source port to use for RTT & Latency measurements. if (request.getSource().getVfsSourceCredential() != null) { - builder.addString(SOURCE_HOST, this.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); + String sourceIp = ODSUtility.uriFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType); + builder.addString(SOURCE_HOST, sourceIp); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getVfsSourceCredential(), sourceType)); + CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(sourceIp); + logger.info("Source Carbon Score: {}", score.avgCarbon); + builder.addLong(CARBON_SCORE_SOURCE, (long) score.avgCarbon); } else if (request.getSource().getOauthSourceCredential() != null) { - builder.addString(SOURCE_HOST, this.uriFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); + builder.addString(SOURCE_HOST, ODSUtility.uriFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); builder.addLong(SOURCE_PORT, (long) this.portFromEndpointCredential(request.getSource().getOauthSourceCredential(), sourceType)); } if (request.getDestination().getVfsDestCredential() != null) { - builder.addString(DEST_HOST, this.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); + String destIp = ODSUtility.uriFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType); + builder.addString(DEST_HOST, destIp); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getVfsDestCredential(), destType)); + CarbonScore score = this.pmeterParser.carbonAverageTraceRoute(destIp); + logger.info("Destination Carbon Score: {}", score.avgCarbon); + builder.addLong(CARBON_SCORE_DEST, (long) score.avgCarbon); } else if (request.getDestination().getOauthDestCredential() != null) { - builder.addString(DEST_HOST, this.uriFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); + builder.addString(DEST_HOST, ODSUtility.uriFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); builder.addLong(DEST_PORT, (long) this.portFromEndpointCredential(request.getDestination().getOauthDestCredential(), destType)); } - return builder.toJobParameters(); } - public String uriFromEndpointCredential(EndpointCredential credential, EndpointType type) { - AccountEndpointCredential ac; - switch (type) { - case ftp: - case sftp: - case scp: - case http: - ac = (AccountEndpointCredential) credential; - URI uri = URI.create(ac.getUri()); - return uri.getHost(); - case s3: - ac = (AccountEndpointCredential) credential; - URI s3Uri = URI.create(S3Utility.constructS3URI(ac.getUri(), "")); - return s3Uri.getHost(); - case box: - return "box.com"; - case dropbox: - return "dropbox.com"; - case gdrive: - return "drive.google.com"; - default: - return ""; - } - } public int portFromEndpointCredential(EndpointCredential credential, EndpointType type) { switch (type) { diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java similarity index 94% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java index 2f68c69c..e330f9b9 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/cron/MetricsCollector.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/MetricsCollector.java @@ -1,4 +1,4 @@ -package org.onedatashare.transferservice.odstransferservice.service.cron; +package org.onedatashare.transferservice.odstransferservice.service; import lombok.Getter; import lombok.Setter; @@ -6,10 +6,6 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.JobMetric; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; -import org.onedatashare.transferservice.odstransferservice.service.DatabaseService.InfluxIOService; -import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.LatencyRtt; -import org.onedatashare.transferservice.odstransferservice.service.PmeterParser; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.JobParameters; @@ -85,7 +81,7 @@ public void collectAndSave() { long maxMem = Runtime.getRuntime().maxMemory(); JobMetric currentAggregateMetric = influxCache.aggregateMetric(); //this metrics throughput is the throughput of the whole map in influxCache. DataInflux lastPmeterData; - if (this.metrics.size() < 1) { + if (this.metrics.isEmpty()) { this.metrics.add(new DataInflux()); lastPmeterData = metrics.get(metrics.size() - 1); } else { @@ -134,6 +130,9 @@ public void collectAndSave() { lastPmeterData.setJobSize(jobParameters.getLong(ODSConstants.JOB_SIZE)); lastPmeterData.setAvgFileSize(jobParameters.getLong(ODSConstants.FILE_SIZE_AVG)); lastPmeterData.setOdsUser(jobParameters.getString(ODSConstants.OWNER_ID)); + lastPmeterData.setIsRunning(currentAggregateMetric.getStepExecution().getJobExecution().isRunning()); + }else{ + lastPmeterData.setIsRunning(false); } log.info(lastPmeterData.toString()); this.influxCache.clearCache(); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java deleted file mode 100644 index a5fa5457..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/OptimizerService.java +++ /dev/null @@ -1,58 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service; - -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerCreateRequest; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.http.HttpEntity; -import org.springframework.http.HttpHeaders; -import org.springframework.http.MediaType; -import org.springframework.scheduling.annotation.Async; -import org.springframework.stereotype.Service; -import org.springframework.web.client.RestClientException; -import org.springframework.web.client.RestTemplate; - -import java.util.concurrent.CompletableFuture; - -@Service -public class OptimizerService { - - @Autowired - RestTemplate optimizerTemplate; - - @Value("${spring.application.name}") - String appName; - - HttpHeaders headers; - - Logger logger = LoggerFactory.getLogger(OptimizerService.class); - - public OptimizerService() { - headers = new HttpHeaders(); - headers.setContentType(MediaType.APPLICATION_JSON); - } - - @Async("optimizerTaskExecutor") - public void createOptimizerBlocking(OptimizerCreateRequest optimizerCreateRequest) throws RestClientException { - optimizerCreateRequest.setNodeId(this.appName); - logger.info("Sending OptimizerCreateRequest {}", optimizerCreateRequest); - HttpEntity createRequestHttpEntity = new HttpEntity<>(optimizerCreateRequest, this.headers); - logger.info(createRequestHttpEntity.getBody().toString()); - this.optimizerTemplate.postForLocation("/optimizer/create", createRequestHttpEntity, Void.class); - CompletableFuture.completedFuture(null); - } - - @Async("optimizerTaskExecutor") - public void deleteOptimizerBlocking(OptimizerDeleteRequest optimizerDeleteRequest) { - optimizerDeleteRequest.setNodeId(this.appName); - try { - this.optimizerTemplate.postForObject("/optimizer/delete", new HttpEntity<>(optimizerDeleteRequest, this.headers), Void.class); - } catch (RestClientException e) { - logger.error("Failed to Delete optimizer. {}", optimizerDeleteRequest); - } - logger.info("Deleted {}", optimizerDeleteRequest); - CompletableFuture.completedFuture(null); - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java index 07c534d4..564f7c3d 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParser.java @@ -1,24 +1,33 @@ package org.onedatashare.transferservice.odstransferservice.service; +import com.fasterxml.jackson.core.type.TypeReference; import com.fasterxml.jackson.databind.ObjectMapper; +import com.fasterxml.jackson.databind.SerializationFeature; +import com.fasterxml.jackson.datatype.jsr310.JavaTimeModule; import jakarta.annotation.PostConstruct; +import lombok.SneakyThrows; import org.apache.commons.exec.CommandLine; import org.apache.commons.exec.DefaultExecutor; import org.apache.commons.exec.ExecuteWatchdog; import org.apache.commons.exec.PumpStreamHandler; +import org.onedatashare.transferservice.odstransferservice.model.CarbonIpEntry; +import org.onedatashare.transferservice.odstransferservice.model.metrics.CarbonScore; import org.onedatashare.transferservice.odstransferservice.model.metrics.DataInflux; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Value; +import org.springframework.core.env.Environment; import org.springframework.stereotype.Service; import java.io.ByteArrayOutputStream; import java.io.IOException; +import java.net.InetAddress; +import java.net.NetworkInterface; +import java.net.Socket; import java.nio.file.Files; import java.nio.file.Path; import java.nio.file.Paths; -import java.util.ArrayList; -import java.util.List; +import java.util.*; @Service public class PmeterParser { @@ -26,17 +35,22 @@ public class PmeterParser { private final String MEASURE = "measure"; private final ByteArrayOutputStream outputStream; private final PumpStreamHandler streamHandler; - private final DefaultExecutor executor; + private final DefaultExecutor pmeterExecutor; private final ExecuteWatchdog watchDog; + @Value("${pmeter.nic}") + public String pmeterNic; + Logger logger = LoggerFactory.getLogger(PmeterParser.class); + @Value("${pmeter.carbon.path}") + String pmeterCarbonPath; - @Value("${pmeter.report.path}") - String pmeterReportPath; + @Value("${pmeter.carbon.map}") + String pmeterCarbonMapPath; - @Value("${pmeter.interface}") - String pmeterNic; + @Value("${pmeter.report.path}") + String pmeterMetricsPath; @Value("${ods.user}") String odsUser; @@ -46,34 +60,43 @@ public class PmeterParser { @Value("${pmeter.options}") String pmeterOptions; + + @Value("${pmeter.carbon.toggle}") + private boolean toggle; + ObjectMapper pmeterMapper; private CommandLine cmdLine; - @PostConstruct - public void init() { - this.cmdLine = CommandLine.parse( - String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", - pmeterNic, odsUser, - measureCount, pmeterOptions, pmeterReportPath)); - } - public PmeterParser(ObjectMapper pmeterMapper) { + public PmeterParser(Environment environment) { this.outputStream = new ByteArrayOutputStream(); this.streamHandler = new PumpStreamHandler(outputStream); - this.executor = new DefaultExecutor(); - + this.pmeterExecutor = new DefaultExecutor(); this.watchDog = new ExecuteWatchdog(ExecuteWatchdog.INFINITE_TIMEOUT); - executor.setWatchdog(watchDog); - executor.setStreamHandler(streamHandler); + pmeterExecutor.setWatchdog(watchDog); + pmeterExecutor.setStreamHandler(streamHandler); + + this.pmeterMapper = new ObjectMapper(); + this.pmeterMapper.registerModule(new JavaTimeModule()); + this.pmeterMapper.configure(SerializationFeature.WRITE_DATE_KEYS_AS_TIMESTAMPS, false); - this.pmeterMapper = pmeterMapper; + + } + + @PostConstruct + public void init() throws IOException { + if (this.pmeterNic == null || this.pmeterNic.isEmpty()) { + this.pmeterNic = this.discoverActiveNetworkInterface(); + } + logger.info("Interface used for monitoring: {}", this.pmeterNic); + this.cmdLine = CommandLine.parse(String.format("pmeter " + MEASURE + " %s --user %s --measure %s %s --file_name %s", this.pmeterNic, odsUser, measureCount, pmeterOptions, pmeterMetricsPath)); } public void runPmeter() { try { - executor.execute(cmdLine); + pmeterExecutor.execute(cmdLine); } catch (IOException e) { logger.error("Failed in executing pmeter script:\n " + cmdLine); e.printStackTrace(); @@ -81,7 +104,7 @@ public void runPmeter() { } public List parsePmeterOutput() throws IOException { - Path path = Paths.get(pmeterReportPath); + Path path = Paths.get(pmeterMetricsPath); List allLines = Files.readAllLines(path); List ret = new ArrayList<>(); for (String line : allLines) { @@ -92,4 +115,67 @@ public List parsePmeterOutput() throws IOException { path.toFile().createNewFile(); return ret; } + + public CarbonScore carbonAverageTraceRoute(String ip) { + //pmeter carbon 129.114.108.45 + if (this.toggle == false || ip == null || ip.isEmpty()) return new CarbonScore(); + + CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s", ip)); + try { + DefaultExecutor carbonExecutor = new DefaultExecutor(); + carbonExecutor.execute(carbonCmd); + Path filePath = Paths.get(this.pmeterCarbonPath); + List lines = Files.readAllLines(filePath); + CarbonScore score = new CarbonScore(); + for (String line : lines) { + score = this.pmeterMapper.readValue(line, CarbonScore.class); + break; + } + filePath.toFile().delete(); + filePath.toFile().createNewFile(); + return score; + } catch (IOException e) { + logger.error("Error Carbon Average Trace Route:\n {}", e.getMessage()); + return new CarbonScore(); + } + } + + public List carbonPerIp(String ip) throws IOException { + if (ip == null || ip.isEmpty()) return new ArrayList<>(); + CommandLine carbonCmd = CommandLine.parse(String.format("pmeter carbon %s --save_per_ip=True", ip)); + DefaultExecutor carbonExecutor = new DefaultExecutor(); + carbonExecutor.execute(carbonCmd); + Path filePath = Paths.get(this.pmeterCarbonMapPath); + List lines = Files.readAllLines(filePath); + String lastLine = lines.getLast(); + Map measurement = this.pmeterMapper.readValue(lastLine, new TypeReference>() { + }); + List retList = new ArrayList<>(); + for (Map.Entry entry : measurement.entrySet()) { + if (entry.getKey().equals("time")) continue; + LinkedHashMap value = (LinkedHashMap) entry.getValue(); + CarbonIpEntry carbonIpEntry = new CarbonIpEntry(); + carbonIpEntry.setIp(entry.getKey()); + carbonIpEntry.setCarbonIntensity((int) value.get("carbon_intensity")); + carbonIpEntry.setLon((double) value.get("lon")); + carbonIpEntry.setLat((double) value.get("lat")); + retList.add(carbonIpEntry); + } + filePath.toFile().delete(); + filePath.toFile().createNewFile(); + return retList; + } + + @SneakyThrows + public String discoverActiveNetworkInterface() { + // iterate over the network interfaces known to java + try (Socket socket = new Socket("google.com", 80)) { + InetAddress localAddress = socket.getLocalAddress(); + System.out.println("Local Address: " + localAddress.getHostAddress()); + + // Get the network interface for the local address + NetworkInterface networkInterface = NetworkInterface.getByInetAddress(localAddress); + return networkInterface.getName(); + } + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java new file mode 100644 index 00000000..8e7f0783 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VaultSSLService.java @@ -0,0 +1,121 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import jakarta.annotation.PostConstruct; +import lombok.Getter; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.core.env.Environment; +import org.springframework.stereotype.Service; +import org.springframework.vault.core.VaultPkiOperations; +import org.springframework.vault.core.VaultTemplate; +import org.springframework.vault.support.VaultIssuerCertificateRequestResponse; + +import java.io.IOException; +import java.io.InputStream; +import java.io.OutputStream; +import java.nio.file.Files; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.nio.file.StandardOpenOption; +import java.security.KeyManagementException; +import java.security.KeyStore; +import java.security.KeyStoreException; +import java.security.NoSuchAlgorithmException; +import java.security.cert.CertificateException; +import java.security.cert.CertificateExpiredException; +import java.security.cert.CertificateNotYetValidException; +import java.security.cert.X509Certificate; +import java.time.Duration; +import java.util.Enumeration; +import java.util.concurrent.Executors; +import java.util.concurrent.ScheduledExecutorService; + +@Service +public class VaultSSLService { + + private final VaultPkiOperations vaultPkiOperations; + @Getter + private final Path storePath; + private final Logger logger = LoggerFactory.getLogger(VaultSSLService.class); + private final String keyStorePassword; + private final ScheduledExecutorService scheduler; + + @Getter + public Duration storeDuration; + + public VaultSSLService(Environment environment, VaultTemplate vaultTemplate) { + this.vaultPkiOperations = vaultTemplate.opsForPki(); + this.storePath = Paths.get(System.getProperty("user.home"), "onedatashare", "ftn", "store", "jobscheduler.truststore.p12"); + this.keyStorePassword = environment.getProperty("hz.keystore.password", "changeit"); + this.storeDuration = Duration.ofDays(1); + this.scheduler = Executors.newScheduledThreadPool(0, Thread.ofVirtual().factory()); + + } + + @PostConstruct + public void init() { + scheduler.scheduleAtFixedRate(() -> { + try { + logger.info("Running Certificate CRON"); + refreshCerts(); + } catch (Exception e) { + logger.error(e.getMessage()); + } + }, 0, Duration.ofMinutes(1).toSeconds(), java.util.concurrent.TimeUnit.SECONDS); + } + + public void refreshCerts() throws KeyStoreException, IOException, NoSuchAlgorithmException, KeyManagementException { + logger.info("Refreshing Certificates"); + KeyStore trustStore = this.readInTrustStore(); + boolean hasValidCerts = this.checkIfCertsAreStillValid(trustStore); + logger.info("Certs are valid: {}", hasValidCerts); + if (trustStore == null || !hasValidCerts) { + VaultIssuerCertificateRequestResponse resp = this.vaultPkiOperations.getIssuerCertificate("7022f845-246c-3836-836f-83936e50b888"); + trustStore = resp.getData().createTrustStore(true); + this.persistStore(trustStore); + } + } + + + private KeyStore readInTrustStore() throws KeyStoreException { + if (Files.exists(storePath)) { + KeyStore keyStore = KeyStore.getInstance("PKCS12"); + try (InputStream inputStream = Files.newInputStream(storePath, StandardOpenOption.READ)) { + keyStore.load(inputStream, this.keyStorePassword.toCharArray()); + return keyStore; + } catch (IOException | CertificateException | NoSuchAlgorithmException e) { + return null; + } + } + return null; + } + + private boolean checkIfCertsAreStillValid(KeyStore keyStore) throws KeyStoreException { + if (keyStore == null) return false; + Enumeration aliases = keyStore.aliases(); + while (aliases.hasMoreElements()) { + String alias = aliases.nextElement(); + X509Certificate certificate = (X509Certificate) keyStore.getCertificate(alias); + try { + certificate.checkValidity(); + } catch (CertificateExpiredException | CertificateNotYetValidException e) { + return false; + } + } + return true; + } + + private void persistStore(KeyStore store) throws IOException, KeyStoreException, NoSuchAlgorithmException { + if (!Files.exists(storePath)) { + Files.createDirectories(storePath.getParent()); + Files.createFile(storePath); + } + try (OutputStream outputStream = Files.newOutputStream(storePath, StandardOpenOption.WRITE, StandardOpenOption.CREATE)) { + logger.debug("Persisting the KeyStore to {}", storePath); + try { + store.store(outputStream, this.keyStorePassword.toCharArray()); + } catch (CertificateException e) { + } + } + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java new file mode 100644 index 00000000..489a1827 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/ExpanderFactory.java @@ -0,0 +1,27 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.springframework.stereotype.Service; + +import java.util.List; + +@Service +public class ExpanderFactory { + + public List getExpander(TransferJobRequest.Source source) { + switch (source.getType()) { + case vfs -> { + VfsExpander vfsExpander = new VfsExpander(); + vfsExpander.createClient(source.getVfsSourceCredential()); + return vfsExpander.expandedFileSystem(source.getInfoList(), source.getFileSourcePath()); + } + case http -> { + HttpExpander httpExpander = new HttpExpander(); + httpExpander.createClient(source.getVfsSourceCredential()); + return httpExpander.expandedFileSystem(source.getInfoList(), source.getFileSourcePath()); + } + } + return source.getInfoList(); + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java new file mode 100644 index 00000000..b5e45968 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/FileExpander.java @@ -0,0 +1,15 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; + +import java.util.List; + +public interface FileExpander { + + public void createClient(EndpointCredential credential); + + public List expandedFileSystem(List userSelectedResources, String basePath); + + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java new file mode 100644 index 00000000..183c74f8 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/HttpExpander.java @@ -0,0 +1,122 @@ +package org.onedatashare.transferservice.odstransferservice.service.expanders; + +import lombok.SneakyThrows; +import org.jsoup.Jsoup; +import org.jsoup.nodes.Document; +import org.jsoup.nodes.Element; +import org.jsoup.select.Elements; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.stereotype.Service; + +import java.io.IOException; +import java.net.URL; +import java.net.URLConnection; +import java.nio.file.Path; +import java.nio.file.Paths; +import java.util.ArrayList; +import java.util.List; +import java.util.Stack; + +public class HttpExpander implements FileExpander{ + + private AccountEndpointCredential credential; + Logger logger = LoggerFactory.getLogger(HttpExpander.class); + + @Override + public void createClient(EndpointCredential credential) { + this.credential = (AccountEndpointCredential) credential; + logger.info(this.credential.toString()); + + } + + @SneakyThrows + @Override + public List expandedFileSystem(List userSelectedResources, String basePath) { + List filesToSend = new ArrayList<>(); + Stack directoriesToTraverse = new Stack<>(); + if (basePath.isEmpty()) basePath = "/"; + if (userSelectedResources.isEmpty()) { //we move the whole damn server + logger.info("User resources is empty gonna just send the whole server I guess"); + Document doc = Jsoup.connect(this.credential.getUri() + basePath).get(); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, 0)); + } + } + } else { //move only files/folders the user selected + for (EntityInfo selectedFiles : userSelectedResources) { + //we have a folder to transfer + if(selectedFiles.getPath().endsWith("/")){ + Document doc = Jsoup.connect(this.credential.getUri() + basePath + selectedFiles.getPath()) + .ignoreContentType(true) + .get(); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, selectedFiles.getChunkSize())); + } + } + }else{ + filesToSend.add(this.fileToInfo(this.credential.getUri() + Paths.get(basePath, selectedFiles.getPath()).toString(), selectedFiles.getChunkSize())); + } + } + } + //all of these have names that should be appended to the path + while (!directoriesToTraverse.isEmpty()) { + Element directory = directoriesToTraverse.pop(); + if (directory.text().contains("..") || directory.text().contains(".")) { + continue; + } + Document doc = Jsoup.connect(directory.baseUri() + basePath +directory.text()).get(); + Elements links = doc.select("body a"); + for (Element elem : links) { + if (elem.text().endsWith("/")) { //directory to expand + directoriesToTraverse.push(elem); + } else { //we have a file + filesToSend.add(fromElement(elem, 0)); + } + } + } + return filesToSend; + } + + public EntityInfo fromElement(Element elem, Integer chunkSize) throws IOException { + EntityInfo fileInfo = new EntityInfo(); + if(chunkSize == 0){ + chunkSize = 16000000; + } + URL url = new URL(elem.absUrl("href")); + long fileSize = url.openConnection().getContentLengthLong(); + Path path = Path.of(url.getPath()); + fileInfo.setId(elem.text()); + fileInfo.setSize(fileSize); + fileInfo.setPath(path.toAbsolutePath().toString()); + fileInfo.setChunkSize(chunkSize); + return fileInfo; + } + + public EntityInfo fileToInfo(String strUrl, Integer chunkSize) throws IOException { + if(chunkSize == 0){ + chunkSize = 16000000; + } + EntityInfo fileInfo = new EntityInfo(); + URL url = new URL(strUrl); + URLConnection conn = url.openConnection(); + long fileSize = conn.getContentLengthLong(); + String fileName = Paths.get(conn.getURL().getFile()).getFileName().toString(); + fileInfo.setId(fileName); + fileInfo.setSize(fileSize); + fileInfo.setPath(url.getPath()); + fileInfo.setChunkSize(chunkSize); + return fileInfo; + } +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java similarity index 69% rename from src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java rename to src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java index ebe9eb6c..e0274542 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/VfsExpander.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/expanders/VfsExpander.java @@ -1,6 +1,8 @@ -package org.onedatashare.transferservice.odstransferservice.service; +package org.onedatashare.transferservice.odstransferservice.service.expanders; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; +import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.stereotype.Service; @@ -11,21 +13,22 @@ import java.util.List; import java.util.Stack; -@Service -public class VfsExpander { +public class VfsExpander implements FileExpander { Logger logger; public VfsExpander() { - this.logger = LoggerFactory.getLogger(FilePartitioner.class); + this.logger = LoggerFactory.getLogger(VfsExpander.class); } - public List expandDirectory(List userResources, String basePath) { + public void createClient(EndpointCredential credential) {} + + public List expandedFileSystem(List userSelectedResources, String basePath) { List endList = new ArrayList<>(); Stack traversalStack = new Stack<>(); //only directories on the stack. - logger.info("Expanding files VFS: {}", userResources); - if (userResources.size() == 0) return endList; //this case should never happen. - for (EntityInfo fileInfo : userResources) { + logger.info("Expanding files VFS: {}", userSelectedResources); + if (userSelectedResources.isEmpty()) return endList; //this case should never happen. + for (EntityInfo fileInfo : userSelectedResources) { Path path = Path.of(fileInfo.getPath()); if (path.toFile().isDirectory()) { traversalStack.push(path.toFile()); @@ -39,9 +42,9 @@ public List expandDirectory(List userResources, String b File[] files = resource.listFiles(); if (files == null) continue; for (File file : files) { - if(file.isDirectory()){ + if (file.isDirectory()) { traversalStack.push(file); - }else{ + } else { endList.add(fileToEntity(file, 0)); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java deleted file mode 100644 index a974a836..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/listner/JobCompletionListener.java +++ /dev/null @@ -1,98 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.listner; - -import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerCreateRequest; -import org.onedatashare.transferservice.odstransferservice.model.optimizer.OptimizerDeleteRequest; -import org.onedatashare.transferservice.odstransferservice.pools.ThreadPoolManager; -import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; -import org.onedatashare.transferservice.odstransferservice.service.OptimizerService; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.core.JobExecution; -import org.springframework.batch.core.JobExecutionListener; -import org.springframework.beans.factory.annotation.Autowired; -import org.springframework.beans.factory.annotation.Value; -import org.springframework.core.env.Environment; -import org.springframework.scheduling.annotation.Async; -import org.springframework.stereotype.Service; - -import java.time.Duration; -import java.util.Set; - -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.JOB_UUID; -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.OWNER_ID; - - -@Service -public class JobCompletionListener implements JobExecutionListener { - private final ThreadPoolManager threadPoolManager; - private Set jobIds; - Logger logger = LoggerFactory.getLogger(JobCompletionListener.class); - - ConnectionBag connectionBag; - - MetricsCollector metricsCollector; - - OptimizerService optimizerService; - - - @Value("${spring.application.name}") - private String appName; - - @Value("${transfer.service.parallelism}") - int maxParallel; - - @Value("${transfer.service.concurrency}") - int maxConc; - - @Value("${transfer.service.pipelining}") - int maxPipe; - boolean optimizerEnable; - - @Autowired - Environment environment; - - public JobCompletionListener(OptimizerService optimizerService, MetricsCollector metricsCollector, ConnectionBag connectionBag, ThreadPoolManager threadPoolManager, Set jobIds) { - this.optimizerService = optimizerService; - this.metricsCollector = metricsCollector; - this.connectionBag = connectionBag; - this.optimizerEnable = false; - this.threadPoolManager = threadPoolManager; - this.jobIds = jobIds; - } - - - @Override - @Async - public void beforeJob(JobExecution jobExecution) { - logger.info("*****Job Execution start Time***** : {} with jobId={}", jobExecution.getStartTime(), jobExecution.getJobId()); - long fileCount = jobExecution.getJobParameters().getLong(ODSConstants.FILE_COUNT); - this.jobIds.add(jobExecution.getJobId()); - String optimizerType = jobExecution.getJobParameters().getString(ODSConstants.OPTIMIZER); - String jobUuid = jobExecution.getJobParameters().getString(JOB_UUID); - String userId = jobExecution.getJobParameters().getString(OWNER_ID); - if (optimizerType != null) { - if (!optimizerType.equals("None") && !optimizerType.isEmpty()) { - OptimizerCreateRequest createRequest = new OptimizerCreateRequest(userId, appName, maxConc, maxParallel, maxPipe, optimizerType, fileCount, jobExecution.getJobId(), this.environment.getActiveProfiles()[0], jobUuid); - optimizerService.createOptimizerBlocking(createRequest); - this.optimizerEnable = true; - } - } - } - - @Override - @Async - public void afterJob(JobExecution jobExecution) { - logger.info("*****Job Execution End Time**** : {}", jobExecution.getEndTime()); - logger.info("Total Job Time in seconds: {}", Duration.between(jobExecution.getStartTime(), jobExecution.getEndTime()).toSeconds()); - connectionBag.closePools(); - if (this.optimizerEnable) { - this.optimizerService.deleteOptimizerBlocking(new OptimizerDeleteRequest(appName)); - this.optimizerEnable = false; - } - this.threadPoolManager.clearJobPool(); - System.gc(); - } -} - diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java index 80403ba1..6a6178bc 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3LargeFileWriter.java @@ -9,7 +9,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java index 21bc5018..36f6e87a 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3Reader.java @@ -14,7 +14,6 @@ import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; -import org.onedatashare.transferservice.odstransferservice.utility.S3Utility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.batch.core.StepExecution; @@ -48,7 +47,7 @@ public AmazonS3Reader(AccountEndpointCredential sourceCredential, EntityInfo fil @BeforeStep public void beforeStep(StepExecution stepExecution) { this.fileName = this.fileInfo.getId();//For an S3 Reader job this should be the object key - this.amazonS3URI = new AmazonS3URI(S3Utility.constructS3URI(this.sourceCredential.getUri(), this.fileName)); + this.amazonS3URI = new AmazonS3URI(ODSUtility.constructS3URI(this.sourceCredential.getUri(), this.fileName)); this.getSkeleton = new GetObjectRequest(this.amazonS3URI.getBucket(), this.amazonS3URI.getKey()); logger.info("Starting S3 job for file {} with uri {}", this.fileName, this.amazonS3URI); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java index 350c0053..ecc25ad0 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/AmazonS3/AmazonS3SmallFileWriter.java @@ -5,13 +5,13 @@ import com.amazonaws.services.s3.model.PutObjectRequest; import com.amazonaws.services.s3.model.PutObjectResult; import lombok.Setter; -import org.onedatashare.transferservice.odstransferservice.model.AWSSinglePutRequestMetaData; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.S3ConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,7 +34,7 @@ public class AmazonS3SmallFileWriter extends ODSBaseWriter implements ItemWriter private final AccountEndpointCredential destCredential; Logger logger = LoggerFactory.getLogger(AmazonS3SmallFileWriter.class); private String destBasepath; - private AWSSinglePutRequestMetaData putObjectRequest; + private SmallFileUpload smallFileUpload; private AmazonS3 client; @Setter private S3ConnectionPool pool; @@ -46,7 +46,7 @@ public AmazonS3SmallFileWriter(AccountEndpointCredential destCredential, EntityI this.fileName = fileInfo.getId(); this.fileInfo = fileInfo; this.destCredential = destCredential; - this.putObjectRequest = new AWSSinglePutRequestMetaData(); + this.smallFileUpload = new SmallFileUpload(); String[] temp = this.destCredential.getUri().split(":::"); this.bucketName = temp[1]; } @@ -62,11 +62,11 @@ public void beforeStep(StepExecution stepExecution) throws InterruptedException @AfterStep public ExitStatus afterStep(StepExecution stepExecution) { - PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucketName, Paths.get(this.destBasepath, fileName).toString(), this.putObjectRequest.condenseListToOneStream(this.fileInfo.getSize()), makeMetaDataForSinglePutRequest(this.fileInfo.getSize())); + PutObjectRequest putObjectRequest = new PutObjectRequest(this.bucketName, Paths.get(this.destBasepath, fileName).toString(), this.smallFileUpload.condenseListToOneStream(), makeMetaDataForSinglePutRequest(this.fileInfo.getSize())); PutObjectResult result = client.putObject(putObjectRequest); logger.info("Pushed the final chunk of the small file"); logger.info(result.toString()); - this.putObjectRequest.clear(); + this.smallFileUpload.getDataChunkPriorityQueue().clear(); this.pool.returnObject(this.client); return stepExecution.getExitStatus(); } @@ -81,6 +81,6 @@ public ObjectMetadata makeMetaDataForSinglePutRequest(long size) { public void write(Chunk chunk) throws Exception { List items = chunk.getItems(); this.fileName = items.get(0).getFileName(); - this.putObjectRequest.addAllChunks(items); + this.smallFileUpload.addAllChunks(items); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java deleted file mode 100644 index 38fb5330..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/DynamicStep.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step; - -import org.springframework.batch.core.JobInterruptedException; -import org.springframework.batch.core.Step; -import org.springframework.batch.core.StepExecution; - -public class DynamicStep implements Step { - - @Override - public String getName() { - return null; - } - - @Override - public boolean isAllowStartIfComplete() { - return false; - } - - @Override - public int getStartLimit() { - return 0; - } - - @Override - public void execute(StepExecution stepExecution) throws JobInterruptedException { - - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java index d189f614..293771ce 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ODSBaseWriter.java @@ -2,9 +2,7 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.annotation.AfterRead; import org.springframework.batch.core.annotation.AfterWrite; @@ -36,7 +34,7 @@ public ODSBaseWriter(MetricsCollector metricsCollector, InfluxCache influxCache) @BeforeWrite public void beforeWrite() { LocalDateTime startWriteTime = LocalDateTime.now(); - this.writeStartTimes.put(Thread.currentThread().getId(), startWriteTime); + this.writeStartTimes.put(Thread.currentThread().threadId(), startWriteTime); } @AfterWrite @@ -44,26 +42,28 @@ public void afterWrite(Chunk chunk) { List items = chunk.getItems(); LocalDateTime writeEndTime = LocalDateTime.now(); long totalBytes = items.stream().mapToLong(DataChunk::getSize).sum(); - LocalDateTime writeStartTime = this.writeStartTimes.get(Thread.currentThread().getId()); + long threadId = Thread.currentThread().threadId(); + LocalDateTime writeStartTime = this.writeStartTimes.remove(threadId); //this is a cache for the optimizer directly in. This i actually think should be deleted and all data querying maybe ideally is done through the monitoring interface - influxCache.addMetric(Thread.currentThread().getId(), stepExecution, totalBytes, writeStartTime, writeEndTime, InfluxCache.ThroughputType.WRITER, items.get(0).getSize()); + influxCache.addMetric(threadId, stepExecution, totalBytes, writeStartTime, writeEndTime, InfluxCache.ThroughputType.WRITER, items.get(0).getSize()); } @BeforeRead public void beforeRead() { LocalDateTime startReadTime = LocalDateTime.now(); - this.readStartTimes.put(Thread.currentThread().getId(), startReadTime); + this.readStartTimes.put(Thread.currentThread().threadId(), startReadTime); } @AfterRead public void afterRead(DataChunk item) { LocalDateTime endTime = LocalDateTime.now(); + long threadId = Thread.currentThread().threadId(); if (item == null) { return; } - LocalDateTime readStartTime = this.readStartTimes.get(Thread.currentThread().getId()); + LocalDateTime readStartTime = this.readStartTimes.remove(threadId); if (readStartTime == null) return; - influxCache.addMetric(Thread.currentThread().getId(), stepExecution, item.getSize(), readStartTime, endTime, InfluxCache.ThroughputType.READER, item.getSize()); + influxCache.addMetric(threadId, stepExecution, item.getSize(), readStartTime, endTime, InfluxCache.ThroughputType.READER, item.getSize()); } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java deleted file mode 100644 index ff8dd22d..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/Processor.java +++ /dev/null @@ -1,19 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.service.step; - -import org.onedatashare.transferservice.odstransferservice.model.DataChunk; -import org.slf4j.Logger; -import org.slf4j.LoggerFactory; -import org.springframework.batch.item.ItemProcessor; -import org.springframework.stereotype.Component; - -@Component -public class Processor implements ItemProcessor { - - Logger logger = LoggerFactory.getLogger(Processor.class); - - @Override - public DataChunk process(DataChunk dc) throws Exception { -// System.out.println("Processor :"+dc.getData()); - return dc; - } -} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java new file mode 100644 index 00000000..55fe8e70 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ReaderWriterFactory.java @@ -0,0 +1,147 @@ +package org.onedatashare.transferservice.odstransferservice.service.step; + + +import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.model.TransferOptions; +import org.onedatashare.transferservice.odstransferservice.service.ConnectionBag; +import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3LargeFileWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3Reader; +import org.onedatashare.transferservice.odstransferservice.service.step.AmazonS3.AmazonS3SmallFileWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxReader; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterLargeFile; +import org.onedatashare.transferservice.odstransferservice.service.step.box.BoxWriterSmallFile; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxChunkedWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxReader; +import org.onedatashare.transferservice.odstransferservice.service.step.dropbox.DropBoxWriterSmallFile; +import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.ftp.FTPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveReader; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveResumableWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.googleDrive.GDriveSimpleWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.http.HttpReader; +import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.scp.SCPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPReader; +import org.onedatashare.transferservice.odstransferservice.service.step.sftp.SFTPWriter; +import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsReader; +import org.onedatashare.transferservice.odstransferservice.service.step.vfs.VfsWriter; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemWriter; +import org.springframework.stereotype.Service; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.FIVE_MB; +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.TWENTY_MB; + +@Service +public class ReaderWriterFactory { + + private final ConnectionBag connectionBag; + private final InfluxCache influxCache; + private final MetricsCollector metricsCollector; + + public ReaderWriterFactory(ConnectionBag connectionBag, InfluxCache influxCache, MetricsCollector metricsCollector) { + this.connectionBag = connectionBag; + this.influxCache = influxCache; + this.metricsCollector = metricsCollector; + } + + public ItemReader getRightReader(TransferJobRequest.Source source, EntityInfo fileInfo, TransferOptions transferOptions) { + switch (source.getType()) { + case http: + HttpReader hr = new HttpReader(fileInfo, source.getVfsSourceCredential()); + hr.setPool(connectionBag.getHttpReaderPool()); + return hr; + case vfs: + VfsReader vfsReader = new VfsReader(source.getVfsSourceCredential(), fileInfo); + return vfsReader; + case sftp: + SFTPReader sftpReader = new SFTPReader(source.getVfsSourceCredential(), fileInfo, transferOptions.getPipeSize()); + sftpReader.setPool(connectionBag.getSftpReaderPool()); + return sftpReader; + case ftp: + FTPReader ftpReader = new FTPReader(source.getVfsSourceCredential(), fileInfo); + ftpReader.setPool(connectionBag.getFtpReaderPool()); + return ftpReader; + case s3: + AmazonS3Reader amazonS3Reader = new AmazonS3Reader(source.getVfsSourceCredential(), fileInfo); + amazonS3Reader.setPool(connectionBag.getS3ReaderPool()); + return amazonS3Reader; + case box: + BoxReader boxReader = new BoxReader(source.getOauthSourceCredential(), fileInfo); + boxReader.setMaxRetry(transferOptions.getRetry()); + return boxReader; + case dropbox: + DropBoxReader dropBoxReader = new DropBoxReader(source.getOauthSourceCredential(), fileInfo); + return dropBoxReader; + case scp: + SCPReader reader = new SCPReader(fileInfo); + reader.setPool(connectionBag.getSftpReaderPool()); + return reader; + case gdrive: + GDriveReader dDriveReader = new GDriveReader(source.getOauthSourceCredential(), fileInfo); + return dDriveReader; + } + return null; + } + + public ItemWriter getRightWriter(TransferJobRequest.Destination destination, EntityInfo fileInfo) { + switch (destination.getType()) { + case vfs: + VfsWriter vfsWriter = new VfsWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return vfsWriter; + case sftp: + SFTPWriter sftpWriter = new SFTPWriter(destination.getVfsDestCredential(), this.metricsCollector, this.influxCache); + sftpWriter.setPool(connectionBag.getSftpWriterPool()); + return sftpWriter; + case ftp: + FTPWriter ftpWriter = new FTPWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + ftpWriter.setPool(connectionBag.getFtpWriterPool()); + return ftpWriter; + case s3: + if (fileInfo.getSize() < TWENTY_MB) { + AmazonS3SmallFileWriter amazonS3SmallFileWriter = new AmazonS3SmallFileWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + amazonS3SmallFileWriter.setPool(connectionBag.getS3WriterPool()); + return amazonS3SmallFileWriter; + } else { + AmazonS3LargeFileWriter amazonS3LargeFileWriter = new AmazonS3LargeFileWriter(destination.getVfsDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + amazonS3LargeFileWriter.setPool(connectionBag.getS3WriterPool()); + return amazonS3LargeFileWriter; + } + case box: + if (fileInfo.getSize() < TWENTY_MB) { + BoxWriterSmallFile boxWriterSmallFile = new BoxWriterSmallFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return boxWriterSmallFile; + } else { + BoxWriterLargeFile boxWriterLargeFile = new BoxWriterLargeFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + return boxWriterLargeFile; + } + case dropbox: + final long DROPBOX_SINGLE_UPLOAD_LIMIT = 150L * 1024L * 1024L; + if (fileInfo.getSize() < DROPBOX_SINGLE_UPLOAD_LIMIT){ + return new DropBoxWriterSmallFile(destination.getOauthDestCredential(), fileInfo, this.metricsCollector, this.influxCache); + }else { + return new DropBoxChunkedWriter(destination.getOauthDestCredential(), this.metricsCollector, this.influxCache); + } + case scp: + SCPWriter scpWriter = new SCPWriter(fileInfo, this.metricsCollector, this.influxCache); + scpWriter.setPool(connectionBag.getSftpWriterPool()); + return scpWriter; + case gdrive: + if (fileInfo.getSize() < FIVE_MB) { + GDriveSimpleWriter writer = new GDriveSimpleWriter(destination.getOauthDestCredential(), fileInfo); + return writer; + } else { + GDriveResumableWriter writer = new GDriveResumableWriter(destination.getOauthDestCredential(), fileInfo); + writer.setPool(connectionBag.getGoogleDriveWriterPool()); + return writer; + } + } + return null; + } + + +} diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java index ae58c6dc..fe31032c 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxReader.java @@ -10,8 +10,6 @@ import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -import org.springframework.batch.core.StepExecution; -import org.springframework.batch.core.annotation.BeforeStep; import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; import org.springframework.util.ClassUtils; @@ -19,13 +17,13 @@ public class BoxReader extends AbstractItemCountingItemStreamItemReader { - private OAuthEndpointCredential credential; FilePartitioner filePartitioner; - private BoxAPIConnection boxAPIConnection; - private BoxFile currentFile; EntityInfo fileInfo; int retry; Logger logger = LoggerFactory.getLogger(BoxReader.class); + private final OAuthEndpointCredential credential; + private BoxAPIConnection boxAPIConnection; + private BoxFile currentFile; public BoxReader(OAuthEndpointCredential credential, EntityInfo fileInfo) { this.credential = credential; @@ -45,7 +43,11 @@ protected DataChunk doRead() { FilePart filePart = filePartitioner.nextPart(); if (filePart == null) return null; ByteArrayOutputStream byteArray = new ByteArrayOutputStream(); - this.currentFile.downloadRange(byteArray, filePart.getStart(), filePart.getEnd()); + if (this.fileInfo.getSize() == this.fileInfo.getChunkSize()) { + this.currentFile.download(byteArray); + } else { + this.currentFile.downloadRange(byteArray, filePart.getStart(), filePart.getEnd()); + } DataChunk chunk = ODSUtility.makeChunk(filePart.getSize(), byteArray.toByteArray(), filePart.getStart(), Math.toIntExact(filePart.getPartIdx()), currentFile.getInfo().getName()); logger.info(chunk.toString()); return chunk; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java index a7dfc847..25781c92 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterLargeFile.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -34,14 +34,14 @@ */ public class BoxWriterLargeFile extends ODSBaseWriter implements ItemWriter { - private BoxAPIConnection boxAPIConnection; EntityInfo fileInfo; - private HashMap fileMap; - private HashMap digestMap; - private List parts; String destinationBasePath; BoxFolder boxFolder; Logger logger = LoggerFactory.getLogger(BoxWriterLargeFile.class); + private final BoxAPIConnection boxAPIConnection; + private final HashMap fileMap; + private final HashMap digestMap; + private final List parts; public BoxWriterLargeFile(OAuthEndpointCredential oAuthDestCredential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { super(metricsCollector, influxCache); @@ -93,10 +93,7 @@ private void prepareForUpload(String fileName) throws NoSuchAlgorithmException { * @return */ private boolean ready(String fileName) { - if (!this.fileMap.containsKey(fileName) || !this.digestMap.containsKey(fileName)) { - return false; - } - return true; + return this.fileMap.containsKey(fileName) && this.digestMap.containsKey(fileName); } /** @@ -116,7 +113,7 @@ public void write(Chunk chunk) throws Exception { BoxFileUploadSessionPart part = session.uploadPart(dataChunk.getData(), dataChunk.getStartPosition(), Long.valueOf(dataChunk.getSize()).intValue(), this.fileInfo.getSize()); this.parts.add(part); digest.update(dataChunk.getData()); - logger.info("Current chunk in BoxLargeFile Writer " + dataChunk.toString()); + logger.info("Current chunk in BoxLargeFile Writer " + dataChunk); } this.digestMap.put(fileName, digest); diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java index 6f6f6134..5e69a6d4 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/box/BoxWriterSmallFile.java @@ -1,14 +1,15 @@ package org.onedatashare.transferservice.odstransferservice.service.step.box; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.*; + +import java.util.List; -import com.box.sdk.BoxAPIConnection; -import com.box.sdk.BoxFileUploadSession; -import com.box.sdk.BoxFolder; -import org.onedatashare.transferservice.odstransferservice.model.BoxSmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @@ -19,26 +20,24 @@ import org.springframework.batch.item.Chunk; import org.springframework.batch.item.ItemWriter; -import java.util.HashMap; -import java.util.List; - -import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.DEST_BASE_PATH; +import com.box.sdk.BoxAPIConnection; +import com.box.sdk.BoxFolder; public class BoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { - private BoxAPIConnection boxAPIConnection; EntityInfo fileInfo; String destinationBasePath; BoxFolder boxFolder; - BoxSmallFileUpload smallFileUpload; - private String fileName; + SmallFileUpload smallFileUpload; Logger logger = LoggerFactory.getLogger(BoxWriterSmallFile.class); + private final BoxAPIConnection boxAPIConnection; + private String fileName; public BoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { super(metricsCollector, influxCache); this.boxAPIConnection = new BoxAPIConnection(credential.getToken()); this.fileInfo = fileInfo; - smallFileUpload = new BoxSmallFileUpload(); + smallFileUpload = new SmallFileUpload(); } @BeforeStep @@ -55,7 +54,7 @@ public void beforeStep(StepExecution stepExecution) { */ @AfterStep public ExitStatus afterStep(StepExecution stepExecution) { - boxFolder.uploadFile(this.smallFileUpload.condenseListToOneStream(this.fileInfo.getSize()), fileName); + boxFolder.uploadFile(this.smallFileUpload.condenseListToOneStream(), fileName); return stepExecution.getExitStatus(); } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java index 92cc3654..f182a65b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxChunkedWriter.java @@ -9,7 +9,7 @@ import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.springframework.batch.core.ExitStatus; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java new file mode 100644 index 00000000..18b1e051 --- /dev/null +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/dropbox/DropBoxWriterSmallFile.java @@ -0,0 +1,71 @@ +package org.onedatashare.transferservice.odstransferservice.service.step.dropbox; + +import com.dropbox.core.DbxRequestConfig; +import com.dropbox.core.v2.DbxClientV2; +import com.dropbox.core.v2.files.WriteMode; +import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; +import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; +import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; +import org.springframework.batch.core.ExitStatus; +import org.springframework.batch.core.StepExecution; +import org.springframework.batch.core.annotation.AfterStep; +import org.springframework.batch.core.annotation.BeforeStep; +import org.springframework.batch.item.Chunk; +import org.springframework.batch.item.ItemWriter; + +import java.io.InputStream; +import java.util.List; + +import static org.onedatashare.transferservice.odstransferservice.constant.ODSConstants.DEST_BASE_PATH; + +public class DropBoxWriterSmallFile extends ODSBaseWriter implements ItemWriter { + + EntityInfo fileInfo; + String destinationBasePath; + SmallFileUpload smallFileUpload; + DbxClientV2 dropboxClient; + Logger logger = LoggerFactory.getLogger(DropBoxWriterSmallFile.class); + private String fileName; + + public DropBoxWriterSmallFile(OAuthEndpointCredential credential, EntityInfo fileInfo, MetricsCollector metricsCollector, InfluxCache influxCache) { + super(metricsCollector, influxCache); + this.dropboxClient = new DbxClientV2(ODSUtility.dbxRequestConfig, credential.getToken()); + this.fileInfo = fileInfo; + smallFileUpload = new SmallFileUpload(); + } + + @BeforeStep + public void beforeStep(StepExecution stepExecution) { + this.destinationBasePath = stepExecution.getJobParameters().getString(DEST_BASE_PATH); + this.stepExecution = stepExecution; + } + + @AfterStep + public ExitStatus afterStep(StepExecution stepExecution) throws Exception { + try (InputStream inputStream = this.smallFileUpload.condenseListToOneStream()) { + dropboxClient.files().uploadBuilder(destinationBasePath + "/" + fileName) + .withMode(WriteMode.ADD) + .uploadAndFinish(inputStream); + } catch (Exception e) { + logger.error("Error uploading file to Dropbox: ", e); + return ExitStatus.FAILED; + } + return stepExecution.getExitStatus(); + } + + + @Override + public void write(Chunk chunk) throws Exception { + List items = chunk.getItems(); + this.fileName = items.get(0).getFileName(); + this.smallFileUpload.addAllChunks(items); + logger.info("Small file Dropbox writer wrote {} DataChunks", items.size()); + } +} \ No newline at end of file diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java index 184fe2c7..8c5ceafa 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/ftp/FTPWriter.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.FtpConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java index d9096efc..2b15ad85 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/googleDrive/GDriveSimpleWriter.java @@ -6,7 +6,7 @@ import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; -import org.onedatashare.transferservice.odstransferservice.model.FileBuffer; +import org.onedatashare.transferservice.odstransferservice.model.SmallFileUpload; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; import org.slf4j.Logger; @@ -32,7 +32,7 @@ public class GDriveSimpleWriter implements ItemWriter { private final OAuthEndpointCredential credential; Drive client; private String basePath; - FileBuffer fileBuffer; + SmallFileUpload smallFileUpload; private String fileName; private String mimeType; private File fileMetaData; @@ -41,7 +41,7 @@ public class GDriveSimpleWriter implements ItemWriter { public GDriveSimpleWriter(OAuthEndpointCredential credential, EntityInfo fileInfo) { this.credential = credential; this.fileInfo = fileInfo; - this.fileBuffer = new FileBuffer(); + this.smallFileUpload = new SmallFileUpload(); } @BeforeStep @@ -60,7 +60,7 @@ public void beforeWrite(List items) { @Override public void write(Chunk items) { - fileBuffer.addAllChunks(items.getItems()); + this.smallFileUpload.addAllChunks(items.getItems()); } @@ -68,7 +68,7 @@ public void write(Chunk items) { public void afterStep() throws Exception { try { logger.debug("Transferring file to the server"); - InputStream inputStream = this.fileBuffer.condenseListToOneStream(this.fileInfo.getSize()); + InputStream inputStream = this.smallFileUpload.condenseListToOneStream(); InputStreamContent inputStreamContent = new InputStreamContent(this.mimeType, inputStream); this.fileMetaData = new File() .setName(this.fileName) @@ -86,8 +86,7 @@ public void afterStep() throws Exception { throw e; } this.client = null; - this.fileBuffer.clear(); - this.fileBuffer = null; - + this.smallFileUpload.clearBuffer(); + this.smallFileUpload = null; } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java index 4fbcaae2..6ab66b85 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/http/HttpReader.java @@ -10,12 +10,15 @@ import org.onedatashare.transferservice.odstransferservice.pools.HttpConnectionPool; import org.onedatashare.transferservice.odstransferservice.service.FilePartitioner; import org.onedatashare.transferservice.odstransferservice.utility.ODSUtility; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.JobParameters; import org.springframework.batch.core.StepExecution; import org.springframework.batch.core.annotation.AfterStep; import org.springframework.batch.core.annotation.BeforeStep; -import org.springframework.batch.item.*; +import org.springframework.batch.item.ItemReader; +import org.springframework.batch.item.ItemStreamException; import java.io.IOException; import java.net.URI; @@ -38,6 +41,7 @@ public class HttpReader implements SetPool, ItemReader { AccountEndpointCredential sourceCred; Boolean compressable; private String uri; + Logger logger; public HttpReader(EntityInfo fileInfo, AccountEndpointCredential credential) { @@ -46,6 +50,7 @@ public HttpReader(EntityInfo fileInfo, AccountEndpointCredential credential) { this.filePartitioner = new FilePartitioner(fileInfo.getChunkSize()); this.sourceCred = credential; this.range = true; + this.logger = LoggerFactory.getLogger(HttpReader.class); } @BeforeStep @@ -92,7 +97,7 @@ public HttpRequest rangeMode(String uri, FilePart filePart, boolean valid) { } @Override - public DataChunk read() throws Exception, UnexpectedInputException, ParseException, NonTransientResourceException { + public DataChunk read() throws IOException, InterruptedException { FilePart filePart = this.filePartitioner.nextPart(); if (filePart == null) return null; HttpRequest request; @@ -102,9 +107,7 @@ public DataChunk read() throws Exception, UnexpectedInputException, ParseExcepti request = rangeMode(uri, filePart, this.range); } HttpResponse response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); - System.out.println(Thread.currentThread().toString() + "File Chunk: " + filePart.toString()); - DataChunk chunk = ODSUtility.makeChunk(response.body().length, response.body(), filePart.getStart(), Long.valueOf(filePart.getPartIdx()).intValue(), this.fileName); - return chunk; + return ODSUtility.makeChunk(response.body().length, response.body(), filePart.getStart(), Long.valueOf(filePart.getPartIdx()).intValue(), this.fileName); } public void open() throws ItemStreamException { @@ -115,20 +118,6 @@ public void open() throws ItemStreamException { } String filePath = Paths.get(fileInfo.getPath()).toString(); uri = sourceCred.getUri() + filePath; -// HttpRequest request = HttpRequest.newBuilder() -// .GET() -// .uri(URI.create(uri)) //make http a string constant as well -// .setHeader(ODSConstants.ACCEPT_ENCODING, ODSConstants.GZIP) -// .setHeader(ODSConstants.RANGE, String.format(ODSConstants.byteRange, 0, 1)) //make Range into a string constant as well as bytes -// .build(); -// HttpResponse response = null; -// try { -// response = client.send(request, HttpResponse.BodyHandlers.ofByteArray()); -// } catch (IOException | InterruptedException e) { -// throw new RuntimeException(e); -// } -// range = response.statusCode() == 206; -// compressable = response.headers().allValues(ODSConstants.CONTENT_ENCODING).size() != 0; } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java index 100a9a80..cc672f32 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/scp/SCPWriter.java @@ -10,7 +10,7 @@ import org.onedatashare.transferservice.odstransferservice.model.SetPool; import org.onedatashare.transferservice.odstransferservice.pools.JschSessionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java index 4525882c..5e842279 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/sftp/SFTPWriter.java @@ -8,7 +8,7 @@ import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.pools.JschSessionPool; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java index 89e7b0db..25f235dd 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsReader.java @@ -13,7 +13,6 @@ import org.springframework.batch.core.annotation.BeforeStep; import org.springframework.batch.item.support.AbstractItemCountingItemStreamItemReader; import org.springframework.util.ClassUtils; -import sun.misc.Unsafe; import java.io.IOException; import java.nio.ByteBuffer; diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java index bf9e76bd..78f2a8af 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/service/step/vfs/VfsWriter.java @@ -4,7 +4,7 @@ import org.onedatashare.transferservice.odstransferservice.model.EntityInfo; import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; import org.onedatashare.transferservice.odstransferservice.service.InfluxCache; -import org.onedatashare.transferservice.odstransferservice.service.cron.MetricsCollector; +import org.onedatashare.transferservice.odstransferservice.service.MetricsCollector; import org.onedatashare.transferservice.odstransferservice.service.step.ODSBaseWriter; import org.springframework.batch.core.ExitStatus; import org.springframework.batch.core.StepExecution; @@ -70,8 +70,7 @@ public void write(Chunk chunks) throws Exception { for (int i = 0; i < items.size(); i++) { DataChunk chunk = items.get(i); int bytesWritten = this.fileChannel.write(ByteBuffer.wrap(chunk.getData()), chunk.getStartPosition()); - if (chunk.getSize() != bytesWritten) - chunk = null; + chunk = null; } } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java index 48295873..8f31d52b 100644 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java +++ b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/ODSUtility.java @@ -10,11 +10,13 @@ import com.google.api.services.drive.model.FileList; import org.onedatashare.transferservice.odstransferservice.Enum.EndpointType; import org.onedatashare.transferservice.odstransferservice.model.DataChunk; +import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; +import org.onedatashare.transferservice.odstransferservice.model.credential.EndpointCredential; import org.onedatashare.transferservice.odstransferservice.model.credential.OAuthEndpointCredential; -import org.springframework.beans.factory.annotation.Value; import java.io.ByteArrayInputStream; import java.io.IOException; +import java.net.URI; import java.security.GeneralSecurityException; import java.util.Arrays; import java.util.HashSet; @@ -23,8 +25,10 @@ public class ODSUtility { private static String odsClientID = "OneDataShare-DIDCLab"; - private static String gDriveClientId= System.getenv("ODS_GDRIVE_CLIENT_ID"); + // @Value("${gdrive.client.id}") + private static String gDriveClientId = System.getenv("ODS_GDRIVE_CLIENT_ID"); + // @Value("${gdrive.client.secret}") private static String gDriveClientSecret = System.getenv("ODS_GDRIVE_CLIENT_SECRET"); // @Value("${gdrive.appname}") @@ -32,15 +36,6 @@ public class ODSUtility { public static DbxRequestConfig dbxRequestConfig = DbxRequestConfig.newBuilder(odsClientID).build(); - public static DataChunk makeChunk(int size, byte[] data, int startPosition, int chunkIdx, String fileName) { - DataChunk dataChunk = new DataChunk(); - dataChunk.setStartPosition(startPosition); - dataChunk.setChunkIdx(chunkIdx); - dataChunk.setFileName(fileName); - dataChunk.setData(data); - dataChunk.setSize(size); - return dataChunk; - } public static DataChunk makeChunk(long size, byte[] data, long startPosition, int chunkIdx, String fileName) { DataChunk dataChunk = new DataChunk(); dataChunk.setStartPosition(startPosition); @@ -52,8 +47,6 @@ public static DataChunk makeChunk(long size, byte[] data, long startPosition, in } public static Drive authenticateDriveClient(OAuthEndpointCredential oauthCred) throws GeneralSecurityException, IOException { - System.out.println(gDriveClientId); - System.out.println(gDriveClientSecret); GoogleCredential credential1 = new GoogleCredential.Builder().setJsonFactory(GsonFactory.getDefaultInstance()) .setClientSecrets(gDriveClientId, gDriveClientSecret) .setTransport(GoogleNetHttpTransport.newTrustedTransport()).build(); @@ -70,15 +63,15 @@ public static File gdriveMakeDir(String basePath, Drive client) throws IOExcepti .setFields("nextPageToken, files(id,name)") .setSpaces("drive"); FileList files = request.execute(); - for(File file : files.getFiles()){ - if(file.getId().equals(basePath)){ + for (File file : files.getFiles()) { + if (file.getId().equals(basePath)) { return file; } } File fileMetadata = new File(); - File ret= new File(); + File ret = new File(); String[] path = basePath.split("/"); - for(String mini: path){ + for (String mini : path) { fileMetadata.setName(mini); fileMetadata.setMimeType("application/vnd.google-apps.folder"); ret = client.files().create(fileMetadata) @@ -97,7 +90,7 @@ public static UploadPartRequest makePartRequest(DataChunk dataChunk, String buck uploadPartRequest.setUploadId(uploadId); uploadPartRequest.setKey(key); // uploadPartRequest.setFileOffset(dataChunk.getStartPosition()); - uploadPartRequest.setPartNumber(dataChunk.getChunkIdx()+1); //by default we start from chunks 0-N but AWS SDK must have 1-10000 so we just add 1 + uploadPartRequest.setPartNumber(dataChunk.getChunkIdx() + 1); //by default we start from chunks 0-N but AWS SDK must have 1-10000 so we just add 1 uploadPartRequest.setPartSize(dataChunk.getSize()); return uploadPartRequest; } @@ -105,4 +98,38 @@ public static UploadPartRequest makePartRequest(DataChunk dataChunk, String buck public static final EndpointType[] SEEKABLE_PROTOCOLS = new EndpointType[]{EndpointType.s3, EndpointType.vfs, EndpointType.http, EndpointType.box}; public static final HashSet fullyOptimizableProtocols = new HashSet(Arrays.asList(SEEKABLE_PROTOCOLS)); + + public static String uriFromEndpointCredential(EndpointCredential credential, EndpointType type) { + AccountEndpointCredential ac; + switch (type) { + case ftp: + case sftp: + case scp: + case http: + ac = (AccountEndpointCredential) credential; + URI uri = URI.create(ac.getUri()); + return uri.getHost(); + case s3: + ac = (AccountEndpointCredential) credential; + URI s3Uri = URI.create(constructS3URI(ac.getUri(), "")); + return s3Uri.getHost(); + case box: + return "box.com"; + case dropbox: + return "dropbox.com"; + case gdrive: + return "drive.google.com"; + default: + return ""; + } + } + + public static String constructS3URI(String uri, String fileKey) { + StringBuilder builder = new StringBuilder(); + String[] temp = uri.split(":::"); + String bucketName = temp[1]; + String region = temp[0]; + builder.append("https://").append(bucketName).append(".").append("s3.").append(region).append(".").append("amazonaws.com/").append(fileKey); + return builder.toString(); + } } diff --git a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java b/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java deleted file mode 100644 index c4f429da..00000000 --- a/src/main/java/org/onedatashare/transferservice/odstransferservice/utility/S3Utility.java +++ /dev/null @@ -1,28 +0,0 @@ -package org.onedatashare.transferservice.odstransferservice.utility; - -import com.amazonaws.auth.AWSCredentials; -import com.amazonaws.auth.AWSStaticCredentialsProvider; -import com.amazonaws.auth.BasicAWSCredentials; -import com.amazonaws.services.s3.AmazonS3; -import com.amazonaws.services.s3.AmazonS3ClientBuilder; -import org.onedatashare.transferservice.odstransferservice.model.credential.AccountEndpointCredential; - -public class S3Utility { - - public static AmazonS3 constructClient(AccountEndpointCredential credential, String region){ - AWSCredentials credentials = new BasicAWSCredentials(credential.getUsername(), credential.getSecret()); - return AmazonS3ClientBuilder.standard() - .withCredentials(new AWSStaticCredentialsProvider(credentials)) - .withRegion(region) - .build(); - } - - public static String constructS3URI(String uri, String fileKey){ - StringBuilder builder = new StringBuilder(); - String[] temp = uri.split(":::"); - String bucketName = temp[1]; - String region = temp[0]; - builder.append("https://").append(bucketName).append(".").append("s3.").append(region).append(".").append("amazonaws.com/").append(fileKey); - return builder.toString(); - } -} diff --git a/src/main/resources/application-cockroach.properties b/src/main/resources/application-cockroach.properties index d791511b..8cd814ae 100644 --- a/src/main/resources/application-cockroach.properties +++ b/src/main/resources/application-cockroach.properties @@ -4,7 +4,12 @@ spring.datasource.driver-class-name=org.postgresql.Driver spring.datasource.url=jdbc:${COCKROACH_URI:postgresql://localhost:26257/job_details?sslmode=disable} spring.datasource.username=${COCKROACH_USER:root} spring.datasource.password=${COCKROACH_PASS:root} -spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:10000} +#Hikari sizing controls +spring.datasource.hikari.minimum-idle=5 +spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:1024} +spring.datasource.hikari.idle-timeout=100000 +spring.datasource.hikari.max-lifetime=1800000 +spring.datasource.hikari.connection-timeout=30000 spring.jpa.show-sql=true spring.jpa.properties.hibernate.format_sql=true diff --git a/src/main/resources/application-hsql.properties b/src/main/resources/application-hsql.properties index c48263a9..ced5e134 100644 --- a/src/main/resources/application-hsql.properties +++ b/src/main/resources/application-hsql.properties @@ -1,11 +1,3 @@ -#spring.datasource.driver-class-name=org.hsqldb.jdbc.JDBCDriver -##spring.datasource.url=jdbc:hsqldb:mem:testdb;DB_CLOSE_DELAY=-1; -#spring.datasource.url= -#spring.datasource.username=SA -#spring.datasource.password= -#spring.jpa.hibernate.ddl-auto=create -#spring.batch.jdbc.initialize-schema=always - spring.datasource.url=jdbc:hsqldb:mem:testdb;sql.enforce_strict_size=true;hsqldb.tx=mvcc;DB_CLOSE_DELAY=-1 spring.datasource.username=sa spring.datasource.password= diff --git a/src/main/resources/application-platform.properties b/src/main/resources/application-platform.properties new file mode 100644 index 00000000..d0122620 --- /dev/null +++ b/src/main/resources/application-platform.properties @@ -0,0 +1 @@ +spring.threads.virtual.enabled=false \ No newline at end of file diff --git a/src/main/resources/application-virtual.properties b/src/main/resources/application-virtual.properties new file mode 100644 index 00000000..0daaddc4 --- /dev/null +++ b/src/main/resources/application-virtual.properties @@ -0,0 +1 @@ +spring.threads.virtual.enabled=true \ No newline at end of file diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index a8651907..bdee6f0c 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -1,56 +1,40 @@ -spring.application.name=${APP_NAME:ODS-Transfer-Node} -ods.user=${USER_NAME} - +spring.application.name=${APP_NAME:ODSTransferService} +ods.user=${USER_NAME:OneDataShare} server.port=8092 - -spring.threads.virtual.enabled=true +server.shutdown=graceful spring.main.allow-bean-definition-overriding=true -#Eureka config -eureka.client.enabled=true -eureka.client.serviceUrl.defaultZone=http://${EUREKA_USER:admin}:${EUREKA_PASS:admin}@${EUREKA_URI:localhost:8090}/eureka -eureka.client.healthcheck.enabled=true -#eureka.client.registry-fetch-interval-seconds=5 -#eureka.instance.leaseRenewalIntervalInSeconds=10 -#eureka.instance.metadata-map.startup=${random.int} +gdrive.client.id=${ODS_GDRIVE_CLIENT_ID} +gdrive.client.secret=${ODS_GDRIVE_CLIENT_SECRET} #SBA management.endpoints.web.exposure.include=* +eureka.client.enabled=false + #Ignore non-null values spring.jackson.default-property-inclusion=NON_NULL - -spring.datasource.hikari.maximum-pool-size=${HIKARI_POOL_SIZE:10000} spring.batch.job.enabled=false -#RabitMQ -ods.rabbitmq.exchange=ods.exchange - -#for vfs nodes this should be the APP_NAME which is always lowercase. -ods.rabbitmq.queue=${CONNECTOR_QUEUE:transferQueue} -ods.rabbitmq.routingkey=${CONNECTOR_QUEUE:ods.routing} -spring.rabbitmq.addresses=${AMPQ_ADDRESS} -spring.rabbitmq.port=${AMPQ_PORT:5672} -spring.rabbitmq.username=${AMPQ_USER:guest} -spring.rabbitmq.password=${AMPQ_PWD:guest} -spring.rabbitmq.listener.direct.prefetch=1 - -#optimizer -optimizer.url=${OPTIMIZER_URL:http://localhost:8088} - #pmeter pmeter.report.path=${PMETER_REPORT_PATH:${HOME}/.pmeter/transfer_service_pmeter_measure.txt} pmeter.cron.run=${ENABLE_PMETER:false} -pmeter.interface=${PMETER_NIC_INTERFACE:awdl0} pmeter.measure=${PMETER_MEASURE:1} pmeter.options=${PMETER_CLI_OPTIONS:"-KNS"} pmeter.cron.expression=${PMETER_CRON_EXP:*/5 * * * * *} +pmeter.carbon.path=${PMETER_CARBON_PATH:${HOME}/.pmeter/carbon_pmeter.txt} +pmeter.carbon.map=${PMETER_CARBON_MAP:${HOME}/.pmeter/carbon_ip_map.json} +pmeter.carbon.toggle=${REPORT_CARBON:false} +pmeter.nic=${PMETER_NIC_INTERFACE:#{null}} #influx ods.influx.uri=${INFLUX_URI:https://influxdb.onedatashare.org} -ods.influx.bucket=${INFLUX_BUCKET:ods_test} +ods.influx.bucket=${INFLUX_BUCKET:ODSTransferNodes} ods.influx.token=${INFLUX_TOKEN} ods.influx.org=${INFLUX_ORG:OneDataShare} transfer.service.concurrency=${MAX_CONCURRENCY:32} transfer.service.parallelism=${MAX_PARALLELISM:32} -transfer.service.pipelining=${MAX_PIPELINING:32} \ No newline at end of file +transfer.service.pipelining=${MAX_PIPELINING:32} + +hz.keystore.password=${HZ_KEYSTORE_PASSWORD:changeit} +hz.ipaddr=${HZ_IP_ADDR:localhost} diff --git a/src/main/resources/bootstrap-ec2.yml b/src/main/resources/bootstrap-ec2.yml new file mode 100644 index 00000000..eff39d96 --- /dev/null +++ b/src/main/resources/bootstrap-ec2.yml @@ -0,0 +1,14 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: aws_iam + aws-iam: + role: ec2-role \ No newline at end of file diff --git a/src/main/resources/bootstrap-eks.yml b/src/main/resources/bootstrap-eks.yml new file mode 100644 index 00000000..11b8041e --- /dev/null +++ b/src/main/resources/bootstrap-eks.yml @@ -0,0 +1,14 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: aws_iam + aws-iam: + role: eks-role \ No newline at end of file diff --git a/src/main/resources/bootstrap-local.yml b/src/main/resources/bootstrap-local.yml new file mode 100644 index 00000000..3377c466 --- /dev/null +++ b/src/main/resources/bootstrap-local.yml @@ -0,0 +1,13 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + authentication: TOKEN + token: ${VAULT_ODS_TOKEN} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets \ No newline at end of file diff --git a/src/main/resources/bootstrap-prod.yml b/src/main/resources/bootstrap-prod.yml new file mode 100644 index 00000000..58e96d73 --- /dev/null +++ b/src/main/resources/bootstrap-prod.yml @@ -0,0 +1,15 @@ +spring: + application: + name: TransferService + cloud: + vault: + uri: ${VAULT_URI} + kv: + enabled: true + profile-separator: '/' + default-context: prod/TransferService + backend: secrets + authentication: approle + app-role: + role-id: ${VAULT_ROLE_ID} + secret-id: ${VAULT_SECRET_ID} \ No newline at end of file diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java new file mode 100644 index 00000000..379e7a17 --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/CarbonJobMeasureTest.java @@ -0,0 +1,66 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.junit.Assert; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.mockito.Mock; +import org.onedatashare.transferservice.odstransferservice.model.TransferJobRequest; +import org.onedatashare.transferservice.odstransferservice.model.TransferOptions; +import org.springframework.test.util.ReflectionTestUtils; + +import java.util.UUID; + +public class CarbonJobMeasureTest { + + CarbonJobMeasure testObj; + static IMap carbonIntensityMap; + static IMap fileTransferScheduleMap; + static ObjectMapper objectMapper; + @Mock + PmeterParser pmeterParser; + + @BeforeAll + public static void beforeAllTests() { + HazelcastInstance hazelcastInstance = Hazelcast.newHazelcastInstance(); + carbonIntensityMap = hazelcastInstance.getMap("carbon-intensity-map"); + fileTransferScheduleMap = hazelcastInstance.getMap("file-transfer-schedule-map"); + objectMapper = new ObjectMapper(); + } + + @BeforeEach + public void beforeEachTest() { + testObj = new CarbonJobMeasure(carbonIntensityMap, fileTransferScheduleMap, pmeterParser, objectMapper); + ReflectionTestUtils.setField(testObj, "appName", "odsNode"); + ReflectionTestUtils.setField(testObj, "odsUser", "odsNode"); + } + + @Test + public void testEmptyMapsDefault() { + testObj.measureCarbonOfPotentialJobs(); + Assert.assertEquals(0, carbonIntensityMap.size()); + } + + @Test + public void testOneJobInMapForThisNode() throws JsonProcessingException { + TransferJobRequest jobRequest = new TransferJobRequest(); + jobRequest.setJobUuid(UUID.randomUUID()); + jobRequest.setOptions(new TransferOptions()); + jobRequest.setSource(new TransferJobRequest.Source()); + jobRequest.setDestination(new TransferJobRequest.Destination()); + jobRequest.setOwnerId("jgoldverg@gmail.com"); + jobRequest.setTransferNodeName("odsNode"); + String jsonJob = objectMapper.writeValueAsString(jobRequest); + fileTransferScheduleMap.put(jobRequest.getJobUuid(), new HazelcastJsonValue(jsonJob)); + testObj.measureCarbonOfPotentialJobs(); + Assert.assertEquals(1, carbonIntensityMap.size()); + } + + +} diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java new file mode 100644 index 00000000..03a5def9 --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/FileTransferNodeRegistrationServiceTest.java @@ -0,0 +1,101 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import com.fasterxml.jackson.core.JsonProcessingException; +import com.fasterxml.jackson.databind.ObjectMapper; +import com.hazelcast.core.Hazelcast; +import com.hazelcast.core.HazelcastInstance; +import com.hazelcast.core.HazelcastJsonValue; +import com.hazelcast.map.IMap; +import org.junit.Assert; +import org.junit.jupiter.api.BeforeEach; +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mock; +import org.mockito.junit.jupiter.MockitoExtension; +import org.onedatashare.transferservice.odstransferservice.constant.ODSConstants; +import org.onedatashare.transferservice.odstransferservice.model.FileTransferNodeMetaData; +import org.springframework.batch.core.JobExecution; +import org.springframework.batch.core.JobParameters; +import org.springframework.core.env.Environment; + +import java.util.UUID; + +import static org.mockito.Mockito.when; + +@ExtendWith(MockitoExtension.class) +public class FileTransferNodeRegistrationServiceTest { + + @Mock + private Environment environment; + + @Mock + private JobExecution jobExecution; + + @Mock + private JobParameters jobParameters; + + private HazelcastInstance hazelcastInstance; + String appName = "testAppName@random.org"; + + private IMap fileTransferNodeMap; + + FileTransferNodeRegistrationService testObj; + ObjectMapper objectMapper = new ObjectMapper(); + + private UUID testJobUuid = UUID.randomUUID(); + + public FileTransferNodeRegistrationServiceTest() { + this.hazelcastInstance = Hazelcast.newHazelcastInstance(); + this.fileTransferNodeMap = this.hazelcastInstance.getMap("testNodeRegistrationMap"); + } + + @BeforeEach + public void setUp() { + when(environment.getProperty("spring.application.name")).thenReturn(this.appName); + when(environment.getProperty("ods.user")).thenReturn("testUser"); + testObj = new FileTransferNodeRegistrationService(hazelcastInstance, fileTransferNodeMap, environment, this.objectMapper); + } + + @Test + public void testInitialNodeRegistrationInMap() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(null); + Assert.assertEquals(this.fileTransferNodeMap.containsKey(this.appName), true); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = this.objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(this.hazelcastInstance.getLocalEndpoint().getUuid(), testData.getNodeUuid()); + Assert.assertEquals("testAppName", testData.getNodeName()); + Assert.assertEquals("testUser", testData.getOdsOwner()); + Assert.assertEquals(-1L, testData.getJobId()); + Assert.assertEquals(new UUID(0, 0), testData.getJobUuid()); + Assert.assertEquals(false, testData.getRunningJob()); + Assert.assertEquals(true, testData.getOnline()); + } + + @Test + public void testRegisterWithJobExecution() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(this.jobExecution); + when(jobExecution.getJobId()).thenReturn(1L); + when(jobExecution.getJobParameters()).thenReturn(this.jobParameters); + when(this.jobParameters.getString(ODSConstants.JOB_UUID)).thenReturn(this.testJobUuid.toString()); + testObj.updateRegistrationInHazelcast(this.jobExecution); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = this.objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(true, testData.getRunningJob()); + Assert.assertEquals(this.testJobUuid, testData.getJobUuid()); + } + + @Test + public void testDeRegisterNodeFromMap() throws JsonProcessingException { + testObj.updateRegistrationInHazelcast(null); + testObj.updateRegistrationInHazelcast(this.jobExecution); + HazelcastJsonValue jsonValue = this.fileTransferNodeMap.get(this.appName); + FileTransferNodeMetaData testData = objectMapper.readValue(jsonValue.getValue(), FileTransferNodeMetaData.class); + Assert.assertNotNull(testData); + Assert.assertEquals(false, testData.getRunningJob()); + Assert.assertEquals(false, testData.getOnline()); + + } + +} diff --git a/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java new file mode 100644 index 00000000..46ffa9f7 --- /dev/null +++ b/src/test/java/org/onedatashare/transferservice/odstransferservice/service/PmeterParserTest.java @@ -0,0 +1,37 @@ +package org.onedatashare.transferservice.odstransferservice.service; + +import org.junit.Assert; +import org.junit.jupiter.api.BeforeAll; +import org.junit.jupiter.api.Test; +import org.mockito.Mockito; +import org.springframework.boot.test.mock.mockito.MockBean; +import org.springframework.core.env.Environment; + +import java.io.IOException; + + +public class PmeterParserTest { + + PmeterParser testObj; + + @MockBean + Environment environment; + + @Test + public void testPmeterNicDefaultEmpty() throws IOException { + this.environment = Mockito.mock(Environment.class); + Mockito.when(environment.getProperty("pmeter.nic", "")).thenReturn(""); + testObj = new PmeterParser(this.environment); + testObj.init(); + Assert.assertEquals("en0",this.testObj.pmeterNic); + } + + @Test + public void testPmeterNicGivenValue() throws IOException { + this.environment = Mockito.mock(Environment.class); + Mockito.when(environment.getProperty("pmeter.nic", "")).thenReturn("en0"); + testObj = new PmeterParser(this.environment); + testObj.init(); + Assert.assertEquals("en0",this.testObj.pmeterNic); + } +}