diff --git a/src/main/java/edu/ie3/datamodel/io/DbGridMetadata.java b/src/main/java/edu/ie3/datamodel/io/DbGridMetadata.java new file mode 100644 index 000000000..b613f0894 --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/DbGridMetadata.java @@ -0,0 +1,28 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io; + +import static edu.ie3.datamodel.io.SqlUtils.quote; + +import java.util.UUID; +import java.util.stream.Stream; + +/** Class for identification of entities and results from grids in SQL databases. */ +public record DbGridMetadata(String gridName, UUID uuid) { + + public static final String GRID_TABLE_COLUMN = "grids"; + public static final String GRID_NAME_COLUMN = "grid_name"; + public static final String GRID_UUID_COLUMN = "grid_uuid"; + + public String toString() { + return GRID_NAME_COLUMN + "=" + gridName + ", " + GRID_UUID_COLUMN + "=" + uuid.toString(); + } + + /** @return Stream with grid uuid */ + public Stream getStreamForQuery() { + return Stream.of(quote(uuid.toString(), "'")); + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/SqlUtils.java b/src/main/java/edu/ie3/datamodel/io/SqlUtils.java new file mode 100644 index 000000000..501c8c6aa --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/SqlUtils.java @@ -0,0 +1,45 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io; + +import java.util.Objects; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SqlUtils { + + protected static final Logger log = LoggerFactory.getLogger(SqlUtils.class); + private static final String END_QUERY_CREATE_TABLE = + ")\n \t WITHOUT OIDS\n \t TABLESPACE pg_default;"; + + private SqlUtils() { + throw new IllegalStateException("Utility classes cannot be instantiated"); + } + + private static String beginQueryCreateTable(String schemaName, String tableName) { + return "CREATE TABLE " + schemaName + "." + tableName + "\n(\n"; + } + + /** @return query to create a SQL table for a grid */ + public static String queryCreateGridTable(String schemaName) { + return beginQueryCreateTable(schemaName, DbGridMetadata.GRID_TABLE_COLUMN) + + "\tuuid uuid PRIMARY KEY,\n\tname TEXT NOT NULL\n" + + END_QUERY_CREATE_TABLE; + } + + /** + * To avoid data type conflicts while insertion into a SQL table all columns should be quoted. + * + * @return input with quoteSymbol + */ + public static String quote(String input, String quoteSymbol) { + if (Objects.equals(input, "") || Objects.equals(input, "null")) { + return "NULL"; + } else { + return input.matches("^\".*\"$") ? input : quoteSymbol + input + quoteSymbol; + } + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/connectors/SqlConnector.java b/src/main/java/edu/ie3/datamodel/io/connectors/SqlConnector.java index 8b13ec0d0..b1e463180 100644 --- a/src/main/java/edu/ie3/datamodel/io/connectors/SqlConnector.java +++ b/src/main/java/edu/ie3/datamodel/io/connectors/SqlConnector.java @@ -61,15 +61,15 @@ public ResultSet executeQuery(Statement stmt, String query) throws SQLException /** * Executes an update query * - * @param updateQuery the query to execute + * @param query the query to execute * @return The number of updates or a negative number if the execution failed */ - public int executeUpdate(String updateQuery) { - try (Statement stmt = getConnection().createStatement()) { - return stmt.executeUpdate(updateQuery); + public int executeUpdate(String query) throws SQLException { + try (Statement statement = getConnection().createStatement()) { + return statement.executeUpdate(query); } catch (SQLException e) { - log.error(String.format("Error at execution of query \"%1.127s\": ", updateQuery), e); - return -1; + throw new SQLException( + String.format("Error at execution of query, SQLReason: '%s'", e.getMessage()), e); } } diff --git a/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java b/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java index 2be6831fd..b3b35172b 100644 --- a/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java +++ b/src/main/java/edu/ie3/datamodel/io/naming/DatabaseNamingStrategy.java @@ -5,14 +5,24 @@ */ package edu.ie3.datamodel.io.naming; +import static edu.ie3.datamodel.io.naming.EntityPersistenceNamingStrategy.logger; + import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme; import edu.ie3.datamodel.models.Entity; +import edu.ie3.datamodel.models.timeseries.TimeSeries; +import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; +import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries; +import edu.ie3.datamodel.models.timeseries.repetitive.LoadProfileInput; +import edu.ie3.datamodel.models.value.Value; import java.util.Optional; /** A naming strategy for database entities */ public class DatabaseNamingStrategy { private static final String TIME_SERIES_PREFIX = "time_series_"; + + private static final String LOAD_PROFILE_PREFIX = "load_profile_"; + private final EntityPersistenceNamingStrategy entityPersistenceNamingStrategy; public DatabaseNamingStrategy(EntityPersistenceNamingStrategy entityPersistenceNamingStrategy) { @@ -42,7 +52,48 @@ public String getTimeSeriesEntityName(ColumnScheme columnScheme) { return TIME_SERIES_PREFIX + columnScheme.getScheme(); } + /** + * Provides the name of a load profile given by the load profile key + * + * @param lpKey Load profile key + * @return the table name + */ + private String getLoadProfileEntityName(String lpKey) { + return LOAD_PROFILE_PREFIX + lpKey; + } + + /** + * Provides the name of a unique entity class. + * + * @param cls Class extends UniqueEntity + * @return the table name + */ public Optional getEntityName(Class cls) { return entityPersistenceNamingStrategy.getEntityName(cls); } + + /** + * Provides the name of a time series. Used to determine the table name in SQL database. + * + * @param timeSeries to be named TimeSeries + * @return the table name + */ + public , E extends TimeSeriesEntry, V extends Value> + Optional getEntityName(T timeSeries) { + if (timeSeries instanceof IndividualTimeSeries individualTimeSeries) { + Optional maybeFirstElement = individualTimeSeries.getEntries().stream().findFirst(); + if (maybeFirstElement.isPresent()) { + Class valueClass = maybeFirstElement.get().getValue().getClass(); + return Optional.of(getTimeSeriesEntityName(ColumnScheme.parse(valueClass).orElseThrow())); + } else { + logger.error("Unable to determine content of time series {}", timeSeries); + return Optional.empty(); + } + } else if (timeSeries instanceof LoadProfileInput loadProfileInput) { + return Optional.of(getLoadProfileEntityName(loadProfileInput.getType().getKey())); + } else { + logger.error("There is no naming strategy defined for {}", timeSeries); + return Optional.empty(); + } + } } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java index 253ac89da..faf79daa8 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/Processor.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/Processor.java @@ -391,7 +391,6 @@ protected String processOperationTime(OperationTime operationTime, String fieldN operationTime .getEndDate() .ifPresent(endDate -> resultStringBuilder.append(processZonedDateTime(endDate))); - return resultStringBuilder.toString(); } diff --git a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java index 7db4f0463..58cad7354 100644 --- a/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java +++ b/src/main/java/edu/ie3/datamodel/io/processor/ProcessorProvider.java @@ -78,6 +78,19 @@ Try, ProcessorProviderException> handleEntity(T en .transformF(ProcessorProviderException::new)); } + public Set> handleEntities(List entities) + throws ProcessorProviderException { + Set setOfEntities = new HashSet<>(entities); + Set> setOfMaps = new HashSet<>(); + for (T entity : setOfEntities) { + LinkedHashMap entryResult = handleEntity(entity).getOrThrow(); + + /* Prepare the actual result and add them to the set of all results */ + setOfMaps.add(new LinkedHashMap<>(entryResult)); + } + return setOfMaps; + } + /** * Get the correct entity processor * diff --git a/src/main/java/edu/ie3/datamodel/io/sink/SqlSink.java b/src/main/java/edu/ie3/datamodel/io/sink/SqlSink.java new file mode 100644 index 000000000..907322b1f --- /dev/null +++ b/src/main/java/edu/ie3/datamodel/io/sink/SqlSink.java @@ -0,0 +1,428 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation +*/ +package edu.ie3.datamodel.io.sink; + +import static edu.ie3.datamodel.io.SqlUtils.*; +import static java.util.stream.Collectors.groupingBy; + +import edu.ie3.datamodel.exceptions.*; +import edu.ie3.datamodel.io.DbGridMetadata; +import edu.ie3.datamodel.io.connectors.SqlConnector; +import edu.ie3.datamodel.io.extractor.Extractor; +import edu.ie3.datamodel.io.extractor.NestedEntity; +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy; +import edu.ie3.datamodel.io.processor.EntityProcessor; +import edu.ie3.datamodel.io.processor.ProcessorProvider; +import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessorKey; +import edu.ie3.datamodel.models.Entity; +import edu.ie3.datamodel.models.input.*; +import edu.ie3.datamodel.models.input.connector.*; +import edu.ie3.datamodel.models.input.container.JointGridContainer; +import edu.ie3.datamodel.models.input.graphics.GraphicInput; +import edu.ie3.datamodel.models.input.system.*; +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput; +import edu.ie3.datamodel.models.input.thermal.ThermalUnitInput; +import edu.ie3.datamodel.models.result.ResultEntity; +import edu.ie3.datamodel.models.timeseries.TimeSeries; +import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry; +import edu.ie3.datamodel.models.value.Value; +import edu.ie3.util.StringUtils; +import java.sql.SQLException; +import java.util.*; +import java.util.stream.Collectors; +import java.util.stream.Stream; +import org.slf4j.Logger; +import org.slf4j.LoggerFactory; + +public class SqlSink { + + protected static final Logger log = LoggerFactory.getLogger(SqlSink.class); + + private final SqlConnector connector; + private final DatabaseNamingStrategy databaseNamingStrategy; + private final ProcessorProvider processorProvider; + private final String schemaName; + + private static final String TIME_SERIES = "time_series"; + + public SqlSink( + String schemaName, DatabaseNamingStrategy databaseNamingStrategy, SqlConnector connector) + throws EntityProcessorException { + this(schemaName, new ProcessorProvider(), databaseNamingStrategy, connector); + } + + public SqlSink( + String schemaName, + ProcessorProvider processorProvider, + DatabaseNamingStrategy databaseNamingStrategy, + SqlConnector connector) { + this.connector = connector; + this.databaseNamingStrategy = databaseNamingStrategy; + this.processorProvider = processorProvider; + this.schemaName = schemaName; + } + + public void shutdown() { + connector.shutdown(); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + /** + * Entry point of a data sink to persist multiple entities in a collection. + * + * @param entities a collection of entities that should be persisted + * @param identifier identifier of the grid + * @param bounded to be all unique entities. Handling of specific entities is normally then + * executed by a specific {@link EntityProcessor} + */ + public void persistAll(Collection entities, DbGridMetadata identifier) { + // Extract nested entities and add them to the set of entities + Set entitiesToAdd = new HashSet<>(entities); // entities to persist + entities.forEach( + entity -> { + if (entity instanceof NestedEntity nestedEntity) { + try { + entitiesToAdd.addAll( + (List) Extractor.extractElements(nestedEntity).stream().toList()); + } catch (ExtractorException e) { + log.error( + String.format( + "An error occurred during extraction of nested entity'%s': ", + entity.getClass()), + e); + } + } + }); + + // Persist the entities in hierarchic order to avoid failure because of foreign keys + for (Class cls : hierarchicInsert()) { + persistMixedList( + entitiesToAdd.stream().filter(ent -> cls.isAssignableFrom(ent.getClass())).toList(), + identifier); + entitiesToAdd.removeIf(ent -> cls.isAssignableFrom(ent.getClass())); + } + persistMixedList(new ArrayList<>(entitiesToAdd), identifier); // persist left entities + } + + /** + * Persist an entity. By default this method takes care of the extraction process of nested + * entities (if any) + * + * @param entity the entity that should be persisted + * @param identifier identifier of the grid + * @throws SQLException + */ + public void persist(C entity, DbGridMetadata identifier) throws SQLException { + if (entity instanceof InputEntity inputEntity) { + persistIncludeNested(inputEntity, identifier); + } else if (entity instanceof ResultEntity resultEntity) { + insert(resultEntity, identifier); + } else if (entity instanceof TimeSeries timeSeries) { + persistTimeSeries(timeSeries, identifier); + } else { + log.error( + "I don't know how to handle an entity of class {}", entity.getClass().getSimpleName()); + } + } + + /** + * Persist an entity. In contrast to {@link SqlSink#persist} this function does not extract nested + * entities. + * + * @param entity the entity that should be persisted + * @param identifier identifier of the grid + */ + public void persistIgnoreNested(C entity, DbGridMetadata identifier) + throws SQLException { + insert(entity, identifier); + } + + /** + * Persist an entity and all nested entities. + * + * @param entity the entity that should be persisted + * @param identifier identifier of the grid + */ + public void persistIncludeNested(C entity, DbGridMetadata identifier) { + Set entitiesToAdd = new HashSet<>(); + entitiesToAdd.add(entity); + persistAll(entitiesToAdd, identifier); + } + + /** + * Persist a list of entities with different types. To minimize the number of queries, the + * entities will be grouped by their class. + */ + private void persistMixedList(List entities, DbGridMetadata identifier) { + Map, List> entitiesPerClass = + entities.stream().collect(groupingBy(entity -> (Class) entity.getClass())); + entitiesPerClass.forEach( + (cls, ent) -> { + try { + persistList(ent, cls, identifier); + } catch (SQLException e) { + throw new RuntimeException( + String.format( + "An error occurred during extraction of entity '%s', SQLReason: '%s'", + cls.getSimpleName(), e.getMessage()), + e); + } + }); + } + + /** + * Persist a list of entities with same types. To minimize the number of queries, the entities + * will be grouped by their class. + */ + private , V extends Value> void persistList( + List entities, Class cls, DbGridMetadata identifier) throws SQLException { + // Check if there are only elements of the same class + Class firstClass = entities.get(0).getClass(); + boolean allSameClass = entities.stream().allMatch(e -> e.getClass() == firstClass); + + if (allSameClass) { + if (InputEntity.class.isAssignableFrom(cls)) { + insertListIgnoreNested(entities, cls, identifier, true); + } else if (ResultEntity.class.isAssignableFrom(cls)) { + insertListIgnoreNested(entities, cls, identifier, false); + } else if (TimeSeries.class.isAssignableFrom(cls)) { + entities.forEach(ts -> persistTimeSeries((TimeSeries) ts, identifier)); + } else { + log.error("I don't know how to handle an entity of class {}", cls.getSimpleName()); + } + } else { + log.error("The list isn't homogenous regarding the classes of the elements."); + } + } + + /** + * Writes a list of entities into a sql table. It's necessary that all entities have the same + * class. + */ + private void insertListIgnoreNested( + List entities, Class cls, DbGridMetadata identifier, boolean ignoreConflict) + throws SQLException { + try { + String[] headerElements = processorProvider.getHeaderElements(cls); + String query = + basicInsertQueryValuesGrid( + schemaName, databaseNamingStrategy.getEntityName(cls).orElseThrow(), headerElements); + query = + query + + createInsertQueryBodyIgnoreConflict( + entities, headerElements, identifier, ignoreConflict); + connector.executeUpdate(query); + } catch (ProcessorProviderException e) { + log.error("Exception occurred during processor request: ", e); + } + } + + /** Persist one time series. */ + protected , V extends Value> void persistTimeSeries( + TimeSeries timeSeries, DbGridMetadata identifier) { + try { + TimeSeriesProcessorKey key = new TimeSeriesProcessorKey(timeSeries); + String[] headerElements = processorProvider.getHeaderElements(key); + persistTimeSeries(timeSeries, headerElements, identifier); + } catch (ProcessorProviderException e) { + log.error( + "Exception occurred during receiving of header elements. Cannot write this element.", e); + } + } + + private , V extends Value> void persistTimeSeries( + TimeSeries timeSeries, String[] headerElements, DbGridMetadata identifier) + throws ProcessorProviderException { + try { + String query = + basicInsertQueryValuesITS( + schemaName, + databaseNamingStrategy.getEntityName(timeSeries).orElseThrow(), + headerElements); + Set> entityFieldData = + processorProvider.handleTimeSeries(timeSeries); + query = + query + + entityFieldData.stream() + .map( + data -> + queryTimeSeriesValueLine( + sqlEntityFieldData(data), + headerElements, + identifier, + timeSeries.getUuid().toString())) + .collect(Collectors.joining(",\n", "", ";")); + executeQueryToPersist(query); + } catch (ProcessorProviderException e) { + throw new ProcessorProviderException("Exception occurred during processor request: ", e); + } + } + + private void executeQueryToPersist(String query) { + try { + connector.executeUpdate(query); + } catch (SQLException e) { + throw new RuntimeException( + String.format( + "An error occurred during extraction of the time series, SQLReason: '%s'", + e.getMessage()), + e); + } + } + + /** Persists a whole {@link JointGridContainer}. */ + public void persistJointGrid(JointGridContainer jointGridContainer, UUID gridUUID) { + DbGridMetadata identifier = new DbGridMetadata(jointGridContainer.getGridName(), gridUUID); + List toAdd = new LinkedList<>(jointGridContainer.allEntitiesAsList()); + persistAll(toAdd, identifier); + } + + /** Executes a query to insert a single entity to a SQL database. */ + private void insert(C entity, DbGridMetadata identifier) throws SQLException { + try { + String[] headerElements = processorProvider.getHeaderElements(entity.getClass()); + String query = + basicInsertQueryValuesGrid( + schemaName, + databaseNamingStrategy.getEntityName(entity.getClass()).orElseThrow(), + headerElements) + + queryValueLine(entity, headerElements, identifier) + + ";"; + connector.executeUpdate(query); + } catch (ProcessorProviderException e) { + log.error( + "Exception occurred during receiving of header elements. Cannot write this element.", e); + } + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + /** + * Provides the value lists for an insertion query. Conflicts because of the primary key 'uuid' + * will be ignored. Conflicts can occur if an entity (e.g. node) already exist. WARNING: It's + * assumed that all entities are from the same class C. + */ + private String createInsertQueryBodyIgnoreConflict( + List entities, String[] headerElements, DbGridMetadata identifier, boolean ignoreConflict) + throws ProcessorProviderException { + Set> entityFieldData = processorProvider.handleEntities(entities); + String suffix; + if (ignoreConflict) { + suffix = "\nON CONFLICT (uuid) DO NOTHING;"; + } else { + suffix = ";\n"; + } + String queryBody = ""; + queryBody = + queryBody + + entityFieldData.stream() + .map(data -> queryValueLine(sqlEntityFieldData(data), headerElements, identifier)) + .collect(Collectors.joining(",\n", "", suffix)); + return queryBody; + } + + /** + * Creates a line with the values of one entity for an insertion query using the entityFieldData. + */ + private String queryValueLine( + LinkedHashMap entityFieldData, + String[] headerElements, + DbGridMetadata identifier) { + return writeOneLine( + Stream.concat( + Arrays.stream(headerElements).map(entityFieldData::get), + identifier.getStreamForQuery())); + } + + /** Creates a line with the values of one entity for an insertion query. */ + private String queryValueLine( + C entity, String[] headerElements, DbGridMetadata identifier) + throws ProcessorProviderException { + return queryValueLine( + processorProvider.handleEntity(entity).map(this::sqlEntityFieldData).getOrThrow(), + headerElements, + identifier); + } + + private String queryTimeSeriesValueLine( + Map entityFieldData, + String[] headerElements, + DbGridMetadata identifier, + String tsUuid) { + return writeOneLine( + Stream.concat( + Stream.concat( + Arrays.stream(headerElements).map(entityFieldData::get), + identifier.getStreamForQuery()), + Stream.of(quote(tsUuid, "'")))); + } + + private LinkedHashMap sqlEntityFieldData( + LinkedHashMap entityFieldData) { + LinkedHashMap quotedEntityFieldData = new LinkedHashMap<>(entityFieldData); + quotedEntityFieldData.replaceAll((key, ent) -> quote(ent, "'")); + + return quotedEntityFieldData; + } + + /** "INSERT INTO" line with schemaName.tableName */ + private static String basicInsertQuery(String schemaName, String tableName) { + return "INSERT INTO\n\t" + schemaName + "." + tableName; + } + + /** Provides the insert, column names, grid identifier and the VALUES statement for a query. */ + private String basicInsertQueryValuesGrid( + String schemaName, String tableName, String[] headerElements) { + String[] addParams = {DbGridMetadata.GRID_UUID_COLUMN}; + return basicInsertQuery(schemaName, tableName) + + " " + + writeOneLine(StringUtils.camelCaseToSnakeCase(headerElements), addParams) + + "\nVALUES\n"; + } + + /** + * Provides the insert, column names, grid identifier, time_series uuid and the VALUES statement + * for a query. + */ + private String basicInsertQueryValuesITS( + String schemaName, String tableName, String[] headerElements) { + String[] addParams = {DbGridMetadata.GRID_UUID_COLUMN, TIME_SERIES}; + return basicInsertQuery(schemaName, tableName) + + " " + + writeOneLine(StringUtils.camelCaseToSnakeCase(headerElements), addParams) + + "\nVALUES\n"; + } + + /** Converts a stream of strings into an one line string with brackets. */ + private String writeOneLine(Stream entries) { + return "(" + entries.collect(Collectors.joining(",")) + ")"; + } + + /** + * Converts an array of strings and an array of strings (for additional parameters) into an one + * line string with brackets. + */ + private String writeOneLine(String[] entries, String[] addParams) { + return writeOneLine(Stream.concat(Arrays.stream(entries), Arrays.stream(addParams))); + } + + // -=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=- + + /** @return insertion order for unique entities */ + private static List> hierarchicInsert() { + List> sortedInsert = new ArrayList<>(); + sortedInsert.add(AssetTypeInput.class); // 1. Types + sortedInsert.add(OperatorInput.class); // 2. Operators + sortedInsert.add(NodeInput.class); // 3. Nodes + sortedInsert.add(ThermalBusInput.class); // 4. ThermalBus + sortedInsert.add(ThermalUnitInput.class); // 5. ThermalUnit + sortedInsert.add(ConnectorInput.class); // 6a. ConnectorInput + sortedInsert.add(SystemParticipantInput.class); // 6b. SystemParticipantInput + sortedInsert.add(GraphicInput.class); // 7. GraphicInput + return sortedInsert; + } +} diff --git a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java index 08aaef7ec..7fafca42a 100644 --- a/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java +++ b/src/main/java/edu/ie3/datamodel/io/source/sql/SqlDataSource.java @@ -179,7 +179,6 @@ protected Stream> buildStreamByTableName(String tableName) { protected Stream> executeQuery(String query, AddParams addParams) { try (PreparedStatement ps = connector.getConnection().prepareStatement(query)) { addParams.addParams(ps); - ResultSet resultSet = ps.executeQuery(); return connector.extractFieldMaps(resultSet).stream(); } catch (SQLException e) { diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy index 2914504c4..c7d18de70 100644 --- a/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/CsvFileSinkTest.groovy @@ -346,7 +346,7 @@ class CsvFileSinkTest extends Specification implements TimeSeriesTestData { testBaseFolderPath.resolve("line_input.csv").toFile().exists() testBaseFolderPath.resolve("line_type_input.csv").toFile().exists() testBaseFolderPath.resolve("load_input.csv").toFile().exists() - testBaseFolderPath.resolve( "node_input.csv").toFile().exists() + testBaseFolderPath.resolve("node_input.csv").toFile().exists() testBaseFolderPath.resolve("operator_input.csv").toFile().exists() testBaseFolderPath.resolve("pv_input.csv").toFile().exists() testBaseFolderPath.resolve("storage_input.csv").toFile().exists() diff --git a/src/test/groovy/edu/ie3/datamodel/io/sink/SqlSinkTest.groovy b/src/test/groovy/edu/ie3/datamodel/io/sink/SqlSinkTest.groovy new file mode 100644 index 000000000..9e45cf059 --- /dev/null +++ b/src/test/groovy/edu/ie3/datamodel/io/sink/SqlSinkTest.groovy @@ -0,0 +1,340 @@ +/* + * © 2023. TU Dortmund University, + * Institute of Energy Systems, Energy Efficiency and Energy Economics, + * Research group Distribution grid planning and operation + */ +package edu.ie3.datamodel.io.sink + +import static edu.ie3.util.quantities.PowerSystemUnits.DEGREE_GEOM +import static edu.ie3.util.quantities.PowerSystemUnits.KILOVOLTAMPERE +import static tech.units.indriya.unit.Units.PERCENT + +import edu.ie3.datamodel.io.DbGridMetadata +import edu.ie3.datamodel.io.connectors.SqlConnector +import edu.ie3.datamodel.io.naming.DatabaseNamingStrategy +import edu.ie3.datamodel.io.processor.ProcessorProvider +import edu.ie3.datamodel.io.processor.input.InputEntityProcessor +import edu.ie3.datamodel.io.processor.result.ResultEntityProcessor +import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessor +import edu.ie3.datamodel.io.processor.timeseries.TimeSeriesProcessorKey +import edu.ie3.datamodel.io.source.sql.SqlDataSource +import edu.ie3.datamodel.models.OperationTime +import edu.ie3.datamodel.models.StandardUnits +import edu.ie3.datamodel.models.input.EmInput +import edu.ie3.datamodel.models.input.NodeInput +import edu.ie3.datamodel.models.input.OperatorInput +import edu.ie3.datamodel.models.input.connector.LineInput +import edu.ie3.datamodel.models.input.connector.Transformer2WInput +import edu.ie3.datamodel.models.input.connector.type.LineTypeInput +import edu.ie3.datamodel.models.input.connector.type.Transformer2WTypeInput +import edu.ie3.datamodel.models.input.graphics.LineGraphicInput +import edu.ie3.datamodel.models.input.graphics.NodeGraphicInput +import edu.ie3.datamodel.models.input.system.EvcsInput +import edu.ie3.datamodel.models.input.system.LoadInput +import edu.ie3.datamodel.models.input.system.PvInput +import edu.ie3.datamodel.models.input.system.characteristic.CosPhiFixed +import edu.ie3.datamodel.models.input.thermal.CylindricalStorageInput +import edu.ie3.datamodel.models.input.thermal.ThermalBusInput +import edu.ie3.datamodel.models.input.thermal.ThermalHouseInput +import edu.ie3.datamodel.models.result.system.EmResult +import edu.ie3.datamodel.models.result.system.EvResult +import edu.ie3.datamodel.models.result.system.EvcsResult +import edu.ie3.datamodel.models.result.system.FlexOptionsResult +import edu.ie3.datamodel.models.result.system.PvResult +import edu.ie3.datamodel.models.result.system.WecResult +import edu.ie3.datamodel.models.timeseries.TimeSeries +import edu.ie3.datamodel.models.timeseries.TimeSeriesEntry +import edu.ie3.datamodel.models.timeseries.individual.IndividualTimeSeries +import edu.ie3.datamodel.models.timeseries.individual.TimeBasedValue +import edu.ie3.datamodel.models.value.EnergyPriceValue +import edu.ie3.datamodel.models.value.Value +import edu.ie3.test.common.GridTestData +import edu.ie3.test.common.SampleJointGrid +import edu.ie3.test.common.SystemParticipantTestData +import edu.ie3.test.common.ThermalUnitInputTestData +import edu.ie3.test.common.TimeSeriesTestData +import edu.ie3.test.helper.TestContainerHelper +import edu.ie3.util.TimeUtil +import org.testcontainers.containers.Container +import org.testcontainers.containers.PostgreSQLContainer +import org.testcontainers.spock.Testcontainers +import org.testcontainers.utility.MountableFile +import spock.lang.Shared +import spock.lang.Specification +import tech.units.indriya.quantity.Quantities + +import java.sql.SQLException +import javax.measure.Quantity +import javax.measure.quantity.Power + +@Testcontainers +class SqlSinkTest extends Specification implements TestContainerHelper, TimeSeriesTestData { + + @Shared + PostgreSQLContainer postgreSQLContainer = new PostgreSQLContainer("postgres:14.2") + + @Shared + SqlConnector connector + + @Shared + SqlDataSource sqlSource + + @Shared + DatabaseNamingStrategy namingStrategy + + @Shared + DbGridMetadata identifier + + static String schemaName = "public" + + def setupSpec() { + // Copy sql import scripts into docker + MountableFile sqlImportFile = getMountableFile("_sql/") + postgreSQLContainer.copyFileToContainer(sqlImportFile, "/home/") + postgreSQLContainer.execInContainer("psql", "-Utest", "-f/home/" + "setup.sql") + + connector = new SqlConnector(postgreSQLContainer.jdbcUrl, postgreSQLContainer.username, postgreSQLContainer.password) + + namingStrategy = new DatabaseNamingStrategy() + + identifier = new DbGridMetadata("vn_simona", UUID.fromString("8e6bd444-4580-11ee-be56-0242ac120002")) + + sqlSource = new SqlDataSource(connector, schemaName, namingStrategy) + } + + def setup() { + // Execute import script + Iterable importFiles = Arrays.asList( + "grids.sql", + "types.sql", + "result_entities.sql", + "input_entities.sql", + "time_series.sql", + "load_profile.sql" + ) + for (String file: importFiles) { + Container.ExecResult res = postgreSQLContainer.execInContainer("psql", "-Utest", "-f/home/" + file) + assert res.stderr.empty + } + } + + def cleanup() { + postgreSQLContainer.execInContainer("psql", "-Utest", "-f/home/" + "cleanup.sql") + } + + def "SQL sink can persist provided elements correctly"() { + given: + TimeSeriesProcessor timeSeriesProcessor = new TimeSeriesProcessor<>(IndividualTimeSeries, TimeBasedValue, EnergyPriceValue) + TimeSeriesProcessorKey timeSeriesProcessorKey = new TimeSeriesProcessorKey(IndividualTimeSeries, TimeBasedValue, EnergyPriceValue) + HashMap timeSeriesProcessorMap = new HashMap<>() + timeSeriesProcessorMap.put(timeSeriesProcessorKey, timeSeriesProcessor) + + SqlSink sink = new SqlSink(schemaName, + new ProcessorProvider(), + namingStrategy, + connector) + UUID inputModel = UUID.fromString("22bea5fc-2cb2-4c61-beb9-b476e0107f52") + Quantity p = Quantities.getQuantity(10, StandardUnits.ACTIVE_POWER_IN) + Quantity q = Quantities.getQuantity(10, StandardUnits.REACTIVE_POWER_IN) + PvResult pvResult = new PvResult(TimeUtil.withDefaults.toZonedDateTime("2020-01-30T17:26:44Z"), inputModel, p, q) + WecResult wecResult = new WecResult(TimeUtil.withDefaults.toZonedDateTime("2020-01-30T17:26:44Z"), inputModel, p, q) + EvcsResult evcsResult = new EvcsResult(TimeUtil.withDefaults.toZonedDateTime("2020-01-30T17:26:44Z"), inputModel, p, q) + EmResult emResult = new EmResult(TimeUtil.withDefaults.toZonedDateTime("2020-01-30T17:26:44Z"), inputModel, p, q) + + Quantity pRef = Quantities.getQuantity(5.1, StandardUnits.ACTIVE_POWER_RESULT) + Quantity pMin = Quantities.getQuantity(-6, StandardUnits.ACTIVE_POWER_RESULT) + Quantity pMax = Quantities.getQuantity(6, StandardUnits.ACTIVE_POWER_RESULT) + FlexOptionsResult flexOptionsResult = new FlexOptionsResult(TimeUtil.withDefaults.toZonedDateTime("2020-01-30T17:26:44Z"), inputModel, pRef, pMin, pMax) + + when: + sink.persistAll([ + pvResult, + wecResult, + evcsResult, + emResult, + flexOptionsResult, + GridTestData.transformerCtoG, + GridTestData.lineGraphicCtoD, + GridTestData.nodeGraphicC, + ThermalUnitInputTestData.cylindricStorageInput, + ThermalUnitInputTestData.thermalHouseInput, + SystemParticipantTestData.evcsInput, + SystemParticipantTestData.loadInput, + SystemParticipantTestData.emInput + ], identifier) + + then: + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "pv_res", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "wec_res", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "evcs_res", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "em_res", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "flex_options_res", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "transformer_2_w_type_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "node_input", ps -> {}).count() == 4 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "transformer_2_w_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "operator_input", ps -> {}).count() == 2 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "cylindrical_storage_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "line_graphic_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "line_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "node_graphic_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "thermal_bus_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "thermal_house_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "load_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "em_input", ps -> {}).count() == 2 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "ev_res", ps -> {}).count() == 0 + + cleanup: + sink.shutdown() + } + + + def "A SqlSink can persist a time series."() { + given: + TimeSeriesProcessor timeSeriesProcessor = new TimeSeriesProcessor<>(IndividualTimeSeries, TimeBasedValue, EnergyPriceValue) + TimeSeriesProcessorKey timeSeriesProcessorKey = new TimeSeriesProcessorKey(IndividualTimeSeries, TimeBasedValue, EnergyPriceValue) + HashMap timeSeriesProcessorMap = new HashMap<>() + timeSeriesProcessorMap.put(timeSeriesProcessorKey, timeSeriesProcessor) + IndividualTimeSeries individualTimeSeries = individualEnergyPriceTimeSeries + SqlSink sink = new SqlSink(schemaName, namingStrategy, connector) + + when: + sink.persist(individualTimeSeries, identifier) + + then: + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_c", ps -> {}).count() == 3 + + cleanup: + sink.shutdown() + } + + def "A valid SqlSink persists a bunch of time series correctly"() { + given: + SqlSink sink = new SqlSink(schemaName, namingStrategy, connector) + SqlDataSource source = new SqlDataSource(connector, schemaName, namingStrategy) + + when: + sink.persistAll(allTimeSeries, identifier) + + then: + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_c", ps -> {}).count() == 3 + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_p", ps -> {}).count() == 3 + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_pq", ps -> {}).count() == 3 + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_ph", ps -> {}).count() == 3 + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_pqh", ps -> {}).count() == 3 + source.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_weather", ps -> {}).count() == 3 + + cleanup: + sink.shutdown() + } + + def "A valid SqlSink throws an exception if an entity has null for a not null attribute."() { + given: + def sink = new SqlSink(schemaName, namingStrategy, connector) + def nestedInput = new PvInput( + UUID.fromString("d56f15b7-8293-4b98-b5bd-58f6273ce229"), + "test_pvInput", + OperatorInput.NO_OPERATOR_ASSIGNED, + OperationTime.notLimited(), + Mock(NodeInput), + new CosPhiFixed("cosPhiFixed:{(0.0,0.95)}"), + null, + 0.2, + Quantities.getQuantity(-8.926613807678223, DEGREE_GEOM), + Quantities.getQuantity(95d, PERCENT), + Quantities.getQuantity(41.01871871948242, DEGREE_GEOM), + 0.8999999761581421, + 1, + false, + Quantities.getQuantity(25d, KILOVOLTAMPERE), + 0.95 + ) + + when: + sink.persistIgnoreNested(nestedInput, identifier) + + then: + def exception = thrown(SQLException) + exception.message.contains("ERROR: null value in column \"node\" of relation \"pv_input\" violates not-null constraint") + + cleanup: + sink.shutdown() + } + + + def "A valid SqlSink refuses to persist an entity, if no processor can be found for a specific input"() { + given: + def sink = new SqlSink( + schemaName, + new ProcessorProvider( + ProcessorProvider.allEntityProcessors(), + new HashMap, Value>, TimeSeriesEntry, Value>>()), + namingStrategy, + connector) + + when: + sink.persist(individualEnergyPriceTimeSeries, identifier) + + then: + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "time_series_c", ps -> {}).count() == 0 + + cleanup: + sink.shutdown() + } + + def "A valid SqlSink throws an exception if a nested entity hasn't all of its nested entity."() { + given: + def sink = new SqlSink( + schemaName, + namingStrategy, + connector) + def load = SystemParticipantTestData.loadInput + + when: + sink.persistIgnoreNested(load, identifier) + + then: + def exception = thrown(SQLException) + exception.message.contains("ERROR: insert or update on table \"load_input\" violates foreign key constraint \"load_input_node_fkey\"\n" + + " Detail: Key (node)=(4ca90220-74c2-4369-9afa-a18bf068840d) is not present in table \"node_input\".") + } + + def "A valid SqlSink throws an exception if a grid does not exist."() { + given: + def sink = new SqlSink(schemaName, namingStrategy, connector) + + when: + def failIdentifier = new DbGridMetadata("fail_grid", UUID.fromString("8e6bd444-4580-11ee-be56-0242ac120003")) + + sink.persist(individualEnergyPriceTimeSeries, failIdentifier) + + then: + def exception = thrown(RuntimeException) + exception.message.contains("Detail: Key (grid_uuid)=(8e6bd444-4580-11ee-be56-0242ac120003) is not present in table \"grids\".") + + cleanup: + sink.shutdown() + } + + def "A valid SqlSink should persist a valid joint grid container correctly"() { + given: + def sink = new SqlSink(schemaName, namingStrategy, connector) + + when: + sink.persistJointGrid(SampleJointGrid.grid(), UUID.fromString("297dfac8-83cc-11ee-b962-0242ac120002")) + + then: + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "line_input", ps -> {}).count() == 6 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "line_type_input", ps -> {}).count() == 2 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "load_input", ps -> {}).count() == 2 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "node_input", ps -> {}).count() == 7 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "operator_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "pv_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "storage_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "storage_type_input", ps -> {}).count() == 1 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "transformer_2_w_type_input", ps -> {}).count() == 2 + sqlSource.executeQuery("SELECT * FROM " + schemaName + "." + "transformer_2_w_input", ps -> {}).count() == 2 + + cleanup: + sink.shutdown() + } +} diff --git a/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy b/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy index ea7ea00ec..7ae270243 100644 --- a/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy +++ b/src/test/groovy/edu/ie3/test/common/SampleJointGrid.groovy @@ -384,8 +384,8 @@ class SampleJointGrid extends SystemParticipantTestData { public static final Transformer2WTypeInput transformerType_MV_HV_110KV = new Transformer2WTypeInput( - UUID.fromString("08559390-d7c0-4427-a2dc-97ba312ae0ac"), - "MS-NS_1", + UUID.fromString("deecabd2-5ddb-11ee-8c99-0242ac120002"), + "HS-MS_1", Quantities.getQuantity(10.078, OHM), Quantities.getQuantity(23.312, OHM), Quantities.getQuantity(800d, KILOVOLTAMPERE), diff --git a/src/test/groovy/edu/ie3/test/common/TimeSeriesTestData.groovy b/src/test/groovy/edu/ie3/test/common/TimeSeriesTestData.groovy index 9a27b149c..18e5030d3 100644 --- a/src/test/groovy/edu/ie3/test/common/TimeSeriesTestData.groovy +++ b/src/test/groovy/edu/ie3/test/common/TimeSeriesTestData.groovy @@ -9,6 +9,8 @@ import static edu.ie3.util.quantities.PowerSystemUnits.* import static tech.units.indriya.unit.Units.CELSIUS import static tech.units.indriya.unit.Units.METRE_PER_SECOND +import edu.ie3.datamodel.io.naming.timeseries.ColumnScheme +import edu.ie3.datamodel.io.naming.timeseries.IndividualTimeSeriesMetaInformation import edu.ie3.datamodel.models.StandardUnits import edu.ie3.datamodel.models.profile.BdewStandardLoadProfile import edu.ie3.datamodel.models.timeseries.IntValue @@ -49,6 +51,11 @@ trait TimeSeriesTestData { ] as Set ) + IndividualTimeSeriesMetaInformation individualEnergyPriceTimeSeriesMeta = new IndividualTimeSeriesMetaInformation( + UUID.fromString("a4bbcb77-b9d0-4b88-92be-b9a14a3e332b"), + ColumnScheme.ENERGY_PRICE + ) + Set> individualEnergyPriceTimeSeriesProcessed = [ [ "time" : "2020-04-02T10:00:00Z", diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/cleanup.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/cleanup.sql new file mode 100644 index 000000000..3dd1acf4a --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/cleanup.sql @@ -0,0 +1,7 @@ +DO $$ DECLARE +r RECORD; +BEGIN +FOR r IN (SELECT tablename FROM pg_tables WHERE schemaname = current_schema()) LOOP + EXECUTE 'DROP TABLE IF EXISTS ' || quote_ident(r.tablename) || ' CASCADE'; +END LOOP; +END $$; \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/grids.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/grids.sql new file mode 100644 index 000000000..4ba8069a0 --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/grids.sql @@ -0,0 +1,13 @@ +CREATE TABLE public.grids +( + uuid UUID PRIMARY KEY, + name TEXT NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +INSERT INTO + public.grids (uuid, name) +VALUES + ('8e6bd444-4580-11ee-be56-0242ac120002', 'vn_simona'), + ('297dfac8-83cc-11ee-b962-0242ac120002', 'sampleGrid'); \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/input_entities.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/input_entities.sql new file mode 100644 index 000000000..9b63f3fba --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/input_entities.sql @@ -0,0 +1,227 @@ +CREATE TABLE public.node_input +( + uuid UUID PRIMARY KEY, + geo_position TEXT NOT NULL, + id TEXT NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + slack BOOL NOT NULL, + subnet int NOT NULL, + v_rated DOUBLE PRECISION NOT NULL, + v_target DOUBLE PRECISION NOT NULL, + volt_lvl TEXT NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.transformer_2_w_input +( + uuid UUID PRIMARY KEY, + auto_tap BOOL NOT NULL, + id TEXT NOT NULL, + node_a UUID NOT NULL REFERENCES node_input(uuid), + node_b UUID NOT NULL REFERENCES node_input(uuid), + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + parallel_devices int NOT NULL, + tap_pos int NOT NULL, + type UUID NOT NULL REFERENCES transformer_2_w_type_input(uuid), + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.evcs_input +( + uuid UUID PRIMARY KEY, + charging_points int NOT NULL, + controlling_em UUID, + cos_phi_rated TEXT NOT NULL, + id TEXT NOT NULL, + location_type TEXT NOT NULL, + node UUID NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + q_characteristics TEXT NOT NULL, + type TEXT NOT NULL, + v_2g_support BOOL NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.line_graphic_input +( + uuid UUID PRIMARY KEY, + graphic_layer TEXT NOT NULL, + line UUID NOT NULL, + path TEXT, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.node_graphic_input +( + uuid UUID PRIMARY KEY, + graphic_layer TEXT NOT NULL, + node UUID NOT NULL, + path TEXT, + point TEXT NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.cylindrical_storage_input +( + uuid uuid PRIMARY KEY, + c DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + inlet_temp double precision NOT NULL, + operates_from timestamp with time zone, + operates_until timestamp with time zone, + operator uuid, + return_temp double precision NOT NULL, + storage_volume_lvl double precision NOT NULL, + thermal_bus uuid NOT NULL, + grid_uuid uuid NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.thermal_house_input +( + uuid UUID PRIMARY KEY, + eth_capa DOUBLE PRECISION NOT NULL, + eth_losses DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + lower_temperature_limit DOUBLE PRECISION NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + target_temperature DOUBLE PRECISION NOT NULL, + thermal_bus UUID NOT NULL, + upper_temperature_limit DOUBLE PRECISION NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.operator_input +( + uuid UUID PRIMARY KEY, + id TEXT NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.line_input +( + uuid UUID PRIMARY KEY, + geo_position TEXT NOT NULL, + id TEXT NOT NULL, + length DOUBLE PRECISION NOT NULL, + node_a UUID NOT NULL, + node_b UUID NOT NULL, + olm_characteristic TEXT NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + parallel_devices int NOT NULL, + type UUID NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.thermal_bus_input +( + uuid UUID PRIMARY KEY, + id TEXT NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.load_input +( + uuid UUID PRIMARY KEY, + controlling_em UUID NOT NULL, + cos_phi_rated TEXT NOT NULL, + dsm BOOL NOT NULL, + e_cons_annual DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + load_profile TEXT NOT NULL, + node UUID NOT NULL REFERENCES node_input(uuid), + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + q_characteristics TEXT NOT NULL, + s_rated DOUBLE PRECISION NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.em_input +( + uuid UUID PRIMARY KEY, + control_strategy TEXT NOT NULL, + controlling_em UUID, + id TEXT NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.pv_input +( + uuid UUID PRIMARY KEY, + albedo DOUBLE PRECISION NOT NULL, + azimuth DOUBLE PRECISION NOT NULL, + controlling_em UUID, + cos_phi_rated TEXT NOT NULL, + elevation_angle DOUBLE PRECISION NOT NULL, + eta_conv DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + k_g DOUBLE PRECISION NOT NULL, + k_t DOUBLE PRECISION NOT NULL, + market_reaction BOOL NOT NULL, + node UUID NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + q_characteristics TEXT NOT NULL, + s_rated DOUBLE PRECISION NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.storage_input +( + uuid UUID PRIMARY KEY, + controlling_em UUID NOT NULL, + id TEXT NOT NULL, + node UUID NOT NULL, + operates_from TIMESTAMP WITH TIME ZONE, + operates_until TIMESTAMP WITH TIME ZONE, + operator UUID, + q_characteristics TEXT NOT NULL, + type UUID NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/load_profile.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/load_profile.sql new file mode 100644 index 000000000..474be75de --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/load_profile.sql @@ -0,0 +1,11 @@ +CREATE TABLE public.load_profile_g2 +( + time_series UUID NOT NULL, + day_of_week TEXT NOT NULL, + quarter_hour_of_day TEXT NOT NULL, + p DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/result_entities.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/result_entities.sql new file mode 100644 index 000000000..a924203fc --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/result_entities.sql @@ -0,0 +1,67 @@ +CREATE TABLE public.pv_res +( + input_model UUID NOT NULL, + p DOUBLE PRECISION NOT NULL, + q DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.wec_res +( + input_model UUID NOT NULL, + p DOUBLE PRECISION NOT NULL, + q DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.ev_res +( + input_model UUID NOT NULL, + p DOUBLE PRECISION NOT NULL, + q DOUBLE PRECISION NOT NULL, + soc DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.evcs_res +( + input_model UUID NOT NULL, + p DOUBLE PRECISION NOT NULL, + q DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.em_res +( + input_model UUID NOT NULL, + p DOUBLE PRECISION NOT NULL, + q DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.flex_options_res +( + input_model UUID NOT NULL, + p_max DOUBLE PRECISION NOT NULL, + p_min DOUBLE PRECISION NOT NULL, + p_ref DOUBLE PRECISION NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default; diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/setup.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/setup.sql new file mode 100644 index 000000000..aacd7775c --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/setup.sql @@ -0,0 +1 @@ +CREATE SCHEMA public; \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/time_series.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/time_series.sql new file mode 100644 index 000000000..cdceb0e52 --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/time_series.sql @@ -0,0 +1,109 @@ +CREATE TABLE public.time_series_c +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + price DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_c_series_id ON time_series_c USING hash (time_series); + +-- Order of columns is important when using btree: https://www.postgresql.org/docs/14/indexes-multicolumn.html +-- Column time_series needs to placed as the first argument since we at most use an equality constraint on +-- time_series and a range query on time. +CREATE UNIQUE INDEX time_series_c_series_time ON time_series_c USING btree (time_series, time); + +CREATE TABLE public.time_series_p +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + p DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_p_series_id ON time_series_p USING hash (time_series); + +CREATE UNIQUE INDEX time_series_p_series_time ON time_series_p USING btree (time_series, time); + +CREATE TABLE public.time_series_pq +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + p DOUBLE PRECISION, + q DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_pq_series_id ON time_series_pq USING hash (time_series); + +CREATE UNIQUE INDEX time_series_pq_series_time ON time_series_pq USING btree (time_series, time); + +CREATE TABLE public.time_series_h +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + heat_demand DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_h_series_id ON time_series_h USING hash (time_series); + +CREATE UNIQUE INDEX time_series_h_series_time ON time_series_h USING btree (time_series, time); + +CREATE TABLE public.time_series_ph +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + p DOUBLE PRECISION, + heat_demand DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_ph_series_id ON time_series_ph USING hash (time_series); + +CREATE UNIQUE INDEX time_series_ph_series_time ON time_series_ph USING btree (time_series, time); + +CREATE TABLE public.time_series_pqh +( + time_series UUID NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + p DOUBLE PRECISION, + q DOUBLE PRECISION, + heat_demand DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_pqh_series_id ON time_series_pqh USING hash (time_series); + +CREATE UNIQUE INDEX time_series_pqh_series_time ON time_series_pqh USING btree (time_series, time); + +CREATE TABLE public.time_series_weather +( + time_series UUID NOT NULL, + coordinate TEXT NOT NULL, + time TIMESTAMP WITH TIME ZONE NOT NULL, + diffuse_irradiance DOUBLE PRECISION, + direct_irradiance DOUBLE PRECISION, + direction DOUBLE PRECISION, + temperature DOUBLE PRECISION, + velocity DOUBLE PRECISION, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE INDEX time_series_weather_series_id ON time_series_weather USING hash (time_series); + +CREATE UNIQUE INDEX time_series_weather_series_time ON time_series_weather USING btree (time_series, time); \ No newline at end of file diff --git a/src/test/resources/edu/ie3/datamodel/io/sink/_sql/types.sql b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/types.sql new file mode 100644 index 000000000..03efd5342 --- /dev/null +++ b/src/test/resources/edu/ie3/datamodel/io/sink/_sql/types.sql @@ -0,0 +1,54 @@ +CREATE TABLE public.transformer_2_w_type_input +( + uuid UUID PRIMARY KEY, + b_m DOUBLE PRECISION NOT NULL, + d_phi DOUBLE PRECISION NOT NULL, + d_v DOUBLE PRECISION NOT NULL, + g_m DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + r_sc DOUBLE PRECISION NOT NULL, + s_rated DOUBLE PRECISION NOT NULL, + tap_max INT NOT NULL, + tap_min INT NOT NULL, + tap_neutr INT NOT NULL, + tap_side BOOL NOT NULL, + v_rated_a DOUBLE PRECISION NOT NULL, + v_rated_b DOUBLE PRECISION NOT NULL, + x_sc DOUBLE PRECISION NOT NULL, + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.line_type_input +( + uuid UUID PRIMARY KEY, + b DOUBLE PRECISION NOT NULL, + g DOUBLE PRECISION NOT NULL, + i_max DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + r DOUBLE PRECISION NOT NULL, + v_rated DOUBLE PRECISION NOT NULL, + x DOUBLE PRECISION NOT NULL, + + grid_uuid UUID NOT NULL REFERENCES grids(uuid) +) + WITHOUT OIDS + TABLESPACE pg_default; + +CREATE TABLE public.storage_type_input +( + uuid UUID PRIMARY KEY, + active_power_gradient DOUBLE PRECISION NOT NULL, + capex DOUBLE PRECISION NOT NULL, + cos_phi_rated TEXT NOT NULL, + e_storage DOUBLE PRECISION NOT NULL, + eta DOUBLE PRECISION NOT NULL, + id TEXT NOT NULL, + opex DOUBLE PRECISION NOT NULL, + p_max DOUBLE PRECISION NOT NULL, + s_rated DOUBLE PRECISION NOT NULL, + grid_uuid UUID NOT NULL +) + WITHOUT OIDS + TABLESPACE pg_default;