Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Support multi-scope configuration settings #10300

Draft
wants to merge 20 commits into
base: main
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from 15 commits
Commits
Show all changes
20 commits
Select commit Hold shift + click to select a range
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -182,6 +182,9 @@ private void setScope(ConfigurationResponse cfgResponse) {
return;
}
cfgResponse.setObjectName("configuration");
if (StringUtils.isNotBlank(cfgResponse.getScope())) {
return;
}
if (getZoneId() != null) {
cfgResponse.setScope("zone");
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,8 @@
// under the License.
package com.cloud.capacity;

import java.util.List;

import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;

Expand Down Expand Up @@ -67,7 +69,7 @@ public interface CapacityManager {
"0.85",
"Percentage (as a value between 0 and 1) of storage utilization above which allocators will disable using the pool for low storage available.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
static final ConfigKey<Double> StorageOverprovisioningFactor =
new ConfigKey<>(
"Storage",
Expand All @@ -85,7 +87,7 @@ public interface CapacityManager {
"0.85",
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for low allocated storage available.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));
static final ConfigKey<Boolean> StorageOperationsExcludeCluster =
new ConfigKey<>(
Boolean.class,
Expand Down Expand Up @@ -125,7 +127,7 @@ public interface CapacityManager {
"Percentage (as a value between 0 and 1) of allocated storage utilization above which allocators will disable using the pool for volume resize. " +
"This is applicable only when volume.resize.allowed.beyond.allocation is set to true.",
true,
ConfigKey.Scope.Zone);
List.of(ConfigKey.Scope.StoragePool, ConfigKey.Scope.Zone));

ConfigKey<Integer> CapacityCalculateWorkers = new ConfigKey<>(ConfigKey.CATEGORY_ADVANCED, Integer.class,
"capacity.calculate.workers", "1",
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,18 +22,27 @@
import java.util.Map;
import java.util.stream.Collectors;

import javax.inject.Inject;

import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
import org.apache.commons.collections.CollectionUtils;

import com.cloud.dc.dao.ClusterDao;
import com.cloud.org.Cluster;
import com.cloud.utils.Pair;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.SearchBuilder;
import com.cloud.utils.db.SearchCriteria;
import com.cloud.utils.db.TransactionLegacy;

public class ClusterDetailsDaoImpl extends GenericDaoBase<ClusterDetailsVO, Long> implements ClusterDetailsDao, ScopedConfigStorage {

@Inject
ClusterDao clusterDao;

protected final SearchBuilder<ClusterDetailsVO> ClusterSearch;
protected final SearchBuilder<ClusterDetailsVO> DetailSearch;

Expand Down Expand Up @@ -180,4 +189,13 @@ private String getCpuMemoryOvercommitRatio(String name) {

return name;
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
Cluster cluster = clusterDao.findById(id);
if (cluster == null) {
return null;
}
return new Pair<>(getScope().getParent(), cluster.getDataCenterId());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,8 @@
import org.apache.cloudstack.storage.datastore.db.StoragePoolDetailsDao;
import org.apache.cloudstack.storage.datastore.db.StoragePoolVO;

import com.cloud.utils.Pair;

public class StoragePoolDetailsDaoImpl extends ResourceDetailsDaoBase<StoragePoolDetailVO> implements StoragePoolDetailsDao, ScopedConfigStorage {

@Inject
Expand Down Expand Up @@ -57,4 +59,17 @@ public void addDetail(long resourceId, String key, String value, boolean display
}
super.addDetail(new StoragePoolDetailVO(resourceId, key, value, display));
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
StoragePoolVO pool = _storagePoolDao.findById(id);
if (pool != null) {
if (pool.getClusterId() != null) {
return new Pair<>(getScope().getParent(), pool.getClusterId());
} else {
return new Pair<>(ConfigKey.Scope.Zone, pool.getDataCenterId());
}
}
return null;
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -54,7 +54,7 @@ public ConfigurationGroupsAggregator() {

public void updateConfigurationGroups() {
LOG.debug("Updating configuration groups");
List<ConfigurationVO> configs = configDao.listAllIncludingRemoved();
List<ConfigurationVO> configs = configDao.searchPartialConfigurations();
if (CollectionUtils.isEmpty(configs)) {
return;
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -87,6 +87,36 @@ public boolean columnExists(Connection conn, String tableName, String columnName
return columnExists;
}

public String getColumnType(Connection conn, String tableName, String columnName) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("DESCRIBE %s %s", tableName, columnName));){
ResultSet rs = pstmt.executeQuery();
if (rs.next()) {
return rs.getString("Type");
}
} catch (SQLException e) {
logger.debug("Type for column {} can not be retrieved in {} ignoring exception: {}", columnName, tableName, e.getMessage());
}
return null;
}

public void addColumn(Connection conn, String tableName, String columnName, String columnDefinition) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s ADD COLUMN %s %s", tableName, columnName, columnDefinition));){
pstmt.executeUpdate();
logger.debug("Column {} is added successfully from the table {}", columnName, tableName);
} catch (SQLException e) {
logger.warn("Unable to add column {} to table {} due to exception", columnName, tableName, e);
}
}

public void changeColumn(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
try (PreparedStatement pstmt = conn.prepareStatement(String.format("ALTER TABLE %s CHANGE COLUMN %s %s %s", tableName, oldColumnName, newColumnName, columnDefinition));){
pstmt.executeUpdate();
logger.debug("Column {} is changed successfully to {} from the table {}", oldColumnName, newColumnName, tableName);
} catch (SQLException e) {
logger.warn("Unable to add column {} to {} from the table {} due to exception", oldColumnName, newColumnName, tableName, e);
}
}

public String generateIndexName(String tableName, String... columnName) {
return String.format("i_%s__%s", tableName, StringUtils.join(columnName, "__"));
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -58,4 +58,21 @@ public static void dropTableColumnsIfExist(Connection conn, String tableName, Li
}
}

public static String getTableColumnType(Connection conn, String tableName, String columnName) {
return dao.getColumnType(conn, tableName, columnName);
}

public static void addTableColumnIfNotExist(Connection conn, String tableName, String columnName, String columnDefinition) {
if (!dao.columnExists(conn, tableName, columnName)) {
dao.addColumn(conn, tableName, columnName, columnDefinition);
}
}

public static void changeTableColumnIfNotExist(Connection conn, String tableName, String oldColumnName, String newColumnName, String columnDefinition) {
if (dao.columnExists(conn, tableName, oldColumnName)) {
System.out.println("column exists------------------------" + oldColumnName);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this can be removed?

dao.changeColumn(conn, tableName, oldColumnName, newColumnName, columnDefinition);
}
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -17,10 +17,16 @@
package com.cloud.upgrade.dao;

import com.cloud.upgrade.SystemVmTemplateRegistration;
import com.cloud.utils.db.TransactionLegacy;
import com.cloud.utils.exception.CloudRuntimeException;

import java.io.InputStream;
import java.sql.Connection;
import java.sql.PreparedStatement;
import java.sql.SQLException;
import java.util.List;

import org.apache.cloudstack.framework.config.ConfigKey;

public class Upgrade42010to42100 extends DbUpgradeAbstractImpl implements DbUpgrade, DbUpgradeSystemVmTemplate {
private SystemVmTemplateRegistration systemVmTemplateRegistration;
Expand Down Expand Up @@ -53,6 +59,7 @@ public InputStream[] getPrepareScripts() {

@Override
public void performDataMigration(Connection conn) {
migrateConfigurationScopeToBitmask(conn);
}

@Override
Expand Down Expand Up @@ -80,4 +87,35 @@ public void updateSystemVmTemplates(Connection conn) {
throw new CloudRuntimeException("Failed to find / register SystemVM template(s)");
}
}

protected void migrateConfigurationScopeToBitmask(Connection conn) {
String scopeDataType = DbUpgradeUtils.getTableColumnType(conn, "configuration", "scope");
logger.info("------------------------{}", scopeDataType);
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

this can be removed or rephrased if needed

if (!"varchar(255)".equals(scopeDataType)) {
return;
}
DbUpgradeUtils.addTableColumnIfNotExist(conn, "configuration", "new_scope", "BIGINT DEFAULT 0");
migrateExistingConfigurationScopeValues(conn);
DbUpgradeUtils.dropTableColumnsIfExist(conn, "configuration", List.of("scope"));
DbUpgradeUtils.changeTableColumnIfNotExist(conn, "configuration", "new_scope", "scope", "BIGINT NOT NULL DEFAULT 0 COMMENT 'Bitmask for scope(s) of this parameter'");
}

protected void migrateExistingConfigurationScopeValues(Connection conn) {
StringBuilder sql = new StringBuilder("UPDATE configuration\n" +
"SET new_scope = " +
" CASE ");
for (ConfigKey.Scope scope : ConfigKey.Scope.values()) {
sql.append(" WHEN scope = '").append(scope.name()).append("' THEN ").append(scope.getBitValue()).append(" ");
}
sql.append(" ELSE 0 " +
" END " +
"WHERE scope IS NOT NULL;");
TransactionLegacy txn = TransactionLegacy.currentTxn();
try (PreparedStatement pstmt = txn.prepareAutoCloseStatement(sql.toString())) {
pstmt.executeUpdate();
} catch (SQLException e) {
logger.error("Failed to migrate existing configuration scope values to bitmask", e);
throw new CloudRuntimeException(String.format("Failed to migrate existing configuration scope values to bitmask due to: %s", e.getMessage()));
}
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import com.cloud.domain.dao.DomainDao;
import com.cloud.domain.dao.DomainDetailsDao;
import com.cloud.user.dao.AccountDao;
import com.cloud.utils.Pair;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.GenericDaoBase;
import com.cloud.utils.db.QueryBuilder;
Expand Down Expand Up @@ -162,4 +163,13 @@ public String getActualValue(AccountDetailVO accountDetailVO) {
}
return accountDetailVO.getValue();
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
Account account = _accountDao.findById(id);
if (account == null) {
return null;
}
return new Pair<>(getScope().getParent(), account.getDomainId());
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -20,13 +20,17 @@
import java.util.List;
import java.util.Map;

import javax.inject.Inject;

import org.apache.cloudstack.api.ApiConstants;
import org.apache.cloudstack.framework.config.ConfigKey;
import org.apache.cloudstack.framework.config.ConfigKey.Scope;
import org.apache.cloudstack.framework.config.ScopedConfigStorage;
import org.apache.cloudstack.resourcedetail.ResourceDetailsDaoBase;
import org.springframework.stereotype.Component;

import com.cloud.storage.ImageStore;
import com.cloud.utils.Pair;
import com.cloud.utils.crypt.DBEncryptionUtil;
import com.cloud.utils.db.QueryBuilder;
import com.cloud.utils.db.SearchBuilder;
Expand All @@ -36,6 +40,8 @@

@Component
public class ImageStoreDetailsDaoImpl extends ResourceDetailsDaoBase<ImageStoreDetailVO> implements ImageStoreDetailsDao, ScopedConfigStorage {
@Inject
ImageStoreDao imageStoreDao;

protected final SearchBuilder<ImageStoreDetailVO> storeSearch;

Expand Down Expand Up @@ -115,4 +121,13 @@ public void addDetail(long resourceId, String key, String value, boolean display
super.addDetail(new ImageStoreDetailVO(resourceId, key, value, display));
}

@Override
public Pair<Scope, Long> getParentScope(long id) {
ImageStore store = imageStoreDao.findById(id);
if (store == null) {
return null;
}
return new Pair<>(getScope().getParent(), store.getDataCenterId());
}

}
Original file line number Diff line number Diff line change
Expand Up @@ -33,4 +33,3 @@ WHERE rp.rule = 'quotaStatement'
AND NOT EXISTS(SELECT 1 FROM cloud.role_permissions rp_ WHERE rp.role_id = rp_.role_id AND rp_.rule = 'quotaCreditsList');

CALL `cloud`.`IDEMPOTENT_ADD_COLUMN`('cloud.host', 'last_mgmt_server_id', 'bigint unsigned DEFAULT NULL COMMENT "last management server this host is connected to" AFTER `mgmt_server_id`');

Loading
Loading