Skip to content

Commit

Permalink
PHOENIX-7502 :- Decouple principal from HAGroupInfo (#2053)
Browse files Browse the repository at this point in the history
Co-authored-by: lokesh-khurana <[email protected]>
  • Loading branch information
lokiore and lokesh-khurana authored Jan 22, 2025
1 parent 35b12bc commit d0a6358
Show file tree
Hide file tree
Showing 21 changed files with 833 additions and 160 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -72,14 +72,11 @@ public class FailoverPhoenixConnection implements PhoenixMonitoredConnection {
public static final String FAILOVER_TIMEOUT_MS_ATTR = "phoenix.ha.failover.timeout.ms";
public static final long FAILOVER_TIMEOUT_MS_DEFAULT = 10_000;
private static final Logger LOG = LoggerFactory.getLogger(FailoverPhoenixConnection.class);

/**
* Connection properties.
*/
private final Properties properties;
/**
* High availability group.
* Context for FailoverPhoenixConnection
*/
private final HighAvailabilityGroup haGroup;
private final FailoverPhoenixContext context;
/**
* Failover policy, per connection.
*/
Expand All @@ -103,13 +100,13 @@ public class FailoverPhoenixConnection implements PhoenixMonitoredConnection {
*/
private Map<String, Map<MetricType, Long>> previousReadMetrics = new HashMap<>();

public FailoverPhoenixConnection(HighAvailabilityGroup haGroup, Properties properties)
public FailoverPhoenixConnection(FailoverPhoenixContext context)
throws SQLException {
this.properties = properties;
this.haGroup = haGroup;
this.policy = FailoverPolicy.get(properties);
this.context = context;
this.policy = FailoverPolicy.get(context.getProperties());
this.isClosed = false;
this.connection = haGroup.connectActive(properties);
this.connection = context.getHAGroup().connectActive(context.getProperties(),
context.getHAURLInfo());
}

/**
Expand Down Expand Up @@ -171,9 +168,9 @@ private static Map<String, Map<MetricType, Long>> mergeMetricMaps(
void failover(long timeoutMs) throws SQLException {
checkConnection();

if (haGroup.isActive(connection)) {
if (context.getHAGroup().isActive(connection)) {
LOG.info("Connection {} is against ACTIVE cluster in HA group {}; skip failing over.",
connection.getURL(), haGroup.getGroupInfo().getName());
connection.getURL(), context.getHAGroup().getGroupInfo().getName());
return;
}

Expand All @@ -183,7 +180,8 @@ void failover(long timeoutMs) throws SQLException {
while (newConn == null &&
EnvironmentEdgeManager.currentTimeMillis() < startTime + timeoutMs) {
try {
newConn = haGroup.connectActive(properties);
newConn = context.getHAGroup().connectActive(context.getProperties(),
context.getHAURLInfo());
} catch (SQLException e) {
cause = e;
LOG.info("Got exception when trying to connect to active cluster.", e);
Expand All @@ -197,7 +195,7 @@ void failover(long timeoutMs) throws SQLException {
}
if (newConn == null) {
throw new FailoverSQLException("Can not failover connection",
haGroup.getGroupInfo().toString(), cause);
context.getHAGroup().getGroupInfo().toString(), cause);
}

final PhoenixConnection oldConn = connection;
Expand All @@ -217,7 +215,7 @@ void failover(long timeoutMs) throws SQLException {
oldConn.close(new SQLExceptionInfo
.Builder(SQLExceptionCode.HA_CLOSED_AFTER_FAILOVER)
.setMessage("Phoenix connection got closed due to failover")
.setHaGroupInfo(haGroup.getGroupInfo().toString())
.setHaGroupInfo(context.getHAGroup().getGroupInfo().toString())
.build()
.buildException());
} catch (SQLException e) {
Expand All @@ -226,7 +224,8 @@ void failover(long timeoutMs) throws SQLException {
}
}
}
LOG.info("Connection {} failed over to {}", haGroup.getGroupInfo(), connection.getURL());
LOG.info("Connection {} failed over to {}", context.getHAGroup().getGroupInfo(),
connection.getURL());
}

/**
Expand All @@ -241,15 +240,15 @@ void failover(long timeoutMs) throws SQLException {
private void checkConnection() throws SQLException {
if (isClosed) {
throw new SQLExceptionInfo.Builder(SQLExceptionCode.CONNECTION_CLOSED)
.setHaGroupInfo(haGroup.getGroupInfo().toString())
.setHaGroupInfo(context.getHAGroup().getGroupInfo().toString())
.build()
.buildException();
}
if (connection == null) {
throw new SQLExceptionInfo
.Builder(SQLExceptionCode.CANNOT_ESTABLISH_CONNECTION)
.setMessage("Connection has not been established to ACTIVE HBase cluster")
.setHaGroupInfo(haGroup.getGroupInfo().toString())
.setHaGroupInfo(context.getHAGroup().getGroupInfo().toString())
.build()
.buildException();
}
Expand Down Expand Up @@ -327,8 +326,9 @@ public void clearMetrics() {
@VisibleForTesting
<T> T wrapActionDuringFailover(SupplierWithSQLException<T> s) throws SQLException {
checkConnection();
final long timeoutMs = Long.parseLong(properties.getProperty(FAILOVER_TIMEOUT_MS_ATTR,
String.valueOf(FAILOVER_TIMEOUT_MS_DEFAULT)));
final long timeoutMs = Long.parseLong(context.getProperties().
getProperty(FAILOVER_TIMEOUT_MS_ATTR,
String.valueOf(FAILOVER_TIMEOUT_MS_DEFAULT)));
int failoverCount = 0;
while (true) {
try {
Expand Down Expand Up @@ -642,4 +642,12 @@ interface SupplierWithSQLException<T> {
interface RunWithSQLException {
void run() throws SQLException;
}

/**
* @return the context of a given FailoverPhoenixConnection
*/
@VisibleForTesting
public FailoverPhoenixContext getContext() {
return context;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.jdbc;

import java.util.Properties;

/**
* FailoverPhoenixContext holds the properties and HAGroup Info for a failover phoenix connection.
*/
public class FailoverPhoenixContext {

private final Properties properties;
private final HighAvailabilityGroup haGroup;
private final HAURLInfo haurlInfo;

FailoverPhoenixContext(Properties properties, HighAvailabilityGroup haGroup,
HAURLInfo haurlInfo) {
this.properties = properties;
this.haGroup = haGroup;
this.haurlInfo = haurlInfo;
}

public Properties getProperties() {
return properties;
}

public HighAvailabilityGroup getHAGroup() {
return haGroup;
}

public HAURLInfo getHAURLInfo() {
return haurlInfo;
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,104 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.jdbc;


import org.apache.commons.lang3.builder.EqualsBuilder;
import org.apache.commons.lang3.builder.HashCodeBuilder;
import org.apache.phoenix.thirdparty.com.google.common.annotations.VisibleForTesting;
import org.apache.phoenix.thirdparty.com.google.common.base.Preconditions;

/**
* An HAURLInfo contains information of an HA Url with respect of HA Group Name.
* <p>
* It is constructed based on client input, including the JDBC connection string and properties.
* Objects of this class are used to get appropriate principal and additional JDBC parameters.
* <p>
* This class is immutable.
*/

@VisibleForTesting
public class HAURLInfo {
private final String name;
private final String principal;
private final String additionalJDBCParams;

HAURLInfo(String name, String principal, String additionalJDBCParams) {
Preconditions.checkNotNull(name);
this.name = name;
this.principal = principal;
this.additionalJDBCParams = additionalJDBCParams;
}

HAURLInfo(String name, String principal) {
this(name, principal, null);
}

HAURLInfo(String name) {
this(name, null, null);
}

public String getName() {
return name;
}

public String getPrincipal() {
return principal;
}

public String getAdditionalJDBCParams() {
return additionalJDBCParams;
}

@Override
public String toString() {
if (principal != null) {
return String.format("%s[%s]", name, principal);
}
return name;
}

@Override
public boolean equals(Object other) {
if (other == null) {
return false;
}
if (other == this) {
return true;
}
if (other.getClass() != getClass()) {
return false;
}
HAURLInfo otherInfo = (HAURLInfo) other;
return new EqualsBuilder()
.append(name, otherInfo.name)
.append(principal, otherInfo.principal)
.isEquals();
}

@Override
public int hashCode() {
if (principal != null) {
return new HashCodeBuilder(7, 47)
.append(name)
.append(principal).hashCode();
}
return new HashCodeBuilder(7, 47).append(name).hashCode();
}

}
Loading

0 comments on commit d0a6358

Please sign in to comment.