Skip to content

Commit

Permalink
PHOENIX-7172: Support HBase 2.6 (apache#1793)
Browse files Browse the repository at this point in the history
  • Loading branch information
richardantal authored Jun 18, 2024
1 parent 259b560 commit ce17ec1
Show file tree
Hide file tree
Showing 44 changed files with 1,137 additions and 187 deletions.
5 changes: 5 additions & 0 deletions phoenix-assembly/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -276,6 +276,11 @@
<artifactId>phoenix-hbase-compat-2.5.4</artifactId>
<version>${project.version}</version>
</dependency>
<dependency>
<groupId>org.apache.phoenix</groupId>
<artifactId>phoenix-hbase-compat-2.6.0</artifactId>
<version>${project.version}</version>
</dependency>
</dependencies>
</profile>
</profiles>
Expand Down
3 changes: 3 additions & 0 deletions phoenix-core-client/pom.xml
Original file line number Diff line number Diff line change
Expand Up @@ -66,6 +66,9 @@
|| ("${hbase.compat.version}".equals("2.5.4")
&amp;&amp; hbaseMinor == 5
&amp;&amp; hbasePatch &gt;=4)
|| ("${hbase.compat.version}".equals("2.6.0")
&amp;&amp; hbaseMinor == 6
&amp;&amp; hbasePatch &gt;=0)
)
</condition>
</evaluateBeanshell>
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
import org.apache.hadoop.hbase.client.TableDescriptor;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Call;
import org.apache.hadoop.hbase.client.coprocessor.Batch.Callback;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;
import org.apache.hadoop.hbase.ipc.CoprocessorRpcChannel;
import org.apache.omid.transaction.TTable;
import org.apache.omid.transaction.Transaction;
Expand Down Expand Up @@ -193,25 +192,12 @@ public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Put put) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
byte[] value, Delete delete) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Delete delete)
throws IOException {
throw new UnsupportedOperationException();
}

@Override
public Result append(Append append) throws IOException {
throw new UnsupportedOperationException();
Expand Down Expand Up @@ -270,13 +256,6 @@ public <R extends Message> void batchCoprocessorService(
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, RowMutations mutation)
throws IOException {
throw new UnsupportedOperationException();
}

@Override
public int getOperationTimeout() {
throw new UnsupportedOperationException();
Expand Down Expand Up @@ -339,11 +318,6 @@ public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier, Compa
throw new UnsupportedOperationException();
}

@Override
public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier, CompareOperator op, byte[] value,
RowMutations mutation) throws IOException {
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -39,6 +39,7 @@
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
import org.apache.hadoop.hbase.io.hfile.ReaderContext.ReaderType;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compat.hbase.CompatIndexHalfStoreFileReader;
import org.apache.phoenix.index.IndexMaintainer;

/**
Expand All @@ -56,7 +57,7 @@
* This file is not splitable. Calls to #midkey() return null.
*/

public class IndexHalfStoreFileReader extends StoreFileReader {
public class IndexHalfStoreFileReader extends CompatIndexHalfStoreFileReader {
private final boolean top;
// This is the key we split around. Its the first possible entry on a row:
// i.e. empty column and a timestamp of LATEST_TIMESTAMP.
Expand Down Expand Up @@ -92,11 +93,11 @@ public IndexHalfStoreFileReader(final FileSystem fs, final Path p, final CacheCo
final byte[][] viewConstants, final RegionInfo regionInfo,
byte[] regionStartKeyInHFile, byte[] splitKey, boolean primaryReplicaStoreFile,
AtomicInteger refCount, RegionInfo currentRegion) throws IOException {
super(new ReaderContext(p, in, size, new HFileSystem(fs), primaryReplicaStoreFile,
super(fs, cacheConf, conf, new ReaderContext(p, in, size, new HFileSystem(fs),
primaryReplicaStoreFile,
ReaderType.STREAM),
new HFileInfo(new ReaderContext(p, in, size, new HFileSystem(fs),
primaryReplicaStoreFile, ReaderType.STREAM), conf),
cacheConf, refCount, conf);
primaryReplicaStoreFile, ReaderType.STREAM), conf), p);
getHFileReader().getHFileInfo().initMetaAndIndex(getHFileReader());
this.splitkey = splitKey == null ? r.getSplitKey() : splitKey;
// Is it top or bottom half?
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -31,12 +31,13 @@
import org.apache.hadoop.hbase.KeyValueUtil;
import org.apache.hadoop.hbase.io.ImmutableBytesWritable;
import org.apache.hadoop.hbase.util.Bytes;
import org.apache.phoenix.compat.hbase.CompatLocalIndexStoreFileScanner;
import org.apache.phoenix.index.IndexMaintainer;
import org.apache.phoenix.util.PhoenixKeyValueUtil;

import static org.apache.hadoop.hbase.KeyValue.ROW_LENGTH_SIZE;

public class LocalIndexStoreFileScanner extends StoreFileScanner{
public class LocalIndexStoreFileScanner extends CompatLocalIndexStoreFileScanner {

private IndexHalfStoreFileReader reader;
private boolean changeBottomKeys;
Expand All @@ -45,13 +46,13 @@ public class LocalIndexStoreFileScanner extends StoreFileScanner{
public LocalIndexStoreFileScanner(IndexHalfStoreFileReader reader, boolean cacheBlocks, boolean pread,
boolean isCompaction, long readPt, long scannerOrder,
boolean canOptimizeForNonNullColumn) {
super(reader, reader.getScanner(cacheBlocks, pread, isCompaction), !isCompaction, reader
.getHFileReader().hasMVCCInfo(), readPt, scannerOrder, canOptimizeForNonNullColumn);
super(reader, cacheBlocks, pread, isCompaction, readPt, scannerOrder,
canOptimizeForNonNullColumn);
this.reader = reader;
this.changeBottomKeys =
this.reader.getRegionInfo().getStartKey().length == 0
&& this.reader.getSplitRow().length != this.reader.getOffset();
this.comparator = (CellComparatorImpl)getComparator();
this.comparator = (CellComparatorImpl) reader.getComparator();
}

@Override
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -18,8 +18,7 @@

package org.apache.hadoop.hbase.regionserver.wal;

import java.io.IOException;

import org.apache.phoenix.compat.hbase.CompatIndexedHLogReader;


/**
Expand All @@ -39,11 +38,6 @@
* we need to track which of the regions were on the server when it crashed only only split those
* edits out into their respective regions.
*/
public class IndexedHLogReader extends ProtobufLogReader {
public class IndexedHLogReader extends CompatIndexedHLogReader {

@Override
protected void initAfterCompression() throws IOException {
conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY, IndexedWALEditCodec.class.getName());
super.initAfterCompression();
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,40 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.compat.hbase;

import java.io.IOException;
import java.util.concurrent.atomic.AtomicInteger;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.hbase.io.hfile.CacheConfig;
import org.apache.hadoop.hbase.io.hfile.HFileInfo;
import org.apache.hadoop.hbase.io.hfile.ReaderContext;
import org.apache.hadoop.hbase.regionserver.StoreFileReader;

public class CompatIndexHalfStoreFileReader extends StoreFileReader {

public CompatIndexHalfStoreFileReader(final FileSystem fs, final CacheConfig cacheConf,
final Configuration conf,
final ReaderContext readerContext,
final HFileInfo hFileInfo, Path p) throws IOException {
super(readerContext, hFileInfo, cacheConf, new AtomicInteger(0), conf);
}


}
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.compat.hbase;

import java.io.IOException;

import org.apache.hadoop.hbase.regionserver.wal.ProtobufLogReader;
import org.apache.hadoop.hbase.regionserver.wal.WALCellCodec;

public abstract class CompatIndexedHLogReader extends ProtobufLogReader {
@Override
protected void initAfterCompression() throws IOException {
conf.set(WALCellCodec.WAL_CELL_CODEC_CLASS_KEY,
"org.apache.hadoop.hbase.regionserver.wal.IndexedWALEditCodec");
super.initAfterCompression();
}
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,33 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.phoenix.compat.hbase;

import org.apache.hadoop.hbase.regionserver.StoreFileScanner;


public class CompatLocalIndexStoreFileScanner extends StoreFileScanner {

public CompatLocalIndexStoreFileScanner(CompatIndexHalfStoreFileReader reader,
boolean cacheBlocks, boolean pread,
boolean isCompaction, long readPt, long scannerOrder,
boolean canOptimizeForNonNullColumn) {
super(reader, reader.getScanner(cacheBlocks, pread, isCompaction), !isCompaction, reader
.getHFileReader().hasMVCCInfo(), readPt, scannerOrder, canOptimizeForNonNullColumn);
}


}
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
Expand All @@ -20,9 +19,13 @@
import java.io.IOException;

import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.client.Delete;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.client.RowMutations;
import org.apache.hadoop.hbase.client.Table;
import org.apache.hadoop.hbase.client.Table.CheckAndMutateBuilder;
import org.apache.hadoop.hbase.filter.CompareFilter.CompareOp;

public abstract class CompatOmidTransactionTable implements Table {

Expand All @@ -41,4 +44,29 @@ public HTableDescriptor getTableDescriptor() throws IOException {
public Result mutateRow(RowMutations rm) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndPut(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Put put) throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndDelete(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, Delete delete)
throws IOException {
throw new UnsupportedOperationException();
}

@Override
public boolean checkAndMutate(byte[] row, byte[] family, byte[] qualifier,
CompareOp compareOp, byte[] value, RowMutations mutation)
throws IOException {
throw new UnsupportedOperationException();
}

@Override
public CheckAndMutateBuilder checkAndMutate(byte[] row, byte[] family) {
throw new UnsupportedOperationException();
}
}
Original file line number Diff line number Diff line change
@@ -1,11 +1,10 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to you under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
Expand Down
Loading

0 comments on commit ce17ec1

Please sign in to comment.