Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -44,4 +44,14 @@ private int getAbsolutePos(int pos) {
public int length() {
return this.length;
}

@Override
public int getIntLE(int pos) {
return LittleEndianBytes.toInt(t, getAbsolutePos(pos));
}

@Override
public long getLongLE(int pos) {
return LittleEndianBytes.toLong(t, getAbsolutePos(pos));
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -35,10 +35,13 @@ public abstract class Hash {
public static final int MURMUR_HASH = 1;
/** Constant to denote {@link MurmurHash3}. */
public static final int MURMUR_HASH3 = 2;
/** Constant to denote {@link XXH3}. */
public static final int XXH3_HASH = 3;

/**
* This utility method converts String representation of hash function name to a symbolic
* constant. Currently three function types are supported, "jenkins", "murmur" and "murmur3".
* constant. Currently three function types are supported, "jenkins", "murmur", "murmur3" and
* "xxh3".
* @param name hash function name
* @return one of the predefined constants
*/
Expand All @@ -49,6 +52,8 @@ public static int parseHashType(String name) {
return MURMUR_HASH;
} else if ("murmur3".equalsIgnoreCase(name)) {
return MURMUR_HASH3;
} else if ("xxh3".equalsIgnoreCase(name)) {
return XXH3_HASH;
} else {
return INVALID_HASH;
}
Expand Down Expand Up @@ -77,6 +82,8 @@ public static Hash getInstance(int type) {
return MurmurHash.getInstance();
case MURMUR_HASH3:
return MurmurHash3.getInstance();
case XXH3_HASH:
return XXH3.getInstance();
default:
return null;
}
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,45 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;

import org.apache.yetus.audience.InterfaceAudience;
import org.apache.yetus.audience.InterfaceStability;

/**
* Interface for computing 64-bit hash values.
*/
@InterfaceAudience.Private
@InterfaceStability.Stable
public interface Hash64 {
/**
* Computes a 64-bit hash from the given {@code HashKey} using a seed of 0.
* @param hashKey the input key providing byte access
* @return the computed 64-bit hash value
*/
default <T> long hash64(HashKey<T> hashKey) {
return hash64(hashKey, 0L);
}

/**
* Computes a 64-bit hash from the given {@code HashKey} and seed.
* @param hashKey the input key providing byte access
* @param seed the 64-bit seed value
* @return the computed 64-bit hash value
*/
<T> long hash64(HashKey<T> hashKey, long seed);
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The goal here is to take a single 64-bit hash result and split it into two 32-bit hashes to compute the Bloom hash locations.

-------------- 64-bit hash output --------------
|                    64 bits                   |
------------------------------------------------
|              lower 32 bits (hash1)           |
|              upper 32 bits (hash2)           |
------------------------------------------------

Since XXH3 already performs much better than the existing hashes and we no longer need to run the hash function twice, this approach gives us an additional performance win on top of the baseline speedup.

}
Original file line number Diff line number Diff line change
Expand Up @@ -35,4 +35,20 @@ public HashKey(T t) {

/** Returns The number of bytes in this HashKey */
public abstract int length();

/**
* Returns the little-endian 32-bit int value starting at the given position in this
* {@code HashKey}.
* @param pos the starting offset of the 4-byte little-endian int
* @return the 32-bit value decoded in little-endian order
*/
public abstract int getIntLE(int pos);

/**
* Returns the little-endian 64-bit long value starting at the given position in this
* {@code HashKey}.
* @param pos the starting offset of the 8-byte little-endian long
* @return the 64-bit value decoded in little-endian order
*/
public abstract long getLongLE(int pos);
}
Original file line number Diff line number Diff line change
@@ -0,0 +1,263 @@
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.hbase.util;

import java.nio.ByteBuffer;
import org.apache.hadoop.hbase.ByteBufferExtendedCell;
import org.apache.hadoop.hbase.Cell;
import org.apache.hadoop.hbase.unsafe.HBasePlatformDependent;
import org.apache.yetus.audience.InterfaceAudience;

/**
* Utility methods for reading and writing little-endian integers and longs from byte[] and
* ByteBuffer. Used by hashing components to perform fast, low-level LE conversions with optional
* Unsafe acceleration.
*/
@InterfaceAudience.Private
public final class LittleEndianBytes {
final static boolean UNSAFE_UNALIGNED = HBasePlatformDependent.unaligned();

static abstract class Converter {
abstract int toInt(byte[] bytes, int offset);

abstract int toInt(ByteBuffer buffer, int offset);

abstract int putInt(byte[] bytes, int offset, int val);

abstract long toLong(byte[] bytes, int offset);

abstract long toLong(ByteBuffer buffer, int offset);

abstract int putLong(byte[] bytes, int offset, long val);
}

static class ConverterHolder {
static final String UNSAFE_CONVERTER_NAME =
ConverterHolder.class.getName() + "$UnsafeConverter";
static final Converter BEST_CONVERTER = getBestConverter();

static Converter getBestConverter() {
try {
Class<? extends Converter> theClass =
Class.forName(UNSAFE_CONVERTER_NAME).asSubclass(Converter.class);
return theClass.getConstructor().newInstance();
} catch (Throwable t) {
return PureJavaConverter.INSTANCE;
}
}

static final class PureJavaConverter extends Converter {
static final PureJavaConverter INSTANCE = new PureJavaConverter();

private PureJavaConverter() {
}

@Override
int toInt(byte[] bytes, int offset) {
int n = 0;
for (int i = offset + 3; i >= offset; i--) {
n <<= 8;
n ^= (bytes[i] & 0xFF);
}
return n;
}

@Override
int toInt(ByteBuffer buffer, int offset) {
return Integer.reverseBytes(buffer.getInt(offset));
}

@Override
int putInt(byte[] bytes, int offset, int val) {
for (int i = offset; i < offset + 3; i++) {
bytes[i] = (byte) val;
val >>>= 8;
}
bytes[offset + 3] = (byte) val;
return offset + Bytes.SIZEOF_INT;
}

@Override
long toLong(byte[] bytes, int offset) {
long l = 0;
for (int i = offset + 7; i >= offset; i--) {
l <<= 8;
l ^= (bytes[i] & 0xFFL);
}
return l;
}

@Override
long toLong(ByteBuffer buffer, int offset) {
return Long.reverseBytes(buffer.getLong(offset));
}

@Override
int putLong(byte[] bytes, int offset, long val) {
for (int i = offset; i < offset + 7; i++) {
bytes[i] = (byte) val;
val >>>= 8;
}
bytes[offset + 7] = (byte) val;
return offset + Bytes.SIZEOF_LONG;
}
}

static final class UnsafeConverter extends Converter {
static final UnsafeConverter INSTANCE = new UnsafeConverter();

public UnsafeConverter() {
}

static {
if (!UNSAFE_UNALIGNED) {
throw new Error();
}
}

@Override
int toInt(byte[] bytes, int offset) {
return UnsafeAccess.toIntLE(bytes, offset);
}

@Override
int toInt(ByteBuffer buffer, int offset) {
return UnsafeAccess.toIntLE(buffer, offset);
}

@Override
int putInt(byte[] bytes, int offset, int val) {
return UnsafeAccess.putIntLE(bytes, offset, val);
}

@Override
long toLong(byte[] bytes, int offset) {
return UnsafeAccess.toLongLE(bytes, offset);
}

@Override
long toLong(ByteBuffer buffer, int offset) {
return UnsafeAccess.toLongLE(buffer, offset);
}

@Override
int putLong(byte[] bytes, int offset, long val) {
return UnsafeAccess.putLongLE(bytes, offset, val);
}
}
}

/*
* Writes an int in little-endian order. Caller must ensure bounds; no checks are performed.
*/
public static void putInt(byte[] bytes, int offset, int val) {
assert offset >= 0 && bytes.length - offset >= Bytes.SIZEOF_INT;
ConverterHolder.BEST_CONVERTER.putInt(bytes, offset, val);
}

/*
* Reads an int in little-endian order. Caller must ensure bounds; no checks are performed.
*/
public static int toInt(byte[] bytes, int offset) {
assert offset >= 0 && bytes.length - offset >= Bytes.SIZEOF_INT;
return ConverterHolder.BEST_CONVERTER.toInt(bytes, offset);
}

/*
* Reads an int in little-endian order from ByteBuffer. Caller must ensure bounds; no checks are
* performed.
*/
public static int toInt(ByteBuffer buffer, int offset) {
assert offset >= 0 && buffer.capacity() - offset >= Bytes.SIZEOF_INT;
return ConverterHolder.BEST_CONVERTER.toInt(buffer, offset);
}

/*
* Writes a long in little-endian order. Caller must ensure bounds; no checks are performed.
*/
public static void putLong(byte[] bytes, int offset, long val) {
assert offset >= 0 && bytes.length - offset >= Bytes.SIZEOF_LONG;
ConverterHolder.BEST_CONVERTER.putLong(bytes, offset, val);
}

/*
* Reads a long in little-endian order. Caller must ensure bounds; no checks are performed.
*/
public static long toLong(byte[] bytes, int offset) {
assert offset >= 0 && bytes.length - offset >= Bytes.SIZEOF_LONG;
return ConverterHolder.BEST_CONVERTER.toLong(bytes, offset);
}

/*
* Reads a long in little-endian order from ByteBuffer. Caller must ensure bounds; no checks are
* performed.
*/
public static long toLong(ByteBuffer buffer, int offset) {
assert offset >= 0 && buffer.capacity() - offset >= Bytes.SIZEOF_LONG;
return ConverterHolder.BEST_CONVERTER.toLong(buffer, offset);
}

/*
* Reads an int in little-endian order from the row portion of the Cell, at the given offset.
*/
public static int getRowAsInt(Cell cell, int offset) {
if (cell instanceof ByteBufferExtendedCell) {
ByteBufferExtendedCell bbCell = (ByteBufferExtendedCell) cell;
return toInt(bbCell.getRowByteBuffer(), bbCell.getRowPosition() + offset);
}
return toInt(cell.getRowArray(), cell.getRowOffset() + offset);
}

/*
* Reads a long in little-endian order from the row portion of the Cell, at the given offset.
*/
public static long getRowAsLong(Cell cell, int offset) {
if (cell instanceof ByteBufferExtendedCell) {
ByteBufferExtendedCell bbCell = (ByteBufferExtendedCell) cell;
return toLong(bbCell.getRowByteBuffer(), bbCell.getRowPosition() + offset);
}
return toLong(cell.getRowArray(), cell.getRowOffset() + offset);
}

/*
* Reads an int in little-endian order from the qualifier portion of the Cell, at the given
* offset.
*/
public static int getQualifierAsInt(Cell cell, int offset) {
if (cell instanceof ByteBufferExtendedCell) {
ByteBufferExtendedCell bbCell = (ByteBufferExtendedCell) cell;
return toInt(bbCell.getQualifierByteBuffer(), bbCell.getQualifierPosition() + offset);
}
return toInt(cell.getQualifierArray(), cell.getQualifierOffset() + offset);
}

/*
* Reads a long in little-endian order from the qualifier portion of the Cell, at the given
* offset.
*/
public static long getQualifierAsLong(Cell cell, int offset) {
if (cell instanceof ByteBufferExtendedCell) {
ByteBufferExtendedCell bbCell = (ByteBufferExtendedCell) cell;
return toLong(bbCell.getQualifierByteBuffer(), bbCell.getQualifierPosition() + offset);
}
return toLong(cell.getQualifierArray(), cell.getQualifierOffset() + offset);
}

private LittleEndianBytes() {
}
}
Original file line number Diff line number Diff line change
Expand Up @@ -37,4 +37,15 @@ public byte get(int offset) {
public int length() {
return this.t.getRowLength();
}

@Override
public int getIntLE(int offset) {
return LittleEndianBytes.getRowAsInt(t, offset);
}

@Override
public long getLongLE(int offset) {
return LittleEndianBytes.getRowAsLong(t, offset);
}

}
Loading