实现一致性 hash 算法

发布于: 2020 年 07 月 08 日
实现一致性hash算法

要求

用熟悉的语言实现一致性Hash算法。编写测试用例,测试100万KV数据,10个服务器节点的情况下,计算这些KV数据在服务器上分布数量的标准差,以评估算法的存储负载不均衡性。

分析

Hash作用是定位资源在服务器上的位置,公式:hash(res) % 服务器数量=位置,这样不用遍历整个服务器集群就可以定位资源了。

问题是当服务器数量发生变化时(服务器故障时和新增服务器时),资源在服务器上的位置就会发生变化,而且无法避免。

一致性Hash算法是对2^32次方取模形成Hash环,增加容错性和可扩展性,将影响降下可控范围内。

为了进一步解决数据倾斜问题,在物理节点上增加虚拟节点。

关键点

一致性Hash定义、环、物理节点、虚拟节点。

代码

/*
* This Java source file was generated by the Gradle 'init' task.
*/
package hash;
import java.util.Collection;
import java.util.SortedMap;
import java.util.TreeMap;
import com.google.common.hash.HashFunction;
import java.nio.charset.Charset;
public class ConsistentHash<T> {
private final HashFunction hashFunction;
private final int numberOfReplicas;
private final SortedMap<Long, T> circle = new TreeMap<Long, T>();
public ConsistentHash(HashFunction hashFunction
, int numberOfReplicas, Collection<T> nodes) {
this.hashFunction = hashFunction;
this.numberOfReplicas = numberOfReplicas;
for (T node : nodes) add(node);
}
public void add(T node) {
for (int i = 0; i < numberOfReplicas; i++) {
final String nodeName = String.format("%s#%d", node.toString(), i);
final long hash = hashFunction.hashString(nodeName
, Charset.defaultCharset()).asLong();
circle.put(hash, node);
}
}
public void remove(T node) {
for (int i = 0; i < numberOfReplicas; i++) {
final String nodeName = String.format("%s#%d", node.toString(), i);
final long hash = hashFunction.hashString(nodeName
, Charset.defaultCharset()).asLong();
circle.remove(hash);
}
}
public T get(Object key) {
if (circle.isEmpty()) {
return null;
}
long hash = hashFunction.hashString(key.toString()
, Charset.defaultCharset()).asLong();
if (!circle.containsKey(hash)) {
SortedMap<Long, T> tailMap = circle.tailMap(hash);
hash = tailMap.isEmpty() ? circle.firstKey() : tailMap.firstKey();
}
return circle.get(hash);
}
}

测试类

/*
* This Java source file was generated by the Gradle 'init' task.
*/
package hash;
import org.junit.Test;
import static org.junit.Assert.*;
import java.util.Collection;
import java.util.HashMap;
import java.util.Map;
import com.google.common.hash.HashFunction;
import com.google.common.hash.Hashing;
import java.util.ArrayList;
public class ConsistentHashTest {
@Test public void test_murmur3_128(){
System.out.println(String.format("hash算法:%s", "murmur3_128"));
testWithHf(Hashing.murmur3_128());
}
@Test public void test_sipHash24(){
System.out.println(String.format("hash算法:%s", "sipHash24"));
testWithHf(Hashing.sipHash24());
}
@Test public void test_md5(){
System.out.println(String.format("hash算法:%s", "md5"));
testWithHf(Hashing.md5());
}
@Test public void test_sha1(){
System.out.println(String.format("hash算法:%s", "sha1"));
testWithHf(Hashing.sha1());
}
@Test public void test_sha256(){
System.out.println(String.format("hash算法:%s", "sha256"));
testWithHf(Hashing.sha256());
}
@Test public void test_sha384(){
System.out.println(String.format("hash算法:%s", "sha384"));
testWithHf(Hashing.sha384());
}
@Test public void test_sha512(){
System.out.println(String.format("hash算法:%s", "sha512"));
testWithHf(Hashing.sha512());
}
@Test public void test_farmHashFingerprint64(){
System.out.println(String.format("hash算法:%s", "farmHashFingerprint64"));
testWithHf(Hashing.farmHashFingerprint64());
}
private void testWithHf(HashFunction hashFunction){
// 十台主机
ArrayList<String> nodes = new ArrayList<String>();
nodes.add("Node-A");
nodes.add("Node-B");
nodes.add("Node-C");
nodes.add("Node-D");
nodes.add("Node-E");
nodes.add("Node-F");
nodes.add("Node-G");
nodes.add("Node-H");
nodes.add("Node-I");
nodes.add("Node-J");
ConsistentHash<String> consistentHashing =
new ConsistentHash(hashFunction, 150, nodes);
// 100W数据
Map<String, Integer> stats = new HashMap<>();
for (String node: nodes){
stats.put(node, 0);
}
for (int i = 0; i < 1000000; i++) {
String key = "KEY:" + i;
String node = consistentHashing.get(key);
stats.put(node, stats.get(node)+1);
}
//输出值
for(Map.Entry<String, Integer> entry : stats.entrySet()){
String mapKey = entry.getKey();
Integer mapValue = entry.getValue();
System.out.println(mapKey+":"+mapValue);
}
//标准差
Collection<Integer> collection = stats.values();
Integer[] arr = new Integer[collection.size()];
arr = collection.toArray(arr);
System.out.println(String.format("标准差是:%f", std(arr)));
}
private double std(Integer[] arr){
int len = arr.length;
double sum = 0;
for (int i = 0; i < len; i++) {
sum += arr[i];
}
double avg = sum / len;
double std = 0;
for (int i = 0; i < len; i++) {
std += (arr[i] - avg) * (arr[i] - avg);
}
return Math.sqrt(std / len);
}
}

结果

hash算法:sha1
Node-B:92823
Node-A:108256
Node-H:97373
Node-G:101293
Node-J:102935
Node-I:101806
Node-D:92264
Node-C:107575
Node-F:94500
Node-E:101175
标准差是:5375.654844
hash算法:md5
Node-B:89324
Node-A:102482
Node-H:110750
Node-G:93059
Node-J:112777
Node-I:95467
Node-D:101781
Node-C:94636
Node-F:101562
Node-E:98162
标准差是:7109.427853
hash算法:sipHash24
Node-B:91167
Node-A:88697
Node-H:101894
Node-G:92392
Node-J:100053
Node-I:99201
Node-D:112520
Node-C:116950
Node-F:95419
Node-E:101707
标准差是:8578.124026
hash算法:sha256
Node-B:110619
Node-A:102707
Node-H:97519
Node-G:100612
Node-J:102623
Node-I:85726
Node-D:99586
Node-C:92435
Node-F:104434
Node-E:103739
标准差是:6544.380933
hash算法:sha384
Node-B:89953
Node-A:107664
Node-H:98905
Node-G:110669
Node-J:104997
Node-I:96231
Node-D:103288
Node-C:101887
Node-F:89127
Node-E:97279
标准差是:6737.052352
hash算法:sha512
Node-B:110213
Node-A:100911
Node-H:93621
Node-G:88849
Node-J:99329
Node-I:93547
Node-D:99120
Node-C:108141
Node-F:103556
Node-E:102713
标准差是:6319.168996
hash算法:murmur3_128
Node-B:93231
Node-A:92172
Node-H:94394
Node-G:103487
Node-J:112682
Node-I:102916
Node-D:105730
Node-C:99059
Node-F:94142
Node-E:102187
标准差是:6267.671370
hash算法:farmHashFingerprint64
Node-B:107604
Node-A:86907
Node-H:111923
Node-G:86216
Node-J:94556
Node-I:94852
Node-D:97597
Node-C:101734
Node-F:119901
Node-E:98710
标准差是:10119.477042

发布于: 2020 年 07 月 08 日 阅读数: 14
用户头像

LEAF

关注

还未添加个人签名 2018.10.08 加入

还未添加个人简介

评论

发布
暂无评论
实现一致性hash算法