1.数据量比较打的时候CurrentHashMap的效率要更高
package com.ydlclass.JUnit;
import com.ydlclass.collection.CountDownLatchTest;
import org.junit.Test;
import java.util.Hashtable;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
import java.util.concurrent.ConcurrentMap;
import java.util.concurrent.CountDownLatch;
public class PerformanceTest {
//HashTable与ConcurrentHashMap
@Test
public void testHashTable() throws InterruptedException {
final Map<Integer,Integer> map = new Hashtable<>(50000);
final CountDownLatch count = new CountDownLatch(50);
System.out.println("----Hashtable开始工作----");
long start = System.currentTimeMillis();
for (int i = 0; i < 50; i++) {//多线程执行
int k = i;
new Thread(() -> {
for (int j = 0; j < 1000000; j++) {//每个线程执行10000次的插入操作
map.put(k + j,j);//保证插入的键不同
}
count.countDown();
}).start();
}
count.await();//等待50个线程分别插入10000条数据后的最大时间;
long end = System.currentTimeMillis();
System.out.println("----Hashtable执行结束----");
System.out.println("所用时间为:" + (end - start));
}
@Test
public void ConcurrentTest() throws InterruptedException {
final ConcurrentHashMap<Integer,Integer> concurrentHashMap = new ConcurrentHashMap<>(50000);
final CountDownLatch count = new CountDownLatch(50);
System.out.println("----ConcurrentHashMap开始工作----");
long start = System.currentTimeMillis();
for (int i = 0; i < 50; i++) {
int k = i;
new Thread(() ->{
for (int j = 0; j < 1000000; j++) {
concurrentHashMap.put(k + j,j);
}
count.countDown();
}).start();
}
count.await();
long end = System.currentTimeMillis();
System.out.println("-----ConcurrentHashMap结束工作----");
System.out.println("所用时间为:" + (end - start));
}
}
----ConcurrentHashMap开始工作----
-----ConcurrentHashMap结束工作----
所用时间为:2902
----Hashtable开始工作----
----Hashtable执行结束----
所用时间为:4311