zoukankan      html  css  js  c++  java
  • HashMap的fast-fail和ConcurrentHashMap的fail-safe实例

    声明:迁移自本人CSDN博客https://blog.csdn.net/u013365635

    Java HashMap遍历过程中如果元素被修改会导致遍历失败,ConcurrentHashMap则不会有这个问题。由此引出HashMap的fast-fail机制和ConcurrentHashMap的的fail-safe机制。

    看如下实例。
    首先看HashMap的fast-fail

    package com;
    
    import java.util.HashMap;
    import java.util.Map;
    
    public class TestHashMapFastFail
    {
        public static void main(String[] args)
        {
            System.out.println("test HashMap fast-fail");
            Map<Integer, String> testHashMap = new HashMap<Integer, String>();
            testHashMap.put(1000, "1000");
            testHashMap.put(2000, "2000");
            testHashMap.put(3000, "3000");
            testHashMap.put(4000, "4000");
            testHashMap.put(5000, "5000");
            System.out.println(testHashMap.size());
            for (Map.Entry<Integer, String> entry : testHashMap.entrySet())
            {
                int key = entry.getKey();
                System.out.println("key=" + key);
                if (key == 3000)
                {
                    testHashMap.remove(key);
                }
            }
            System.out.println(testHashMap.size());
            for (Map.Entry<Integer, String> entry : testHashMap.entrySet())
            {
                System.out.println(entry.getKey() + "-->" + entry.getValue());
            }
        }
    }
    

    运行结果

    test HashMap
    5
    key=2000
    key=4000
    key=1000
    key=3000
    Exception in thread "main" java.util.ConcurrentModificationException
    	at java.util.HashMap$HashIterator.nextNode(HashMap.java:1437)
    	at java.util.HashMap$EntryIterator.next(HashMap.java:1471)
    	at java.util.HashMap$EntryIterator.next(HashMap.java:1469)
    	at com.TestHashMapFastFail.main(TestHashMapFastFail.java:18)
    

    可以看到执行remove操作后,下一轮迭代立刻失效,并抛出异常,这就是所谓的fast-fail。

    再看ConcurrentHashMap的fail-safe

    package com;
    
    import java.util.Map;
    import java.util.concurrent.ConcurrentHashMap;
    
    public class TestConcurrentHashMapFailSafe
    {
        public static void main(String[] args)
        {
            System.out.println("test ConcurrentHashMap fast-fail");
            Map<Integer, String> testConcurrentHashMap = new ConcurrentHashMap<Integer, String>();
            testConcurrentHashMap.put(100, "100");
            testConcurrentHashMap.put(200, "200");
            testConcurrentHashMap.put(300, "300");
            testConcurrentHashMap.put(400, "400");
            testConcurrentHashMap.put(500, "500");
            System.out.println(testConcurrentHashMap.size());
            for (Map.Entry<Integer, String> entry : testConcurrentHashMap.entrySet())
            {
                int key = entry.getKey();
                System.out.println("key=" + key);
                if (key == 300)
                {
                    testConcurrentHashMap.remove(key);
                }
            }
            System.out.println(testConcurrentHashMap.size());
            for (Map.Entry<Integer, String> entry : testConcurrentHashMap.entrySet())
            {
                System.out.println(entry.getKey() + "-->" + entry.getValue());
            }
        }
    }
    

    运行结果

     test ConcurrentHashMap fast-fail
    5
    key=400
    key=100
    key=500
    key=200
    key=300
    3
    100-->100
    500-->500
    300-->300
    

    可以看出,尽管在迭代过程中执行了remove操作,但是ConcurrentHashMap对外的表现仍然正常,这就是所谓的fail-safe。原因在于ConcurrentHashMap返回的迭代器是弱一致性,ConcurrentHashMap底层数据结构改变时并且不会抛出ConcurrentModificationException异常。
    所以,这也是选择ConcurrentHashMap可以获得的一个额外好处,或者说是解决HashMap fast-fail的一种方法,还有一个方法就是使用迭代器的remove方法而不是使用集合的remove方法,示例如下。

    package com;
    
    import java.util.HashMap;
    import java.util.Iterator;
    import java.util.Map;
    
    public class TestHashMapFastFail2
    {
        public static void main(String[] args)
        {
            System.out.println("test solve HashMap fast-fail");
            Map<Integer, String> testHashMap = new HashMap<Integer, String>();
            testHashMap.put(1000, "1000");
            testHashMap.put(2000, "2000");
            testHashMap.put(3000, "3000");
            testHashMap.put(4000, "4000");
            testHashMap.put(5000, "5000");
            System.out.println(testHashMap.size());
            Iterator iterator = testHashMap.entrySet().iterator();
            while (iterator.hasNext())
            {
                int key = (int)((Map.Entry)iterator.next()).getKey();
                System.out.println("key=" + key);
                if (key == 2000 || key == 4000)
                {
                    iterator.remove();
                }
            }
            System.out.println(testHashMap.size());
            for (Map.Entry<Integer, String> entry : testHashMap.entrySet())
            {
                System.out.println(entry.getKey() + "-->" + entry.getValue());
            }
        }
    }
    

    运行结果

    test solve HashMap fast-fail
    5
    key=2000
    key=4000
    key=1000
    key=3000
    key=5000
    3
    1000-->1000
    3000-->3000
    5000-->5000
    

    集合的 fast-fail 问题是初学者很容易犯的错误。
    说说fast-fail机制和fail-safe机制设计的原因。有人可能会问,既然fast-fail有这么多弊端,为什么还要设计呢,以HashMap为例,因为HashMap本身就是设计成线程不安全的,不支持多个线程同时安全修改,但这也意味着HashMap有较快的速度。fail-safe机制设计的初衷就是保证多线程并发安全地修改集合或Map类。当然,本文的用例都是单线程中的修改操作,主要是为了引出这2个概念。至于内部实现机制,看源码吧。

  • 相关阅读:
    hdu-4638
    一个简单的询问
    CoderForces-617B
    HYSBZ-2002弹飞绵羊
    邻接表
    CoderForces-913-C
    CoderForces-913D
    CoderFocers-620C
    CoderForces-375D
    HDU-6119
  • 原文地址:https://www.cnblogs.com/xsl-thumb-rfcs/p/9941596.html
Copyright © 2011-2022 走看看