資料結構08-雜湊(自定義HashMap)
阿新 • • 發佈:2018-11-20
實現自定義的HashSet之後HashMap也自然水到渠成了,作者的實現方法是以HashSet的方式來類比實現HashMap,而JavaAPI中標準的HashSet是基於HashMap的,因為Map中的keySet方法就是返回一個HashSet,所以只需要隱藏掉Value的相關操作,稍加改造就可以實現HashSet,減少程式碼冗餘。
作者的這種方法雖然相對於標準的JavaAPI來說有些冗餘,但十分利於學習和鞏固基礎,此種法方法原理就是將HashSet中儲存的元素改為固定的Entry類,而Entry類是個泛型類,帶有兩個泛型K和V,分別代表key和value,閒話少敘,下面進入程式碼環節;
實現自定義HashMap的基本程式如下:
1.定義MyMap介面:
package A03.Hash; public interface MyMap<K, V> { // 查詢元素 boolean containsKey(K key); // 查詢元素 boolean containsValue(V value); // 根據Key取出Value V get(K key); // 根據Key刪除Value void remove(K key); // 獲取對映表大小 int size(); // 是否為空 boolean isEmpty(); V put(K key, V value); MySet<K> keySet(); MySet<V> values(); MySet<Entry<K, V>> entrySet(); class Entry<K, V> { K key; V value; public Entry(K key, V value) { this.key = key; this.value = value; } public K getKey() { return key; } public V getValue() { return value; } @Override public String toString() { return "[" + key + "=" + value + "]"; } } }
2.定義MyHashMap實現類
package A03.Hash; import A01.LSQ.MyDLinkedList; public class MyHashMap<K, V> implements MyMap<K, V> { private static int DEFAULT_INITIAL_CAPACITY = 10; private static int MAXIMUM_CAPACITY = 1 << 30; private int capacity; private static float DEFAULT_MAX_LOAD_FACTOR = 0.75f; private float loadFactorThreshold; private int size = 0; private MyDLinkedList<Entry<K, V>>[] table; public MyHashMap() { this(DEFAULT_INITIAL_CAPACITY, DEFAULT_MAX_LOAD_FACTOR); } public MyHashMap(int initialCapacity) { this(initialCapacity, DEFAULT_MAX_LOAD_FACTOR); } public MyHashMap(int initialCapacity, float loadFactorThreshold) { if (initialCapacity > MAXIMUM_CAPACITY) this.capacity = MAXIMUM_CAPACITY; else this.capacity = trimToPowerOf2(initialCapacity); this.loadFactorThreshold = loadFactorThreshold; table = new MyDLinkedList[capacity]; } private int trimToPowerOf2(int initialCapacity) { int capacity = 1; while (capacity < initialCapacity) capacity <<= 1; return capacity; } private static int supplementalHash(int h) { h ^= (h >>> 20) ^ (h >>> 12); return h ^ (h >>> 7) ^ (h >>> 4); } private int hash(int hashCode) { return supplementalHash(hashCode) & (capacity - 1); } private void rehash() { MySet<Entry<K, V>> set = entrySet(); capacity <<= 1; System.out.println("capacity="+capacity); table = new MyDLinkedList[capacity]; size = 0; for (Entry<K, V> entry : set) { put(entry.getKey(), entry.getValue()); } } @Override public String toString() { StringBuffer sb = new StringBuffer("["); for (int i = 0; i < capacity; i++) if (table[i] != null && table[i].size() > 0) for (Entry<K, V> entry : table[i]) sb.append(entry); sb.append("]"); return sb.toString(); } @Override public boolean isEmpty() { return size == 0; } @Override public boolean containsKey(K key) { return get(key) != null; } @Override public boolean containsValue(V value) { for (int i = 0; i < capacity; i++) if (table[i] != null) { MyDLinkedList<Entry<K, V>> bucket = table[i]; for (Entry<K, V> entry : bucket) if (entry.getValue().equals(value)) return true; } return false; } @Override public V get(K key) { int bucketIndex = hash(key.hashCode()); if (table[bucketIndex] != null) { MyDLinkedList<Entry<K, V>> bucket = table[bucketIndex]; for (Entry<K, V> entry : bucket) if (entry.getKey().equals(key)) return entry.getValue(); } return null; } @Override public V put(K key, V value) { int bucketIndex = hash(key.hashCode()); if (get(key) != null) { MyDLinkedList<Entry<K, V>> bucket = table[bucketIndex]; for (Entry<K, V> entry : bucket) if (entry.getKey().equals(key)) { V oldValue = entry.getValue(); entry.value = value; return oldValue; } } if (size >= capacity * loadFactorThreshold) { if (capacity == MAXIMUM_CAPACITY) throw new RuntimeException(); rehash(); } if (table[bucketIndex] == null) table[bucketIndex] = new MyDLinkedList<>(); table[bucketIndex].add(new Entry<>(key, value)); size++; return value; } @Override public void remove(K key) { int bucketIndex = hash(key.hashCode()); if (table[bucketIndex] != null) { MyDLinkedList<Entry<K, V>> bucket = table[bucketIndex]; for (Entry<K, V> entry : bucket) if (entry.getKey().equals(key)) { bucket.remove(entry); size--; break; } } } @Override public int size() { return size; } @Override public MySet<K> keySet() { MySet<K> set = new MyHashSet<>(); for (int i = 0; i < capacity; i++) if (table[i] != null) { MyDLinkedList<Entry<K, V>> list = table[i]; for (Entry<K, V> entry : list) set.add(entry.getKey()); } return set; } @Override public MySet<V> values() { MySet<V> set = new MyHashSet<>(); for (int i = 0; i < capacity; i++) if (table[i] != null) { MyDLinkedList<Entry<K, V>> list = table[i]; for (Entry<K, V> entry : list) set.add(entry.getValue()); } return set; } @Override public MySet<Entry<K, V>> entrySet() { MySet<Entry<K, V>> set = new MyHashSet<>(); for (int i = 0; i < capacity; i++) if (table[i] != null) { MyDLinkedList<Entry<K, V>> bucket = table[i]; for (Entry<K, V> entry : bucket) set.add(entry); } return set; } }
多數程式碼與MyHashSet重複,屬性值意義與MyHashSet相同,讀者如果不懂可以先搞懂讀者的上一篇文章。