· 6 years ago · Oct 26, 2019, 10:14 PM
1import java.util.ArrayList;
2import java.util.Arrays;
3import java.util.LinkedList;
4
5// TODO: comment and complete your HashTableADT implementation
6//
7// TODO: implement all required methods
8// DO ADD REQUIRED PUBLIC METHODS TO IMPLEMENT interfaces
9//
10// DO NOT ADD ADDITIONAL PUBLIC MEMBERS TO YOUR CLASS
11// (no public or package methods that are not in implemented interfaces)
12//
13// TODO: describe the collision resolution scheme you have chosen
14// identify your scheme as open addressing or bucket
15//
16// I used ArrayList bucket collision resolution
17//
18// if open addressing: describe probe sequence
19// if buckets: describe data structure for each bucket
20//
21// TODO: explain your hashing algorithm here
22// NOTE: you are not required to design your own algorithm for hashing,
23// since you do not know the type for K,
24// you must use the hashCode provided by the <K key> object
25
26/**
27 * HashTable implementation that uses:
28 *
29 * @param <K> unique comparable identifier for each <K,V> pair, may not be null
30 * @param <V> associated value with a key, value may be null
31 */
32public class BookHashTable implements HashTableADT<String, Book> {
33
34 /** The initial capacity that is used if none is specified user */
35 static final int DEFAULT_CAPACITY = 101;
36
37 /** The load factor that is used if none is specified by user */
38 static final double DEFAULT_LOAD_FACTOR_THRESHOLD = 0.75;
39
40 /**
41 * REQUIRED default no-arg constructor Uses default capacity and sets load factor threshold for
42 * the newly created hash table.
43 */
44 public BookHashTable() {
45 this(DEFAULT_CAPACITY, DEFAULT_LOAD_FACTOR_THRESHOLD);
46 }
47
48 private BookHashTable BookSet;
49 private ArrayList<ArrayList<Book>> bookTable; // ArrayList of ArrayLists to store buckets
50 private int capacity;
51 private double loadFactorThresh;
52 private int numKeys;
53
54
55 private int getKeyIndex(String key) {
56 int keyHashCode = key.hashCode();
57 if (keyHashCode < 0) {
58 keyHashCode = keyHashCode * -1;
59 }
60 int keyIndex = keyHashCode % capacity;
61 return keyIndex;
62 }
63
64
65 /**
66 * Creates an empty hash table with the specified capacity and load factor.
67 *
68 * @param initialCapacity number of elements table should hold at start.
69 * @param loadFactorThreshold the ratio of items/capacity that causes table to resize and rehash
70 */
71 public BookHashTable(int initialCapacity, double loadFactorThreshold) {
72 // TODO: comment and complete a constructor that accepts initial capacity
73 // and load factor threshold and initializes all fields
74 loadFactorThresh = loadFactorThreshold;
75 numKeys = 0;
76 bookTable = new ArrayList<ArrayList<Book>>(initialCapacity); // ArrayList of buckets
77 capacity = initialCapacity; // sets number of available buckets (capacity of bookTable)
78 for (int i = 0; i < capacity; i += 1) {
79 // fills bookTable with null values so size can be checked
80 bookTable.add(null);
81 }
82 BookSet = this;
83 }
84
85 @Override
86 public void insert(String key, Book value) throws IllegalNullKeyException, DuplicateKeyException {
87 // check for null key
88 if (key == null) {
89 throw new IllegalNullKeyException();
90 }
91
92 // finds index of key
93 int indexOfKey = getKeyIndex(key);
94
95
96 // check to see if key is already in data structure
97 if (bookTable.get(indexOfKey) != null) { // bucket at this index in bookTable is not empty
98 for (int i = 0; i < bookTable.get(indexOfKey).size(); i += 1) {
99 // loops through bucket to check for duplicate
100 if (bookTable.get(indexOfKey).get(i).equals(value)) {
101 // checks bucket at index i for Book, if match throws exception
102 System.out.println("DuplicateKeyException thrown");
103 throw new DuplicateKeyException();
104 }
105 }
106 } else {
107 // bucket at this index in bookTable is empty, meaning no duplicate key
108 }
109
110
111 // increase capacity if load factor is met and reinserts old books into new bookTable
112 double loadFactor = (1.0 * numKeys) / capacity;
113 if (loadFactor >= getLoadFactorThreshold()) {
114 System.out.println("Resizing");
115 ArrayList<ArrayList<Book>> temp = bookTable;
116 int tempCapacity = capacity; // keeps track of number of buckets in old hash table
117
118 BookSet = new BookHashTable(2 * capacity + 1, loadFactorThresh);
119 bookTable = BookSet.getBookTable();
120 capacity = BookSet.getCapacity();
121 for (int i = 0; i < tempCapacity; i += 1) {
122 if (temp.get(i) == null) { // empty bucket is skipped
123 } else {
124 for (int j = 0; j < temp.get(i).size(); j += 1) { // goes through old books in bucket
125 Book tempBook = temp.get(i).get(j);
126 String tempKey = tempBook.getKey();
127 insert(tempKey, tempBook);
128 }
129
130 }
131 }
132 } else {
133 System.out.println("No resizing needed");
134 }
135
136
137 // Add the key,value pair to the data structure
138 if (bookTable.get(indexOfKey) == null) { // no keys in this bucket. Must create bucket
139 bookTable.set(indexOfKey, new ArrayList<Book>()); // creates bucket in empty spot
140 bookTable.get(indexOfKey).add(value); // adds to new bucket
141 } else { // bucket already exists. add to bucket
142 bookTable.get(indexOfKey).add(value); // adds to existing bucket
143 }
144
145 // increase the number of keys
146 numKeys += 1;
147 // System.out.println("increased numKeys: " + numKeys);
148
149
150
151 }
152
153 private ArrayList<ArrayList<Book>> getBookTable() {
154 return bookTable;
155 }
156
157
158
159 @Override
160 public boolean remove(String key) throws IllegalNullKeyException {
161
162 // check for null key
163 if (key == null) {
164 throw new IllegalNullKeyException();
165 }
166
167 // find if book to remove is there
168 try {
169 Book bookToRemove = get(key);
170 } catch (KeyNotFoundException e) {
171 return false;
172 }
173
174 // remove book
175 boolean bookRemoved = false;
176 int keyIndex = getKeyIndex(key);
177 if (bookTable.get(keyIndex).size() == 1) {
178 bookTable.set(keyIndex, null);
179 } else {
180 for (int i = 0; i < bookTable.get(keyIndex).size(); i += 1) {
181 // goes through bucket where key is located
182 Book currBook = bookTable.get(keyIndex).get(i);
183 if (bookRemoved && i != bookTable.get(keyIndex).size() - 1) {
184 bookTable.get(keyIndex).set(i - 1, currBook);
185 } else if (bookRemoved && i != bookTable.get(keyIndex).size() - 1) {
186 bookTable.get(keyIndex).set(i - 1, currBook);
187 bookTable.get(keyIndex).remove(i);
188 }
189 if (currBook.getKey().equals(key)) {
190 bookTable.get(keyIndex).remove(i); // removes current book
191 bookRemoved = true;
192 if (bookTable.get(keyIndex).size() == 0) { // if bucket is now empty
193 bookTable.set(keyIndex, null); // set empty bucket to null
194 }
195
196 }
197 }
198 }
199
200 // reduce numKeys
201 numKeys -= 1;
202
203 return true;
204 }
205
206
207
208 @Override
209 public Book get(String key) throws IllegalNullKeyException, KeyNotFoundException {
210 if (key == null) {
211 throw new IllegalNullKeyException();
212 } else {
213 int keyIndex = getKeyIndex(key);
214 if (bookTable.get(keyIndex) == null) {
215 throw new KeyNotFoundException();
216 } else {
217 for (int i = 0; i < bookTable.get(keyIndex).size(); i += 1) {
218 // goes through bucket where key is located
219 Book currBook = bookTable.get(keyIndex).get(i);
220 if (currBook.getKey().equals(key)) {
221 return currBook;
222 }
223 }
224 }
225 }
226 throw new KeyNotFoundException();
227 }
228
229 @Override
230 public int numKeys() {
231 return numKeys; // numKeys should increase with each insert and decrease with each remove
232 }
233
234 @Override
235 public double getLoadFactorThreshold() {
236 return loadFactorThresh; // load factor should not change
237 }
238
239 @Override
240 public int getCapacity() {
241 return capacity; // size will increase, but never decrease
242 }
243
244 @Override
245 public int getCollisionResolutionScheme() {
246 return 4; // 4 indicates use of ArrayList buckets for collisions
247 }
248
249
250}