001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver.compactions; 019 020import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.STRIPE_END_KEY; 021import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.STRIPE_START_KEY; 022import static org.junit.Assert.assertArrayEquals; 023import static org.junit.Assert.assertEquals; 024import static org.junit.Assert.assertFalse; 025import static org.junit.Assert.assertNotNull; 026import static org.junit.Assert.assertNull; 027import static org.junit.Assert.assertTrue; 028import static org.mockito.ArgumentMatchers.anyCollection; 029import static org.mockito.Matchers.any; 030import static org.mockito.Matchers.anyBoolean; 031import static org.mockito.Matchers.anyLong; 032import static org.mockito.Mockito.doAnswer; 033import static org.mockito.Mockito.mock; 034import static org.mockito.Mockito.when; 035 036import java.io.IOException; 037import java.util.ArrayList; 038import java.util.Arrays; 039import java.util.List; 040import java.util.TreeMap; 041import org.apache.hadoop.fs.Path; 042import org.apache.hadoop.hbase.Cell; 043import org.apache.hadoop.hbase.KeyValue; 044import org.apache.hadoop.hbase.io.hfile.HFile; 045import org.apache.hadoop.hbase.regionserver.BloomType; 046import org.apache.hadoop.hbase.regionserver.HStoreFile; 047import org.apache.hadoop.hbase.regionserver.InternalScanner; 048import org.apache.hadoop.hbase.regionserver.ScannerContext; 049import org.apache.hadoop.hbase.regionserver.StoreFileReader; 050import org.apache.hadoop.hbase.regionserver.StoreFileScanner; 051import org.apache.hadoop.hbase.regionserver.StoreFileWriter; 052import org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter; 053import org.apache.hadoop.hbase.util.Bytes; 054import org.mockito.invocation.InvocationOnMock; 055import org.mockito.stubbing.Answer; 056 057public class TestCompactor { 058 059 public static HStoreFile createDummyStoreFile(long maxSequenceId) throws Exception { 060 // "Files" are totally unused, it's Scanner class below that gives compactor fake KVs. 061 // But compaction depends on everything under the sun, so stub everything with dummies. 062 HStoreFile sf = mock(HStoreFile.class); 063 StoreFileReader r = mock(StoreFileReader.class); 064 when(r.length()).thenReturn(1L); 065 when(r.getBloomFilterType()).thenReturn(BloomType.NONE); 066 when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class)); 067 when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(), 068 anyBoolean())).thenReturn(mock(StoreFileScanner.class)); 069 when(sf.getReader()).thenReturn(r); 070 when(sf.getMaxSequenceId()).thenReturn(maxSequenceId); 071 return sf; 072 } 073 074 public static CompactionRequestImpl createDummyRequest() throws Exception { 075 return new CompactionRequestImpl(Arrays.asList(createDummyStoreFile(1L))); 076 } 077 078 // StoreFile.Writer has private ctor and is unwieldy, so this has to be convoluted. 079 public static class StoreFileWritersCapture 080 implements Answer<StoreFileWriter>, StripeMultiFileWriter.WriterFactory { 081 public static class Writer { 082 public ArrayList<KeyValue> kvs = new ArrayList<>(); 083 public TreeMap<byte[], byte[]> data = new TreeMap<>(Bytes.BYTES_COMPARATOR); 084 public boolean hasMetadata; 085 } 086 087 private List<Writer> writers = new ArrayList<>(); 088 089 @Override 090 public StoreFileWriter createWriter() throws IOException { 091 final Writer realWriter = new Writer(); 092 writers.add(realWriter); 093 StoreFileWriter writer = mock(StoreFileWriter.class); 094 doAnswer(new Answer<Object>() { 095 @Override 096 public Object answer(InvocationOnMock invocation) { 097 return realWriter.kvs.add((KeyValue) invocation.getArgument(0)); 098 } 099 }).when(writer).append(any()); 100 doAnswer(new Answer<Object>() { 101 @Override 102 public Object answer(InvocationOnMock invocation) { 103 Object[] args = invocation.getArguments(); 104 return realWriter.data.put((byte[]) args[0], (byte[]) args[1]); 105 } 106 }).when(writer).appendFileInfo(any(), any()); 107 doAnswer(new Answer<Void>() { 108 @Override 109 public Void answer(InvocationOnMock invocation) throws Throwable { 110 realWriter.hasMetadata = true; 111 return null; 112 } 113 }).when(writer).appendMetadata(anyLong(), anyBoolean()); 114 doAnswer(new Answer<Void>() { 115 @Override 116 public Void answer(InvocationOnMock invocation) throws Throwable { 117 realWriter.hasMetadata = true; 118 return null; 119 } 120 }).when(writer).appendMetadata(anyLong(), anyBoolean(), anyCollection()); 121 doAnswer(new Answer<Path>() { 122 @Override 123 public Path answer(InvocationOnMock invocation) throws Throwable { 124 return new Path("foo"); 125 } 126 }).when(writer).getPath(); 127 return writer; 128 } 129 130 @Override 131 public StoreFileWriter answer(InvocationOnMock invocation) throws Throwable { 132 return createWriter(); 133 } 134 135 public void verifyKvs(KeyValue[][] kvss, boolean allFiles, boolean requireMetadata) { 136 if (allFiles) { 137 assertEquals(kvss.length, writers.size()); 138 } 139 int skippedWriters = 0; 140 for (int i = 0; i < kvss.length; ++i) { 141 KeyValue[] kvs = kvss[i]; 142 if (kvs != null) { 143 Writer w = writers.get(i - skippedWriters); 144 if (requireMetadata) { 145 assertNotNull(w.data.get(STRIPE_START_KEY)); 146 assertNotNull(w.data.get(STRIPE_END_KEY)); 147 } else { 148 assertNull(w.data.get(STRIPE_START_KEY)); 149 assertNull(w.data.get(STRIPE_END_KEY)); 150 } 151 assertEquals(kvs.length, w.kvs.size()); 152 for (int j = 0; j < kvs.length; ++j) { 153 assertEquals(kvs[j], w.kvs.get(j)); 154 } 155 } else { 156 assertFalse(allFiles); 157 ++skippedWriters; 158 } 159 } 160 } 161 162 public void verifyBoundaries(byte[][] boundaries) { 163 assertEquals(boundaries.length - 1, writers.size()); 164 for (int i = 0; i < writers.size(); ++i) { 165 assertArrayEquals("i = " + i, boundaries[i], writers.get(i).data.get(STRIPE_START_KEY)); 166 assertArrayEquals("i = " + i, boundaries[i + 1], writers.get(i).data.get(STRIPE_END_KEY)); 167 } 168 } 169 170 public void verifyKvs(KeyValue[][] kvss, boolean allFiles, List<Long> boundaries) { 171 if (allFiles) { 172 assertEquals(kvss.length, writers.size()); 173 } 174 int skippedWriters = 0; 175 for (int i = 0; i < kvss.length; ++i) { 176 KeyValue[] kvs = kvss[i]; 177 if (kvs != null) { 178 Writer w = writers.get(i - skippedWriters); 179 assertEquals(kvs.length, w.kvs.size()); 180 for (int j = 0; j < kvs.length; ++j) { 181 assertTrue(kvs[j].getTimestamp() >= boundaries.get(i)); 182 assertTrue(kvs[j].getTimestamp() < boundaries.get(i + 1)); 183 assertEquals(kvs[j], w.kvs.get(j)); 184 } 185 } else { 186 assertFalse(allFiles); 187 ++skippedWriters; 188 } 189 } 190 } 191 192 public List<Writer> getWriters() { 193 return writers; 194 } 195 } 196 197 public static class Scanner implements InternalScanner { 198 private final ArrayList<KeyValue> kvs; 199 200 public Scanner(KeyValue... kvs) { 201 this.kvs = new ArrayList<>(Arrays.asList(kvs)); 202 } 203 204 @Override 205 public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException { 206 if (kvs.isEmpty()) return false; 207 result.add(kvs.remove(0)); 208 return !kvs.isEmpty(); 209 } 210 211 @Override 212 public void close() throws IOException { 213 } 214 } 215}