001/** 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.regionserver.compactions; 019 020import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.STRIPE_END_KEY; 021import static org.apache.hadoop.hbase.regionserver.StripeStoreFileManager.STRIPE_START_KEY; 022import static org.junit.Assert.assertArrayEquals; 023import static org.junit.Assert.assertEquals; 024import static org.junit.Assert.assertFalse; 025import static org.junit.Assert.assertNotNull; 026import static org.junit.Assert.assertNull; 027import static org.junit.Assert.assertTrue; 028import static org.mockito.ArgumentMatchers.anyCollection; 029import static org.mockito.Matchers.any; 030import static org.mockito.Matchers.anyBoolean; 031import static org.mockito.Matchers.anyLong; 032import static org.mockito.Mockito.doAnswer; 033import static org.mockito.Mockito.mock; 034import static org.mockito.Mockito.when; 035 036import java.io.IOException; 037import java.util.ArrayList; 038import java.util.Arrays; 039import java.util.List; 040import java.util.TreeMap; 041 042import org.apache.hadoop.fs.Path; 043import org.apache.hadoop.hbase.Cell; 044import org.apache.hadoop.hbase.KeyValue; 045import org.apache.hadoop.hbase.io.hfile.HFile; 046import org.apache.hadoop.hbase.regionserver.BloomType; 047import org.apache.hadoop.hbase.regionserver.HStoreFile; 048import org.apache.hadoop.hbase.regionserver.InternalScanner; 049import org.apache.hadoop.hbase.regionserver.ScannerContext; 050import org.apache.hadoop.hbase.regionserver.StoreFileReader; 051import org.apache.hadoop.hbase.regionserver.StoreFileScanner; 052import org.apache.hadoop.hbase.regionserver.StoreFileWriter; 053import org.apache.hadoop.hbase.regionserver.StripeMultiFileWriter; 054import org.apache.hadoop.hbase.util.Bytes; 055import org.mockito.invocation.InvocationOnMock; 056import org.mockito.stubbing.Answer; 057 058public class TestCompactor { 059 060 public static HStoreFile createDummyStoreFile(long maxSequenceId) throws Exception { 061 // "Files" are totally unused, it's Scanner class below that gives compactor fake KVs. 062 // But compaction depends on everything under the sun, so stub everything with dummies. 063 HStoreFile sf = mock(HStoreFile.class); 064 StoreFileReader r = mock(StoreFileReader.class); 065 when(r.length()).thenReturn(1L); 066 when(r.getBloomFilterType()).thenReturn(BloomType.NONE); 067 when(r.getHFileReader()).thenReturn(mock(HFile.Reader.class)); 068 when(r.getStoreFileScanner(anyBoolean(), anyBoolean(), anyBoolean(), anyLong(), anyLong(), 069 anyBoolean())).thenReturn(mock(StoreFileScanner.class)); 070 when(sf.getReader()).thenReturn(r); 071 when(sf.getMaxSequenceId()).thenReturn(maxSequenceId); 072 return sf; 073 } 074 075 public static CompactionRequestImpl createDummyRequest() throws Exception { 076 return new CompactionRequestImpl(Arrays.asList(createDummyStoreFile(1L))); 077 } 078 079 // StoreFile.Writer has private ctor and is unwieldy, so this has to be convoluted. 080 public static class StoreFileWritersCapture 081 implements Answer<StoreFileWriter>, StripeMultiFileWriter.WriterFactory { 082 public static class Writer { 083 public ArrayList<KeyValue> kvs = new ArrayList<>(); 084 public TreeMap<byte[], byte[]> data = new TreeMap<>(Bytes.BYTES_COMPARATOR); 085 public boolean hasMetadata; 086 } 087 088 private List<Writer> writers = new ArrayList<>(); 089 090 @Override 091 public StoreFileWriter createWriter() throws IOException { 092 final Writer realWriter = new Writer(); 093 writers.add(realWriter); 094 StoreFileWriter writer = mock(StoreFileWriter.class); 095 doAnswer(new Answer<Object>() { 096 @Override 097 public Object answer(InvocationOnMock invocation) { 098 return realWriter.kvs.add((KeyValue) invocation.getArgument(0)); 099 } 100 }).when(writer).append(any()); 101 doAnswer(new Answer<Object>() { 102 @Override 103 public Object answer(InvocationOnMock invocation) { 104 Object[] args = invocation.getArguments(); 105 return realWriter.data.put((byte[]) args[0], (byte[]) args[1]); 106 } 107 }).when(writer).appendFileInfo(any(), any()); 108 doAnswer(new Answer<Void>() { 109 @Override 110 public Void answer(InvocationOnMock invocation) throws Throwable { 111 realWriter.hasMetadata = true; 112 return null; 113 } 114 }).when(writer).appendMetadata(anyLong(), anyBoolean()); 115 doAnswer(new Answer<Void>() { 116 @Override 117 public Void answer(InvocationOnMock invocation) throws Throwable { 118 realWriter.hasMetadata = true; 119 return null; 120 } 121 }).when(writer).appendMetadata(anyLong(), anyBoolean(), anyCollection()); 122 doAnswer(new Answer<Path>() { 123 @Override 124 public Path answer(InvocationOnMock invocation) throws Throwable { 125 return new Path("foo"); 126 } 127 }).when(writer).getPath(); 128 return writer; 129 } 130 131 @Override 132 public StoreFileWriter answer(InvocationOnMock invocation) throws Throwable { 133 return createWriter(); 134 } 135 136 public void verifyKvs(KeyValue[][] kvss, boolean allFiles, boolean requireMetadata) { 137 if (allFiles) { 138 assertEquals(kvss.length, writers.size()); 139 } 140 int skippedWriters = 0; 141 for (int i = 0; i < kvss.length; ++i) { 142 KeyValue[] kvs = kvss[i]; 143 if (kvs != null) { 144 Writer w = writers.get(i - skippedWriters); 145 if (requireMetadata) { 146 assertNotNull(w.data.get(STRIPE_START_KEY)); 147 assertNotNull(w.data.get(STRIPE_END_KEY)); 148 } else { 149 assertNull(w.data.get(STRIPE_START_KEY)); 150 assertNull(w.data.get(STRIPE_END_KEY)); 151 } 152 assertEquals(kvs.length, w.kvs.size()); 153 for (int j = 0; j < kvs.length; ++j) { 154 assertEquals(kvs[j], w.kvs.get(j)); 155 } 156 } else { 157 assertFalse(allFiles); 158 ++skippedWriters; 159 } 160 } 161 } 162 163 public void verifyBoundaries(byte[][] boundaries) { 164 assertEquals(boundaries.length - 1, writers.size()); 165 for (int i = 0; i < writers.size(); ++i) { 166 assertArrayEquals("i = " + i, boundaries[i], writers.get(i).data.get(STRIPE_START_KEY)); 167 assertArrayEquals("i = " + i, boundaries[i + 1], writers.get(i).data.get(STRIPE_END_KEY)); 168 } 169 } 170 171 public void verifyKvs(KeyValue[][] kvss, boolean allFiles, List<Long> boundaries) { 172 if (allFiles) { 173 assertEquals(kvss.length, writers.size()); 174 } 175 int skippedWriters = 0; 176 for (int i = 0; i < kvss.length; ++i) { 177 KeyValue[] kvs = kvss[i]; 178 if (kvs != null) { 179 Writer w = writers.get(i - skippedWriters); 180 assertEquals(kvs.length, w.kvs.size()); 181 for (int j = 0; j < kvs.length; ++j) { 182 assertTrue(kvs[j].getTimestamp() >= boundaries.get(i)); 183 assertTrue(kvs[j].getTimestamp() < boundaries.get(i + 1)); 184 assertEquals(kvs[j], w.kvs.get(j)); 185 } 186 } else { 187 assertFalse(allFiles); 188 ++skippedWriters; 189 } 190 } 191 } 192 193 public List<Writer> getWriters() { 194 return writers; 195 } 196 } 197 198 public static class Scanner implements InternalScanner { 199 private final ArrayList<KeyValue> kvs; 200 201 public Scanner(KeyValue... kvs) { 202 this.kvs = new ArrayList<>(Arrays.asList(kvs)); 203 } 204 205 @Override 206 public boolean next(List<Cell> result, ScannerContext scannerContext) throws IOException { 207 if (kvs.isEmpty()) return false; 208 result.add(kvs.remove(0)); 209 return !kvs.isEmpty(); 210 } 211 212 @Override 213 public void close() throws IOException { 214 } 215 } 216}