001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertFalse;
022import static org.junit.Assert.assertNotNull;
023import static org.junit.Assert.assertTrue;
024import static org.junit.Assert.fail;
025
026import com.codahale.metrics.Histogram;
027import com.codahale.metrics.Snapshot;
028import com.codahale.metrics.UniformReservoir;
029import java.io.BufferedReader;
030import java.io.ByteArrayInputStream;
031import java.io.IOException;
032import java.io.InputStreamReader;
033import java.lang.reflect.Constructor;
034import java.lang.reflect.InvocationTargetException;
035import java.nio.charset.StandardCharsets;
036import java.util.LinkedList;
037import java.util.NoSuchElementException;
038import java.util.Queue;
039import java.util.Random;
040import java.util.concurrent.ThreadLocalRandom;
041import org.apache.hadoop.fs.FSDataInputStream;
042import org.apache.hadoop.fs.FileSystem;
043import org.apache.hadoop.fs.Path;
044import org.apache.hadoop.hbase.PerformanceEvaluation.RandomReadTest;
045import org.apache.hadoop.hbase.PerformanceEvaluation.TestOptions;
046import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
047import org.apache.hadoop.hbase.client.TableDescriptor;
048import org.apache.hadoop.hbase.regionserver.CompactingMemStore;
049import org.apache.hadoop.hbase.testclassification.MiscTests;
050import org.apache.hadoop.hbase.testclassification.SmallTests;
051import org.apache.hadoop.hbase.util.GsonUtil;
052import org.junit.ClassRule;
053import org.junit.Test;
054import org.junit.experimental.categories.Category;
055
056import org.apache.hbase.thirdparty.com.google.gson.Gson;
057
058@Category({ MiscTests.class, SmallTests.class })
059public class TestPerformanceEvaluation {
060  @ClassRule
061  public static final HBaseClassTestRule CLASS_RULE =
062    HBaseClassTestRule.forClass(TestPerformanceEvaluation.class);
063
064  private static final HBaseTestingUtil HTU = new HBaseTestingUtil();
065
066  @Test
067  public void testDefaultInMemoryCompaction() {
068    PerformanceEvaluation.TestOptions defaultOpts = new PerformanceEvaluation.TestOptions();
069    assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
070      defaultOpts.getInMemoryCompaction().toString());
071    TableDescriptor tableDescriptor = PerformanceEvaluation.getTableDescriptor(defaultOpts);
072    for (ColumnFamilyDescriptor familyDescriptor : tableDescriptor.getColumnFamilies()) {
073      assertEquals(CompactingMemStore.COMPACTING_MEMSTORE_TYPE_DEFAULT,
074        familyDescriptor.getInMemoryCompaction().toString());
075    }
076  }
077
078  @Test
079  public void testSerialization() {
080    PerformanceEvaluation.TestOptions options = new PerformanceEvaluation.TestOptions();
081    assertFalse(options.isAutoFlush());
082    options.setAutoFlush(true);
083    Gson gson = GsonUtil.createGson().create();
084    String optionsString = gson.toJson(options);
085    PerformanceEvaluation.TestOptions optionsDeserialized =
086      gson.fromJson(optionsString, PerformanceEvaluation.TestOptions.class);
087    assertTrue(optionsDeserialized.isAutoFlush());
088  }
089
090  /**
091   * Exercise the mr spec writing. Simple assertions to make sure it is basically working.
092   */
093  @Test
094  public void testWriteInputFile() throws IOException {
095    TestOptions opts = new PerformanceEvaluation.TestOptions();
096    final int clients = 10;
097    opts.setNumClientThreads(clients);
098    opts.setPerClientRunRows(10);
099    Path dir =
100      PerformanceEvaluation.writeInputFile(HTU.getConfiguration(), opts, HTU.getDataTestDir());
101    FileSystem fs = FileSystem.get(HTU.getConfiguration());
102    Path p = new Path(dir, PerformanceEvaluation.JOB_INPUT_FILENAME);
103    long len = fs.getFileStatus(p).getLen();
104    assertTrue(len > 0);
105    byte[] content = new byte[(int) len];
106    try (FSDataInputStream dis = fs.open(p)) {
107      dis.readFully(content);
108      BufferedReader br = new BufferedReader(
109        new InputStreamReader(new ByteArrayInputStream(content), StandardCharsets.UTF_8));
110      int count = 0;
111      while (br.readLine() != null) {
112        count++;
113      }
114      assertEquals(clients, count);
115    }
116  }
117
118  @Test
119  public void testSizeCalculation() {
120    TestOptions opts = new PerformanceEvaluation.TestOptions();
121    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
122    int rows = opts.getPerClientRunRows();
123    // Default row count
124    final int defaultPerClientRunRows = 1024 * 1024;
125    assertEquals(defaultPerClientRunRows, rows);
126    // If size is 2G, then twice the row count.
127    opts.setSize(2.0f);
128    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
129    assertEquals(defaultPerClientRunRows * 2, opts.getPerClientRunRows());
130    // If two clients, then they get half the rows each.
131    opts.setNumClientThreads(2);
132    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
133    assertEquals(defaultPerClientRunRows, opts.getPerClientRunRows());
134    // What if valueSize is 'random'? Then half of the valueSize so twice the rows.
135    opts.valueRandom = true;
136    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
137    assertEquals(defaultPerClientRunRows * 2, opts.getPerClientRunRows());
138  }
139
140  @Test
141  public void testRandomReadCalculation() {
142    TestOptions opts = new PerformanceEvaluation.TestOptions();
143    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
144    int rows = opts.getPerClientRunRows();
145    // Default row count
146    final int defaultPerClientRunRows = 1024 * 1024;
147    assertEquals(defaultPerClientRunRows, rows);
148    // If size is 2G, then twice the row count.
149    opts.setSize(2.0f);
150    opts.setPerClientRunRows(1000);
151    opts.setCmdName(PerformanceEvaluation.RANDOM_READ);
152    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
153    assertEquals(1000, opts.getPerClientRunRows());
154    // If two clients, then they get half the rows each.
155    opts.setNumClientThreads(2);
156    opts = PerformanceEvaluation.calculateRowsAndSize(opts);
157    assertEquals(1000, opts.getPerClientRunRows());
158    // assuming we will get one before this loop expires
159    boolean foundValue = false;
160    Random rand = ThreadLocalRandom.current();
161    for (int i = 0; i < 10000000; i++) {
162      int randomRow = PerformanceEvaluation.generateRandomRow(rand, opts.totalRows);
163      if (randomRow > 1000) {
164        foundValue = true;
165        break;
166      }
167    }
168    assertTrue("We need to get a value more than 1000", foundValue);
169  }
170
171  @Test
172  public void testZipfian() throws NoSuchMethodException, SecurityException, InstantiationException,
173    IllegalAccessException, IllegalArgumentException, InvocationTargetException {
174    TestOptions opts = new PerformanceEvaluation.TestOptions();
175    opts.setValueZipf(true);
176    final int valueSize = 1024;
177    opts.setValueSize(valueSize);
178    RandomReadTest rrt = new RandomReadTest(null, opts, null);
179    Constructor<?> ctor =
180      Histogram.class.getDeclaredConstructor(com.codahale.metrics.Reservoir.class);
181    ctor.setAccessible(true);
182    Histogram histogram = (Histogram) ctor.newInstance(new UniformReservoir(1024 * 500));
183    for (int i = 0; i < 100; i++) {
184      histogram.update(rrt.getValueLength(null));
185    }
186    Snapshot snapshot = histogram.getSnapshot();
187    double stddev = snapshot.getStdDev();
188    assertTrue(stddev != 0 && stddev != 1.0);
189    assertTrue(snapshot.getStdDev() != 0);
190    double median = snapshot.getMedian();
191    assertTrue(median != 0 && median != 1 && median != valueSize);
192  }
193
194  @Test
195  public void testSetBufferSizeOption() {
196    TestOptions opts = new PerformanceEvaluation.TestOptions();
197    long bufferSize = opts.getBufferSize();
198    assertEquals(bufferSize, 2L * 1024L * 1024L);
199    opts.setBufferSize(64L * 1024L);
200    bufferSize = opts.getBufferSize();
201    assertEquals(bufferSize, 64L * 1024L);
202  }
203
204  @Test
205  public void testParseOptsWithThreads() {
206    Queue<String> opts = new LinkedList<>();
207    String cmdName = "sequentialWrite";
208    int threads = 1;
209    opts.offer(cmdName);
210    opts.offer(String.valueOf(threads));
211    PerformanceEvaluation.TestOptions options = PerformanceEvaluation.parseOpts(opts);
212    assertNotNull(options);
213    assertNotNull(options.getCmdName());
214    assertEquals(cmdName, options.getCmdName());
215    assertEquals(threads, options.getNumClientThreads());
216  }
217
218  @Test
219  public void testParseOptsWrongThreads() {
220    Queue<String> opts = new LinkedList<>();
221    String cmdName = "sequentialWrite";
222    opts.offer(cmdName);
223    opts.offer("qq");
224    try {
225      PerformanceEvaluation.parseOpts(opts);
226    } catch (IllegalArgumentException e) {
227      System.out.println(e.getMessage());
228      assertEquals("Command " + cmdName + " does not have threads number", e.getMessage());
229      assertTrue(e.getCause() instanceof NumberFormatException);
230    }
231  }
232
233  @Test
234  public void testParseOptsNoThreads() {
235    Queue<String> opts = new LinkedList<>();
236    String cmdName = "sequentialWrite";
237    try {
238      PerformanceEvaluation.parseOpts(opts);
239    } catch (IllegalArgumentException e) {
240      System.out.println(e.getMessage());
241      assertEquals("Command " + cmdName + " does not have threads number", e.getMessage());
242      assertTrue(e.getCause() instanceof NoSuchElementException);
243    }
244  }
245
246  @Test
247  public void testParseOptsMultiPuts() {
248    Queue<String> opts = new LinkedList<>();
249    String cmdName = "sequentialWrite";
250    opts.offer("--multiPut=10");
251    opts.offer(cmdName);
252    opts.offer("64");
253    PerformanceEvaluation.TestOptions options = null;
254    try {
255      options = PerformanceEvaluation.parseOpts(opts);
256      fail("should fail");
257    } catch (IllegalArgumentException e) {
258      System.out.println(e.getMessage());
259    }
260
261    // Re-create options
262    opts = new LinkedList<>();
263    opts.offer("--autoFlush=true");
264    opts.offer("--multiPut=10");
265    opts.offer(cmdName);
266    opts.offer("64");
267
268    options = PerformanceEvaluation.parseOpts(opts);
269    assertNotNull(options);
270    assertNotNull(options.getCmdName());
271    assertEquals(cmdName, options.getCmdName());
272    assertEquals(10, options.getMultiPut());
273  }
274
275  @Test
276  public void testParseOptsMultiPutsAndAutoFlushOrder() {
277    Queue<String> opts = new LinkedList<>();
278    String cmdName = "sequentialWrite";
279    String cmdMultiPut = "--multiPut=10";
280    String cmdAutoFlush = "--autoFlush=true";
281    opts.offer(cmdAutoFlush);
282    opts.offer(cmdMultiPut);
283    opts.offer(cmdName);
284    opts.offer("64");
285    PerformanceEvaluation.TestOptions options = null;
286    options = PerformanceEvaluation.parseOpts(opts);
287    assertNotNull(options);
288    assertEquals(true, options.autoFlush);
289    assertEquals(10, options.getMultiPut());
290
291    // Change the order of AutoFlush and Multiput
292    opts = new LinkedList<>();
293    opts.offer(cmdMultiPut);
294    opts.offer(cmdAutoFlush);
295    opts.offer(cmdName);
296    opts.offer("64");
297
298    options = null;
299    options = PerformanceEvaluation.parseOpts(opts);
300    assertNotNull(options);
301    assertEquals(10, options.getMultiPut());
302    assertEquals(true, options.autoFlush);
303  }
304
305  @Test
306  public void testParseOptsConnCount() {
307    Queue<String> opts = new LinkedList<>();
308    String cmdName = "sequentialWrite";
309    opts.offer("--oneCon=true");
310    opts.offer("--connCount=10");
311    opts.offer(cmdName);
312    opts.offer("64");
313    PerformanceEvaluation.TestOptions options = null;
314    try {
315      options = PerformanceEvaluation.parseOpts(opts);
316      fail("should fail");
317    } catch (IllegalArgumentException e) {
318      System.out.println(e.getMessage());
319    }
320
321    opts = new LinkedList<>();
322    opts.offer("--connCount=10");
323    opts.offer(cmdName);
324    opts.offer("64");
325
326    options = PerformanceEvaluation.parseOpts(opts);
327    assertNotNull(options);
328    assertNotNull(options.getCmdName());
329    assertEquals(cmdName, options.getCmdName());
330    assertEquals(10, options.getConnCount());
331  }
332
333  @Test
334  public void testParseOptsValueRandom() {
335    Queue<String> opts = new LinkedList<>();
336    String cmdName = "sequentialWrite";
337    opts.offer("--valueRandom");
338    opts.offer("--valueZipf");
339    opts.offer(cmdName);
340    opts.offer("64");
341    PerformanceEvaluation.TestOptions options = null;
342    try {
343      options = PerformanceEvaluation.parseOpts(opts);
344      fail("should fail");
345    } catch (IllegalStateException e) {
346      System.out.println(e.getMessage());
347    }
348
349    opts = new LinkedList<>();
350    opts.offer("--valueRandom");
351    opts.offer(cmdName);
352    opts.offer("64");
353
354    options = PerformanceEvaluation.parseOpts(opts);
355
356    assertNotNull(options);
357    assertNotNull(options.getCmdName());
358    assertEquals(cmdName, options.getCmdName());
359    assertEquals(true, options.valueRandom);
360  }
361
362}