001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.coprocessor.example;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertTrue;
022
023import org.apache.hadoop.hbase.HBaseClassTestRule;
024import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
025import org.apache.hadoop.hbase.client.Result;
026import org.apache.hadoop.hbase.client.ResultScanner;
027import org.apache.hadoop.hbase.client.Scan;
028import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
029import org.apache.hadoop.hbase.regionserver.HRegion;
030import org.apache.hadoop.hbase.regionserver.HStore;
031import org.apache.hadoop.hbase.testclassification.CoprocessorTests;
032import org.apache.hadoop.hbase.testclassification.MediumTests;
033import org.junit.BeforeClass;
034import org.junit.ClassRule;
035import org.junit.Test;
036import org.junit.experimental.categories.Category;
037
038@Category({ CoprocessorTests.class, MediumTests.class })
039public class TestWriteHeavyIncrementObserver extends WriteHeavyIncrementObserverTestBase {
040
041  @ClassRule
042  public static final HBaseClassTestRule CLASS_RULE =
043    HBaseClassTestRule.forClass(TestWriteHeavyIncrementObserver.class);
044
045  @BeforeClass
046  public static void setUp() throws Exception {
047    WriteHeavyIncrementObserverTestBase.setUp();
048    UTIL.getAdmin()
049      .createTable(TableDescriptorBuilder.newBuilder(NAME)
050        .setCoprocessor(WriteHeavyIncrementObserver.class.getName())
051        .setColumnFamily(ColumnFamilyDescriptorBuilder.of(FAMILY)).build());
052    TABLE = UTIL.getConnection().getTable(NAME);
053  }
054
055  @Test
056  public void test() throws Exception {
057    doIncrement(0);
058    assertSum();
059    // we do not hack scan operation so using scan we could get the original values added into the
060    // table.
061    try (ResultScanner scanner = TABLE.getScanner(new Scan().withStartRow(ROW)
062      .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
063      Result r = scanner.next();
064      assertTrue(r.rawCells().length > 2);
065    }
066    UTIL.flush(NAME);
067    HRegion region = UTIL.getHBaseCluster().findRegionsForTable(NAME).get(0);
068    HStore store = region.getStore(FAMILY);
069    for (;;) {
070      region.compact(true);
071      if (store.getStorefilesCount() == 1) {
072        break;
073      }
074    }
075    assertSum();
076    // Should only have two cells after flush and major compaction
077    try (ResultScanner scanner = TABLE.getScanner(new Scan().withStartRow(ROW)
078      .withStopRow(ROW, true).addFamily(FAMILY).readAllVersions().setAllowPartialResults(true))) {
079      Result r = scanner.next();
080      assertEquals(2, r.rawCells().length);
081    }
082  }
083}