001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase;
019
020import static org.junit.Assert.assertEquals;
021import static org.junit.Assert.assertFalse;
022import static org.junit.Assert.assertTrue;
023import static org.junit.Assert.fail;
024
025import java.io.IOException;
026import java.util.Collections;
027import org.apache.hadoop.hbase.client.Admin;
028import org.apache.hadoop.hbase.client.ColumnFamilyDescriptor;
029import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder;
030import org.apache.hadoop.hbase.client.RegionInfoBuilder;
031import org.apache.hadoop.hbase.client.TableDescriptor;
032import org.apache.hadoop.hbase.client.TableDescriptorBuilder;
033import org.apache.hadoop.hbase.io.encoding.DataBlockEncoding;
034import org.apache.hadoop.hbase.regionserver.Region;
035import org.apache.hadoop.hbase.testclassification.MediumTests;
036import org.apache.hadoop.hbase.testclassification.MiscTests;
037import org.apache.hadoop.hbase.util.Bytes;
038import org.junit.After;
039import org.junit.Before;
040import org.junit.ClassRule;
041import org.junit.Rule;
042import org.junit.Test;
043import org.junit.experimental.categories.Category;
044import org.junit.rules.TestName;
045
046/**
047 * Test being able to edit hbase:meta.
048 */
049@Category({ MiscTests.class, MediumTests.class })
050public class TestHBaseMetaEdit {
051  @ClassRule
052  public static final HBaseClassTestRule CLASS_RULE =
053    HBaseClassTestRule.forClass(TestHBaseMetaEdit.class);
054  @Rule
055  public TestName name = new TestName();
056  private final static HBaseTestingUtil UTIL = new HBaseTestingUtil();
057
058  @Before
059  public void before() throws Exception {
060    UTIL.startMiniCluster();
061  }
062
063  @After
064  public void after() throws Exception {
065    UTIL.shutdownMiniCluster();
066  }
067
068  // make sure that with every possible way, we get the same meta table descriptor.
069  private TableDescriptor getMetaDescriptor() throws TableNotFoundException, IOException {
070    Admin admin = UTIL.getAdmin();
071    TableDescriptor get = admin.getDescriptor(TableName.META_TABLE_NAME);
072    TableDescriptor list =
073      admin.listTableDescriptors(true).stream().filter(td -> td.isMetaTable()).findAny().get();
074    TableDescriptor listByName =
075      admin.listTableDescriptors(Collections.singletonList(TableName.META_TABLE_NAME)).get(0);
076    TableDescriptor listByNs =
077      admin.listTableDescriptorsByNamespace(NamespaceDescriptor.SYSTEM_NAMESPACE_NAME).stream()
078        .filter(td -> td.isMetaTable()).findAny().get();
079    assertEquals(get, list);
080    assertEquals(get, listByName);
081    assertEquals(get, listByNs);
082    return get;
083  }
084
085  /**
086   * Set versions, set HBASE-16213 indexed block encoding, and add a column family. Delete the
087   * column family. Then try to delete a core hbase:meta family (should fail). Verify they are all
088   * in place by looking at TableDescriptor AND by checking what the RegionServer sees after opening
089   * Region.
090   */
091  @Test
092  public void testEditMeta() throws IOException {
093    Admin admin = UTIL.getAdmin();
094    admin.tableExists(TableName.META_TABLE_NAME);
095    TableDescriptor originalDescriptor = getMetaDescriptor();
096    ColumnFamilyDescriptor cfd = originalDescriptor.getColumnFamily(HConstants.CATALOG_FAMILY);
097    int oldVersions = cfd.getMaxVersions();
098    // Add '1' to current versions count. Set encoding too.
099    cfd = ColumnFamilyDescriptorBuilder.newBuilder(cfd).setMaxVersions(oldVersions + 1)
100      .setConfiguration(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING,
101        DataBlockEncoding.ROW_INDEX_V1.toString())
102      .build();
103    admin.modifyColumnFamily(TableName.META_TABLE_NAME, cfd);
104    byte[] extraColumnFamilyName = Bytes.toBytes("xtra");
105    ColumnFamilyDescriptor newCfd =
106      ColumnFamilyDescriptorBuilder.newBuilder(extraColumnFamilyName).build();
107    admin.addColumnFamily(TableName.META_TABLE_NAME, newCfd);
108    TableDescriptor descriptor = getMetaDescriptor();
109    // Assert new max versions is == old versions plus 1.
110    assertEquals(oldVersions + 1,
111      descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
112    descriptor = getMetaDescriptor();
113    // Assert new max versions is == old versions plus 1.
114    assertEquals(oldVersions + 1,
115      descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getMaxVersions());
116    assertTrue(descriptor.getColumnFamily(newCfd.getName()) != null);
117    String encoding = descriptor.getColumnFamily(HConstants.CATALOG_FAMILY).getConfiguration()
118      .get(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING);
119    assertEquals(encoding, DataBlockEncoding.ROW_INDEX_V1.toString());
120    Region r = UTIL.getHBaseCluster().getRegionServer(0)
121      .getRegion(RegionInfoBuilder.FIRST_META_REGIONINFO.getEncodedName());
122    assertEquals(oldVersions + 1,
123      r.getStore(HConstants.CATALOG_FAMILY).getColumnFamilyDescriptor().getMaxVersions());
124    encoding = r.getStore(HConstants.CATALOG_FAMILY).getColumnFamilyDescriptor()
125      .getConfigurationValue(ColumnFamilyDescriptorBuilder.DATA_BLOCK_ENCODING);
126    assertEquals(encoding, DataBlockEncoding.ROW_INDEX_V1.toString());
127    assertTrue(r.getStore(extraColumnFamilyName) != null);
128    // Assert we can't drop critical hbase:meta column family but we can drop any other.
129    admin.deleteColumnFamily(TableName.META_TABLE_NAME, newCfd.getName());
130    descriptor = getMetaDescriptor();
131    assertTrue(descriptor.getColumnFamily(newCfd.getName()) == null);
132    try {
133      admin.deleteColumnFamily(TableName.META_TABLE_NAME, HConstants.CATALOG_FAMILY);
134      fail("Should not reach here");
135    } catch (HBaseIOException hioe) {
136      assertTrue(hioe.getMessage().contains("Delete of hbase:meta"));
137    }
138  }
139
140  /**
141   * Validate whether meta table can be altered as READ only, shouldn't be allowed otherwise it will
142   * break assignment functionalities. See HBASE-24977.
143   */
144  @Test
145  public void testAlterMetaWithReadOnly() throws IOException {
146    Admin admin = UTIL.getAdmin();
147    TableDescriptor origMetaTableDesc = admin.getDescriptor(TableName.META_TABLE_NAME);
148    assertFalse(origMetaTableDesc.isReadOnly());
149    TableDescriptor newTD =
150      TableDescriptorBuilder.newBuilder(origMetaTableDesc).setReadOnly(true).build();
151    try {
152      admin.modifyTable(newTD);
153      fail("Meta table can't be set as read only");
154    } catch (Exception e) {
155      assertFalse(admin.getDescriptor(TableName.META_TABLE_NAME).isReadOnly());
156    }
157
158    // Create a table to check region assignment & meta operation
159    TableName tableName = TableName.valueOf("tempTable");
160    TableDescriptor td = TableDescriptorBuilder.newBuilder(tableName).setReadOnly(true)
161      .setColumnFamily(ColumnFamilyDescriptorBuilder.newBuilder(Bytes.toBytes("f1")).build())
162      .build();
163    UTIL.getAdmin().createTable(td);
164    UTIL.deleteTable(tableName);
165  }
166}