001/*
002 * Licensed to the Apache Software Foundation (ASF) under one
003 * or more contributor license agreements.  See the NOTICE file
004 * distributed with this work for additional information
005 * regarding copyright ownership.  The ASF licenses this file
006 * to you under the Apache License, Version 2.0 (the
007 * "License"); you may not use this file except in compliance
008 * with the License.  You may obtain a copy of the License at
009 *
010 *     http://www.apache.org/licenses/LICENSE-2.0
011 *
012 * Unless required by applicable law or agreed to in writing, software
013 * distributed under the License is distributed on an "AS IS" BASIS,
014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
015 * See the License for the specific language governing permissions and
016 * limitations under the License.
017 */
018package org.apache.hadoop.hbase.io.hfile.bucket;
019
020import java.io.IOException;
021import java.util.Comparator;
022import java.util.HashMap;
023import java.util.Map;
024import java.util.NavigableSet;
025import java.util.concurrent.ConcurrentHashMap;
026import java.util.concurrent.ConcurrentSkipListSet;
027import java.util.function.Function;
028import org.apache.hadoop.hbase.io.ByteBuffAllocator;
029import org.apache.hadoop.hbase.io.ByteBuffAllocator.Recycler;
030import org.apache.hadoop.hbase.io.hfile.BlockCacheKey;
031import org.apache.hadoop.hbase.io.hfile.BlockPriority;
032import org.apache.hadoop.hbase.io.hfile.BlockType;
033import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager;
034import org.apache.hadoop.hbase.io.hfile.HFileBlock;
035import org.apache.hadoop.hbase.util.Pair;
036import org.apache.yetus.audience.InterfaceAudience;
037
038import org.apache.hbase.thirdparty.com.google.protobuf.ByteString;
039
040import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos;
041
042@InterfaceAudience.Private
043final class BucketProtoUtils {
044  private BucketProtoUtils() {
045
046  }
047
048  static BucketCacheProtos.BucketCacheEntry toPB(BucketCache cache) {
049    return BucketCacheProtos.BucketCacheEntry.newBuilder().setCacheCapacity(cache.getMaxSize())
050      .setIoClass(cache.ioEngine.getClass().getName())
051      .setMapClass(cache.backingMap.getClass().getName())
052      .putAllDeserializers(CacheableDeserializerIdManager.save())
053      .putAllCachedFiles(toCachedPB(cache.fullyCachedFiles))
054      .setBackingMap(BucketProtoUtils.toPB(cache.backingMap))
055      .setChecksum(ByteString
056        .copyFrom(((PersistentIOEngine) cache.ioEngine).calculateChecksum(cache.getAlgorithm())))
057      .build();
058  }
059
060  private static BucketCacheProtos.BackingMap toPB(Map<BlockCacheKey, BucketEntry> backingMap) {
061    BucketCacheProtos.BackingMap.Builder builder = BucketCacheProtos.BackingMap.newBuilder();
062    for (Map.Entry<BlockCacheKey, BucketEntry> entry : backingMap.entrySet()) {
063      builder.addEntry(BucketCacheProtos.BackingMapEntry.newBuilder().setKey(toPB(entry.getKey()))
064        .setValue(toPB(entry.getValue())).build());
065    }
066    return builder.build();
067  }
068
069  private static BucketCacheProtos.BlockCacheKey toPB(BlockCacheKey key) {
070    return BucketCacheProtos.BlockCacheKey.newBuilder().setHfilename(key.getHfileName())
071      .setOffset(key.getOffset()).setPrimaryReplicaBlock(key.isPrimary())
072      .setBlockType(toPB(key.getBlockType())).build();
073  }
074
075  private static BucketCacheProtos.BlockType toPB(BlockType blockType) {
076    switch (blockType) {
077      case DATA:
078        return BucketCacheProtos.BlockType.data;
079      case META:
080        return BucketCacheProtos.BlockType.meta;
081      case TRAILER:
082        return BucketCacheProtos.BlockType.trailer;
083      case INDEX_V1:
084        return BucketCacheProtos.BlockType.index_v1;
085      case FILE_INFO:
086        return BucketCacheProtos.BlockType.file_info;
087      case LEAF_INDEX:
088        return BucketCacheProtos.BlockType.leaf_index;
089      case ROOT_INDEX:
090        return BucketCacheProtos.BlockType.root_index;
091      case BLOOM_CHUNK:
092        return BucketCacheProtos.BlockType.bloom_chunk;
093      case ENCODED_DATA:
094        return BucketCacheProtos.BlockType.encoded_data;
095      case GENERAL_BLOOM_META:
096        return BucketCacheProtos.BlockType.general_bloom_meta;
097      case INTERMEDIATE_INDEX:
098        return BucketCacheProtos.BlockType.intermediate_index;
099      case DELETE_FAMILY_BLOOM_META:
100        return BucketCacheProtos.BlockType.delete_family_bloom_meta;
101      default:
102        throw new Error("Unrecognized BlockType.");
103    }
104  }
105
106  private static BucketCacheProtos.BucketEntry toPB(BucketEntry entry) {
107    return BucketCacheProtos.BucketEntry.newBuilder().setOffset(entry.offset())
108      .setCachedTime(entry.getCachedTime()).setLength(entry.getLength())
109      .setDiskSizeWithHeader(entry.getOnDiskSizeWithHeader())
110      .setDeserialiserIndex(entry.deserializerIndex).setAccessCounter(entry.getAccessCounter())
111      .setPriority(toPB(entry.getPriority())).build();
112  }
113
114  private static BucketCacheProtos.BlockPriority toPB(BlockPriority p) {
115    switch (p) {
116      case MULTI:
117        return BucketCacheProtos.BlockPriority.multi;
118      case MEMORY:
119        return BucketCacheProtos.BlockPriority.memory;
120      case SINGLE:
121        return BucketCacheProtos.BlockPriority.single;
122      default:
123        throw new Error("Unrecognized BlockPriority.");
124    }
125  }
126
127  static Pair<ConcurrentHashMap<BlockCacheKey, BucketEntry>, NavigableSet<BlockCacheKey>> fromPB(
128    Map<Integer, String> deserializers, BucketCacheProtos.BackingMap backingMap,
129    Function<BucketEntry, Recycler> createRecycler) throws IOException {
130    ConcurrentHashMap<BlockCacheKey, BucketEntry> result = new ConcurrentHashMap<>();
131    NavigableSet<BlockCacheKey> resultSet = new ConcurrentSkipListSet<>(Comparator
132      .comparing(BlockCacheKey::getHfileName).thenComparingLong(BlockCacheKey::getOffset));
133    for (BucketCacheProtos.BackingMapEntry entry : backingMap.getEntryList()) {
134      BucketCacheProtos.BlockCacheKey protoKey = entry.getKey();
135      BlockCacheKey key = new BlockCacheKey(protoKey.getHfilename(), protoKey.getOffset(),
136        protoKey.getPrimaryReplicaBlock(), fromPb(protoKey.getBlockType()));
137      BucketCacheProtos.BucketEntry protoValue = entry.getValue();
138      // TODO:We use ByteBuffAllocator.HEAP here, because we could not get the ByteBuffAllocator
139      // which created by RpcServer elegantly.
140      BucketEntry value = new BucketEntry(protoValue.getOffset(), protoValue.getLength(),
141        protoValue.getDiskSizeWithHeader(), protoValue.getAccessCounter(),
142        protoValue.getCachedTime(),
143        protoValue.getPriority() == BucketCacheProtos.BlockPriority.memory, createRecycler,
144        ByteBuffAllocator.HEAP);
145      // This is the deserializer that we stored
146      int oldIndex = protoValue.getDeserialiserIndex();
147      String deserializerClass = deserializers.get(oldIndex);
148      if (deserializerClass == null) {
149        throw new IOException("Found deserializer index without matching entry.");
150      }
151      // Convert it to the identifier for the deserializer that we have in this runtime
152      if (deserializerClass.equals(HFileBlock.BlockDeserializer.class.getName())) {
153        int actualIndex = HFileBlock.BLOCK_DESERIALIZER.getDeserializerIdentifier();
154        value.deserializerIndex = (byte) actualIndex;
155      } else {
156        // We could make this more plugable, but right now HFileBlock is the only implementation
157        // of Cacheable outside of tests, so this might not ever matter.
158        throw new IOException("Unknown deserializer class found: " + deserializerClass);
159      }
160      result.put(key, value);
161      resultSet.add(key);
162    }
163    return new Pair<>(result, resultSet);
164  }
165
166  private static BlockType fromPb(BucketCacheProtos.BlockType blockType) {
167    switch (blockType) {
168      case data:
169        return BlockType.DATA;
170      case meta:
171        return BlockType.META;
172      case trailer:
173        return BlockType.TRAILER;
174      case index_v1:
175        return BlockType.INDEX_V1;
176      case file_info:
177        return BlockType.FILE_INFO;
178      case leaf_index:
179        return BlockType.LEAF_INDEX;
180      case root_index:
181        return BlockType.ROOT_INDEX;
182      case bloom_chunk:
183        return BlockType.BLOOM_CHUNK;
184      case encoded_data:
185        return BlockType.ENCODED_DATA;
186      case general_bloom_meta:
187        return BlockType.GENERAL_BLOOM_META;
188      case intermediate_index:
189        return BlockType.INTERMEDIATE_INDEX;
190      case delete_family_bloom_meta:
191        return BlockType.DELETE_FAMILY_BLOOM_META;
192      default:
193        throw new Error("Unrecognized BlockType.");
194    }
195  }
196
197  static Map<String, BucketCacheProtos.RegionFileSizeMap>
198    toCachedPB(Map<String, Pair<String, Long>> prefetchedHfileNames) {
199    Map<String, BucketCacheProtos.RegionFileSizeMap> tmpMap = new HashMap<>();
200    prefetchedHfileNames.forEach((hfileName, regionPrefetchMap) -> {
201      BucketCacheProtos.RegionFileSizeMap tmpRegionFileSize =
202        BucketCacheProtos.RegionFileSizeMap.newBuilder().setRegionName(regionPrefetchMap.getFirst())
203          .setRegionCachedSize(regionPrefetchMap.getSecond()).build();
204      tmpMap.put(hfileName, tmpRegionFileSize);
205    });
206    return tmpMap;
207  }
208
209  static Map<String, Pair<String, Long>>
210    fromPB(Map<String, BucketCacheProtos.RegionFileSizeMap> prefetchHFileNames) {
211    Map<String, Pair<String, Long>> hfileMap = new HashMap<>();
212    prefetchHFileNames.forEach((hfileName, regionPrefetchMap) -> {
213      hfileMap.put(hfileName,
214        new Pair<>(regionPrefetchMap.getRegionName(), regionPrefetchMap.getRegionCachedSize()));
215    });
216    return hfileMap;
217  }
218}