001/* 002 * Licensed to the Apache Software Foundation (ASF) under one 003 * or more contributor license agreements. See the NOTICE file 004 * distributed with this work for additional information 005 * regarding copyright ownership. The ASF licenses this file 006 * to you under the Apache License, Version 2.0 (the 007 * "License"); you may not use this file except in compliance 008 * with the License. You may obtain a copy of the License at 009 * 010 * http://www.apache.org/licenses/LICENSE-2.0 011 * 012 * Unless required by applicable law or agreed to in writing, software 013 * distributed under the License is distributed on an "AS IS" BASIS, 014 * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. 015 * See the License for the specific language governing permissions and 016 * limitations under the License. 017 */ 018package org.apache.hadoop.hbase.io.hfile.bucket; 019 020import java.io.IOException; 021import java.util.Map; 022import java.util.concurrent.ConcurrentHashMap; 023import java.util.function.Function; 024import org.apache.hadoop.hbase.io.ByteBuffAllocator; 025import org.apache.hadoop.hbase.io.ByteBuffAllocator.Recycler; 026import org.apache.hadoop.hbase.io.hfile.BlockCacheKey; 027import org.apache.hadoop.hbase.io.hfile.BlockPriority; 028import org.apache.hadoop.hbase.io.hfile.BlockType; 029import org.apache.hadoop.hbase.io.hfile.CacheableDeserializerIdManager; 030import org.apache.hadoop.hbase.io.hfile.HFileBlock; 031import org.apache.yetus.audience.InterfaceAudience; 032 033import org.apache.hbase.thirdparty.com.google.protobuf.ByteString; 034 035import org.apache.hadoop.hbase.shaded.protobuf.generated.BucketCacheProtos; 036 037@InterfaceAudience.Private 038final class BucketProtoUtils { 039 private BucketProtoUtils() { 040 041 } 042 043 static BucketCacheProtos.BucketCacheEntry toPB(BucketCache cache) { 044 return BucketCacheProtos.BucketCacheEntry.newBuilder().setCacheCapacity(cache.getMaxSize()) 045 .setIoClass(cache.ioEngine.getClass().getName()) 046 .setMapClass(cache.backingMap.getClass().getName()) 047 .putAllDeserializers(CacheableDeserializerIdManager.save()) 048 .setBackingMap(BucketProtoUtils.toPB(cache.backingMap)) 049 .setChecksum(ByteString 050 .copyFrom(((PersistentIOEngine) cache.ioEngine).calculateChecksum(cache.getAlgorithm()))) 051 .build(); 052 } 053 054 private static BucketCacheProtos.BackingMap toPB(Map<BlockCacheKey, BucketEntry> backingMap) { 055 BucketCacheProtos.BackingMap.Builder builder = BucketCacheProtos.BackingMap.newBuilder(); 056 for (Map.Entry<BlockCacheKey, BucketEntry> entry : backingMap.entrySet()) { 057 builder.addEntry(BucketCacheProtos.BackingMapEntry.newBuilder().setKey(toPB(entry.getKey())) 058 .setValue(toPB(entry.getValue())).build()); 059 } 060 return builder.build(); 061 } 062 063 private static BucketCacheProtos.BlockCacheKey toPB(BlockCacheKey key) { 064 return BucketCacheProtos.BlockCacheKey.newBuilder().setHfilename(key.getHfileName()) 065 .setOffset(key.getOffset()).setPrimaryReplicaBlock(key.isPrimary()) 066 .setBlockType(toPB(key.getBlockType())).build(); 067 } 068 069 private static BucketCacheProtos.BlockType toPB(BlockType blockType) { 070 switch (blockType) { 071 case DATA: 072 return BucketCacheProtos.BlockType.data; 073 case META: 074 return BucketCacheProtos.BlockType.meta; 075 case TRAILER: 076 return BucketCacheProtos.BlockType.trailer; 077 case INDEX_V1: 078 return BucketCacheProtos.BlockType.index_v1; 079 case FILE_INFO: 080 return BucketCacheProtos.BlockType.file_info; 081 case LEAF_INDEX: 082 return BucketCacheProtos.BlockType.leaf_index; 083 case ROOT_INDEX: 084 return BucketCacheProtos.BlockType.root_index; 085 case BLOOM_CHUNK: 086 return BucketCacheProtos.BlockType.bloom_chunk; 087 case ENCODED_DATA: 088 return BucketCacheProtos.BlockType.encoded_data; 089 case GENERAL_BLOOM_META: 090 return BucketCacheProtos.BlockType.general_bloom_meta; 091 case INTERMEDIATE_INDEX: 092 return BucketCacheProtos.BlockType.intermediate_index; 093 case DELETE_FAMILY_BLOOM_META: 094 return BucketCacheProtos.BlockType.delete_family_bloom_meta; 095 default: 096 throw new Error("Unrecognized BlockType."); 097 } 098 } 099 100 private static BucketCacheProtos.BucketEntry toPB(BucketEntry entry) { 101 return BucketCacheProtos.BucketEntry.newBuilder().setOffset(entry.offset()) 102 .setLength(entry.getLength()).setDeserialiserIndex(entry.deserializerIndex) 103 .setAccessCounter(entry.getAccessCounter()).setPriority(toPB(entry.getPriority())).build(); 104 } 105 106 private static BucketCacheProtos.BlockPriority toPB(BlockPriority p) { 107 switch (p) { 108 case MULTI: 109 return BucketCacheProtos.BlockPriority.multi; 110 case MEMORY: 111 return BucketCacheProtos.BlockPriority.memory; 112 case SINGLE: 113 return BucketCacheProtos.BlockPriority.single; 114 default: 115 throw new Error("Unrecognized BlockPriority."); 116 } 117 } 118 119 static ConcurrentHashMap<BlockCacheKey, BucketEntry> fromPB(Map<Integer, String> deserializers, 120 BucketCacheProtos.BackingMap backingMap, Function<BucketEntry, Recycler> createRecycler) 121 throws IOException { 122 ConcurrentHashMap<BlockCacheKey, BucketEntry> result = new ConcurrentHashMap<>(); 123 for (BucketCacheProtos.BackingMapEntry entry : backingMap.getEntryList()) { 124 BucketCacheProtos.BlockCacheKey protoKey = entry.getKey(); 125 BlockCacheKey key = new BlockCacheKey(protoKey.getHfilename(), protoKey.getOffset(), 126 protoKey.getPrimaryReplicaBlock(), fromPb(protoKey.getBlockType())); 127 BucketCacheProtos.BucketEntry protoValue = entry.getValue(); 128 // TODO:We use ByteBuffAllocator.HEAP here, because we could not get the ByteBuffAllocator 129 // which created by RpcServer elegantly. 130 BucketEntry value = new BucketEntry(protoValue.getOffset(), protoValue.getLength(), 131 protoValue.getAccessCounter(), 132 protoValue.getPriority() == BucketCacheProtos.BlockPriority.memory, createRecycler, 133 ByteBuffAllocator.HEAP); 134 // This is the deserializer that we stored 135 int oldIndex = protoValue.getDeserialiserIndex(); 136 String deserializerClass = deserializers.get(oldIndex); 137 if (deserializerClass == null) { 138 throw new IOException("Found deserializer index without matching entry."); 139 } 140 // Convert it to the identifier for the deserializer that we have in this runtime 141 if (deserializerClass.equals(HFileBlock.BlockDeserializer.class.getName())) { 142 int actualIndex = HFileBlock.BLOCK_DESERIALIZER.getDeserializerIdentifier(); 143 value.deserializerIndex = (byte) actualIndex; 144 } else { 145 // We could make this more plugable, but right now HFileBlock is the only implementation 146 // of Cacheable outside of tests, so this might not ever matter. 147 throw new IOException("Unknown deserializer class found: " + deserializerClass); 148 } 149 result.put(key, value); 150 } 151 return result; 152 } 153 154 private static BlockType fromPb(BucketCacheProtos.BlockType blockType) { 155 switch (blockType) { 156 case data: 157 return BlockType.DATA; 158 case meta: 159 return BlockType.META; 160 case trailer: 161 return BlockType.TRAILER; 162 case index_v1: 163 return BlockType.INDEX_V1; 164 case file_info: 165 return BlockType.FILE_INFO; 166 case leaf_index: 167 return BlockType.LEAF_INDEX; 168 case root_index: 169 return BlockType.ROOT_INDEX; 170 case bloom_chunk: 171 return BlockType.BLOOM_CHUNK; 172 case encoded_data: 173 return BlockType.ENCODED_DATA; 174 case general_bloom_meta: 175 return BlockType.GENERAL_BLOOM_META; 176 case intermediate_index: 177 return BlockType.INTERMEDIATE_INDEX; 178 case delete_family_bloom_meta: 179 return BlockType.DELETE_FAMILY_BLOOM_META; 180 default: 181 throw new Error("Unrecognized BlockType."); 182 } 183 } 184}