View Javadoc
1   /*
2    * Copyright 2021 TiKV Project Authors.
3    *
4    * Licensed under the Apache License, Version 2.0 (the "License");
5    * you may not use this file except in compliance with the License.
6    * You may obtain a copy of the License at
7    *
8    * http://www.apache.org/licenses/LICENSE-2.0
9    *
10   * Unless required by applicable law or agreed to in writing, software
11   * distributed under the License is distributed on an "AS IS" BASIS,
12   * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13   * See the License for the specific language governing permissions and
14   * limitations under the License.
15   *
16   */
17  
18  package org.tikv.common.codec;
19  
20  import static com.google.common.base.Preconditions.checkArgument;
21  
22  import com.google.protobuf.ByteString;
23  import java.util.ArrayList;
24  import java.util.Iterator;
25  import java.util.List;
26  import org.tikv.common.Snapshot;
27  import org.tikv.common.codec.Codec.BytesCodec;
28  import org.tikv.common.codec.Codec.IntegerCodec;
29  import org.tikv.common.exception.TiClientInternalException;
30  import org.tikv.common.util.Pair;
31  import org.tikv.kvproto.Kvrpcpb.KvPair;
32  
33  public class MetaCodec {
34    public static final String ENCODED_DB_PREFIX = "DB";
35    public static final String KEY_TID = "TID";
36    private static final byte[] META_PREFIX = new byte[] {'m'};
37    private static final byte HASH_DATA_FLAG = 'h';
38    private static final byte HASH_META_FLAG = 'H';
39    private static final byte STR_DATA_FLAG = 's';
40    public static ByteString KEY_DBs = ByteString.copyFromUtf8("DBs");
41    public static String KEY_TABLE = "Table";
42    public static ByteString KEY_SCHEMA_VERSION = ByteString.copyFromUtf8("SchemaVersionKey");
43  
44    public static void encodeStringDataKey(CodecDataOutput cdo, byte[] key) {
45      cdo.write(META_PREFIX);
46      BytesCodec.writeBytes(cdo, key);
47      IntegerCodec.writeULong(cdo, STR_DATA_FLAG);
48    }
49  
50    public static void encodeHashDataKey(CodecDataOutput cdo, byte[] key, byte[] field) {
51      cdo.write(META_PREFIX);
52      BytesCodec.writeBytes(cdo, key);
53      IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
54      BytesCodec.writeBytes(cdo, field);
55    }
56  
57    public static ByteString encodeHashMetaKey(CodecDataOutput cdo, byte[] key) {
58      cdo.write(META_PREFIX);
59      BytesCodec.writeBytes(cdo, key);
60      IntegerCodec.writeULong(cdo, HASH_META_FLAG);
61      return cdo.toByteString();
62    }
63  
64    public static void encodeHashDataKeyPrefix(CodecDataOutput cdo, byte[] key) {
65      cdo.write(META_PREFIX);
66      BytesCodec.writeBytes(cdo, key);
67      IntegerCodec.writeULong(cdo, HASH_DATA_FLAG);
68    }
69  
70    public static Pair<ByteString, ByteString> decodeHashDataKey(ByteString rawKey) {
71      checkArgument(
72          KeyUtils.hasPrefix(rawKey, ByteString.copyFrom(META_PREFIX)),
73          "invalid encoded hash data key prefix: " + new String(META_PREFIX));
74      CodecDataInput cdi = new CodecDataInput(rawKey.toByteArray());
75      cdi.skipBytes(META_PREFIX.length);
76      byte[] key = BytesCodec.readBytes(cdi);
77      long typeFlag = IntegerCodec.readULong(cdi);
78      if (typeFlag != HASH_DATA_FLAG) {
79        throw new TiClientInternalException("Invalid hash data flag: " + typeFlag);
80      }
81      byte[] field = BytesCodec.readBytes(cdi);
82      return Pair.create(ByteString.copyFrom(key), ByteString.copyFrom(field));
83    }
84  
85    public static ByteString autoTableIDKey(long tableId) {
86      return ByteString.copyFrom(String.format("%s:%d", KEY_TID, tableId).getBytes());
87    }
88  
89    public static ByteString tableKey(long tableId) {
90      return ByteString.copyFrom(String.format("%s:%d", KEY_TABLE, tableId).getBytes());
91    }
92  
93    public static ByteString encodeDatabaseID(long id) {
94      return ByteString.copyFrom(String.format("%s:%d", ENCODED_DB_PREFIX, id).getBytes());
95    }
96  
97    public static ByteString hashGet(ByteString key, ByteString field, Snapshot snapshot) {
98      CodecDataOutput cdo = new CodecDataOutput();
99      MetaCodec.encodeHashDataKey(cdo, key.toByteArray(), field.toByteArray());
100     return snapshot.get(cdo.toByteString());
101   }
102 
103   public static ByteString bytesGet(ByteString key, Snapshot snapshot) {
104     CodecDataOutput cdo = new CodecDataOutput();
105     MetaCodec.encodeStringDataKey(cdo, key.toByteArray());
106     return snapshot.get(cdo.toByteString());
107   }
108 
109   public static List<Pair<ByteString, ByteString>> hashGetFields(
110       ByteString key, Snapshot snapshot) {
111     CodecDataOutput cdo = new CodecDataOutput();
112     MetaCodec.encodeHashDataKeyPrefix(cdo, key.toByteArray());
113     ByteString encodedKey = cdo.toByteString();
114 
115     Iterator<KvPair> iterator = snapshot.scanPrefix(encodedKey);
116     List<Pair<ByteString, ByteString>> fields = new ArrayList<>();
117     while (iterator.hasNext()) {
118       KvPair kv = iterator.next();
119       if (kv == null || kv.getKey() == null) {
120         continue;
121       }
122       fields.add(Pair.create(MetaCodec.decodeHashDataKey(kv.getKey()).second, kv.getValue()));
123     }
124 
125     return fields;
126   }
127 }