diff --git a/hadoop-hdds/interface-server/pom.xml b/hadoop-hdds/interface-server/pom.xml
index c38927df5007..b914a3d4ab4c 100644
--- a/hadoop-hdds/interface-server/pom.xml
+++ b/hadoop-hdds/interface-server/pom.xml
@@ -83,6 +83,7 @@
InterSCMProtocol.proto
SCMUpdateProtocol.proto
+ SCMRatisProtocol.proto
target/generated-sources/proto-java-for-ratis
false
@@ -101,6 +102,7 @@
InterSCMProtocol.proto
SCMUpdateProtocol.proto
+ SCMRatisProtocol.proto
target/generated-sources/proto-java-for-protobuf-${protobuf.version}
false
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisRequest.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisRequest.java
index 28fca97e4220..0eeec239c9a1 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisRequest.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisRequest.java
@@ -18,8 +18,6 @@
package org.apache.hadoop.hdds.scm.ha;
import com.google.common.base.Preconditions;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.TextFormat;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol.Method;
@@ -29,6 +27,8 @@
import org.apache.hadoop.hdds.scm.ha.io.CodecFactory;
import org.apache.ratis.proto.RaftProtos.StateMachineLogEntryProto;
import org.apache.ratis.protocol.Message;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.TextFormat;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
@@ -111,9 +111,8 @@ public Message encode() throws InvalidProtocolBufferException {
}
methodBuilder.addAllArgs(args);
requestProtoBuilder.setMethod(methodBuilder.build());
- return Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- requestProtoBuilder.build().toByteArray()));
+ final SCMRatisRequestProto requestProto = requestProtoBuilder.build();
+ return Message.valueOf(requestProto.toByteString());
}
/**
@@ -122,7 +121,7 @@ public Message encode() throws InvalidProtocolBufferException {
public static SCMRatisRequest decode(Message message)
throws InvalidProtocolBufferException {
final SCMRatisRequestProto requestProto =
- SCMRatisRequestProto.parseFrom(message.getContent().toByteArray());
+ SCMRatisRequestProto.parseFrom(message.getContent().asReadOnlyByteBuffer());
// proto2 required-equivalent checks
if (!requestProto.hasType()) {
@@ -173,7 +172,7 @@ public static String smProtoToString(StateMachineLogEntryProto proto) {
StringBuilder builder = new StringBuilder();
try {
builder.append(TextFormat.shortDebugString(
- SCMRatisRequestProto.parseFrom(proto.getLogData().toByteArray())));
+ SCMRatisRequestProto.parseFrom(proto.getLogData().asReadOnlyByteBuffer())));
} catch (Throwable ex) {
LOG.error("smProtoToString failed", ex);
builder.append("smProtoToString failed with");
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
index 34e5bea3401d..55f55cab4f5e 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/SCMRatisResponse.java
@@ -17,12 +17,12 @@
package org.apache.hadoop.hdds.scm.ha;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol.SCMRatisResponseProto;
import org.apache.hadoop.hdds.scm.ha.io.CodecFactory;
import org.apache.ratis.protocol.Message;
import org.apache.ratis.protocol.RaftClientReply;
import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
@@ -92,7 +92,7 @@ public static SCMRatisResponse decode(RaftClientReply reply)
return new SCMRatisResponse();
}
- final SCMRatisResponseProto responseProto = SCMRatisResponseProto.parseFrom(response.toByteArray());
+ final SCMRatisResponseProto responseProto = SCMRatisResponseProto.parseFrom(response.asReadOnlyByteBuffer());
// proto2 required-equivalent checks
if (!responseProto.hasType()) {
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BigIntegerCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BigIntegerCodec.java
index bee5c59f0084..afc7da03fa11 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BigIntegerCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BigIntegerCodec.java
@@ -17,9 +17,9 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.ProtoUtils;
import java.math.BigInteger;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Codec for type BigInteger.
@@ -29,7 +29,7 @@ public class BigIntegerCodec implements Codec {
@Override
public ByteString serialize(Object object) {
// BigInteger returns a new byte[].
- return ProtoUtils.unsafeByteString(((BigInteger)object).toByteArray());
+ return UnsafeByteOperations.unsafeWrap(((BigInteger) object).toByteArray());
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BooleanCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BooleanCodec.java
index 899d10814a44..2f79e8e9cfe5 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BooleanCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/BooleanCodec.java
@@ -17,7 +17,7 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
/**
* {@link Codec} for {@code Boolean} objects.
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ByteStringCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ByteStringCodec.java
index d2599a4c1abd..1ffabcf328a6 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ByteStringCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ByteStringCodec.java
@@ -17,23 +17,27 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
- * A dummy codec that serializes a ByteString object to ByteString.
+ * {@link Codec} implementation for non-shaded
+ * {@link com.google.protobuf.ByteString} objects.
*/
public class ByteStringCodec implements Codec {
@Override
public ByteString serialize(Object object)
throws InvalidProtocolBufferException {
- return (ByteString) object;
+ return UnsafeByteOperations.unsafeWrap(
+ ((com.google.protobuf.ByteString) object).asReadOnlyByteBuffer());
}
@Override
public Object deserialize(Class> type, ByteString value)
throws InvalidProtocolBufferException {
- return value;
+ return com.google.protobuf.UnsafeByteOperations.
+ unsafeWrap(value.asReadOnlyByteBuffer());
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/Codec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/Codec.java
index 49520ab6dd82..1a552283658f 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/Codec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/Codec.java
@@ -17,8 +17,8 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
/**
* Codec interface to marshall/unmarshall data to/from {@link ByteString}.
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/CodecFactory.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/CodecFactory.java
index 1cd3ad1bf92f..3141754a53cb 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/CodecFactory.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/CodecFactory.java
@@ -17,9 +17,6 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.Message;
import com.google.protobuf.ProtocolMessageEnum;
import java.math.BigInteger;
import java.security.cert.X509Certificate;
@@ -29,6 +26,9 @@
import java.util.Map;
import org.apache.commons.lang3.ClassUtils;
import org.apache.hadoop.hdds.security.symmetric.ManagedSecretKey;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.Message;
/**
* Maps types to the corresponding {@link Codec} implementation.
@@ -38,7 +38,8 @@ public final class CodecFactory {
private static Map, Codec> codecs = new HashMap<>();
static {
- codecs.put(Message.class, new GeneratedMessageCodec());
+ codecs.put(com.google.protobuf.Message.class, new GeneratedMessageCodec());
+ codecs.put(Message.class, new ScmGeneratedMessageCodec());
codecs.put(ProtocolMessageEnum.class, new EnumCodec());
codecs.put(List.class, new ListCodec());
codecs.put(Integer.class, new IntegerCodec());
@@ -47,7 +48,8 @@ public final class CodecFactory {
codecs.put(Boolean.class, new BooleanCodec());
codecs.put(BigInteger.class, new BigIntegerCodec());
codecs.put(X509Certificate.class, new X509CertificateCodec());
- codecs.put(ByteString.class, new ByteStringCodec());
+ codecs.put(com.google.protobuf.ByteString.class, new ByteStringCodec());
+ codecs.put(ByteString.class, new ScmByteStringCodec());
codecs.put(ManagedSecretKey.class, new ManagedSecretKeyCodec());
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/EnumCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/EnumCodec.java
index 32108b2da8b6..24ea6af1424a 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/EnumCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/EnumCodec.java
@@ -18,12 +18,12 @@
package org.apache.hadoop.hdds.scm.ha.io;
import com.google.common.primitives.Ints;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.ProtoUtils;
import com.google.protobuf.ProtocolMessageEnum;
import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.hdds.scm.ha.ReflectionUtil;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* {@link Codec} for {@link ProtocolMessageEnum} objects.
@@ -34,8 +34,7 @@ public class EnumCodec implements Codec {
public ByteString serialize(Object object)
throws InvalidProtocolBufferException {
// toByteArray returns a new array
- return ProtoUtils.unsafeByteString(Ints.toByteArray(
- ((ProtocolMessageEnum) object).getNumber()));
+ return UnsafeByteOperations.unsafeWrap(Ints.toByteArray(((ProtocolMessageEnum) object).getNumber()));
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/GeneratedMessageCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/GeneratedMessageCodec.java
index b4adf805276c..14a30c874af2 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/GeneratedMessageCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/GeneratedMessageCodec.java
@@ -17,34 +17,36 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import com.google.protobuf.Message;
import java.lang.reflect.InvocationTargetException;
import org.apache.hadoop.hdds.scm.ha.ReflectionUtil;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
- * {@link Codec} for {@link Message} objects.
+ * {@link Codec} implementation for non-shaded
+ * {@link com.google.protobuf.Message} objects.
*/
public class GeneratedMessageCodec implements Codec {
@Override
- public ByteString serialize(Object object) {
- return ((Message)object).toByteString();
+ public ByteString serialize(Object object)
+ throws InvalidProtocolBufferException {
+ return UnsafeByteOperations.unsafeWrap(
+ ((Message) object).toByteString().asReadOnlyByteBuffer());
}
@Override
- public Message deserialize(Class> type, ByteString value)
+ public Object deserialize(Class> type, ByteString value)
throws InvalidProtocolBufferException {
try {
- return (Message) ReflectionUtil.getMethod(type,
- "parseFrom", byte[].class)
+ return ReflectionUtil.getMethod(type, "parseFrom", byte[].class)
.invoke(null, (Object) value.toByteArray());
} catch (NoSuchMethodException | IllegalAccessException
- | InvocationTargetException ex) {
+ | InvocationTargetException ex) {
ex.printStackTrace();
- throw new InvalidProtocolBufferException(
- "Message cannot be decoded: " + ex.getMessage());
+ throw new InvalidProtocolBufferException("Message cannot be decoded: " + ex.getMessage());
}
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/IntegerCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/IntegerCodec.java
index a7d00e535e06..81ccae870525 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/IntegerCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/IntegerCodec.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hdds.scm.ha.io;
import com.google.common.primitives.Ints;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.ProtoUtils;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Encodes/decodes an integer to a byte string.
@@ -30,7 +30,7 @@ public class IntegerCodec implements Codec {
public ByteString serialize(Object object)
throws InvalidProtocolBufferException {
// toByteArray returns a new array
- return ProtoUtils.unsafeByteString(Ints.toByteArray((Integer) object));
+ return UnsafeByteOperations.unsafeWrap(Ints.toByteArray((Integer) object));
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ListCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ListCodec.java
index 591fb17e321b..798076b6a970 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ListCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ListCodec.java
@@ -17,13 +17,13 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.lang.reflect.InvocationTargetException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol.ListArgument;
import org.apache.hadoop.hdds.scm.ha.ReflectionUtil;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
/**
* {@link Codec} for {@link List} objects.
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/LongCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/LongCodec.java
index b8c35eae478f..ad5792cb5a18 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/LongCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/LongCodec.java
@@ -18,9 +18,9 @@
package org.apache.hadoop.hdds.scm.ha.io;
import com.google.common.primitives.Longs;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.ProtoUtils;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* {@link Codec} for {@code Long} objects.
@@ -31,7 +31,7 @@ public class LongCodec implements Codec {
public ByteString serialize(Object object)
throws InvalidProtocolBufferException {
// toByteArray returns a new array
- return ProtoUtils.unsafeByteString(Longs.toByteArray((Long) object));
+ return UnsafeByteOperations.unsafeWrap(Longs.toByteArray((Long) object));
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ManagedSecretKeyCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ManagedSecretKeyCodec.java
index ec4a7c261e4d..a82943f730e8 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ManagedSecretKeyCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ManagedSecretKeyCodec.java
@@ -17,10 +17,11 @@
package org.apache.hadoop.hdds.scm.ha.io;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hdds.protocol.proto.SCMSecretKeyProtocolProtos;
import org.apache.hadoop.hdds.security.symmetric.ManagedSecretKey;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* A codec for {@link ManagedSecretKey} objects.
@@ -30,14 +31,20 @@ public class ManagedSecretKeyCodec implements Codec {
public ByteString serialize(Object object)
throws InvalidProtocolBufferException {
ManagedSecretKey secretKey = (ManagedSecretKey) object;
- return secretKey.toProtobuf().toByteString();
+ return UnsafeByteOperations.unsafeWrap(
+ secretKey.toProtobuf().toByteString().asReadOnlyByteBuffer());
}
@Override
public Object deserialize(Class> type, ByteString value)
throws InvalidProtocolBufferException {
- SCMSecretKeyProtocolProtos.ManagedSecretKey message =
- SCMSecretKeyProtocolProtos.ManagedSecretKey.parseFrom(value);
- return ManagedSecretKey.fromProtobuf(message);
+ try {
+ SCMSecretKeyProtocolProtos.ManagedSecretKey message =
+ SCMSecretKeyProtocolProtos.ManagedSecretKey.parseFrom(
+ value.asReadOnlyByteBuffer());
+ return ManagedSecretKey.fromProtobuf(message);
+ } catch (com.google.protobuf.InvalidProtocolBufferException e) {
+ throw new InvalidProtocolBufferException("Failed to deserialize value for " + type, e);
+ }
}
}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmByteStringCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmByteStringCodec.java
new file mode 100644
index 000000000000..2b479ff55595
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmByteStringCodec.java
@@ -0,0 +1,39 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.ha.io;
+
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+
+/**
+ * A dummy codec that serializes a ByteString object to ByteString.
+ */
+public class ScmByteStringCodec implements Codec {
+
+ @Override
+ public ByteString serialize(Object object)
+ throws InvalidProtocolBufferException {
+ return (ByteString) object;
+ }
+
+ @Override
+ public Object deserialize(Class> type, ByteString value)
+ throws InvalidProtocolBufferException {
+ return value;
+ }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmGeneratedMessageCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmGeneratedMessageCodec.java
new file mode 100644
index 000000000000..5a8be986fa83
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/ScmGeneratedMessageCodec.java
@@ -0,0 +1,50 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.ha.io;
+
+import java.lang.reflect.InvocationTargetException;
+import org.apache.hadoop.hdds.scm.ha.ReflectionUtil;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.Message;
+
+/**
+ * {@link Codec} for {@link Message} objects.
+ */
+public class ScmGeneratedMessageCodec implements Codec {
+
+ @Override
+ public ByteString serialize(Object object) throws InvalidProtocolBufferException {
+ return ((Message)object).toByteString();
+ }
+
+ @Override
+ public Message deserialize(Class> type, ByteString value)
+ throws InvalidProtocolBufferException {
+ try {
+ return (Message) ReflectionUtil.getMethod(type,
+ "parseFrom", byte[].class)
+ .invoke(null, (Object) value.toByteArray());
+ } catch (NoSuchMethodException | IllegalAccessException
+ | InvocationTargetException ex) {
+ ex.printStackTrace();
+ throw new InvalidProtocolBufferException(
+ "Message cannot be decoded: " + ex.getMessage());
+ }
+ }
+}
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/StringCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/StringCodec.java
index 47d8917872dd..c53e4e82f941 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/StringCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/StringCodec.java
@@ -19,8 +19,8 @@
import static java.nio.charset.StandardCharsets.UTF_8;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.ProtoUtils;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* {@link Codec} for {@code String} objects.
@@ -29,7 +29,7 @@ public class StringCodec implements Codec {
@Override
public ByteString serialize(Object object) {
// getBytes returns a new array
- return ProtoUtils.unsafeByteString(((String) object).getBytes(UTF_8));
+ return UnsafeByteOperations.unsafeWrap(((String) object).getBytes(UTF_8));
}
@Override
diff --git a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/X509CertificateCodec.java b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/X509CertificateCodec.java
index e68eaf481b20..408e69e9a29b 100644
--- a/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/X509CertificateCodec.java
+++ b/hadoop-hdds/server-scm/src/main/java/org/apache/hadoop/hdds/scm/ha/io/X509CertificateCodec.java
@@ -19,11 +19,11 @@
import static java.nio.charset.StandardCharsets.UTF_8;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.ProtoUtils;
import java.security.cert.X509Certificate;
import org.apache.hadoop.hdds.security.x509.certificate.utils.CertificateCodec;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
/**
* Codec for type X509Certificate.
@@ -36,7 +36,7 @@ public ByteString serialize(Object object)
String certString =
CertificateCodec.getPEMEncodedString((X509Certificate) object);
// getBytes returns a new array
- return ProtoUtils.unsafeByteString(certString.getBytes(UTF_8));
+ return UnsafeByteOperations.unsafeWrap(certString.getBytes(UTF_8));
} catch (Exception ex) {
throw new InvalidProtocolBufferException(
"X509Certificate cannot be decoded: " + ex.getMessage());
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/SCMRatisProtocolCompatibilityTestUtil.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/SCMRatisProtocolCompatibilityTestUtil.java
new file mode 100644
index 000000000000..aaf7ba6e1f4b
--- /dev/null
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/SCMRatisProtocolCompatibilityTestUtil.java
@@ -0,0 +1,89 @@
+/*
+ * Licensed to the Apache Software Foundation (ASF) under one or more
+ * contributor license agreements. See the NOTICE file distributed with
+ * this work for additional information regarding copyright ownership.
+ * The ASF licenses this file to You under the Apache License, Version 2.0
+ * (the "License"); you may not use this file except in compliance with
+ * the License. You may obtain a copy of the License at
+ *
+ * http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package org.apache.hadoop.hdds.scm.ha;
+
+import static org.apache.hadoop.hdds.scm.ha.TestSCMRatisProtocolCompatibility.RANDOM;
+import static org.apache.hadoop.hdds.scm.ha.TestSCMRatisProtocolCompatibility.TYPES;
+import static org.junit.jupiter.api.Assertions.assertEquals;
+
+import com.google.protobuf.ByteString;
+import java.nio.ByteBuffer;
+import org.apache.hadoop.hdds.protocol.proto.testing.Proto2SCMRatisProtocolForTesting;
+
+/**
+ * Tests proto2 to proto3 compatibility for SCMRatisProtocol.
+ */
+public final class SCMRatisProtocolCompatibilityTestUtil {
+
+ private SCMRatisProtocolCompatibilityTestUtil() {
+ }
+
+ static ByteString randomValueProto2(Class clazz) {
+ if (clazz == String.class) {
+ final int length = RANDOM.nextInt(3);
+ final StringBuilder builder = new StringBuilder(length);
+ for (int i = 0; i < length; i++) {
+ builder.append(RANDOM.nextInt(10));
+ }
+ final String string = builder.toString();
+ assertEquals(length, string.length());
+ return ByteString.copyFromUtf8(string);
+ } else if (clazz == Integer.class) {
+ final ByteBuffer buffer = ByteBuffer.allocate(4);
+ buffer.putInt(RANDOM.nextInt());
+ return ByteString.copyFrom(buffer.array());
+ } else if (clazz == byte[].class) {
+ final byte[] bytes = new byte[RANDOM.nextInt(3)];
+ RANDOM.nextBytes(bytes);
+ return ByteString.copyFrom(bytes);
+ }
+ throw new IllegalArgumentException("Unrecognized class " + clazz);
+ }
+
+ static Proto2SCMRatisProtocolForTesting.MethodArgument randomProto2MethodArgument() {
+ final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
+ return Proto2SCMRatisProtocolForTesting.MethodArgument.newBuilder()
+ .setType(type.getName())
+ .setValue(randomValueProto2(type))
+ .build();
+ }
+
+ static Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto randomProto2SCMRatisResponseProto() {
+ final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
+ return Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto.newBuilder()
+ .setType(type.getName())
+ .setValue(randomValueProto2(type))
+ .build();
+ }
+
+ static Proto2SCMRatisProtocolForTesting.SCMRatisRequestProto proto2Request(
+ String name, Proto2SCMRatisProtocolForTesting.RequestType type, int numArgs) {
+ // Build request using proto2 (test-only schema)
+ final Proto2SCMRatisProtocolForTesting.Method.Builder b =
+ Proto2SCMRatisProtocolForTesting.Method.newBuilder()
+ .setName(name);
+ for (int i = 0; i < numArgs; i++) {
+ b.addArgs(randomProto2MethodArgument());
+ }
+
+ return Proto2SCMRatisProtocolForTesting.SCMRatisRequestProto.newBuilder()
+ .setType(type)
+ .setMethod(b)
+ .build();
+ }
+}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisProtocolCompatibility.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisProtocolCompatibility.java
index 3d58cecb37c0..043f9f4e5631 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisProtocolCompatibility.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisProtocolCompatibility.java
@@ -17,15 +17,17 @@
package org.apache.hadoop.hdds.scm.ha;
+import static org.apache.hadoop.hdds.scm.ha.SCMRatisProtocolCompatibilityTestUtil.proto2Request;
+import static org.apache.hadoop.hdds.scm.ha.SCMRatisProtocolCompatibilityTestUtil.randomProto2SCMRatisResponseProto;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertTrue;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.TextFormat;
import java.nio.ByteBuffer;
import java.util.Random;
import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol;
import org.apache.hadoop.hdds.protocol.proto.testing.Proto2SCMRatisProtocolForTesting;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.junit.jupiter.api.Test;
/**
@@ -35,7 +37,7 @@ public class TestSCMRatisProtocolCompatibility {
static final Random RANDOM = new Random();
static final Class>[] TYPES = {String.class, Integer.class, byte[].class};
- static ByteString randomValue(Class clazz) {
+ static ByteString randomValueProto3(Class clazz) {
if (clazz == String.class) {
final int length = RANDOM.nextInt(3);
final StringBuilder builder = new StringBuilder(length);
@@ -57,53 +59,22 @@ static ByteString randomValue(Class clazz) {
throw new IllegalArgumentException("Unrecognized class " + clazz);
}
- static Proto2SCMRatisProtocolForTesting.MethodArgument randomProto2MethodArgument() {
- final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
- return Proto2SCMRatisProtocolForTesting.MethodArgument.newBuilder()
- .setType(type.getName())
- .setValue(randomValue(type))
- .build();
- }
-
static SCMRatisProtocol.MethodArgument randomProto3MethodArgument() {
final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
return SCMRatisProtocol.MethodArgument.newBuilder()
.setType(type.getName())
- .setValue(randomValue(type))
+ .setValue(randomValueProto3(type))
.build();
}
- static Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto randomProto2SCMRatisResponseProto() {
- final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
- return Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto.newBuilder()
- .setType(type.getName())
- .setValue(randomValue(type))
- .build();
- }
-
static SCMRatisProtocol.SCMRatisResponseProto randomProto3SCMRatisResponseProto() {
final Class> type = TYPES[RANDOM.nextInt(TYPES.length)];
return SCMRatisProtocol.SCMRatisResponseProto.newBuilder()
.setType(type.getName())
- .setValue(randomValue(type))
+ .setValue(randomValueProto3(type))
.build();
}
- static Proto2SCMRatisProtocolForTesting.SCMRatisRequestProto proto2Request(
- String name, Proto2SCMRatisProtocolForTesting.RequestType type, int numArgs) {
- // Build request using proto2 (test-only schema)
- final Proto2SCMRatisProtocolForTesting.Method.Builder b = Proto2SCMRatisProtocolForTesting.Method.newBuilder()
- .setName(name);
- for (int i = 0; i < numArgs; i++) {
- b.addArgs(randomProto2MethodArgument());
- }
-
- return Proto2SCMRatisProtocolForTesting.SCMRatisRequestProto.newBuilder()
- .setType(type)
- .setMethod(b)
- .build();
- }
-
static SCMRatisProtocol.SCMRatisRequestProto proto3Request(
String name, SCMRatisProtocol.RequestType type, int numArgs) {
final SCMRatisProtocol.Method.Builder b = SCMRatisProtocol.Method.newBuilder()
@@ -155,20 +126,24 @@ static void runTestProto2RequestCanBeParsedByProto3(
}
assertEquals(proto2.toString(), proto3.toString());
- assertEquals(TextFormat.shortDebugString(proto2), TextFormat.shortDebugString(proto3));
+ assertShortDebugString(proto2, proto3);
assertEquals(proto2, Proto2SCMRatisProtocolForTesting.SCMRatisRequestProto.parseFrom(proto3.toByteArray()));
}
+ private static void assertByteStringEquals(com.google.protobuf.ByteString proto2, ByteString proto3) {
+ assertEquals(UnsafeByteOperations.unsafeWrap(proto2.asReadOnlyByteBuffer()), proto3);
+ }
+
static void assertMethodArgument(Proto2SCMRatisProtocolForTesting.MethodArgument proto2,
SCMRatisProtocol.MethodArgument proto3) {
- assertEquals(proto2.getValue(), proto3.getValue());
assertEquals(proto2.getType(), proto3.getType());
+ assertByteStringEquals(proto2.getValue(), proto3.getValue());
}
static void assertSCMRatisResponseProto(Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto proto2,
SCMRatisProtocol.SCMRatisResponseProto proto3) {
- assertEquals(proto2.getValue(), proto3.getValue());
assertEquals(proto2.getType(), proto3.getType());
+ assertByteStringEquals(proto2.getValue(), proto3.getValue());
}
/**
@@ -195,7 +170,7 @@ static void runTestProto2ResponseCanBeParsedByProto3() throws Exception {
assertSCMRatisResponseProto(proto2, proto3);
assertEquals(proto2.toString(), proto3.toString());
- assertEquals(TextFormat.shortDebugString(proto2), TextFormat.shortDebugString(proto3));
+ assertShortDebugString(proto2, proto3);
assertEquals(proto2, Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto.parseFrom(proto3.toByteArray()));
}
@@ -246,11 +221,12 @@ static void runTestProto3RequestCanBeParsedByProto2(
for (int i = 0; i < numArgs; i++) {
assertEquals(proto3.getMethod().getArgs(i).getType(), proto2.getMethod().getArgs(i).getType());
- assertEquals(proto3.getMethod().getArgs(i).getValue(), proto2.getMethod().getArgs(i).getValue());
+ assertByteStringEquals(proto2.getMethod().getArgs(i).getValue(),
+ proto3.getMethod().getArgs(i).getValue());
}
assertEquals(proto2.toString(), proto3.toString());
- assertEquals(TextFormat.shortDebugString(proto2), TextFormat.shortDebugString(proto3));
+ assertShortDebugString(proto2, proto3);
assertEquals(proto3, SCMRatisProtocol.SCMRatisRequestProto.parseFrom(proto2.toByteArray()));
}
@@ -268,10 +244,16 @@ static void runTestProto3ResponseCanBeParsedByProto2() throws Exception {
Proto2SCMRatisProtocolForTesting.SCMRatisResponseProto.parseFrom(proto3.toByteArray());
assertEquals(proto3.getType(), proto2.getType());
- assertEquals(proto3.getValue(), proto2.getValue());
+ assertByteStringEquals(proto2.getValue(), proto3.getValue());
assertEquals(proto2.toString(), proto3.toString());
- assertEquals(TextFormat.shortDebugString(proto2), TextFormat.shortDebugString(proto3));
+ assertShortDebugString(proto2, proto3);
assertEquals(proto3, SCMRatisProtocol.SCMRatisResponseProto.parseFrom(proto2.toByteArray()));
}
+
+ private static void assertShortDebugString(com.google.protobuf.MessageOrBuilder proto2,
+ org.apache.ratis.thirdparty.com.google.protobuf.MessageOrBuilder proto3) {
+ assertEquals(com.google.protobuf.TextFormat.shortDebugString(proto2),
+ org.apache.ratis.thirdparty.com.google.protobuf.TextFormat.shortDebugString(proto3));
+ }
}
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisRequest.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisRequest.java
index baf5d379ebfd..2f4ebf943695 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisRequest.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisRequest.java
@@ -22,8 +22,6 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import java.util.ArrayList;
import java.util.List;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
@@ -31,6 +29,9 @@
import org.apache.hadoop.hdds.scm.ha.io.ListCodec;
import org.apache.hadoop.hdds.scm.pipeline.PipelineID;
import org.apache.ratis.protocol.Message;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.junit.jupiter.api.Test;
/**
@@ -105,8 +106,7 @@ public void testDecodeMissingRequestTypeShouldFail() throws Exception {
.build();
Message msg = Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray()));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer()));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
@@ -124,8 +124,7 @@ public void testDecodeMissingMethodShouldFail() throws Exception {
.build();
Message msg = Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray()));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer()));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
@@ -145,8 +144,7 @@ public void testDecodeMissingMethodNameShouldFail() throws Exception {
.build();
Message msg = Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray()));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer()));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
@@ -173,8 +171,7 @@ public void testDecodeMissingArgumentTypeShouldFail() throws Exception {
.build();
Message msg = Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray()));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer()));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
@@ -201,8 +198,7 @@ public void testDecodeMissingArgumentValueShouldFail() throws Exception {
.build();
Message msg = Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray()));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer()));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisResponse.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisResponse.java
index b0c67a1460e0..b65aa3ac885c 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisResponse.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/TestSCMRatisResponse.java
@@ -26,8 +26,6 @@
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
import org.apache.hadoop.hdds.protocol.proto.SCMRatisProtocol;
import org.apache.ratis.protocol.ClientId;
import org.apache.ratis.protocol.Message;
@@ -37,6 +35,9 @@
import org.apache.ratis.protocol.RaftPeerId;
import org.apache.ratis.protocol.exceptions.LeaderNotReadyException;
import org.apache.ratis.protocol.exceptions.RaftException;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
@@ -108,8 +109,7 @@ public void testResponseDecodeMissingTypeShouldFail() throws Exception {
RaftClientReply reply = mock(RaftClientReply.class);
when(reply.isSuccess()).thenReturn(true);
when(reply.getMessage()).thenReturn(Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray())));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer())));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
@@ -130,8 +130,7 @@ public void testResponseDecodeMissingValueShouldFail() throws Exception {
RaftClientReply reply = mock(RaftClientReply.class);
when(reply.isSuccess()).thenReturn(true);
when(reply.getMessage()).thenReturn(Message.valueOf(
- org.apache.ratis.thirdparty.com.google.protobuf.ByteString.copyFrom(
- proto.toByteArray())));
+ UnsafeByteOperations.unsafeWrap(proto.toByteString().asReadOnlyByteBuffer())));
InvalidProtocolBufferException ex = assertThrows(
InvalidProtocolBufferException.class,
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestBigIntegerCodec.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestBigIntegerCodec.java
index 096b1e7f506a..4142a69eb575 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestBigIntegerCodec.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestBigIntegerCodec.java
@@ -19,8 +19,8 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
-import com.google.protobuf.ByteString;
import java.math.BigInteger;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
import org.junit.jupiter.api.Test;
/**
diff --git a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestX509CertificateCodec.java b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestX509CertificateCodec.java
index 2ae1aac94eee..8b5ad7d8fa11 100644
--- a/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestX509CertificateCodec.java
+++ b/hadoop-hdds/server-scm/src/test/java/org/apache/hadoop/hdds/scm/ha/io/TestX509CertificateCodec.java
@@ -21,14 +21,14 @@
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertThrows;
-import com.google.protobuf.ByteString;
-import com.google.protobuf.InvalidProtocolBufferException;
-import com.google.protobuf.ProtoUtils;
import java.security.KeyPair;
import java.security.cert.X509Certificate;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.security.SecurityConfig;
import org.apache.hadoop.security.ssl.KeyStoreTestUtil;
+import org.apache.ratis.thirdparty.com.google.protobuf.ByteString;
+import org.apache.ratis.thirdparty.com.google.protobuf.InvalidProtocolBufferException;
+import org.apache.ratis.thirdparty.com.google.protobuf.UnsafeByteOperations;
import org.junit.jupiter.api.BeforeAll;
import org.junit.jupiter.api.Test;
@@ -63,7 +63,7 @@ public void codec() throws Exception {
public void testCodecError() {
X509CertificateCodec x509CertificateCodec = new X509CertificateCodec();
- final ByteString byteString = ProtoUtils.unsafeByteString("dummy".getBytes(UTF_8));
+ final ByteString byteString = UnsafeByteOperations.unsafeWrap("dummy".getBytes(UTF_8));
assertThrows(InvalidProtocolBufferException.class, () ->
x509CertificateCodec.deserialize(X509Certificate.class, byteString));