Spark SQL中Kryo反序列化問題分析

1 問題描述

當使用Spark-sql執行 Hive UDF時會發生NullPointerException(NPE),從而導致作業異常終止。NPE具體堆棧信息如下:

Serialization trace:
fields (com.xiaoju.dataservice.api.hive.udf.LoadFromDataServiceMetricSetUDTF)
    at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:144)
    at com.esotericsoftware.kryo.serializers.FieldSerializer.read(FieldSerializer.java:551)
    at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:686)
    at org.apache.spark.sql.hive.HiveShim$HiveFunctionWrapper.deserializeObjectByKryo(HiveShim.scala:155)
    at org.apache.spark.sql.hive.HiveShim$HiveFunctionWrapper.deserializePlan(HiveShim.scala:171)
    at org.apache.spark.sql.hive.HiveShim$HiveFunctionWrapper.readExternal(HiveShim.scala:210)
    at java.io.ObjectInputStream.readExternalData(ObjectInputStream.java:1842)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1799)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
    at scala.collection.immutable.List$SerializationProxy.readObject(List.scala:479)
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
    at java.lang.reflect.Method.invoke(Method.java:498)
    at java.io.ObjectStreamClass.invokeReadObject(ObjectStreamClass.java:1058)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1900)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.defaultReadFields(ObjectInputStream.java:2000)
    at java.io.ObjectInputStream.readSerialData(ObjectInputStream.java:1924)
    at java.io.ObjectInputStream.readOrdinaryObject(ObjectInputStream.java:1801)
    at java.io.ObjectInputStream.readObject0(ObjectInputStream.java:1351)
    at java.io.ObjectInputStream.readObject(ObjectInputStream.java:371)
    at org.apache.spark.serializer.JavaDeserializationStream.readObject(JavaSerializer.scala:75)
    at org.apache.spark.serializer.JavaSerializerInstance.deserialize(JavaSerializer.scala:114)
    at org.apache.spark.scheduler.ResultTask.runTask(ResultTask.scala:80)
    at org.apache.spark.scheduler.Task.run(Task.scala:108)
    at org.apache.spark.executor.Executor$TaskRunner.run(Executor.scala:338)
    at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1142)
    at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:617)
    at java.lang.Thread.run(Thread.java:745)
Caused by: java.lang.NullPointerException
    at java.util.ArrayList.ensureExplicitCapacity(ArrayList.java:234)
    at java.util.ArrayList.ensureCapacity(ArrayList.java:218)
    at com.esotericsoftware.kryo.serializers.CollectionSerializer.read(CollectionSerializer.java:114)
    at com.esotericsoftware.kryo.serializers.CollectionSerializer.read(CollectionSerializer.java:40)
    at com.esotericsoftware.kryo.Kryo.readObject(Kryo.java:708)
    at com.esotericsoftware.kryo.serializers.ObjectField.read(ObjectField.java:125)

2 問題分析

2.1 NPE直接原因分析

從上述堆棧信息可知,NPE發生在Kryo反序列化ArrayList對象時。

Kryo是一個快速高效的序列化框架,它不強制使用某種模式或具有特殊操作特點的數據,所有的規范都交由Serializers自己來處理。不同的數據類型采用的Serializers進行處理,同時也允許用戶自定義Serializers來處理數據。而針對ArrayList類型的集合類型的數據,Kryo默認提供了CollectionSerializer.

at java.util.ArrayList.ensureExplicitCapacity(ArrayList.java:234)
at java.util.ArrayList.ensureCapacity(ArrayList.java:218)
at com.esotericsoftware.kryo.serializers.CollectionSerializer.read(CollectionSerializer.java:114)

結合上述堆棧信息,通過源碼調試,我們發現CollectionSerializer#read中會反序列化生成ArrayList對象,在調用ensureCapacity設置ArrayList容量時發生NPE異常. 通過試信息發現生成的ArrayList中elementData屬性未初始化,調試信息如下:


image

而通過查看ArrayList的各個構造函數,均對ArrayList@elementData進行了初始化。為什么調試結果顯示elementData為NULL呢,除非創建對象時未調用任何構造函數,于是問題的分析方向轉移到了ArrayList的創建方式上。

 /**
     * Constructs an empty list with an initial capacity of ten.
     */
    public ArrayList() {
        this.elementData = DEFAULTCAPACITY_EMPTY_ELEMENTDATA;
    }
    
    //其它構造函數也均對elementData進行了初始化
     

2.2 ArrayList對象的創建方式

上文提到,創建的ArrayList對象的elementData屬性為NULL,而ArrayList的各個構造方法中都對elementData進行了初始化,出現此結果的原因可能是由于創建對象時未使用任何構造方法。帶著此假設,再次對程序進行調試。

    //創建ArrayList對象的方法

    /** Creates a new instance of a class using {@link Registration#getInstantiator()}. If the registration's instantiator is null,
     * a new one is set using {@link #newInstantiator(Class)}. */
    public <T> T newInstance (Class<T> type) {
        Registration registration = getRegistration(type);
        ObjectInstantiator instantiator = registration.getInstantiator();
        if (instantiator == null) {
            instantiator = newInstantiator(type);
            registration.setInstantiator(instantiator);
        }
        return (T)instantiator.newInstance();

ArrayList對象由Kryo#newInstance方法進行實例化,而具體采用的實例化器(創建對象采用的構造器),類型向Kryo注冊Registration時指定的實例器,若注冊時未指定,則會依據Class Type按設置的InstantiatorStrategy創建實例化器。實現如下:

/** Returns a new instantiator for creating new instances of the specified type. By default, an instantiator is returned that
     * uses reflection if the class has a zero argument constructor, an exception is thrown. If a
     * {@link #setInstantiatorStrategy(InstantiatorStrategy) strategy} is set, it will be used instead of throwing an exception. */
    protected ObjectInstantiator newInstantiator (final Class type) {
        // InstantiatorStrategy.
        return strategy.newInstantiatorOf(type);
    }

SparkSql在序列化及反序列化Hive UDF時默認采用的Kryo實例由Hive代碼定義的,其采用的實例化器策略為StdInstantiatorStrategy(若注冊的Registration未設置instantiator,則使用該策略創建instantiator),具體實現如下:


  // Kryo is not thread-safe,
  // Also new Kryo() is expensive, so we want to do it just once.
  public static ThreadLocal<Kryo> runtimeSerializationKryo = new ThreadLocal<Kryo>() {
    @Override
    protected synchronized Kryo initialValue() {
      Kryo kryo = new Kryo();
      kryo.setClassLoader(Thread.currentThread().getContextClassLoader());
      kryo.register(java.sql.Date.class, new SqlDateSerializer());
      kryo.register(java.sql.Timestamp.class, new TimestampSerializer());
      kryo.register(Path.class, new PathSerializer());
      kryo.setInstantiatorStrategy(new StdInstantiatorStrategy());
      ......
      return kryo;
    };
  };

而StdInstantiatorStrategy在創建對象時是依據JVM version信息及JVM vendor信息進行的,而不是依據Class的具體實現,
其可以不調用對象的任何構造方法創建對象。

// StdInstantiatorStrategy的描述信息
/**
 * Guess the best instantiator for a given class. The instantiator will instantiate the class
 * without calling any constructor. Currently, the selection doesn't depend on the class. It relies
 * on the
 * <ul>
 * <li>JVM version</li>
 * <li>JVM vendor</li>
 * <li>JVM vendor version</li>
 * </ul>
 * However, instantiators are stateful and so dedicated to their class.
 * 
 * @author Henri Tremblay
 * @see ObjectInstantiator
 */
public class StdInstantiatorStrategy extends BaseInstantiatorStrategy {

而我們發現Kryo在注冊各類型Class的Registration對象時都未顯式設置instantiator,因此都會采用StdInstantiatorStrategy策略構造對象。
至此,我們的假設成立,NPE的原因是由于生成ArrayList對象時未調用任何構造方法,從而使其elementData屬性未初始化所致。

3 部分Spark版本可以正常執行的原因

同樣的用戶程序,在公司較早期的Spark中可以正常執行,而在最新提供的Spark版本中會出現上述Bug,為什么會出現這樣的問題呢,我們的第一反應是可能Kryo的版本不同,通過查看IDE的External Libraries 觀查到老版本Spark采用的是Kryo 2, 而最新版本中依賴的是Kryo 3。

通過分析兩個版本的Kryo代碼實現,并沒有發現對ArrayList的操作行為有何不同。于是重新進行排查,因問題發生于Hive UDF的反序列化過程,因此排查了兩個版本Spark 依賴的Hive版本信息。

公司老版本Spark依賴的Hive信息(Spark官方的依賴版本,即:閹割版):

 <hive.group>org.spark-project.hive</hive.group>
    <!-- Version used in Maven Hive dependency -->
<hive.version>1.2.1.spark</hive.version>

公司新版本Spark依賴的Hive信息(本質為社區版Hive):

 <hive.group>com.my corporation.hive</hive.group>
    <!-- Version used in Maven Hive dependency -->
<hive.version>1.2.1-200-spark</hive.version>

顯然,公司使用的新老版本的Spark依賴的Hive是不同的。通過調研發現Spark社區版的Hive依賴“org.spark-project.hive” 系在原版Hive基礎上修改過的獨立的工程,其中存在自己定義的Kryo的組件(即對Hive社區版進行了閹割,并自己實現了Kryo)。 而公司新版Spark中依賴的Hive是社區版Hive, Hive中使用的Kryo組件為第三方依賴(Kryo官方版,并通過maven-shade-plugin的relocation將包路徑重定義到了hive-exec中)。

通過對比分析發現:

公司老版本依賴的Hive(即Spark社區版中依賴的Hive)中對Kryo的newInstantiator方法進行了改造,其并未設置實例化器策略(InstantiatorStrategy),而是直接通過獲取Class的默認構造函數來創建對象,即其創建的對象是被實例化的。因此,創建ArrayList時,elementData屬性可以被初始化。

對該問題存在影響的不同實現:

  • 公司老版本Spark依賴Hive(即社區版Spark中閹割的Hive)中使用的Kryo

    protected ObjectInstantiator newInstantiator(final Class type) {
        if (!Util.isAndroid) {
            Class enclosingType = type.getEnclosingClass();
            boolean isNonStaticMemberClass = enclosingType != null && type.isMemberClass() && !Modifier.isStatic(type.getModifiers());
            if (!isNonStaticMemberClass) {
                try {
                    // 獲取無參構造方法
                    final ConstructorAccess access = ConstructorAccess.get(type);
                    return new ObjectInstantiator() {
                        public Object newInstance() {
                            try {
                                return access.newInstance();
                            } catch (Exception var2) {
                                throw new KryoException("Error constructing instance of class: " + Util.className(type), var2);
                            }
                        }
                    };
                } catch (Exception var7) {
                    ;
                }
            }
        }
    ......
    }


  • 公司新版本Spark依賴的Hive(實為社區版Hive)中使用的Kryo,是依據InstantiatorStrategy選取不同的策略進行創建對象,在本文2.2節已進行描述,不再贅述。
/** Returns a new instantiator for creating new instances of the specified type. By default, an instantiator is returned that
     * uses reflection if the class has a zero argument constructor, an exception is thrown. If a
     * {@link #setInstantiatorStrategy(InstantiatorStrategy) strategy} is set, it will be used instead of throwing an exception. */
    protected ObjectInstantiator newInstantiator (final Class type) {
        // InstantiatorStrategy.
        return strategy.newInstantiatorOf(type);
    }
    

4 解決方案

經過以上分析,可知NPE的主要原因是由于Spark調用了Hive中設置了StdInstantiatorStrategy的Kryo對象對ArrayList對象反序列化時未調用其任何構造函數,從而使用創建的對象未實例化所致。

因此,可以在Spark、Hive、Kryo三者中任一中修復。目前,該問題只在Spark引擎中出現,故選擇在Spark中進行修復。主要思想是首先使用默認無參構造策略DefaultInstantiatorStrategy,若創建對象失敗則采用StdInstantiatorStrategy

@transient
def deserializeObjectByKryo[T: ClassTag](
    kryo: Kryo,
    in: InputStream,
    clazz: Class[_]): T = {
  val inp = new Input(in)
  // 顯式設置instantiator
kryo.setInstantiatorStrategy(new Kryo.DefaultInstantiatorStrategy(new StdInstantiatorStrategy))

  val t: T = kryo.readObject(inp, clazz).asInstanceOf[T]
  inp.close()
  t
}

最后編輯于
?著作權歸作者所有,轉載或內容合作請聯系作者
平臺聲明:文章內容(如有圖片或視頻亦包括在內)由作者上傳并發布,文章內容僅代表作者本人觀點,簡書系信息發布平臺,僅提供信息存儲服務。

推薦閱讀更多精彩內容