I have this field in my dataframe: myField: binary (nullable = false) (originally a Java byte[16]).
How do I select it and show it as a hex in scala from a spark shell? Please include necessary imports.
I have this field in my dataframe: myField: binary (nullable = false) (originally a Java byte[16]).
How do I select it and show it as a hex in scala from a spark shell? Please include necessary imports.
 
    
    Try
import org.apache.spark.sql.functions.{col, hex}
df.select(hex(col("myField")))
So I generalized @user6022341 solution to the following function
  def castBinaryToHexString(inputSrcDF: DataFrame):DataFrame={
    var srcDF = inputSrcDF
    // get all column names
    val allCols = srcDF.schema.fields.map(f => f.name.toLowerCase())
    // get all columns with data type = BinaryType
    val dateCols = srcDF.schema.fields.filter(f => f.dataType == BinaryType).map(f => f.name.toLowerCase())
    // cast all binary types to hex string
    srcDF = srcDF.select(allCols.map(x => if (dateCols.contains(x)) concat(lit("0x"),hex(col(x))).alias(x) else col(x) ): _*)
    return srcDF
  }
