You can use the below code written in PySpark to transform the columns into rows
from pyspark.sql.functions import expr, explode
df = spark.createDataFrame([(1, 10, 20, 30, 40),
                            (2, 20, 30, 50, 60),
                            (3, 30, 50, 60, 70),
                            (4, 30, 40, 50, 80)], 
                           ["Month", "Col1", "Col2", "Col3", "Col4"])
melted_df = df.selectExpr("Month", 
                          "stack(4, 'Col1', Col1, 'Col2', Col2, 'Col3', Col3, 'Col4', Col4) as (Col_name, Value)")
exploded_df = melted_df.select("Month", "Col_name", explode(expr("map('Value', Value)")))
final_df = exploded_df.selectExpr("Month", "Col_name", "Value as Value_new")
final_df.show()