# Find Count of Null, None, NaN of All DataFrame Columns from pyspark.sql import functions as F df.select([F.count(F.when(F.isnan(c) | F.col(c).isNull(), c)).alias(c) for c in df.columns]).show()