I want to filter a Pyspark DataFrame with a SQL-like IN
clause, as in
sc = SparkContext()
sqlc = SQLContext(sc)
df = sqlc.sql(\'SELECT * from my
A slightly different approach that worked for me is to filter with a custom filter function.
def filter_func(a):
"""wrapper function to pass a in udf"""
def filter_func_(col):
"""filtering function"""
if col in a.value:
return True
return False
return udf(filter_func_, BooleanType())
# Broadcasting allows to pass large variables efficiently
a = sc.broadcast((1, 2, 3))
df = my_df.filter(filter_func(a)(col('field1'))) \