1from pyspark.sql import SQLContext
2
3sc = # existing SparkContext
4sql_context = SQLContext(sc)
5
6# Read data from a table
7df = sql_context.read \
8 .format("com.databricks.spark.redshift") \
9 .option("url", "jdbc:redshift://redshifthost:5439/database?user=username&password=pass") \
10 .option("dbtable", "my_table") \
11 .option("tempdir", "s3n://path/for/temp/data") \
12 .load()