spark 读取mongodb中的数据然后存储到hdfs上

2019-06-12  本文已影响0人  枫隐_5f5f
from pyspark.sql import SparkSession
import sys


if __name__ == "__main__":
    spark = SparkSession.builder \
            .getOrCreate()

    mongo_read_uri = "mongodb://user:passwd@ip:port/database_name"
    table = "table_name"
    device_statis_df = spark.read \
                .option("uri",mongo_read_uri) \
                .option("collection",table) \
                .format("com.mongodb.spark.sql") \
                .load()

    device_statis_df.createOrReplaceTempView("devicestatistics")
    sql_str = """
        select * from devicestatistics
    """

    sqlDF = spark.sql(sql_str)
    sqlDF.repartition(10).write.format("parquet").mode("overwrite").save("/path/to/hdfs")
    print ("Done ====")

    spark.stop()
上一篇 下一篇

猜你喜欢

热点阅读