local start up:
./pyspark -c spark.driver.bindAddress=127.0.0.1

vim ./conf/spark-env.sh

export SPARK_MASTER_HOST=192.168.8.150

export SPARK_LOCAL_IP=192.168.8.150

启动:

./sbin/start-master.sh

spark2.4.4安装

spark执行py文件

./spark-submit /home/admin/my_script.py

from pyspark import SparkConf, SparkContext

conf = SparkConf().setMaster(“local”).setAppName(“My App”)
sc = SparkContext(conf = conf)

相关文章:

  • 2021-05-29
  • 2022-12-23
  • 2022-12-23
  • 2022-02-05
  • 2022-12-23
  • 2022-01-01
  • 2021-05-18
猜你喜欢
  • 2021-06-03
  • 2021-08-06
  • 2021-10-12
  • 2022-02-09
  • 2021-11-16
  • 2021-10-13
  • 2021-10-21
相关资源
相似解决方案