val spark = SparkSession.builder().master("local[5]").appName("jobTask").enableHiveSupport()// 元数据存储到 MySQL.config("javax.jdo.option.ConnectionURL", "jdbc:mysql://localhost:3306/spark_metastore?createDatabaseIfNotExist=true").config("javax.jdo.option.ConnectionDriverName", "com.mysql.cj.jdbc.Driver").config("javax.jdo.option.ConnectionUserName", "sparkuser").config("javax.jdo.option.ConnectionPassword", "spark123")// 数据文件存储到本地.config("spark.sql.warehouse.dir", "E:\\workpath\\sparkproject\\sparkLocalTask\\local_warehouse")// 性能优化配置.config("spark.sql.shuffle.partitions", "10").config("spark.default.parallelism", "10").config("spark.driver.memory", "6g")// 关闭不必要的日志.config("spark.ui.showConsoleProgress", "false").getOrCreate()
