From 0d2248aee125b1d04e2dc8a9e2e5b084e54f60e1 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Wed, 18 Jan 2017 22:39:24 +0800 Subject: [PATCH 1/7] =?UTF-8?q?=E5=BC=80=E5=A7=8B=E5=B0=9D=E8=AF=95?= =?UTF-8?q?=E4=BB=A5centos7=E7=B3=BB=E7=BB=9F=E4=BD=9C=E4=B8=BA=E5=9F=BA?= =?UTF-8?q?=E7=A1=80=E9=95=9C=E5=83=8F=EF=BC=8C=E5=B9=B6=E7=A7=BB=E9=99=A4?= =?UTF-8?q?=E5=AF=B9wget=E7=9A=84=E4=BE=9D=E8=B5=96?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- .../os-jvm-docker/centos7-openjdk8/Dockerfile | 17 +++++++++++++++++ 1 file changed, 17 insertions(+) create mode 100644 services/os-jvm-docker/centos7-openjdk8/Dockerfile diff --git a/services/os-jvm-docker/centos7-openjdk8/Dockerfile b/services/os-jvm-docker/centos7-openjdk8/Dockerfile new file mode 100644 index 0000000..25d6329 --- /dev/null +++ b/services/os-jvm-docker/centos7-openjdk8/Dockerfile @@ -0,0 +1,17 @@ +FROM centos:7 + +MAINTAINER twinsen + +USER root + +ENV JAVA_HOME=/usr/lib/jvm/java-openjdk + +ENV PATH=$PATH:$JAVA_HOME/bin:. + +# 安装 OpenJDK +RUN yum update -y && \ + yum install -y java-1.8.0-openjdk-devel && \ + yum clean all && \ + cp /usr/share/zoneinfo/Asia/Shanghai /etc/localtime + +CMD ["/bin/bash"] \ No newline at end of file From e52e9a8688c320e934035c776a3ab9808dc349b0 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Sun, 22 Jan 2017 11:26:51 +0800 Subject: [PATCH 2/7] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E8=B7=AF=E5=BE=84=E9=94=99=E8=AF=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose-build-all.yml | 12 ++++++++---- 1 file changed, 8 insertions(+), 4 deletions(-) diff --git a/docker-compose-build-all.yml b/docker-compose-build-all.yml index 22eba5e..2a67519 100755 --- a/docker-compose-build-all.yml +++ b/docker-compose-build-all.yml @@ -1,17 +1,21 @@ version: '2' services: os-jvm: - build: ./os-jvm-docker/centos6-openjdk8 + build: ./services/os-jvm-docker/centos6-openjdk8 image: twinsen/os-jvm:centos6-openjdk8 + os7-jvm: + build: ./services/os-jvm-docker/centos7-openjdk8 + image: twinsen/os-jvm:centos7-openjdk8 + hadoop: - build: ./hadoop-docker/2.7.2 + build: ./services/hadoop-docker/2.7.2 image: twinsen/hadoop:2.7.2 hive-2.1.1: - build: ./hive-docker/2.1.1 + build: ./services/hive-docker/2.1.1 image: twinsen/hive:2.1.1 spark-2.1.0: - build: ./spark-docker/2.1.0 + build: ./services/spark-docker/2.1.0 image: twinsen/spark:2.1.0 \ No newline at end of file From 37b23ce2144ae3a03ad1580d5abaec953720e4d5 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Fri, 3 Mar 2017 16:05:54 +0800 Subject: [PATCH 3/7] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86=E9=95=9C?= =?UTF-8?q?=E5=83=8F=E7=9A=84=E8=8E=B7=E5=8F=96=E6=96=B9=E5=BC=8F=EF=BC=8C?= =?UTF-8?q?=E7=9B=B4=E6=8E=A5=E4=BB=8Edocker=20hub=E6=8B=89=E5=8F=96?= =?UTF-8?q?=E9=95=9C=E5=83=8F=EF=BC=8C=E9=81=BF=E5=85=8D=E5=9B=A0=E4=B8=BA?= =?UTF-8?q?github=20LFS=E5=AD=98=E5=82=A8=E6=B5=81=E9=87=8F=E4=B8=8D?= =?UTF-8?q?=E8=B6=B3=E5=AF=BC=E8=87=B4=E6=97=A0=E6=B3=95=E7=BC=96=E8=AF=91?= =?UTF-8?q?=E9=95=9C=E5=83=8F?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/README.md b/README.md index 4f4a413..06ce9ad 100644 --- a/README.md +++ b/README.md @@ -44,21 +44,21 @@ `docker pull centos:6` -- 构建基本操作系统和OpenJDK环境,包含CentOS 6和OpenJDK 8 +- 拉取基本操作系统和OpenJDK环境,包含CentOS 6和OpenJDK 8 -`docker-compose -f docker-compose-build-all.yml build os-jvm` +`docker pull twinsen/os-jvm:centos6-openjdk8` -- 构建Hadoop环境,包含Hadoop 2.7.2 +- 拉取Hadoop环境,包含Hadoop 2.7.2 -`docker-compose -f docker-compose-build-all.yml build hadoop` +`docker pull twinsen/hadoop:2.7.2` -- 构建Hive环境,包含Hive 2.1.1 +- 拉取Hive环境,包含Hive 2.1.1 -`docker-compose -f docker-compose-build-all.yml build hive-2.1.1` +`docker pull twinsen/hive:2.1.1` -- 构建Spark环境,包含Spark 2.1.0 +- 拉取Spark环境,包含Spark 2.1.0 -`docker-compose -f docker-compose-build-all.yml build spark-2.1.0` +`docker pull twinsen/spark:2.1.0` ###3. 启动及停止集群 完成上一步的镜像编译工作后,在系统命令行中,可以使用docker images命令查看目前docker环境下的镜像,如下图所示: From be619abf6c117ef5030f4c9dae52844e59877824 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Tue, 4 Apr 2017 17:47:44 +0800 Subject: [PATCH 4/7] =?UTF-8?q?=E5=B0=86hadoop.tmp.dir=E7=9B=AE=E5=BD=95?= =?UTF-8?q?=E6=98=A0=E5=B0=84=E5=88=B0=E6=9C=AC=E5=9C=B0=E6=96=87=E4=BB=B6?= =?UTF-8?q?=E5=8D=B7=E4=B8=AD=EF=BC=8C=E9=98=B2=E6=AD=A2docker=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E6=8C=81=E7=BB=AD=E5=A2=9E=E5=A4=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docker-compose.yml | 4 ++++ services/hadoop-docker/2.7.2/config/hadoop/core-site.xml | 4 ++++ services/hadoop-docker/2.7.2/docker-compose.yml | 4 ++++ 3 files changed, 12 insertions(+) diff --git a/docker-compose.yml b/docker-compose.yml index e8335fc..234dca2 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -7,6 +7,7 @@ services: - "./volume/hadoop/work/slave1:/works" - "./volume/hadoop/logs/slave1:/root/hadoop/logs/" - "./volume/spark/logs/slave1:/root/spark/logs/" + - "./volume/hadoop/tmp/slave1:/tmp" - "./volume/ro_data:/ro_data:ro" hostname: hadoop-slave1 networks: @@ -22,6 +23,7 @@ services: - "./volume/hadoop/work/slave2:/works" - "./volume/hadoop/logs/slave2:/root/hadoop/logs/" - "./volume/spark/logs/slave2:/root/spark/logs/" + - "./volume/hadoop/tmp/slave2:/tmp" - "./volume/ro_data:/ro_data:ro" hostname: hadoop-slave2 networks: @@ -37,6 +39,7 @@ services: - "./volume/hadoop/work/slave3:/works" - "./volume/hadoop/logs/slave3:/root/hadoop/logs/" - "./volume/spark/logs/slave3:/root/spark/logs/" + - "./volume/hadoop/tmp/slave3:/tmp" - "./volume/ro_data:/ro_data:ro" hostname: hadoop-slave3 networks: @@ -68,6 +71,7 @@ services: - "./volume/hadoop/work/master:/works" - "./volume/hadoop/logs/master:/root/hadoop/logs/" - "./volume/spark/logs/master:/root/spark/logs/" + - "./volume/hadoop/tmp/master:/tmp" - "./volume/code:/code" - "./volume/ro_data:/ro_data:ro" container_name: spark-master diff --git a/services/hadoop-docker/2.7.2/config/hadoop/core-site.xml b/services/hadoop-docker/2.7.2/config/hadoop/core-site.xml index a499909..35bed8e 100644 --- a/services/hadoop-docker/2.7.2/config/hadoop/core-site.xml +++ b/services/hadoop-docker/2.7.2/config/hadoop/core-site.xml @@ -17,6 +17,10 @@ + + hadoop.tmp.dir + file:/works/hadoop_tmp/hadoop_${user.name} + io,native.lib.available true diff --git a/services/hadoop-docker/2.7.2/docker-compose.yml b/services/hadoop-docker/2.7.2/docker-compose.yml index e33c68a..032fa9e 100755 --- a/services/hadoop-docker/2.7.2/docker-compose.yml +++ b/services/hadoop-docker/2.7.2/docker-compose.yml @@ -6,6 +6,7 @@ services: volumes: - "./volume/hadoop/work/slave1:/works" - "./volume/hadoop/logs/slave1:/root/hadoop/logs/" + - "./volume/hadoop/tmp/slave1:/tmp" hostname: hadoop-slave1 networks: - hadoop @@ -17,6 +18,7 @@ services: volumes: - "./volume/hadoop/work/slave2:/works" - "./volume/hadoop/logs/slave2:/root/hadoop/logs/" + - "./volume/hadoop/tmp/slave2:/tmp" hostname: hadoop-slave2 networks: - hadoop @@ -28,6 +30,7 @@ services: volumes: - "./volume/hadoop/work/slave3:/works" - "./volume/hadoop/logs/slave3:/root/hadoop/logs/" + - "./volume/hadoop/tmp/slave3:/tmp" hostname: hadoop-slave3 networks: - hadoop @@ -42,6 +45,7 @@ services: volumes: - "./volume/hadoop/work/master:/works" - "./volume/hadoop/logs/master:/root/hadoop/logs/" + - "./volume/hadoop/tmp/master:/tmp" - "./volume/code:/code" hostname: hadoop-master links: From e9f50d023d7d6262bee3692442c1120402e55341 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Thu, 13 Apr 2017 20:22:27 +0800 Subject: [PATCH 5/7] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86Spark=E7=9A=84?= =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6=EF=BC=8C=E5=A2=9E=E5=8A=A0?= =?UTF-8?q?=E4=BA=86spark.yarn.jar=E9=85=8D=E7=BD=AE=E9=A1=B9=EF=BC=8C?= =?UTF-8?q?=E8=AE=BE=E7=BD=AE=E4=B8=BAhdfs://hadoop-master:54310/user/spar?= =?UTF-8?q?k/share/lib/*.jar=E3=80=82=E8=BF=99=E5=8F=AF=E4=BB=A5=E9=98=B2?= =?UTF-8?q?=E6=AD=A2Spark=E4=BB=BB=E5=8A=A1=E6=8F=90=E4=BA=A4=E6=97=B6?= =?UTF-8?q?=E5=8F=8D=E5=A4=8D=E6=89=93=E5=8C=85=E4=B8=8A=E4=BC=A0Spark?= =?UTF-8?q?=E7=9B=B8=E5=85=B3=E7=9A=84Jar=E6=96=87=E4=BB=B6=EF=BC=8C?= =?UTF-8?q?=E8=8A=82=E7=9C=81=E7=A3=81=E7=9B=98=E7=A9=BA=E9=97=B4=E5=B9=B6?= =?UTF-8?q?=E6=8F=90=E9=AB=98=E7=A8=8B=E5=BA=8F=E6=89=A7=E8=A1=8C=E6=95=88?= =?UTF-8?q?=E7=8E=87=E3=80=82=20=E4=BF=AE=E6=94=B9=E4=BA=86README.md?= =?UTF-8?q?=E3=80=82=E5=A2=9E=E5=8A=A0=E4=BA=86=E9=9B=86=E7=BE=A4=E5=90=AF?= =?UTF-8?q?=E5=8A=A8=E5=89=8D=E7=9A=84=E5=88=9D=E5=A7=8B=E5=8C=96=E5=B7=A5?= =?UTF-8?q?=E4=BD=9C=EF=BC=8C=E5=8C=85=E6=8B=AC=E6=A0=BC=E5=BC=8F=E5=8C=96?= =?UTF-8?q?HDFS=EF=BC=8C=E5=B0=86Spark=E7=9B=B8=E5=85=B3jar=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E6=89=93=E5=8C=85=E4=B8=8A=E4=BC=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 31 ++++++++++++------- .../2.1.0/config/spark/spark-defaults.conf | 3 +- 2 files changed, 22 insertions(+), 12 deletions(-) diff --git a/README.md b/README.md index 06ce9ad..f4f0f6c 100644 --- a/README.md +++ b/README.md @@ -8,31 +8,31 @@ ## 1.基本软件环境介绍 -###1. 软件版本 +###1.1 软件版本 - 操作系统: CentOS 6 - Java环境: OpenJDK 8 - Hadoop: 2.7.2 -- Spark: 1.6.2/2.1.0 -- Hive: 1.1.1/2.1.1 +- Spark: 2.1.0 +- Hive: 2.1.1 - HBase: 1.2.2 - Zookeeper: 3.4.8 - 基于docker-compose管理镜像和容器,并进行集群的编排 - 所有软件的二进制包均通过网络下载。其中包含自行编译的Hadoop和Protobuf二进制包,保存在Github上,其它软件的二进制包均使用Apache官方镜像。 -###2. 镜像依赖关系 +###1.2 镜像依赖关系 ![镜像依赖关系图](https://github.com/ruoyu-chen/hadoop-docker/raw/master/images/arch.jpeg "镜像依赖关系") 上图中,灰色的镜像(centos:6)为docker hub官方基础镜像。其它镜像(twinsen/hadoop:2.7.2等)都是在下层镜像的基础上实现的。这一镜像之间的依赖关系,决定了镜像的编译顺序. ## 2.使用方法简介 -###1. 安装docker +###2.1 安装docker 具体安装方法请自行百度,安装完成后,在命令行下输入docker info进行测试,输出结果如下图所示,说明安装成功 ![docker安装测试结果](https://github.com/ruoyu-chen/hadoop-docker/raw/master/images/docker_info.png "Docker安装测试") -###2. 构建镜像 +###2.2 构建镜像 首先,下载工程文件( https://github.com/ruoyu-chen/hadoop-docker/archive/1.1.zip ),解压到任意目录下。 接下来,可以在工程根目录下(包含有docker-compose-build-all.yml文件),在系统命令行中,依次使用下列命令构建镜像: @@ -60,14 +60,12 @@ `docker pull twinsen/spark:2.1.0` -###3. 启动及停止集群 +###2.3 环境准备 完成上一步的镜像编译工作后,在系统命令行中,可以使用docker images命令查看目前docker环境下的镜像,如下图所示: ![查看docker本机镜像列表](https://github.com/ruoyu-chen/hadoop-docker/raw/master/images/docker_images.png "查看Docker本机镜像列表") 为了方便使用,在工程根目录下放置了一个docker-compose.yml文件,这一文件中已经预先配置好了由3个slave节点和1个master节点组成的Spark集群。 -下面简要介绍启动和关闭Spark集群的步骤(以下步骤均在命令行环境下完成,在工程根目录下执行) - -- 初始化工作 +在使用集群之前,需要先完成初始化

 #[创建容器]
@@ -76,8 +74,19 @@ docker-compose up -d
 docker-compose exec spark-master hdfs namenode -format
 #[初始化Hive数据库。仅在第一次启动集群前执行一次]
 docker-compose exec spark-master schematool -dbType mysql -initSchema
+#[将Spark相关的jar文件打包,存储在/code目录下,命名为spark-libs.jar]
+docker-compose exec spark-master jar cv0f /code/spark-libs.jar -C /root/spark/jars/ .
+#[启动HDFS]
+docker-compose exec spark-master start-dfs.sh
+#[将/code/spark-libs.jar文件上传至HDFS下的/user/spark/share/lib/目录下]
+docker-compose exec spark-master hadoop fs -put /code/spark-libs.jar /user/spark/share/lib/
+#[关闭HDFS]
+docker-compose exec spark-master stop-dfs.sh
 
+###2.4 启动及停止集群 + +下面简要介绍启动和关闭Spark集群的步骤(以下步骤均在命令行环境下完成,在工程根目录下执行) - 启动集群进程,依次执行:

@@ -102,7 +111,7 @@ docker-compose exec spark-master stop-dfs.sh
 docker-compose down
-###4. 开发与测试过程中的集群使用方法 +###2.5 开发与测试过程中的集群使用方法 目前集群中采用的是1个master节点和3个slave节点的分配方案,可以通过调整docker-compose配置文件以及相应软件的配置文件来实现集群扩容,暂时无法做到自动化扩容。 diff --git a/services/spark-docker/2.1.0/config/spark/spark-defaults.conf b/services/spark-docker/2.1.0/config/spark/spark-defaults.conf index 4215fde..59e1018 100644 --- a/services/spark-docker/2.1.0/config/spark/spark-defaults.conf +++ b/services/spark-docker/2.1.0/config/spark/spark-defaults.conf @@ -11,4 +11,5 @@ spark.executor.extraClassPath /root/hive/lib/mysql-connector-java-5.1.40-bin.jar:/root/hive/lib/guava-14.0.1.jar -spark.driver.extraClassPath /root/hive/lib/mysql-connector-java-5.1.40-bin.jar:/root/hive/lib/guava-14.0.1.jar \ No newline at end of file +spark.driver.extraClassPath /root/hive/lib/mysql-connector-java-5.1.40-bin.jar:/root/hive/lib/guava-14.0.1.jar +spark.yarn.jar hdfs://hadoop-master:54310/user/spark/share/lib/*.jar \ No newline at end of file From 27a786572413ab1a66e47729552572f2d956f1a7 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Thu, 13 Apr 2017 21:28:50 +0800 Subject: [PATCH 6/7] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E4=BA=86Spark=E7=9A=84?= =?UTF-8?q?=E9=85=8D=E7=BD=AE=E6=96=87=E4=BB=B6=EF=BC=8C=E5=A2=9E=E5=8A=A0?= =?UTF-8?q?=E4=BA=86spark.yarn.archive=E9=85=8D=E7=BD=AE=E9=A1=B9=EF=BC=8C?= =?UTF-8?q?=E8=AE=BE=E7=BD=AE=E4=B8=BAhdfs://hadoop-master:54310/user/spar?= =?UTF-8?q?k/share/lib/spark-libs.jar=E3=80=82=E8=BF=99=E5=8F=AF=E4=BB=A5?= =?UTF-8?q?=E9=98=B2=E6=AD=A2Spark=E4=BB=BB=E5=8A=A1=E6=8F=90=E4=BA=A4?= =?UTF-8?q?=E6=97=B6=E5=8F=8D=E5=A4=8D=E6=89=93=E5=8C=85=E4=B8=8A=E4=BC=A0?= =?UTF-8?q?Spark=E7=9B=B8=E5=85=B3=E7=9A=84Jar=E6=96=87=E4=BB=B6=EF=BC=8C?= =?UTF-8?q?=E8=8A=82=E7=9C=81=E7=A3=81=E7=9B=98=E7=A9=BA=E9=97=B4=E5=B9=B6?= =?UTF-8?q?=E6=8F=90=E9=AB=98=E7=A8=8B=E5=BA=8F=E6=89=A7=E8=A1=8C=E6=95=88?= =?UTF-8?q?=E7=8E=87=E3=80=82=20=E4=BF=AE=E6=94=B9=E4=BA=86README.md?= =?UTF-8?q?=E3=80=82=E5=A2=9E=E5=8A=A0=E4=BA=86=E9=9B=86=E7=BE=A4=E5=90=AF?= =?UTF-8?q?=E5=8A=A8=E5=89=8D=E7=9A=84=E5=88=9D=E5=A7=8B=E5=8C=96=E5=B7=A5?= =?UTF-8?q?=E4=BD=9C=EF=BC=8C=E5=8C=85=E6=8B=AC=E6=A0=BC=E5=BC=8F=E5=8C=96?= =?UTF-8?q?HDFS=EF=BC=8C=E5=B0=86Spark=E7=9B=B8=E5=85=B3jar=E6=96=87?= =?UTF-8?q?=E4=BB=B6=E6=89=93=E5=8C=85=E4=B8=8A=E4=BC=A0?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- services/spark-docker/2.1.0/config/spark/spark-defaults.conf | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/services/spark-docker/2.1.0/config/spark/spark-defaults.conf b/services/spark-docker/2.1.0/config/spark/spark-defaults.conf index 59e1018..5a4fb5c 100644 --- a/services/spark-docker/2.1.0/config/spark/spark-defaults.conf +++ b/services/spark-docker/2.1.0/config/spark/spark-defaults.conf @@ -12,4 +12,4 @@ spark.executor.extraClassPath /root/hive/lib/mysql-connector-java-5.1.40-bin.jar:/root/hive/lib/guava-14.0.1.jar spark.driver.extraClassPath /root/hive/lib/mysql-connector-java-5.1.40-bin.jar:/root/hive/lib/guava-14.0.1.jar -spark.yarn.jar hdfs://hadoop-master:54310/user/spark/share/lib/*.jar \ No newline at end of file +spark.yarn.archive hdfs://hadoop-master:54310/user/spark/share/lib/spark-libs.jar \ No newline at end of file From be5b12de6cced585d519e1e5da8ad43c99679d43 Mon Sep 17 00:00:00 2001 From: Ruoyu Chen Date: Tue, 23 May 2017 12:58:29 +0800 Subject: [PATCH 7/7] =?UTF-8?q?=E8=A1=A5=E5=85=85=E4=BA=86=E5=BF=85?= =?UTF-8?q?=E8=A6=81=E7=9A=84=E5=88=9B=E5=BB=BAHDFS=E7=9A=84=E8=AF=AD?= =?UTF-8?q?=E5=8F=A5?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- README.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/README.md b/README.md index f4f0f6c..f51188a 100644 --- a/README.md +++ b/README.md @@ -78,6 +78,8 @@ docker-compose exec spark-master schematool -dbType mysql -initSchema docker-compose exec spark-master jar cv0f /code/spark-libs.jar -C /root/spark/jars/ . #[启动HDFS] docker-compose exec spark-master start-dfs.sh +#[在HDFS中创建/user/spark/share/lib/目录] +docker-compose exec spark-master hadoop fs -mkdir -p /user/spark/share/lib/ #[将/code/spark-libs.jar文件上传至HDFS下的/user/spark/share/lib/目录下] docker-compose exec spark-master hadoop fs -put /code/spark-libs.jar /user/spark/share/lib/ #[关闭HDFS]