SpringBoot下YarnClient使用

1.pom.xml

<properties>
        <!-- hadoop version-->
        <hadoop.version>2.6.0</hadoop.version>
    </properties>

        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>${hadoop.version}</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <!-- hadoop yarn api -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-api</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

        <!-- hadoop yarn client -->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-yarn-client</artifactId>
            <version>${hadoop.version}</version>
        </dependency>

2.代码

package com.**.dfp.rtsync.task;

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.yarn.api.records.ApplicationReport;
import org.apache.hadoop.yarn.api.records.YarnApplicationState;
import org.apache.hadoop.yarn.client.api.YarnClient;
import org.apache.hadoop.yarn.conf.YarnConfiguration;
import org.apache.hadoop.yarn.exceptions.YarnException;

import java.io.IOException;
import java.util.EnumSet;
import java.util.List;

/**
 * Description
 *
 * @author Bob
 * @date 2020/10/22
 **/
@Slf4j
public class YarnTest {
    public static void main(String[] args){
        Configuration conf = new YarnConfiguration();
        YarnClient yarnClient = YarnClient.createYarnClient();
        yarnClient.init(conf);
        yarnClient.start();
        try {
            //查询运行状态所有任务
            List<ApplicationReport> applicationReportList = yarnClient.getApplications(EnumSet.of(YarnApplicationState.RUNNING));
            for (ApplicationReport applicationReport : applicationReportList) {
                // 获取Name
                String name = applicationReport.getName();
                // 获取ApplicationType
                String applicationType = applicationReport.getApplicationType();
                ApplicationId applicationId;
                log.info("-----------applicationId:{},name:{},quene:{},user:{},type:{}", applicationReport.getApplicationId(), applicationReport.getName(), applicationReport.getQueue(), applicationReport.getUser(), applicationReport.getApplicationType());
                // 检查是否实时集成任务(Apache Spark)
                if (name.equals(applicationName)) {
                    if ("Apache Spark".equalsIgnoreCase(applicationType) || "Spark".equalsIgnoreCase(applicationType)) {
                        // ApplicationId
                        applicationId = applicationReport.getApplicationId();
                        // Kill applicationId
                        //yarnClient.killApplication(applicationId);
                        log.info("==========applicationId:{} is killed!", applicationName);
                        break;
                    } else {
                        log.warn("The app {} is not valid spark job! ", applicationName);
                    }
                }
            }
        } catch (YarnException | IOException e) {
            log.error("kill killYarnApplication error:", e);
            throw new BusinessException("停止Yarn任务失败!");
        } finally {
            if (yarnClient != null) {
                try {
                    // 停止YarnClient
                    yarnClient.stop();
                    // 关闭YarnClient
                    yarnClient.close();
                } catch (IOException e) {
                    log.error("关闭Yarn Client失败!", e);
                }
            }
        }
    }
}

3.yarn-site.xml 

注意:

方式一:将yarn-site.xml文件放到resources目录下。

方式二:指定yarn-site.xml,代码如下:

Configuration conf = new YarnConfiguration();
conf.addResource(new File("D:\spark\hadoop-2.6.0\yarn-site.xml").toURI().toURL());
yarnClient.init(conf);

4.环境变量

win环境下一定要配置hadoop环境变量!

hadoop环境变量:  HADOOP_HOME:D:\spark\hadoop-2.6.0

path中添加:%HADOOP_HOME%\bin;

5.参考:

https://www.jianshu.com/p/68737f8b1ad4

 

版权声明:本文为jsbylibo原创文章,遵循CC 4.0 BY-SA版权协议,转载请附上原文出处链接和本声明。