HDFSAPI操作(篇3:案例二)

发布于:2022-12-03 ⋅ 阅读:(149) ⋅ 点赞:(0)

目录

一、项目说明

二、准备数据

三、Xshell

四、Idea

五、打包

六、上传

七、查看


好友推荐

一、项目说明

说明:

互为推荐关系

–非好友的两个人之间存在相同好友则互为推荐关系

–朋友圈两个非好友的人,存在共同好友人数越多,越值得推荐

–存在一个共同好友,值为1;存在多个值累加

程序需求:

程序要求,给每个人推荐可能认识的人

–互为推荐关系值越高,越值得推荐

–每个用户,推荐值越高的可能认识的人排在前面

二、准备数据

 新建txt 文件

friend.txt

xiaoming laowang renhua linzhiling
laowang xiaoming fengjie
renhua xiaoming ligang fengjie
linzhiling xiaoming ligang fengjie guomeimei
ligang renhua fengjie linzhiling
guomeimei fengjie linzhiling
fengjie renhua laowang linzhiling guomeimei

说明:

数据使用空格分割

每行是一个用户以及其对应的好友

每行的第一列名字是用户的名字,后面的是其对应的好友

三、Xshell

 上传到xshell 里:

进入 opt 目录

cd /opt

进入 testData 目录

cd /testData

新建 friends 目录

mkdir friends

 

进入 friends 目录

cd friends

将 friend.txt 上传到 input

hdfs dfs -put friend.txt /friend/input

四、Idea

新建项目 FriendsTest

新建三个类

Pom.xml 里加入代码:

    <dependencies>

        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-common</artifactId>

            <version>2.9.2</version>

        </dependency>

        <dependency>

            <groupId>org.apache.hadoop</groupId>

            <artifactId>hadoop-mapreduce-client-jobclient</artifactId>

            <version>2.9.2</version>

            <scope>provided</scope>

        </dependency>

    </dependencies>



    <build>

        <plugins>

            <plugin>

                <groupId>org.apache.maven.plugins</groupId>

                <artifactId>maven-jar-plugin</artifactId>

                <configuration>

                    <archive>

                        <manifest>

                            <mainClass>org.friends.FriendRecommend</mainClass>

                        </manifest>

                    </archive>

                </configuration>

            </plugin>

        </plugins>

</build>

FriendsRecommendMapper.java

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Mapper;

import java.io.IOException;
@Slf4j
public class FriendsRecommendMapper extends Mapper<Object, Text, Text, IntWritable> {

    /**
     *
     * map的key为好友名,value为0|直接好友;1|可能是间接好友,需要在reduce中进行进一步处理
     *
     */
    @Override
    public void map(Object key, Text value, Context context) throws IOException, InterruptedException {

        log.info("key:" + key + ",value:" + value);
        String[] friends = value.toString().split(" ");
        for(int i=0; i < friends.length; i++) {

            String self = friends[i];

            for(int j=i+1; j < friends.length; j++) {

                log.info("i:" + i + ",j:" + j);
                if(i == 0) {
                    // 直接好友
                    String directFriend = friends[j];
                    Text directFriendKey = new Text(sort(self, directFriend));
                    log.info("direct:" + directFriendKey.toString());
                    context.write(directFriendKey, new IntWritable(0));
                } else {
                    // 可能是间接好友
                    String indirectFriend = friends[j];
                    Text indirectFriendKey = new Text(sort(self, indirectFriend));
                    log.info("indirect:" + indirectFriendKey.toString());
                    context.write(indirectFriendKey, new IntWritable(1));
                }

            }

        }

    }

    private String sort(String self, String directFriend) {
        if(self.compareToIgnoreCase(directFriend) < 0) {
            return directFriend + " " + self;
        }
        return self + " " + directFriend;
    }

}

FriendsRecommendReduce.java

import lombok.extern.slf4j.Slf4j;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Reducer;

import java.io.IOException;
@Slf4j
public class FriendsRecommendReduce extends Reducer<Text, IntWritable, Text, IntWritable> {

    @Override
    public void reduce(Text key, Iterable<IntWritable> values, Context context) throws IOException, InterruptedException {
        log.info("key:" + key);
        int sum = 0;
        boolean isDirectFriend = false;
        for(IntWritable value : values) {
            if(value.get() == 0) {
                // 直接好友
                log.info("direct friend");
                isDirectFriend = true;
                break;
            }
            sum = sum + value.get();
        }

        if(!isDirectFriend) {
            context.write(key, new IntWritable(sum));
        }
    }

}

 FriendsRecommend.java

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.Path;
import org.apache.hadoop.io.IntWritable;
import org.apache.hadoop.io.Text;
import org.apache.hadoop.mapreduce.Job;
import org.apache.hadoop.mapreduce.lib.input.FileInputFormat;
import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat;

public class FriendsRecommend {
    public static void main(String[] args) throws Exception{
        //获取虚拟机配置信息
        Configuration configuration = new Configuration();
        //创建Job对象
        Job job = Job.getInstance(configuration);
        job.setJarByClass(FriendsRecommend.class);

        //Map端
        job.setMapperClass(FriendsRecommendMapper.class);
        job.setMapOutputKeyClass(Text.class);
        job.setMapOutputValueClass(IntWritable.class);
        //combiner组件
//      job.setCombinerClass(FriendsRecommendReduce.class);
        //Reduce端
        job.setReducerClass(FriendsRecommendReduce.class);
        //文件的输入路径
        Path inputPath = new Path("/friend/input");
        FileInputFormat.addInputPath(job, inputPath);
        //结果的输出路经
        Path outputPath = new Path("/friend/output");
        //若路径存在则将其删除
        if (outputPath.getFileSystem(configuration).exists(outputPath))
            outputPath.getFileSystem(configuration).delete(outputPath);
        FileOutputFormat.setOutputPath(job, outputPath);

        System.exit(job.waitForCompletion(true) ? 0 : 1);

    }
}

五、打包

 

在下面生成的路径中找到对应的本地文件

直接拖拽到xshell 里

使用 ll 命令进行查看是否成功

开启Hadoop集群

start-dfs.sh

开启yarn集群

start-yarn.sh

六、上传

hadoop jar FriendsTest-1.0-SNAPSHOT.jar  /friend/input /output

七、查看

hdfs dfs -cat /friend/output/part-r-00000

Web端查看

http://192.168.67.110:50070/

 下载到本地


网站公告

今日签到

点亮在社区的每一天
去签到