我是靠谱客的博主 傻傻口红,最近开发中收集的这篇文章主要介绍JAVA HDFS 文件上传、下载、删除、更名、移动/剪切、获取文件列表、复制文件/文件夹、查找、创建文件夹、获取文件夹大小,觉得挺不错的,现在分享给大家,希望可以做个参考。

概述

// 1文件上传
/**
     * 文件上传
     *
     * @param IDc       用户ID
     * @param localPath 本地文件完整路径
     * @param cloudPath hdfs文件保存完整路径
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    public String Upload(String IDc, String localPath, String cloudPath) throws IOException, InterruptedException, URISyntaxException {

        // 1获取fs对象
        Configuration conf = new Configuration();
//        conf.set("dfs.replication", "2");
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        Path findf = new Path(cloudPath);
        //FileSystem.exists指定HDFS文件是否存在
        boolean isExists = fs.exists(findf);
//        System. out.println(isExists?"文件存在" :"文件不存在" );
        if (isExists) {
            fs.close();
            return "{"operation":"The file/folder you are uploading already exists","log":{}}";
        }

        // 2 执行上传API
        fs.copyFromLocalFile(new Path(localPath), new Path(cloudPath));//前本后云

        // 3关闭资源
        fs.close();
        return "{"operation":"Upload","log":{"IDc":"" + IDc + "","ticket":"null","
                + ""file":"" + cloudPath + "","operation":"Upload","
                + ""time":"" + System.currentTimeMillis() + ""}}";
    }

    // 2文件下载

    /**
     * 文件下载子操作
     *
     * @param cloudPath hdfs文件下载完整路径
     * @param localPath 文件下载到本地完整路径
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    private void down(String cloudPath, String localPath) throws IOException, InterruptedException, URISyntaxException {

        // 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        // 2执行下载操作
//		fs.copyToLocalFile(new Path("/banhua.txt"), new Path("D:/banhua.txt"));
        fs.copyToLocalFile(false, new Path(cloudPath), new Path(localPath), true);

        // 3关闭资源
        fs.close();
    }


    // 3文件删除

    /**
     * 文件删除
     *
     * @param IDc       用户ID
     * @param cloudPath hdfs文件完整路径
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    public String Delete(String IDc, String cloudPath) throws IOException, InterruptedException, URISyntaxException {

//		 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断文件是否存在
        if (!fs.exists(new Path(cloudPath))) {
            fs.close();
            return "{"operation":"The file/folder you are deleting does not exist","log":{}}";
        }
//		 2文件删除
        fs.delete(new Path(cloudPath), true);

        // 3关闭资源
        fs.close();
        return "{"operation":"Delete","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + cloudPath + "","
                + ""operation":"Delete","time":"" + System.currentTimeMillis() + ""}}";
    }


    // 4文件更名

    /**
     * 文件更名
     *
     * @param IDc     用户ID
     * @param oldName hdfs源文件完整路径
     * @param newName 改名后的hdfs目的文件完整路径
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    public String Rename(String IDc, String oldName, String newName) throws IOException, InterruptedException, URISyntaxException {

        // 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断文件是否存在
        if (!fs.exists(new Path(oldName))) {
            fs.close();
            return "{"operation":"The file/folder you are renaming does not exist","log":{}}";
        }
        // 2执行更名操作
        fs.rename(new Path(oldName), new Path(newName));//前旧后新

        // 3 关闭资源
        fs.close();
        return "{"operation":"Rename","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + oldName + " To " + newName + "","
                + ""operation":"Rename","time":"" + System.currentTimeMillis() + ""}}";
    }


    // 4'文件剪切/移动

    /**
     * 文件截切(移动)
     *
     * @param IDc 用户ID
     * @param src hdfs源文件完整路径
     * @param dsc hdfs目的文件完整路径
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    public String Move(String IDc, String src, String dsc) throws IOException, InterruptedException, URISyntaxException {

        // 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断文件是否存在
        if (!fs.exists(new Path(src))) {
            fs.close();
            return "{"operation":"The file/folder you are moving does not exist","log":{}}";
        }

        // 2执行更名操作
        fs.rename(new Path(src), new Path(dsc));//前旧后新(新的文件名路径)

        // 3 关闭资源
        fs.close();

        return "{"operation":"Move","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + "from" + src + " To " + dsc + "","
                + ""operation":"Move","time":"" + System.currentTimeMillis() + ""}}";
    }


    // 5获取文件列表

    /**
     * 获取指定目录下的文件列表
     *
     * @param cloudPath hdfs目录完整路径
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */

    public String ListFiles(String cloudPath) throws IOException, InterruptedException, URISyntaxException {

        // 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断路径是否存在
        if (!fs.exists(new Path(cloudPath))) {
            fs.close();
            return "{"operation":"The folder path you are looking for does not exist"}";
        }

        // 2查看文件详情
        JSONObject jsonObjectL = new JSONObject();

        int i = 1;
        int j = 1;
        FileStatus[] listStatus = fs.listStatus(new Path(cloudPath));

        for (FileStatus fileStatus : listStatus) {
            if (fileStatus.isFile()) {
                //文件
                jsonObjectL.put("file" + i, fileStatus.getPath().getName());
                i++;
//                System.out.println("f:"+fileStatus.getPath().getName());
            } else {
                //文件夹
                jsonObjectL.put("Folder" + j, fileStatus.getPath().getName());
                j++;
//                System.out.println("d:"+fileStatus.getPath().getName());
            }
//			System.out.println("--------");
        }

        // 3关闭资源
        fs.close();
        return "{"operation":"ListFiles","result":" + jsonObjectL + "}";
    }



    // 6复制文件1
//	public void textDisplayToConsole(String src,String dsc) throws IOException, InterruptedException, URISyntaxException {
//
//		// 1获取fs对象
//		Configuration conf = new Configuration();
//		FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");
//
//		// 2获取输入流
//		FSDataInputStream in = fs.open(new Path(src));
//		FSDataOutputStream out = fs.create(new Path(dsc));
//
//		// 3流的拷贝
//        IOUtils.copyBytes(in,out,conf);
//
//		// 4关闭资源
//        IOUtils.closeStream(in);
//        IOUtils.closeStream(out);
//		fs.close();
//	}


    //6复制文件2

    /**
     * 复制文件
     *
     * @param IDc 用户ID
     * @param src hdfs源文件完整地址
     * @param dsc hdfs目的文件完整地址
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws IllegalArgumentException
     * @throws Exception
     */
    public String CopyFile(String IDc, String src, String dsc) throws IllegalArgumentException, Exception {
        /*
         * 1:建立输入流
         * 2:建立输出流
         * 3:两个流的对接
         * 4:资源的关闭
         */
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");


        //判断文件是否存在
        if (!fs.exists(new Path(src))) {
            fs.close();
            return "{"operation":"The file you are copying does not exist","log":{}}";
        }

        //1:建立输入流
        FSDataInputStream input = fs.open(new Path(src));

        //2:建立输出流
        FSDataOutputStream output = fs.create(new Path(dsc));

        //3:两个流的对接
        byte[] b = new byte[1024];
        int hasRead = 0;
        while ((hasRead = input.read(b)) > 0) {
            output.write(b, 0, hasRead);
        }

        //4:资源的关闭
        input.close();
        output.close();
        fs.close();
        return "{"operation":"CopyFile","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + dsc + "","
                + ""operation":"CopyFile","time":"" + System.currentTimeMillis() + ""}}";

    }


    //7复制文件夹

    /**
     * 复制文件夹
     *
     * @param IDc 用户ID
     * @param src hdfs源文件夹完整地址
     * @param dsc hdfs目的文件完整地址
     * @return 返回json格式字符串,包含operation和log两个属性
     * @throws FileNotFoundException
     * @throws Exception
     */
    public String CopyDir(String IDc, String src, String dsc) throws FileNotFoundException, Exception {

        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断文件夹是否存在
        Path srcPath = new Path(src);
        if (!fs.exists(srcPath)) {
            fs.close();
            return "{"operation":"The folder you are copying does not exist","log":{}}";
        }

        String[] strs = src.split("/");
        String lastName = strs[strs.length - 1];
        if (fs.isDirectory(srcPath)) {
            fs.mkdirs(new Path(dsc));

            //遍历
            FileStatus[] fileStatus = fs.listStatus(srcPath);
            for (FileStatus fileSta : fileStatus) {
                if (dsc.equals("/")) {
                    CopyDir(IDc, fileSta.getPath().toString(), dsc + lastName);
                } else {
                    CopyDir(IDc, fileSta.getPath().toString(), dsc + "/" + lastName);
                }

            }

        } else {
            fs.mkdirs(new Path(dsc));
//            System.out.println("src"+src+"n"+dsc);
            CopyFile(IDc, src, dsc + "/" + lastName);
        }
        return "{"operation":"CopyFolder","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + dsc + "/" + lastName + "","
                + ""operation":"CopyFolder","time":"" + System.currentTimeMillis() + ""}}";
    }


    //8查找
    /**
     * 在指定目录下查找文件
     *
     * @param cloudPath    完整目录路径
     * @param name    查找的文件名
     * @throws IOException
     * @throws InterruptedException
     * @throws URISyntaxException
     */
    JSONObject jsonObjectS = new JSONObject();

    public String Search(String cloudPath, String name) throws IOException, InterruptedException, URISyntaxException {

        // 1获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");

        //判断文件是否存在
        if (!fs.exists(new Path(cloudPath))) {
            fs.close();
            return "{"operation":"The folder path you are looking for does not exist"}";
        }

        //查找
        FileStatus[] fileStatus1 = fs.listStatus(new Path(cloudPath));
        for (FileStatus fileStatus : fileStatus1) {
            String getname = fileStatus.getPath().getName();
            boolean flag = getname.contains(name);
            if (flag) {
                jsonObjectS.put(fileStatus.getPath().toString().substring(25), getname);
                System.out.println(getname);//文件名称

                System.out.println(fileStatus.getPath().toString().substring(25));
                System.out.println("--------------");
            }
            if (fs.isDirectory(new Path(fileStatus.getPath().toString().substring(25)))) {
//				System.out.println(fileStatus.getPath().toString().substring(25));
                Search(fileStatus.getPath().toString().substring(25), name);
            }
        }

        // 3关闭资源
        fs.close();
        return "{"operation":"Search","result":" + jsonObjectS + "}";
//        return jsonObjectS.toString();
    }





    //9、创建文件夹
    public String CreatDir(String IDc,String cloudPath){
        		Configuration conf = new Configuration();
//		conf.set("fs.defaultFS", "hdfs://192.168.1.102:9000");

		// 1获取hdfs客户端对象
//		FileSystem fs = FileSystem.get(conf );
        FileSystem fs = null;
        try {
            fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");
        } catch (IOException e) {
            e.printStackTrace();
        } catch (InterruptedException e) {
            e.printStackTrace();
        } catch (URISyntaxException e) {
            e.printStackTrace();
        }

        // 2在hdfs上创建路径
        try {
            fs.mkdirs(new Path(cloudPath));
        } catch (IOException e) {
            e.printStackTrace();
        }

        // 3关闭资源
        try {
            fs.close();
        } catch (IOException e) {
            e.printStackTrace();
        }

        return "{"operation":"CreatDir","log":{"IDc":"" + IDc + "","ticket":"null","file":"" + cloudPath + "","
                + ""operation":"CreatDir","time":"" + System.currentTimeMillis() + ""}}";
    }


    //10、获取文件夹大小
    public Long getFileSize(String cloudPath) throws URISyntaxException, IOException, InterruptedException {
        //获取对象
        Configuration conf = new Configuration();
        FileSystem fs = FileSystem.get(new URI("hdfs://192.168.1.102:9000"), conf, "hadoop");
        //返回文件夹大小
        return fs.getContentSummary(new Path(cloudPath)).getLength();

    }

 

 

最后

以上就是傻傻口红为你收集整理的JAVA HDFS 文件上传、下载、删除、更名、移动/剪切、获取文件列表、复制文件/文件夹、查找、创建文件夹、获取文件夹大小的全部内容,希望文章能够帮你解决JAVA HDFS 文件上传、下载、删除、更名、移动/剪切、获取文件列表、复制文件/文件夹、查找、创建文件夹、获取文件夹大小所遇到的程序开发问题。

如果觉得靠谱客网站的内容还不错,欢迎将靠谱客网站推荐给程序员好友。

本图文内容来源于网友提供,作为学习参考使用,或来自网络收集整理,版权属于原作者所有。
点赞(41)

评论列表共有 0 条评论

立即
投稿
返回
顶部