hadoop hdfs文件系统操作(spring框架下)

在此贴出了hadoop网盘项目中 关于对hdfs分布式文件系统的简单操作。
一个hadoop hdfs工具类和一个业务层的实现类

1、HdfsUtils.java

package com.haodisk.util;

import java.io.IOException;
import java.net.URI;
import java.net.URISyntaxException;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.fs.FileSystem;

/**
 * 单例设计  提供Hadoop FileSystem 和 Configuration
 * @author TooCruel
 *
 */
public class HdfsUtils {
	private static Configuration conf = new Configuration();
	private static FileSystem fs = null;
	/**
	 * 获取Hadoop的Configuration
	 * @return
	 */
	public static synchronized Configuration getConfiguration() {
		return conf;
	}
	
	/**
	 * 该方法只会执行一次,即在第一次创建FileSystem时
	 * @throws IOException
	 * @throws URISyntaxException
	 */
	private synchronized static void initFileSystem(String url) throws IOException, URISyntaxException{
			if(fs ==null)
				fs = FileSystem.get(new URI(url),conf);
	}
	public static FileSystem getFileSystem(String url) throws IOException, URISyntaxException{
		if(fs ==  null)//是第一次创建
			initFileSystem(url);
		return fs;
	}
	
	
	public synchronized static String convertSize(long size) {
		String result = String.valueOf(size);
		if (size < 1024 * 1024) {
			result = String.valueOf(size / 1024) + " KB";
		} else if (size >= 1024 * 1024 && size < 1024 * 1024 * 1024) {
			result = String.valueOf(size / 1024 / 1024) + " MB";
		} else if (size >= 1024 * 1024 * 1024) {
			result = String.valueOf(size / 1024 / 1024 / 1024) + " GB";
		} else {
			result = result + " B";
		}
		return result;
	}
}

2、HdfsFileServiceImpl.java (业务逻辑层)

package com.haodisk.service.impl;
import java.io.*;
//此处省略...
@Service
@Transactional
@Scope("singleton")
public class HdfsFileServiceImpl implements IHdfsFileService {

	protected  Config config ;
	protected  FileSystem fs  ;
 
	public void deleteFile(String path) throws HDFSException {
		Path hdfsPath =new Path( path);
			try {
				getFs().delete(hdfsPath, true);
			} catch (IOException e) {
				e.printStackTrace();
				throw new HDFSException("HDFS异常", e);
			}
		 
	}

	public String createPersonFile(FileInputStream fileInputStream) throws Exception {
		String path = "/person/"+UUID.randomUUID();
		Path hdfsPath =new Path( path);
		
		try {
			FSDataOutputStream out = getFs().create(hdfsPath);
			IOUtils.copyBytes(fileInputStream, out, HdfsUtils.getConfiguration());
			
		} catch (IOException e) {
			e.printStackTrace();
			throw new HDFSException("HDFS异常", e);
		}
		return path;
	}

	public String createDeptFile(FileInputStream fileInputStream) throws Exception {
		String path = "/dept/"+UUID.randomUUID();
		Path hdfsPath =new Path( path);
		try {
			FSDataOutputStream out = getFs().create(hdfsPath);
			IOUtils.copyBytes(fileInputStream, out, HdfsUtils.getConfiguration());
		} catch (IOException e) {
			e.printStackTrace();
			throw new HDFSException("HDFS异常", e);
		}
		return path;
	}
	public String createGroupFile(FileInputStream fileInputStream) throws Exception {
		String path = "/group/"+UUID.randomUUID();
		Path hdfsPath =new Path( path);
		try {
			FSDataOutputStream out = getFs().create(hdfsPath);
			IOUtils.copyBytes(fileInputStream, out, HdfsUtils.getConfiguration());
		} catch (IOException e) {
			e.printStackTrace();
			throw new HDFSException("HDFS异常", e);
		}
		return path;
	}

	public InputStream downloadFile(String path) throws Exception {
		FSDataInputStream out = null;
		Path hdfsPath =new Path( path);
		try {
			out = getFs().open(hdfsPath);
		} catch (IOException e) {
			e.printStackTrace();
			throw new HDFSException("HDFS异常", e);
		}
		return out;
	}

	public Config getConfig() {
		return (Config) ActionContext.getContext().getApplication().get("config");
	}

	public void setConfig(Config config) {
		this.config = config;
	}

	public FileSystem getFs() {
		if(fs!=null) return fs;
		try {
			fs = HdfsUtils.getFileSystem(getConfig().getHdfsUrl());
		} catch (IOException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		} catch (URISyntaxException e) {
			// TODO Auto-generated catch block
			e.printStackTrace();
		}
		return fs;
	}
	public void setFs(FileSystem fs) {
		this.fs = fs;
	}
}

基于hadoop企业网盘的介绍请看http://blog.toocruel.net/haodisk

演示http://haodisk.toocruel.net

(全文完)

(转载本站文章请注明作者和出处 hadoop hdfs文件系统操作(spring框架下)