eclipse插件的编译——Hadoop0.2.02版
package com.kingdee.hadoop;import java.io.BufferedReader;
import java.io.IOException;
import java.io.InputStream;
import java.io.InputStreamReader;
import org.apache.hadoop.conf.Configuration;
importorg.apache.hadoop.fs.FSDataOutputStream;
import org.apache.hadoop.fs.FileSystem;
import org.apache.hadoop.fs.Path;
/**
*
*The utilities to operate file on hadoop hdfs.
*
*@author luolihui 2011-07-18
*
*/
public> privatestatic final String ROOT_PATH = "hdfs:///";
privatestatic final int BUFFER_SIZE = 4096;
/**
* construct.
*/
publicDFSOperator(){}
/**
* Create a file on hdfs.The root path is /.
* for example: DFSOperator.createFile("/lory/test1.txt",true);
* @param paththe file name to open
* @param overwrite if a file with this namealready exists, then if true, the file will be
* @return true if delete is successful elseIOException.
* @throws IOException
*/
publicstatic boolean createFile(String path, boolean overwrite) throws IOException
{
//Stringuri = "hdfs://192.168.1.100:9000";
//FileSystemfs1 = FileSystem.get(URI.create(uri), conf);
Configurationconf = new Configuration();
FileSystemfs = FileSystem.get(conf);
Pathf = new Path(ROOT_PATH + path);
fs.create(f,overwrite);
fs.close();
returntrue;
}
/**
* Delete a file on hdfs.The root path is /.
* for example: DFSOperator.deleteFile("/user/hadoop/output",true);
* @param path the path to delete
* @param recursiveif path is adirectory and set to true, the directory is deleted else throws an exception.In case of a file the recursive can be set to either true or false.
* @return true if delete is successful else IOException.
* @throws IOException
*/
publicstatic boolean deleteFile(String path, boolean recursive) throws IOException
{
//Stringuri = "hdfs://192.168.1.100:9000";
//FileSystemfs1 = FileSystem.get(URI.create(uri), conf);
Configurationconf = new Configuration();
FileSystemfs = FileSystem.get(conf);
Pathf = new Path(ROOT_PATH + path);
fs.delete(f,recursive);
fs.close();
returntrue;
}
/**
* Read a file to string on hadoop hdfs. Fromstream to string.
* for example:System.out.println(DFSOperator.readDFSFileToString("/user/hadoop/input/test3.txt"));
* @param path the path to read
* @return true if read is successful elseIOException.
* @throws IOException
*/
publicstatic String readDFSFileToString(String path) throws IOException
{
Configurationconf = new Configuration();
FileSystemfs = FileSystem.get(conf);
Pathf = new Path(ROOT_PATH + path);
InputStreamin = null;
Stringstr = null;
StringBuildersb = new StringBuilder(BUFFER_SIZE);
if(fs.exists(f))
{
in= fs.open(f);
BufferedReaderbf = new BufferedReader(new InputStreamReader(in));
while((str = bf.readLine()) != null)
{
sb.append(str);
sb.append("\n");
}
in.close();
bf.close();
fs.close();
returnsb.toString();
}
else
{
returnnull;
}
}
/**
* Write string to a hadoop hdfs file.
* for example:DFSOperator.writeStringToDFSFile("/lory/test1.txt", "You are abad man.\nReally!\n");
* @param path the file where the string towrite in.
* @param string the context to write in afile.
* @return true if write is successful elseIOException.
* @throws IOException
*/
publicstatic boolean writeStringToDFSFile(String path, String string) throwsIOException
{
Configurationconf = new Configuration();
FileSystemfs = FileSystem.get(conf);
FSDataOutputStreamos = null;
Pathf = new Path(ROOT_PATH + path);
os= fs.create(f,true);
os.writeBytes(string);
os.close();
fs.close();
returntrue;
}
publicstatic void main(String[] args)
{
try{
DFSOperator.createFile("/lory/test1.txt",true);
DFSOperator.deleteFile("/dfs_operator.txt",true);
DFSOperator.writeStringToDFSFile("/lory/test1.txt","You are a bad man.\nReally?\n");
System.out.println(DFSOperator.readDFSFileToString("/lory/test1.txt"));
}catch (IOException e) {
//TODO Auto-generated catch block
e.printStackTrace();
}
System.out.println("===end===");
}
}
页:
[1]