|
packagehdfsTest;
importjava.io.IOException;
importorg.apache.hadoop.conf.Configuration;
importorg.apache.hadoop.fs.FSDataOutputStream;
importorg.apache.hadoop.fs.FileStatus;
importorg.apache.hadoop.fs.FileSystem;
importorg.apache.hadoop.fs.Path;
public classOperatingFiles {
//initialization
static Configuration conf = newConfiguration();
static FileSystem hdfs;
static {
String path ="/usr/java/hadoop-1.0.3/conf/";
conf.addResource(newPath(path + "core-site.xml"));
conf.addResource(newPath(path + "hdfs-site.xml"));
conf.addResource(newPath(path + "mapred-site.xml"));
path ="/usr/java/hbase-0.90.3/conf/";
conf.addResource(newPath(path + "hbase-site.xml"));
try {
hdfs =FileSystem.get(conf);
} catch (IOException e) {
e.printStackTrace();
}
}
//create a direction
public void createDir(String dir)throws IOException {
Path path = new Path(dir);
hdfs.mkdirs(path);
System.out.println("newdir \t" + conf.get("fs.default.name") + dir);
}
//copy from local file to HDFS file
public void copyFile(String localSrc,String hdfsDst) throws IOException{
Path src = newPath(localSrc);
Path dst = new Path(hdfsDst);
hdfs.copyFromLocalFile(src,dst);
//list all the files in thecurrent direction
FileStatus files[] =hdfs.listStatus(dst);
System.out.println("Uploadto \t" + conf.get("fs.default.name") + hdfsDst);
for (FileStatus file : files){
System.out.println(file.getPath());
}
}
//create a new file
public void createFile(String fileName,String fileContent) throws IOException {
Path dst = newPath(fileName);
byte[] bytes =fileContent.getBytes();
FSDataOutputStream output =hdfs.create(dst);
output.write(bytes);
System.out.println("newfile \t" + conf.get("fs.default.name") + fileName);
}
//list all files
public void listFiles(String dirName)throws IOException {
Path f = new Path(dirName);
FileStatus[] status =hdfs.listStatus(f);
System.out.println(dirName +" has all files:");
for (int i = 0; i |
|
|
|
|
|
|