|
1 package zhouls.bigdata.myWholeHadoop.HDFS.hdfs3;
2
3 import java.io.FileInputStream;
4 import java.io.InputStream;
5 import java.io.OutputStream;
6 import java.net.URI;
7
8 import org.apache.hadoop.conf.Configuration;
9 import org.apache.hadoop.fs.FileSystem;
10 import org.apache.hadoop.fs.Path;
11 import org.apache.hadoop.io.IOUtils;
12
13 public>
14
15
16 public static void main(String[] args) throws Exception {
17 Configuration conf = new Configuration();
18 conf.set("fs.defaultFS", "hdfs://ns1");
19 conf.set("dfs.nameservices", "ns1");
20 conf.set("dfs.ha.namenodes.ns1", "nn1,nn2");
21 conf.set("dfs.namenode.rpc-address.ns1.nn1", "hadoop01:9000");
22 conf.set("dfs.namenode.rpc-address.ns1.nn2", "hadoop02:9000");
23 //conf.setBoolean(name, value);
24 conf.set("dfs.client.failover.proxy.provider.ns1", "org.apache.hadoop.hdfs.server.namenode.ha.ConfiguredFailoverProxyProvider");
25 FileSystem fs = FileSystem.get(new URI("hdfs://ns1"), conf, "hadoop");
26 InputStream in =new FileInputStream("D://eclipse.rar");
27 OutputStream out = fs.create(new Path("/eclipse"));
28 IOUtils.copyBytes(in, out, 4096, true);
29 }
30 } |
|
|