8_4java使用hadoop、hdfs的ApI文件读写
- 引入相应的maven依赖
<dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-common</artifactId><version>2.9.2</version></dependency><!-- https://mvnrepository.com/artifact/org.apache.hadoop/hadoop-hdfs --><dependency><groupId>org.apache.hadoop</groupId><artifactId>hadoop-hdfs</artifactId><version>2.9.2</version></dependency>
- HdfsUtils
public class HdfsUtils{
//创建mkdirpublic bollean mkdirs(){
return true;}
}
TestClass
public Class HdfsUtilsTest(){
static{
System.setProperty("HADOOP_USER_NAME","hadoop")}private final int BUFF_SIZE=4096;FileSystem fs;@Beforepublic void setup(){
System.out.println("Before execute"); }@Afterpublic void destory(){
System.out.println("After execute");}@Testpublic void testMkdir(){
Configuration conf=new Configuradion();conf.set("fs.defaultFS","hdfs://hadoop:9000");FileSystem fs=FileSystem.get(conf);fs.mkdirs(new Part("/a/b"));//断言返回true为测试通过bollean resuilt=fs.mkdirs(new Path("/b/c"))Assert.assertTrue(result);}//上传public void upload(){
String Filename="pom.xml";fs.copyFromLocalFile(new Path("hadoop-hdfs.iml"),new Path("/"));bollean exists=fs.exists(new Path("/",fileName));Assert.assertTure(exists);}public void dowload(){
fs.copyToLocalFile("","");}public void delete(){
fs.delete(null);}//写文件操作public void write(){
Inputstream in=new FileInputStream(new File("D:\\python"));//输出流FSDataOutputStream out=fs.create(new Path("/upload"));byte buf[]=new byte[BUFF_SIZE];int bytesRead=in.read(buf);while(byteRead>0){
out.write(buf);byteRead=in.read(buf);}}
}
- MapReduce
public class WordCountMapper extends Mapper<Object,Text,Text,IntWritable>{
//Object行数@Overrrideprotected void map(Object key,Text value,Context context){
While(){
}}
}public class WordCountMapper extends Reduce<Text,IntWritable Text,IntWritable>{
//如果是key相同的数据,进入同一个reduce@Overrrideprotected void reduce(Text,Iterable<IntWritable> values,Context context){
While(){
}}
}