1. win10开发环境准备工作
  2. maven依赖

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-hdfs</artifactId>
    <version>2.9.0</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hadoop</groupId>
    <artifactId>hadoop-client</artifactId>
    <version>2.9.0</version>
    </dependency>
    <dependency>
    <groupId>junit</groupId>
    <artifactId>junit</artifactId>
    <version>4.12</version>
    </dependency>
  3. API调用

    1
    2
    3
    4
    5
    6
    7
    8
    9
    10
    11
    12
    13
    14
    15
    16
    17
    18
    19
    20
    21
    22
    23
    24
    25
    26
    27
    28
    29
    30
    31
    32
    33
    34
    35
    36
    package com.diyishuai.hadoop.hdfs;
    import org.apache.hadoop.conf.Configuration;
    import org.apache.hadoop.fs.FileSystem;
    import org.apache.hadoop.fs.Path;
    import org.junit.Before;
    import org.junit.Test;
    import java.io.IOException;
    import java.net.URI;
    public class HdfsClientDemo {
    FileSystem fs = null;
    @Before
    public void init() throws Exception{
    Configuration conf = new Configuration();
    // conf.set("fs.defaultFS","hdfs://server01:9000");
    fs = FileSystem.get(new URI("hdfs://server01:9000"),conf,"root");
    }
    @Test
    public void testUpload() throws IOException {
    fs.copyFromLocalFile(new Path("C:\\Users\\Administrator\\Downloads\\apache-maven-3.5.2-bin.zip"),new Path("/hihi.txt"));
    }
    @Test
    public void testDownload() throws IOException {
    fs.copyToLocalFile(new Path("/hihi.txt"),new Path("C:\\Users\\Administrator\\Downloads\\hihi-from-down.txt"));
    }
    }

今天编译win的hadoop包消耗了些元气,各种等啊等!
代码放在github上了,https://github.com/BestBurning/myworld/tree/master/hadoop