首先添加Hbase的依赖

1、获取Hbase的连接对象

2、读取本地文件

3、put到Hbase表中(提前在Hbase中创建表,并设置好列族)

4、释放资源

import com.duan.utils.HbaseUtil;
import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HBaseConfiguration;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.*;

import java.io.BufferedReader;
import java.io.FileReader;
import java.util.ArrayList;
import java.util.List;

/**
 * @create: 2021-09-11 21:25
 * @author: liu_da_dai_xing_fu
 * @program: LoadData
 * @Description:
 **/
public class LoadData {
    public static void main(String[] args) throws Exception {
        //获取Hbase的连接对象
        Configuration conf = HBaseConfiguration.create();
        // 设置zk的地址
        conf.set("hbase.zookeeper.quorum" , "doit01:2181,doit02:2181,doit03:2181");
        // 获取连接对象
        Connection conn = ConnectionFactory.createConnection(conf);

        BufferedMutator mutator = conn.getBufferedMutator(TableName.valueOf("tb_stu2"));
        List<Mutation>list=new ArrayList<>();
        BufferedReader br = new BufferedReader(new FileReader("C:\\Users\\DHG\\Desktop\\user.txt"));
        String line = null ;
        while ((line=br.readLine())!=null){
            //u001,zss,23,M
            String[] split = line.split(",");
            Put put = new Put(split[0].getBytes());//u001行数据
            put.addColumn("cf".getBytes(),"name".getBytes(),split[1].getBytes());
            put.addColumn("cf".getBytes(),"age".getBytes(),split[2].getBytes());
            put.addColumn("cf".getBytes(),"gender".getBytes(),split[3].getBytes());
            list.add(put);
        }
        //批次操作对象
        mutator.mutate(list); //缓存、类似于put操作
        mutator.flush(); //手动刷写到Hbase中的HDFS上

        mutator.close();//关闭资源

    }
}

 添加依赖见↓

<dependencies>
        <!--zookeeper-->
        <dependency>
            <groupId>org.apache.zookeeper</groupId>
            <artifactId>zookeeper</artifactId>
            <version>3.4.6</version>
        </dependency>
        <!--hadoop-->
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-auth</artifactId>
            <version>3.1.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-client</artifactId>
            <version>3.1.1</version>
        </dependency>
        <dependency>
            <groupId>org.apache.hadoop</groupId>
            <artifactId>hadoop-common</artifactId>
            <version>3.1.1</version>
        </dependency>
        <!-- HBASE依赖 -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-client</artifactId>
            <version>2.2.5</version>
        </dependency>

        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-server</artifactId>
            <version>2.2.5</version>
        </dependency>
        <!-- 使用mr程序操作hbase 数据的导入 -->
        <dependency>
            <groupId>org.apache.hbase</groupId>
            <artifactId>hbase-mapreduce</artifactId>
            <version>2.2.5</version>
        </dependency>
        <!--json解析-->
        <dependency>
            <groupId>com.google.code.gson</groupId>
            <artifactId>gson</artifactId>
            <version>2.8.5</version>
        </dependency>
    </dependencies>

    <build>
        <plugins>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-compiler-plugin</artifactId>
                <version>3.5.1</version>
                <configuration>
                    <source>1.8</source>
                    <target>1.8</target>
                </configuration>
            </plugin>
            <plugin>
                <groupId>org.apache.maven.plugins</groupId>
                <artifactId>maven-assembly-plugin</artifactId>
                <version>2.6</version>
                <configuration>
                    <!-- get all project dependencies -->
                    <descriptorRefs>
                        <descriptorRef>jar-with-dependencies</descriptorRef>
                    </descriptorRefs>
                    <!-- MainClass in mainfest make a executable jar -->
                    <archive>
                        <manifest>
                            <!--<mainClass>util.Microseer</mainClass> -->
                        </manifest>
                    </archive>
                </configuration>
                <executions>
                    <execution>
                        <id>make-assembly</id>
                        <!-- bind to the packaging phase -->
                        <phase>package</phase>
                        <goals>
                            <goal>single</goal>
                        </goals>
                    </execution>
                </executions>
            </plugin>
        </plugins>
    </build>

user.txt数据:

u001,zss,23,M
u002,lss,22,F
u003,lls,33,M
u004,cls,43,F

执行程序,即可将本地数据写到Hbase中. 

Logo

为开发者提供学习成长、分享交流、生态实践、资源工具等服务,帮助开发者快速成长。

更多推荐