<ruby id="bdb3f"></ruby>

    <p id="bdb3f"><cite id="bdb3f"></cite></p>

      <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
        <p id="bdb3f"><cite id="bdb3f"></cite></p>

          <pre id="bdb3f"></pre>
          <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

          <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
          <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

          <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                <ruby id="bdb3f"></ruby>

                ThinkChat2.0新版上線,更智能更精彩,支持會話、畫圖、視頻、閱讀、搜索等,送10W Token,即刻開啟你的AI之旅 廣告
                使用Java對Hbase進行增、刪、改、查,其中改就是增。<br/> **1. 隨便創建一個Maven項目,然后添加下面的依賴** *`pom.xml`* ```xml <properties> <maven.compiler.source>1.8</maven.compiler.source> <maven.compiler.target>1.8</maven.compiler.target> <hadoop.version>2.6.0</hadoop.version> <hive.version>1.1.0</hive.version> <hbase.version>1.2.0</hbase.version> </properties> <repositories> <repository> <id>cloudera</id> <url>https://repository.cloudera.com/artifactory/cloudera-repos/</url> </repository> </repositories> <dependencies> <!--hadoop--> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-common</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-client</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-hdfs</artifactId> <version>${hadoop.version}</version> </dependency> <!--日志--> <dependency> <groupId>commons-logging</groupId> <artifactId>commons-logging</artifactId> <version>1.2</version> </dependency> <!--MapReduce--> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-auth</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-core</artifactId> <version>${hadoop.version}</version> </dependency> <dependency> <groupId>org.apache.hadoop</groupId> <artifactId>hadoop-mapreduce-client-jobclient</artifactId> <version>${hadoop.version}</version> </dependency> <!-- https://mvnrepository.com/artifact/org.apache.zookeeper/zookeeper --> <!--zookeeper--> <dependency> <groupId>org.apache.zookeeper</groupId> <artifactId>zookeeper</artifactId> <version>3.4.5</version> <type>pom</type> </dependency> <!--hbase--> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-client</artifactId> <version>${hbase.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-common</artifactId> <version>${hbase.version}</version> </dependency> <dependency> <groupId>org.apache.hbase</groupId> <artifactId>hbase-server</artifactId> <version>${hbase.version}</version> </dependency> <!--log4j--> <dependency> <groupId>log4j</groupId> <artifactId>log4j</artifactId> <version>1.2.17</version> </dependency> <!--測試--> <dependency> <groupId>junit</groupId> <artifactId>junit</artifactId> <version>4.11</version> <!--<scope>test</scope>--> </dependency> </dependencies> ``` *`resource/log4j.properties`* ```xml log4j.rootLogger=INFO, stdout log4j.appender.stdout=org.apache.log4j.ConsoleAppender log4j.appender.stdout.layout=org.apache.log4j.PatternLayout log4j.appender.stdout.layout.ConversionPattern=%d %p [%c] - %m%n log4j.appender.logfile=org.apache.log4j.FileAppender log4j.appender.logfile.File=target/spring.log log4j.appender.logfile.layout=org.apache.log4j.PatternLayout log4j.appender.logfile.layout.ConversionPattern=%d %p [%c] - %m%n ``` **2. Java代碼** *`hbase/HbaseApi.java`* ```java package hbase; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.hbase.*; import org.apache.hadoop.hbase.client.*; import org.apache.hadoop.hbase.util.Bytes; import org.junit.Test; import java.io.IOException; public class HbaseApi { /** * ========== 連接到Hbase ========== */ public Configuration createConnection(){ Configuration conf = HBaseConfiguration.create(); conf.set("hbase.zookeeper.quorum", "hadoop101"); conf.set("hbase.zookeeper.property.clientPort", "2181"); conf.set("hbase.master", "hadoop101:160000"); /* 也可以使用配置文件來代替上面的配置 conf.addResource(new Path("/opt/install/hbase/conf/hbase-site.xml")); conf.addResource(new Path("/opt/install/hadoop/etc/hadoop/core-site.xml")); */ return conf; } /** * ========== 建表 ========== */ @Test public void createTable() throws IOException { // 1. 創建連接 Connection conn = ConnectionFactory.createConnection(this.createConnection()); // 2. 創建admin Admin admin = conn.getAdmin(); // 3. 添加表相關信息 HTableDescriptor student = new HTableDescriptor(TableName.valueOf("student")); // 表名 student.addFamily(new HColumnDescriptor("info")); // 列族 // student.addFamily(new HColumnDescriptor("score")); // 列族 // 4. 建表 admin.createTable(student); // 5. 關閉連接 conn.close(); } /** * ========== 添加數據 ========== */ @Test public void putDataToTable() throws IOException { // 1. 創建連接 Connection conn = ConnectionFactory.createConnection(this.createConnection()); // 2. 獲取表 Table student = conn.getTable(TableName.valueOf("student")); // 3. 創建數據 Put put = new Put(Bytes.toBytes("r00001")); // row_key put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("zhangsan")); put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("gender"), Bytes.toBytes("male")); put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("age"), Bytes.toBytes(11)); // 4. 添加 student.put(put); // 5. 關閉連接 conn.close(); } /** * ========== 獲取表數據 ========== */ @Test public void getDataFromTable() throws IOException{ // 1. 創建連接 Connection conn = ConnectionFactory.createConnection(this.createConnection()); // 2. 獲取表 Table student = conn.getTable(TableName.valueOf("student")); // 3. 讀取數據 Get get = new Get(Bytes.toBytes("r00001")); // 根據row_key來讀取 Result result = student.get(get); Cell[] cells = result.rawCells(); for(Cell cell : cells) { System.out.println("row_key = " + Bytes.toString(CellUtil.cloneRow(cell))); System.out.println("列族 = " + Bytes.toString(CellUtil.cloneFamily(cell))); System.out.println("列名 = " + Bytes.toString(CellUtil.cloneQualifier(cell))); // 需要注意的是在存儲age時是int數據,當用toString讀取時返回的是 該符號 // 應該使用Bytes.toInt(CellUtil.cloneValue(cell)) 讀取age才正常看到效果 System.out.println("value = " + Bytes.toString(CellUtil.cloneValue(cell))); System.out.println("-------------------"); } // 4. 關閉連接 conn.close(); } /** * ========== 刪除表 ========== * 無論表是否為空都可以刪除 */ @Test public void dropTable() throws IOException{ // 1. 創建連接 Connection conn = ConnectionFactory.createConnection(this.createConnection()); // 2. 獲取Admin Admin admin = conn.getAdmin(); // 3. 禁用表 admin.disableTable(TableName.valueOf("student")); // 4. 刪除表 admin.deleteTable(TableName.valueOf("student")); // 5. 關閉連接 conn.close(); } } ```
                  <ruby id="bdb3f"></ruby>

                  <p id="bdb3f"><cite id="bdb3f"></cite></p>

                    <p id="bdb3f"><cite id="bdb3f"><th id="bdb3f"></th></cite></p><p id="bdb3f"></p>
                      <p id="bdb3f"><cite id="bdb3f"></cite></p>

                        <pre id="bdb3f"></pre>
                        <pre id="bdb3f"><del id="bdb3f"><thead id="bdb3f"></thead></del></pre>

                        <ruby id="bdb3f"><mark id="bdb3f"></mark></ruby><ruby id="bdb3f"></ruby>
                        <pre id="bdb3f"><pre id="bdb3f"><mark id="bdb3f"></mark></pre></pre><output id="bdb3f"></output><p id="bdb3f"></p><p id="bdb3f"></p>

                        <pre id="bdb3f"><del id="bdb3f"><progress id="bdb3f"></progress></del></pre>

                              <ruby id="bdb3f"></ruby>

                              哎呀哎呀视频在线观看