通过JavaAPI访问HBase

news/2024/5/20 4:29:31 标签: 大数据, hadoop, hdfs, hbase, hive

先开始创建表

create 'emp001','member_id','address','info'

放入数据

put 'emp001','Rain','id','31'
put 'emp001', 'Rain', 'info:birthday', '1990-05-01'
put 'emp001', 'Rain', 'info:industry', 'architect'
put 'emp001', 'Rain', 'info:city', 'ShenZhen'
put 'emp001', 'Rain', 'info:country', 'China'
get 'emp001','Rain','info'
scan 'emp001',{COLUMNS=> 'info:birthday'}

导入依赖

<dependency>
      <groupId>junit</groupId>
      <artifactId>junit</artifactId>
      <version>3.8.1</version>
      <scope>test</scope>
    </dependency>
    <dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase-client</artifactId>
    <version>1.3.1</version>
    </dependency>
    <dependency>
    <groupId>org.apache.hbase</groupId>
    <artifactId>hbase-server</artifactId>
    <version>1.3.1</version>
    </dependency>
  </dependencies>
  <build>
  <plugins>
  <plugin>
  <groupId>org.apache.maven.plugins</groupId>
  <artifactId>maven-shade-plugin</artifactId>
  <version>2.4.3</version>
  <executions>
  <execution>
  <phase>package</phase>
  <goals>
  <goal>shade</goal>
  </goals>
  <configuration>
  <transformers>
  <transformer implementation="org.apache.maven.plugins.shade.resource.ManifestResourceTransformer">
  <mainClass>com.www</mainClass>
  </transformer>
  </transformers>
  </configuration>
  </execution>
  </executions>
  </plugin>
  </plugins>
  </build>

创建表

package com.dd12345.aaa;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class CreateTable {
	public static void main(String []args) throws Exception {
		Configuration conf=new Configuration();
		conf.set("hbase.rootdir", "hdfs://locslhost:8020/hbase");
		HBaseAdmin client=new HBaseAdmin(conf);
		HTableDescriptor htd = new HTableDescriptor(TableName.valueOf("student"));
		HColumnDescriptor h1=new HColumnDescriptor("info");
		HColumnDescriptor h2=new HColumnDescriptor("grade");
		htd.addFamily(h1);
		htd.addFamily(h2);
		client.createTable(htd);
		client.close();
	}
	
}

插入一条数据

package com.dd12345.aaa;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Put;
import org.apache.hadoop.hbase.util.Bytes;

public class insertOne {
	
		public static void main(String []args) throws Exception {
			Configuration conf=new Configuration();
			conf.set("hbase.rootdir", "hdfs://locslhost:8020/hbase");
			HTable table=new HTable(conf,"student");
			Put put =new Put(Bytes.toBytes("stu001"));
			put.addColumn(Bytes.toBytes("info"), Bytes.toBytes("name"), Bytes.toBytes("Tom"));
			table.put(put);
			table.close();
		}
		
	}

查找数据

package com.dd12345.aaa;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.client.Get;
import org.apache.hadoop.hbase.client.HTable;
import org.apache.hadoop.hbase.client.Result;
import org.apache.hadoop.hbase.util.Bytes;

public class findUp {
	public static void main(String []args) throws Exception {
		Configuration conf=new Configuration();
		conf.set("hbase.rootdir", "hdfs://locslhost:8020/hbase");
		HTable table=new HTable(conf,"student");
		Get get =new Get(Bytes.toBytes("stu001"));
		Result record=table.get(get);
		String name=Bytes.toString(record.getValue(Bytes.toBytes("info"), Bytes.toBytes("name")));
		System.out.println(name);
		table.close();
	}

}

扫描数据

package com.dd12345.aaa;

import org.apache.hadoop.conf.Configuration;

public class Scann {
	public static void main(String []args) throws Exception {
		Configuration conf=new Configuration();
		conf.set("hbase.rootdir", "hdfs://locslhost:8020/hbase");
		HTable table=new HTable(conf,"student");
		Scan scanner=new Scan();
		ResultScanner rs=table.getScanner(scanner);
		for(Result r:rs) {
			String name=Bytes.toString(r.getValue(Bytes.toBytes("info"), Bytes.toBytes("name")));
		
			String age=Bytes.toString(r.getValue(Bytes.toBytes("info"), Bytes.toBytes("age")));
			System.out.println(name+"   "+age);
		}
		
		table.close();
	}
	
}

删除表

package com.dd12345.aaa;

import org.apache.hadoop.conf.Configuration;
import org.apache.hadoop.hbase.HColumnDescriptor;
import org.apache.hadoop.hbase.HTableDescriptor;
import org.apache.hadoop.hbase.TableName;
import org.apache.hadoop.hbase.client.HBaseAdmin;

public class delete {
	public static void main(String []args) throws Exception {
		Configuration conf=new Configuration();
		conf.set("hbase.rootdir", "hdfs://locslhost:8020/hbase");
		HBaseAdmin client=new HBaseAdmin(conf);
		client.disableTable("student");
		client.deleteTable("student");;
		client.close();
	}
	

}

成功截图
在这里插入图片描述
有问题私聊我!


http://www.niftyadmin.cn/n/1585110.html

相关文章

flume的配置与安装

一.flume的配置与案例1 下载flume包 http://mirrors.tuna.tsinghua.edu.cn/apache/flume/1.8.0/apache-flume-1.8.0-bin.tar.gz1.将压缩包放在ubunta下 cd ~ tar -zxvf apache-flume-1.8.0-bin.tar.gz -C ~ln -s apache-flume-1.8.0-bin/ flumevi ~/.bashrc source ~/.bashrc…

关于SWT/JFace中其他常用的事件

1.addSelectionListener:这个监听器最常用. 这个addSelectionListener是一个方法,addSelectionListener(SelectionListener listener)传入的是一个 SelectionListener类型的参数对象.SelectionAdapter实现了这个SelectionListener接口. SelectionListener接口中有下面的这个两个…

maven 报错 To see the full stack trace of the errors, re-run Maven with the -e switch.Re-run Maven usi

1.如果你的首先在Maven的setting.xml里设置新版HTTPS的阿里云仓库配置就直接进行第二个步骤 如果没配置就进行配置 <mirror> <id>aliyunmaven</id> <mirrorOf>*</mirrorOf> <name>阿里云公共仓库</name> <url&g…

spring aop 配置

Spring的AOP分为注解和配置两种方式实现. 网上注解的例子比较多.看了视频, 写了个简单的以备后用.Common.java 普通的类package trytry.aop; /** * author 李晨 * version 创建时间&#xff1a;Jul 28, 2009 3:01:01 PM */ public class Common { public void …

如何集中查看NTFS权限

【摘要】 NTFS权限&#xff0c;作为文件访问控制的手段&#xff0c;已被管理员广泛使用。在企业的文件服务器上&#xff0c;当文件较多&#xff0c;且NTFS权限配置较复杂时&#xff0c;如何方便快捷的查看用户的有效权限&#xff0c;逐渐成为IT管理员需要考虑的问题&#xff0c…

解决idea application context not configured for this file的问题

spring配置文件中时常会出现这个提示&#xff0c;翻译过来大概意思就是没有配置该文件到项目中 于是进入到file-Project Structure中查看 可以很明显的看到下面有个感叹号&#xff0c;大概意思是下面的文件没有匹配 知道原因就很好解决问题了&#xff0c;只需要加到项目中去…

Maven 中启动 Tomcat,控制台和日志打印信息出现乱码的解决方法

找到intellij idea 的 file—settings—Editor—FileEncodings的GlobalEncoding和ProjectEncoding和Default encoding for properties都配置成UTF-8 步骤2&#xff1a; 找到自己的IDEA目录IDEA\IntelliJ IDEA xxx\bin&#xff0c;编辑器打开文件&#xff0c;然后末行追加上 -D…

第十章:配置SpringBoot支持自动装载Servlet

Web 技术成为当今主流的互联网 Web 应用技术之一&#xff0c;而 Servlet 是 Java Web 技术的核心基础之一。Servlet是Java编写服务器端的程序组件&#xff0c;主要功能在于交互式数据操作&#xff0c;动态生成WEB内容等。传统的Spring项目&#xff0c;配置Servlet比较繁琐&…