0

Ich versuche, HBase von einem Scala-Code zu verbinden, aber unter Fehler zu bekommen.Hbase Scala Verbindungsproblem in Cloudera Schnellstart VM CDH5.8.0

17/03/28 11:40:53 INFO client.RpcRetryingCaller: Call exception, tries=30, retries=35, started=450502 ms ago, cancelled=false, msg= 
17/03/28 11:41:13 INFO client.RpcRetryingCaller: Call exception, tries=31, retries=35, started=470659 ms ago, cancelled=false, msg= 
17/03/28 11:41:33 INFO client.RpcRetryingCaller: Call exception, tries=32, retries=35, started=490824 ms ago, cancelled=false, msg= 
17/03/28 11:41:53 INFO client.RpcRetryingCaller: Call exception, tries=33, retries=35, started=510834 ms ago, cancelled=false, msg= 
17/03/28 11:42:13 INFO client.RpcRetryingCaller: Call exception, tries=34, retries=35, started=530956 ms ago, cancelled=false, msg= 
[error] (run-main-0) org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=35, exceptions: 
[error] Tue Mar 28 11:33:22 PDT 2017, RpcRetryingCaller{globalStartTime=1490726002560, pause=100, retries=35}, org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
[error] Tue Mar 28 11:33:23 PDT 2017, RpcRetryingCaller{globalStartTime=1490726002560, pause=100, retries=35}, org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
[error] Tue Mar 28 11:33:23 PDT 2017, RpcRetryingCaller{globalStartTime=1490726002560, pause=100, retries=35}, org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
[error] Tue Mar 28 11:33:24 PDT 2017, RpcRetryingCaller{globalStartTime=1490726002560, pause=100, retries=35}, org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
. 
. 
. 
. 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:147) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4117) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4110) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:427) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:411) 
    at Hi$.main(hw.scala:12) 
    at Hi.main(hw.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
Caused by: org.apache.hadoop.hbase.MasterNotRunningException: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStub(ConnectionManager.java:1560) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.makeStub(ConnectionManager.java:1580) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getKeepAliveMasterService(ConnectionManager.java:1737) 
    at org.apache.hadoop.hbase.client.MasterCallable.prepare(MasterCallable.java:38) 
    at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4117) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4110) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:427) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:411) 
    at Hi$.main(hw.scala:12) 
    at Hi.main(hw.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
Caused by: com.google.protobuf.ServiceException: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:239) 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331) 
    at org.apache.hadoop.hbase.protobuf.generated.MasterProtos$MasterService$BlockingStub.isMasterRunning(MasterProtos.java:58383) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.isMasterRunning(ConnectionManager.java:1591) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStubNoRetries(ConnectionManager.java:1529) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStub(ConnectionManager.java:1551) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.makeStub(ConnectionManager.java:1580) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getKeepAliveMasterService(ConnectionManager.java:1737) 
    at org.apache.hadoop.hbase.client.MasterCallable.prepare(MasterCallable.java:38) 
    at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4117) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4110) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:427) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:411) 
    at Hi$.main(hw.scala:12) 
    at Hi.main(hw.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
Caused by: java.lang.NoClassDefFoundError: org/apache/hadoop/net/SocketInputWrapper 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.createConnection(RpcClientImpl.java:138) 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.getConnection(RpcClientImpl.java:1316) 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1224) 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226) 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331) 
    at org.apache.hadoop.hbase.protobuf.generated.MasterProtos$MasterService$BlockingStub.isMasterRunning(MasterProtos.java:58383) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.isMasterRunning(ConnectionManager.java:1591) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStubNoRetries(ConnectionManager.java:1529) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStub(ConnectionManager.java:1551) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.makeStub(ConnectionManager.java:1580) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getKeepAliveMasterService(ConnectionManager.java:1737) 
    at org.apache.hadoop.hbase.client.MasterCallable.prepare(MasterCallable.java:38) 
    at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4117) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4110) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:427) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:411) 
    at Hi$.main(hw.scala:12) 
    at Hi.main(hw.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
Caused by: java.lang.ClassNotFoundException: org.apache.hadoop.net.SocketInputWrapper 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:366) 
    at java.net.URLClassLoader$1.run(URLClassLoader.java:355) 
    at java.security.AccessController.doPrivileged(Native Method) 
    at java.net.URLClassLoader.findClass(URLClassLoader.java:354) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:425) 
    at java.lang.ClassLoader.loadClass(ClassLoader.java:358) 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.createConnection(RpcClientImpl.java:138) 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.getConnection(RpcClientImpl.java:1316) 
    at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1224) 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:226) 
    at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:331) 
    at org.apache.hadoop.hbase.protobuf.generated.MasterProtos$MasterService$BlockingStub.isMasterRunning(MasterProtos.java:58383) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.isMasterRunning(ConnectionManager.java:1591) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStubNoRetries(ConnectionManager.java:1529) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$StubMaker.makeStub(ConnectionManager.java:1551) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation$MasterServiceStubMaker.makeStub(ConnectionManager.java:1580) 
    at org.apache.hadoop.hbase.client.ConnectionManager$HConnectionImplementation.getKeepAliveMasterService(ConnectionManager.java:1737) 
    at org.apache.hadoop.hbase.client.MasterCallable.prepare(MasterCallable.java:38) 
    at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:124) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4117) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.executeCallable(HBaseAdmin.java:4110) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:427) 
    at org.apache.hadoop.hbase.client.HBaseAdmin.listTables(HBaseAdmin.java:411) 
    at Hi$.main(hw.scala:12) 
    at Hi.main(hw.scala) 
    at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
    at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) 
    at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 
    at java.lang.reflect.Method.invoke(Method.java:606) 
[trace] Stack trace suppressed: run last compile:run for the full output. 
17/03/28 07:56:55 ERROR zookeeper.ClientCnxn: Event thread exiting due to interruption 
java.lang.InterruptedException 
    at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.reportInterruptAfterWait(AbstractQueuedSynchronizer.java:2017) 
    at java.util.concurrent.locks.AbstractQueuedSynchronizer$ConditionObject.await(AbstractQueuedSynchronizer.java:2052) 
    at java.util.concurrent.LinkedBlockingQueue.take(LinkedBlockingQueue.java:442) 
    at org.apache.zookeeper.ClientCnxn$EventThread.run(ClientCnxn.java:494) 
17/03/28 07:56:55 INFO zookeeper.ClientCnxn: EventThread shut down 
java.lang.RuntimeException: Nonzero exit code: 1 
    at scala.sys.package$.error(package.scala:27) 
[trace] Stack trace suppressed: run last compile:run for the full output. 
[error] (compile:run) Nonzero exit code: 1 
[error] Total time: 544 s, completed Mar 28, 2017 7:56:56 AM 

• Host-Betriebssystem ist Windows 7 mit 8 GB RAM und 64-Bit-Arch. Intel Core i5.
• Ich verwende Cloudera Quick start VM CDH 5.8.0. auf meinen Wndows.
• VM verwendet 6 GB RAM, 2 Prozessoren & 64 GB Festplatte.
• Dienstleistungen in Cloudera Manager ausgeführt wird:

Hbase 
    HDFS 
    YARN 
    Zookeeper 
    Key-Value Indexer 

• Dienstleistungen in Cloudera Manager beendet:

Hive 
    Hue 
    Impala 
    Oozie 
    Solar 
    Spark 
    Sqoop 1 Client 
    Sqoop 2 

• Hbase Version 1.2.0-cdh5.8.0
• Code Mein Client in VM ist nur .
• Das sbt-Projekt erstellt.
• Ich habe diese URL https://hbase.apache.org/book.html#scala für Hbase-Konnektivität mit Scala bezeichnet.
• Einstellung CLASSPATH. Ich habe die "/path/to/scala-library.jar" im CLASSPATH nicht erwähnt, wie im Link erwähnt.

$ export CLASSPATH=$CLASSPATH:/usr/lib/hadoop/lib/native:/usr/lib/hbase/lib/native/Linux-amd64-64 

• Projektstammverzeichnis =/home/cloudera/Desktop/Play-SBT-Projekt
• Mein/home/cloudera/Desktop/Play-SBT-Projekt/build.sbt sieht wie folgt aus. Ich habe die Version der abhängigen Bibliothek gemäß meiner Umgebung geändert. Ich habe einige weitere Abhängigkeiten wie "hbase-client", "hbase-common" & "hbase-server" als Teil der Fehlerbehandlung hinzugefügt, aber immer noch keinen Erfolg.

name := "play-sbt-project" 
version := "1.0" 
scalaVersion := "2.10.2" 
resolvers += "Apache HBase" at "https://repository.apache.org/content/repositories/releases" 
resolvers += "Thrift" at "http://people.apache.org/~rawson/repo/" 
libraryDependencies ++= Seq(
"org.apache.hadoop" % "hadoop-core" % "1.2.1", 
"org.apache.hbase" % "hbase" % "1.2.0", 
"org.apache.hbase" % "hbase-client" % "1.2.0", 
"org.apache.hbase" % "hbase-common" % "1.2.0", 
"org.apache.hbase" % "hbase-server" % "1.2.0" 
) 

• Mein Hauptcode für Hbase Konnektivität /home/cloudera/Desktop/play-sbt-project/src/main/scala/pw.scala sieht wie folgt aus

import org.apache.hadoop.hbase.HBaseConfiguration 
import org.apache.hadoop.hbase.client.{ConnectionFactory,HBaseAdmin,HTable,Put,Get} 
import org.apache.hadoop.hbase.util.Bytes 

object Hi { 
def main(args: Array[String]) = { 
println("Hi!") 
val conf = new HBaseConfiguration() 
val connection = ConnectionFactory.createConnection(conf); 
val admin = connection.getAdmin(); 

// list the tables 
val listtables=admin.listTables() 
listtables.foreach(println) 
} 
} 

• Meine/etc /hbase/conf/hbase-site.xml sieht wie folgt aus:

<?xml version="1.0" encoding="UTF-8"?> 

<!--Autogenerated by Cloudera Manager--> 
<configuration> 
    <property> 
    <name>hbase.rootdir</name> 
    <value>hdfs://quickstart.cloudera:8020/hbase</value> 
    </property> 
    <property> 
    <name>hbase.replication</name> 
    <value>true</value> 
    </property> 
    <property> 
    <name>hbase.client.write.buffer</name> 
    <value>2097152</value> 
    </property> 
    <property> 
    <name>hbase.client.pause</name> 
    <value>100</value> 
    </property> 
    <property> 
    <name>hbase.client.retries.number</name> 
    <value>35</value> 
    </property> 
    <property> 
    <name>hbase.client.scanner.caching</name> 
    <value>100</value> 
    </property> 
    <property> 
    <name>hbase.client.keyvalue.maxsize</name> 
    <value>10485760</value> 
    </property> 
    <property> 
    <name>hbase.ipc.client.allowsInterrupt</name> 
    <value>true</value> 
    </property> 
    <property> 
    <name>hbase.client.primaryCallTimeout.get</name> 
    <value>10</value> 
    </property> 
    <property> 
    <name>hbase.client.primaryCallTimeout.multiget</name> 
    <value>10</value> 
    </property> 
    <property> 
    <name>hbase.coprocessor.region.classes</name> 
    <value>org.apache.hadoop.hbase.security.access.SecureBulkLoadEndpoint</value> 
    </property> 
    <property> 
    <name>hbase.regionserver.thrift.http</name> 
    <value>false</value> 
    </property> 
    <property> 
    <name>hbase.thrift.support.proxyuser</name> 
    <value>false</value> 
    </property> 
    <property> 
    <name>hbase.rpc.timeout</name> 
    <value>60000</value> 
    </property> 
    <property> 
    <name>hbase.snapshot.enabled</name> 
    <value>true</value> 
    </property> 
    <property> 
    <name>hbase.snapshot.master.timeoutMillis</name> 
    <value>60000</value> 
    </property> 
    <property> 
    <name>hbase.snapshot.region.timeout</name> 
    <value>60000</value> 
    </property> 
    <property> 
    <name>hbase.snapshot.master.timeout.millis</name> 
    <value>60000</value> 
    </property> 
    <property> 
    <name>hbase.security.authentication</name> 
    <value>simple</value> 
    </property> 
    <property> 
    <name>hbase.rpc.protection</name> 
    <value>authentication</value> 
    </property> 
    <property> 
    <name>zookeeper.session.timeout</name> 
    <value>60000</value> 
    </property> 
    <property> 
    <name>zookeeper.znode.parent</name> 
    <value>/hbase</value> 
    </property> 
    <property> 
    <name>zookeeper.znode.rootserver</name> 
    <value>root-region-server</value> 
    </property> 
    <property> 
    <name>hbase.zookeeper.quorum</name> 
    <!-- <value>quickstart.cloudera</value> --> 
    <value>127.0.0.1</value> 
    </property> 
    <property> 
    <name>hbase.zookeeper.property.clientPort</name> 
    <value>2181</value> 
    </property> 
    <property> 
    <name>hbase.rest.ssl.enabled</name> 
    <value>false</value> 
    </property> 
</configuration> 

ich viel gegoogelt dieses Problem zu lösen, aber nicht Erfolg bekam. Bei der Lösung dieses Problems habe ich folgende Änderungen vorgenommen:
• Die Version der abhängigen Bibliotheken in der Datei "build.sbt" gemäß meiner Umgebung geändert
• Einige weitere abhängige Bibliotheken hinzugefügt "hbase-client", "hbase-common" & "HBAS-Server".
• Den Wert "hbase.zookeeper.quorum" von "quickstart.cloudera" auf "127.0.0.1" in der Datei "hbase-site.xml" umgestellt.

Bitte helfen Sie mir, dieses Problem zu lösen. Vielen Dank.

+0

Ist das wirklich die Version von 'hadoop-core' Sie verwenden? Hadoop-Versionen sind nicht identisch mit HBase-Versionen. CDH5.8 basiert auf Hadoop 2.6. –

+0

@Joe Pallas, Danke für den Kommentar. Ich habe das Problem gelöst. 'hadoop-core' und kompatible JAR-Abhängigkeitsversionen waren eine der Änderungen, die ich zusammen mit einigen Codeänderungen vorgenommen habe. Ich poste meine Lösung. – kumarhimanshu449

Antwort

0

Das Problem wurde behoben. Folgende Änderungen müssen vorgenommen werden:

  1. Ändern Sie "hadoop-core" zu "hadoop-common" in der Datei build.sbt. In den neuesten CDH-Versionen wird "hadoop-core" nur von Code unterstützt, der für MapReduce 1 läuft.
  2. Ändern Sie alle Abhängigkeiten gemäß cloudera 5.8.0 Kompatibilität in build.sbt.Aktualisiert build.sbt sieht wie folgt aus:

    name := "play-sbt-project" 
    version := "1.0" 
    scalaVersion := "2.10.2" 
    resolvers += "Thrift" at "http://people.apache.org/~rawson/repo/" 
    resolvers += "Cloudera Repository" at "https://repository.cloudera.com/artifactory/cloudera-repos/" 
    
    libraryDependencies ++= Seq( 
    "org.apache.hadoop" % "hadoop-common" % "2.6.0-cdh5.8.0", 
    "org.apache.hbase" % "hbase" % "1.2.0-cdh5.8.0", 
    "org.apache.hbase" % "hbase-client" % "1.2.0-cdh5.8.0", 
    "org.apache.hbase" % "hbase-common" % "1.2.0-cdh5.8.0", 
    "org.apache.hbase" % "hbase-server" % "1.2.0-cdh5.8.0" 
    ) 
    
  3. HBaseConfiguration() der Klasse depricated wird. Verwenden Sie stattdessen die create() -Methode. Auch habe ich einige Logik im Hauptcode geändert. Früher hatte ich die Tabellen in HBase (Da dies einige Probleme gab, so habe ich dies fallen lassen, aber ich werde das nächste Mal versuchen), Da mein Ziel ist, Scala zu HBase Konnektivität zu etablieren, so versuche ich jetzt eine neue Zeile einfügen bereits existierende HBase-Tabelle. Neuer Code sieht wie folgt aus:

    package main.scala 
    
    import org.apache.hadoop.conf.Configuration 
    import org.apache.hadoop.hbase.HBaseConfiguration 
    import org.apache.hadoop.hbase.client.{ConnectionFactory,HTable,Put} 
    import org.apache.hadoop.hbase.util.Bytes 
    
    object Hi { 
    
    def main(args: Array[String]) = { 
    println("Hi!") 
    val conf:Configuration = HBaseConfiguration.create() 
    val table:HTable = new HTable(conf, "emp1") 
    val put1:Put = new Put(Bytes.toBytes("row1")) 
    put1.add(Bytes.toBytes("personal_data"),Bytes.toBytes("qual1"),Bytes.toBytes("val1")) 
    table.put(put1) 
    println("Success") 
    } 
    } 
    
Verwandte Themen