2016-04-13 7 views
0

verbinden Ich versuche, Daten von lokalen Pentaho Data Ingration Client zu einem Hbase-Server (über Hadoop) zu laden, aber jedes Mal bekomme ich eine SocketTimeoutException Fehler. finden Sie unten die gesamte Fehlermeldung die ich bekomme. Kann mir jemand helfen, dieses Problem zu lösen. danke im voraus.Kann Pentaho Data Integration Client mit Hbase nicht unter Hadoop

org.apache.hadoop.hbase.client.RetriesExhaustedException: Failed after attempts=36, exceptions: 
Wed Apr 13 16:38:25 WEST 2016, null, java.net.SocketTimeoutException: callTimeout=60000, callDuration=75228: row 'pentaho_mappings,,' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=localhost,16020,1460561069506, seqNum=0 
at org.apache.hadoop.hbase.client.RpcRetryingCallerWithReadReplicas.throwEnrichedException(RpcRetryingCallerWithReadReplicas.java:270) 
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:203) 
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas.call(ScannerCallableWithReplicas.java:57) 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200) 
at org.apache.hadoop.hbase.client.ClientScanner.call(ClientScanner.java:294) 
at org.apache.hadoop.hbase.client.ClientScanner.nextScanner(ClientScanner.java:269) 
at org.apache.hadoop.hbase.client.ClientScanner.initializeScannerInConstruction(ClientScanner.java:141) 
at org.apache.hadoop.hbase.client.ClientScanner.<init>(ClientScanner.java:136) 
at org.apache.hadoop.hbase.client.HTable.getScanner(HTable.java:886) 
at org.apache.hadoop.hbase.MetaTableAccessor.fullScan(MetaTableAccessor.java:601) 
at org.apache.hadoop.hbase.MetaTableAccessor.tableExists(MetaTableAccessor.java:365) 
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:310) 
at org.apache.hadoop.hbase.client.HBaseAdmin.tableExists(HBaseAdmin.java:320) 
at org.pentaho.hbase.shim.common.CommonHBaseConnection.tableExists(CommonHBaseConnection.java:182) 
at org.pentaho.hbase.shim.cdh54.HBaseConnectionImpl.access$801(HBaseConnectionImpl.java:32) 
at org.pentaho.hbase.shim.cdh54.HBaseConnectionImpl$9.call(HBaseConnectionImpl.java:172) 
at org.pentaho.hbase.shim.cdh54.HBaseConnectionImpl$9.call(HBaseConnectionImpl.java:168) 
at org.pentaho.hbase.shim.cdh54.HBaseConnectionImpl.doWithContextClassLoader(HBaseConnectionImpl.java:63) 
at org.pentaho.hbase.shim.cdh54.HBaseConnectionImpl.tableExists(HBaseConnectionImpl.java:168) 
at org.pentaho.hbase.mapping.MappingAdmin.getMappedTables(MappingAdmin.java:557) 
at org.pentaho.di.trans.steps.hbaseoutput.HBaseOutputDialog.setupMappedTableNames(HBaseOutputDialog.java:796) 
at org.pentaho.di.trans.steps.hbaseoutput.HBaseOutputDialog.access$900(HBaseOutputDialog.java:82) 
at org.pentaho.di.trans.steps.hbaseoutput.HBaseOutputDialog$7.widgetSelected(HBaseOutputDialog.java:383) 
at org.eclipse.swt.widgets.TypedListener.handleEvent(Unknown Source) 
at org.eclipse.swt.widgets.EventTable.sendEvent(Unknown Source) 
at org.eclipse.swt.widgets.Widget.sendEvent(Unknown Source) 
at org.eclipse.swt.widgets.Display.runDeferredEvents(Unknown Source) 
at org.eclipse.swt.widgets.Display.readAndDispatch(Unknown Source) 
at org.pentaho.di.trans.steps.hbaseoutput.HBaseOutputDialog.open(HBaseOutputDialog.java:587) 
at org.pentaho.di.ui.spoon.delegates.SpoonStepsDelegate.editStep(SpoonStepsDelegate.java:124) 
at org.pentaho.di.ui.spoon.Spoon.editStep(Spoon.java:8773) 
at org.pentaho.di.ui.spoon.trans.TransGraph.editStep(TransGraph.java:3061) 
at org.pentaho.di.ui.spoon.trans.TransGraph.mouseDoubleClick(TransGraph.java:747) 
at org.eclipse.swt.widgets.TypedListener.handleEvent(Unknown Source) 
at org.eclipse.swt.widgets.EventTable.sendEvent(Unknown Source) 
at org.eclipse.swt.widgets.Widget.sendEvent(Unknown Source) 
at org.eclipse.swt.widgets.Display.runDeferredEvents(Unknown Source) 
at org.eclipse.swt.widgets.Display.readAndDispatch(Unknown Source) 
at org.pentaho.di.ui.spoon.Spoon.readAndDispatch(Spoon.java:1328) 
at org.pentaho.di.ui.spoon.Spoon.waitForDispose(Spoon.java:8000) 
at org.pentaho.di.ui.spoon.Spoon.start(Spoon.java:9251) 
at org.pentaho.di.ui.spoon.Spoon.main(Spoon.java:663) 
at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 
at sun.reflect.NativeMethodAccessorImpl.invoke(Unknown Source) 
at sun.reflect.DelegatingMethodAccessorImpl.invoke(Unknown Source) 
at java.lang.reflect.Method.invoke(Unknown Source) 
at org.pentaho.commons.launcher.Launcher.main(Launcher.java:92) 
Caused by: java.net.SocketTimeoutException: callTimeout=60000, callDuration=75228: row 'pentaho_mappings,,' on table 'hbase:meta' at region=hbase:meta,,1.1588230740, hostname=localhost,16020,1460561069506, seqNum=0 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:159) 
at org.apache.hadoop.hbase.client.ResultBoundedCompletionService$QueueingFuture.run(ResultBoundedCompletionService.java:64) 
at java.util.concurrent.ThreadPoolExecutor.runWorker(Unknown Source) 
at java.util.concurrent.ThreadPoolExecutor$Worker.run(Unknown Source) 
at java.lang.Thread.run(Unknown Source) 
Caused by: java.net.ConnectException: Connection refused: no further information 
at sun.nio.ch.SocketChannelImpl.checkConnect(Native Method) 
at sun.nio.ch.SocketChannelImpl.finishConnect(Unknown Source) 
at org.apache.hadoop.net.SocketIOWithTimeout.connect(SocketIOWithTimeout.java:206) 
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:530) 
at org.apache.hadoop.net.NetUtils.connect(NetUtils.java:494) 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupConnection(RpcClientImpl.java:404) 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.setupIOstreams(RpcClientImpl.java:710) 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.writeRequest(RpcClientImpl.java:881) 
at org.apache.hadoop.hbase.ipc.RpcClientImpl$Connection.tracedWriteRequest(RpcClientImpl.java:850) 
at org.apache.hadoop.hbase.ipc.RpcClientImpl.call(RpcClientImpl.java:1174) 
at org.apache.hadoop.hbase.ipc.AbstractRpcClient.callBlockingMethod(AbstractRpcClient.java:216) 
at org.apache.hadoop.hbase.ipc.AbstractRpcClient$BlockingRpcChannelImplementation.callBlockingMethod(AbstractRpcClient.java:300) 
at org.apache.hadoop.hbase.protobuf.generated.ClientProtos$ClientService$BlockingStub.scan(ClientProtos.java:31751) 
at org.apache.hadoop.hbase.client.ScannerCallable.openScanner(ScannerCallable.java:337) 
at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:192) 
at org.apache.hadoop.hbase.client.ScannerCallable.call(ScannerCallable.java:62) 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithoutRetries(RpcRetryingCaller.java:200) 
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:316) 
at org.apache.hadoop.hbase.client.ScannerCallableWithReplicas$RetryingRPC.call(ScannerCallableWithReplicas.java:290) 
at org.apache.hadoop.hbase.client.RpcRetryingCaller.callWithRetries(RpcRetryingCaller.java:126) 
... 4 more 
+0

Hallo zusammen, ich bin immer noch auf diesem fest. Kann mir bitte jemand helfen? Danke – Mus007

+0

ist das ein Kernfehler oder ich bin der einzige, der diesen Fehler bekommen hat? – Mus007

Antwort

0

Bitte diese Eigenschaft Wert überprüfen 'hbase.client.scanner.timeout.period' auf 10 Minuten in hbase-default.xml von hbase Ausnahmen loszuwerden.

Verwandte Themen