RPC response exceeds maximum data length & Cannot assign requested address & java.net.UnknownH

107197 ワード

シーン:flumeを使用している間にエラーが発生しました
質問:似たような質問は全部で4つあります
				20/06/20 09:09:37 ERROR hdfs.HDFSEventSink: process failed
				java.lang.IllegalArgumentException: java.net.UnknownHostException: Project
					at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:445)
					at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:132)
					at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:350)
					at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:284)
					at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:167)
					at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3288)
					at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123)
					at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3337)
					at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3305)
					at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:476)
					at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:255)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:247)
					at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:727)
					at org.apache.flume.auth.SimpleAuthenticator.execute(SimpleAuthenticator.java:50)
					at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:724)
					at java.util.concurrent.FutureTask.run(FutureTask.java:266)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				Caused by: java.net.UnknownHostException: Project
					... 20 more
				20/06/20 09:09:37 ERROR flume.SinkRunner: Unable to deliver event. Exception follows.
				org.apache.flume.EventDeliveryException: java.lang.IllegalArgumentException: java.net.UnknownHostException: Project
					at org.apache.flume.sink.hdfs.HDFSEventSink.process(HDFSEventSink.java:464)
					at org.apache.flume.sink.DefaultSinkProcessor.process(DefaultSinkProcessor.java:67)
					at org.apache.flume.SinkRunner$PollingRunner.run(SinkRunner.java:145)
					at java.lang.Thread.run(Thread.java:748)
				Caused by: java.lang.IllegalArgumentException: java.net.UnknownHostException: Project
					at org.apache.hadoop.security.SecurityUtil.buildTokenService(SecurityUtil.java:445)
					at org.apache.hadoop.hdfs.NameNodeProxiesClient.createProxyWithClientProtocol(NameNodeProxiesClient.java:132)
					at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:350)
					at org.apache.hadoop.hdfs.DFSClient.<init>(DFSClient.java:284)
					at org.apache.hadoop.hdfs.DistributedFileSystem.initialize(DistributedFileSystem.java:167)
					at org.apache.hadoop.fs.FileSystem.createFileSystem(FileSystem.java:3288)
					at org.apache.hadoop.fs.FileSystem.access$200(FileSystem.java:123)
					at org.apache.hadoop.fs.FileSystem$Cache.getInternal(FileSystem.java:3337)
					at org.apache.hadoop.fs.FileSystem$Cache.get(FileSystem.java:3305)
					at org.apache.hadoop.fs.FileSystem.get(FileSystem.java:476)
					at org.apache.hadoop.fs.Path.getFileSystem(Path.java:361)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:255)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:247)
					at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:727)
					at org.apache.flume.auth.SimpleAuthenticator.execute(SimpleAuthenticator.java:50)
					at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:724)
					at java.util.concurrent.FutureTask.run(FutureTask.java:266)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					... 1 more

もう一つのエラー
					20/06/20 09:20:55 WARN hdfs.BucketWriter: failed to rename() file (hdfs://bdpcdh6301:9870/Projecat/BDP/data/ineer/RAW/01/test_flume/20200620/logs-.1592616051200.tmp). Exception follows.
				java.io.IOException: Failed on local exception: org.apache.hadoop.ipc.RpcException: RPC response exceedsmaximum data length; Host Details : local host is: "bdpcdh6303/172.18.0.93"; destination host is: "bdpcdh6301":9870;
					at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:808)
					at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1503)
					at org.apache.hadoop.ipc.Client.call(Client.java:1445)
					at org.apache.hadoop.ipc.Client.call(Client.java:1355)
					at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
					at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
					at com.sun.proxy.$Proxy12.getFileInfo(Unknown Source)
					at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.getFileInfo(ClientNamenodeProtocolTranslatorPB.java:875)
					at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
					at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
					at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
					at java.lang.reflect.Method.invoke(Method.java:498)
					at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
					at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
					at com.sun.proxy.$Proxy13.getFileInfo(Unknown Source)
					at org.apache.hadoop.hdfs.DFSClient.getFileInfo(DFSClient.java:1624)
					at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1495)
					at org.apache.hadoop.hdfs.DistributedFileSystem$29.doCall(DistributedFileSystem.java:1492)
					at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
					at org.apache.hadoop.hdfs.DistributedFileSystem.getFileStatus(DistributedFileSystem.java:1507)
					at org.apache.hadoop.fs.FileSystem.exists(FileSystem.java:1668)
					at org.apache.flume.sink.hdfs.BucketWriter$7.call(BucketWriter.java:680)
					at org.apache.flume.sink.hdfs.BucketWriter$7.call(BucketWriter.java:677)
					at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:727)
					at org.apache.flume.auth.SimpleAuthenticator.execute(SimpleAuthenticator.java:50)
					at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:724)
					at java.util.concurrent.FutureTask.run(FutureTask.java:266)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				Caused by: org.apache.hadoop.ipc.RpcException: RPC response exceeds maximum data length
					at org.apache.hadoop.ipc.Client$IpcStreams.readResponse(Client.java:1818)
					at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1166)
					at org.apache.hadoop.ipc.Client$Connection.run(Client.java:1062)
				20/06/20 09:23:55 WARN hdfs.BucketWriter: Renaming file: hdfs://bdpcdh6301:9870/Projecat/BDP/data/ineer/RAW/01/test_flume/20200620/logs-.1592616051200.tmp failed. Will retry again in 180 seconds.
				java.util.concurrent.RejectedExecutionException: Task java.util.concurrent.FutureTask@6ff77997 rejected from java.util.concurrent.ThreadPoolExecutor@61308a19[Terminated, pool size = 0, active threads = 0, queued tasks = 0, completed tasks = 3]
					at java.util.concurrent.ThreadPoolExecutor$AbortPolicy.rejectedExecution(ThreadPoolExecutor.java:2063)
					at java.util.concurrent.ThreadPoolExecutor.reject(ThreadPoolExecutor.java:830)
					at java.util.concurrent.ThreadPoolExecutor.execute(ThreadPoolExecutor.java:1379)
					at java.util.concurrent.AbstractExecutorService.submit(AbstractExecutorService.java:134)
					at org.apache.flume.sink.hdfs.BucketWriter.callWithTimeout(BucketWriter.java:721)
					at org.apache.flume.sink.hdfs.BucketWriter.renameBucket(BucketWriter.java:677)
					at org.apache.flume.sink.hdfs.BucketWriter.access$1600(BucketWriter.java:60)
					at org.apache.flume.sink.hdfs.BucketWriter$ScheduledRenameCallable.call(BucketWriter.java:382)
					at org.apache.flume.sink.hdfs.BucketWriter$ScheduledRenameCallable.call(BucketWriter.java:367)
					at java.util.concurrent.FutureTask.run(FutureTask.java:266)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$201(ScheduledThreadPoolExecutor.java:180)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:293)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
			      
				20/06/20 09:29:24 ERROR source.NetcatSource: Unable to bind to socket. Exception follows.
				java.net.BindException: Cannot assign requested address
					at sun.nio.ch.Net.bind0(Native Method)
					at sun.nio.ch.Net.bind(Net.java:433)
					at sun.nio.ch.Net.bind(Net.java:425)
					at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:67)
					at org.apache.flume.source.NetcatSource.start(NetcatSource.java:164)
					at org.apache.flume.source.EventDrivenSourceRunner.start(EventDrivenSourceRunner.java:44)
					at org.apache.flume.lifecycle.LifecycleSupervisor$MonitorRunnable.run(LifecycleSupervisor.java:249)
					at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
					at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				20/06/20 09:29:24 INFO source.NetcatSource: Source stopping
				20/06/20 09:29:24 ERROR lifecycle.LifecycleSupervisor: Unable to start EventDrivenSourceRunner: { source:org.apache.flume.source.NetcatSource{name:r1,state:STOP} } - Exception follows.
				org.apache.flume.FlumeException: java.net.BindException: Cannot assign requested address
					at org.apache.flume.source.NetcatSource.start(NetcatSource.java:171)
					at org.apache.flume.source.EventDrivenSourceRunner.start(EventDrivenSourceRunner.java:44)
					at org.apache.flume.lifecycle.LifecycleSupervisor$MonitorRunnable.run(LifecycleSupervisor.java:249)
					at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
					at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				Caused by: java.net.BindException: Cannot assign requested address
					at sun.nio.ch.Net.bind0(Native Method)
					at sun.nio.ch.Net.bind(Net.java:433)
					at sun.nio.ch.Net.bind(Net.java:425)
					at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:67)
					at org.apache.flume.source.NetcatSource.start(NetcatSource.java:164)
					... 9 more
				20/06/20 09:29:27 INFO source.NetcatSource: Source starting
				20/06/20 09:29:27 ERROR source.NetcatSource: Unable to bind to socket. Exception follows.
				java.net.BindException: Cannot assign requested address
					at sun.nio.ch.Net.bind0(Native Method)
					at sun.nio.ch.Net.bind(Net.java:433)
					at sun.nio.ch.Net.bind(Net.java:425)
					at sun.nio.ch.ServerSocketChannelImpl.bind(ServerSocketChannelImpl.java:223)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:74)
					at sun.nio.ch.ServerSocketAdaptor.bind(ServerSocketAdaptor.java:67)
					at org.apache.flume.source.NetcatSource.start(NetcatSource.java:164)
					at org.apache.flume.source.EventDrivenSourceRunner.start(EventDrivenSourceRunner.java:44)
					at org.apache.flume.lifecycle.LifecycleSupervisor$MonitorRunnable.run(LifecycleSupervisor.java:249)
					at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:511)
					at java.util.concurrent.FutureTask.runAndReset(FutureTask.java:308)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.access$301(ScheduledThreadPoolExecutor.java:180)
					at java.util.concurrent.ScheduledThreadPoolExecutor$ScheduledFutureTask.run(ScheduledThreadPoolExecutor.java:294)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				
				
			      
20/06/20 09:45:13 WARN hdfs.HDFSEventSink: HDFS IO error
				java.io.IOException: Failed on local exception: org.apache.hadoop.ipc.RpcException: RPC response exceedsmaximum data length; Host Details : local host is: "bdpcdh6303/172.18.0.93"; destination host is: "47.102.155.106":9870;
					at org.apache.hadoop.net.NetUtils.wrapException(NetUtils.java:808)
					at org.apache.hadoop.ipc.Client.getRpcResponse(Client.java:1503)
					at org.apache.hadoop.ipc.Client.call(Client.java:1445)
					at org.apache.hadoop.ipc.Client.call(Client.java:1355)
					at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:228)
					at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:116)
					at com.sun.proxy.$Proxy12.create(Unknown Source)
					at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.create(ClientNamenodeProtocolTranslatorPB.java:349)
					at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method)
					at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62)
					at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
					at java.lang.reflect.Method.invoke(Method.java:498)
					at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:422)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeMethod(RetryInvocationHandler.java:165)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invoke(RetryInvocationHandler.java:157)
					at org.apache.hadoop.io.retry.RetryInvocationHandler$Call.invokeOnce(RetryInvocationHandler.java:95)
					at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:359)
					at com.sun.proxy.$Proxy13.create(Unknown Source)
					at org.apache.hadoop.hdfs.DFSOutputStream.newStreamForCreate(DFSOutputStream.java:276)
					at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1176)
					at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1155)
					at org.apache.hadoop.hdfs.DFSClient.create(DFSClient.java:1093)
					at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:463)
					at org.apache.hadoop.hdfs.DistributedFileSystem$8.doCall(DistributedFileSystem.java:460)
					at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
					at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:474)
					at org.apache.hadoop.hdfs.DistributedFileSystem.create(DistributedFileSystem.java:401)
					at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1103)
					at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:1083)
					at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:972)
					at org.apache.hadoop.fs.FileSystem.create(FileSystem.java:960)
					at org.apache.flume.sink.hdfs.HDFSDataStream.doOpen(HDFSDataStream.java:80)
					at org.apache.flume.sink.hdfs.HDFSDataStream.open(HDFSDataStream.java:107)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:257)
					at org.apache.flume.sink.hdfs.BucketWriter$1.call(BucketWriter.java:247)
					at org.apache.flume.sink.hdfs.BucketWriter$8$1.run(BucketWriter.java:727)
					at org.apache.flume.auth.SimpleAuthenticator.execute(SimpleAuthenticator.java:50)
					at org.apache.flume.sink.hdfs.BucketWriter$8.call(BucketWriter.java:724)
					at java.util.concurrent.FutureTask.run(FutureTask.java:266)
					at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1149)
					at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:624)
					at java.lang.Thread.run(Thread.java:748)
				Caused by: org.apache.hadoop.ipc.RpcException: RPC response exceeds maximum data length
					at org.apache.hadoop.ipc.Client$IpcStreams.readResponse(Client.java:1818)
					at org.apache.hadoop.ipc.Client$Connection.receiveRpcResponse(Client.java:1166)
					at org.apache.hadoop.ipc.Client$Connection.run(Client.java:1062)
				20/06/20 09:45:17 INFO hdfs.BucketWriter: Creating hdfs://47.102.155.106:9870/Projecat/BDP/data/ineer/RAW/01/test_flume/20200620/logs-.1592617512489.tmp


質問分析:インターネットでいろいろ調べて、いろいろ試してみます.
解決策:まずsourcesのipアドレスがローカルに書かれていることを確認します.ポートが書き終わりました.HDfsのアドレスは書き終わり、hdfsのポートは書き終わりました.