1 Logging initialized using configuration in jar:file:/usr/local/hive/lib/hive-common-1.2.2.jar!/hive-log4j.properties 2 Exception in thread "main" java.lang.RuntimeException: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException): Cannot create directory /tmp/hive. Name node is in safe mode. 3 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immediately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off. 4 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335) 5 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3866) 6 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:984) 7 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:634) 8 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) 9 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) 10 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) 11 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217) 12 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213) 13 at java.security.AccessController.doPrivileged(Native Method) 14 at javax.security.auth.Subject.doAs(Subject.java:422) 15 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754) 16 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2213) 17 18 at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:522) 19 at org.apache.hadoop.hive.cli.CliDriver.run(CliDriver.java:677) 20 at org.apache.hadoop.hive.cli.CliDriver.main(CliDriver.java:621) 21 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 22 at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 23 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 24 at java.lang.reflect.Method.invoke(Method.java:498) 25 at org.apache.hadoop.util.RunJar.run(RunJar.java:221) 26 at org.apache.hadoop.util.RunJar.main(RunJar.java:136) 27 Caused by: org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.server.namenode.SafeModeException): Cannot create directory /tmp/hive. Name node is in safe mode. 28 Resources are low on NN. Please add or free up more resources then turn off safe mode manually. NOTE: If you turn off safe mode before adding resources, the NN will immediately return to safe mode. Use "hdfs dfsadmin -safemode leave" to turn safe mode off. 29 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkNameNodeSafeMode(FSNamesystem.java:1335) 30 at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.mkdirs(FSNamesystem.java:3866) 31 at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.mkdirs(NameNodeRpcServer.java:984) 32 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.mkdirs(ClientNamenodeProtocolServerSideTranslatorPB.java:634) 33 at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) 34 at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) 35 at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982) 36 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2217) 37 at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2213) 38 at java.security.AccessController.doPrivileged(Native Method) 39 at javax.security.auth.Subject.doAs(Subject.java:422) 40 at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1754) 41 at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2213) 42 43 at org.apache.hadoop.ipc.Client.call(Client.java:1476) 44 at org.apache.hadoop.ipc.Client.call(Client.java:1413) 45 at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) 46 at com.sun.proxy.$Proxy16.mkdirs(Unknown Source) 47 at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.mkdirs(ClientNamenodeProtocolTranslatorPB.java:563) 48 at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) 49 at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:62) 50 at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) 51 at java.lang.reflect.Method.invoke(Method.java:498) 52 at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191) 53 at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) 54 at com.sun.proxy.$Proxy17.mkdirs(Unknown Source) 55 at org.apache.hadoop.hdfs.DFSClient.primitiveMkdir(DFSClient.java:3014) 56 at org.apache.hadoop.hdfs.DFSClient.mkdirs(DFSClient.java:2984) 57 at org.apache.hadoop.hdfs.DistributedFileSystem$21.doCall(DistributedFileSystem.java:1047) 58 at org.apache.hadoop.hdfs.DistributedFileSystem$21.doCall(DistributedFileSystem.java:1043) 59 at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) 60 at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirsInternal(DistributedFileSystem.java:1061) 61 at org.apache.hadoop.hdfs.DistributedFileSystem.mkdirs(DistributedFileSystem.java:1036) 62 at org.apache.hadoop.hive.ql.exec.Utilities.createDirsWithPermission(Utilities.java:3678) 63 at org.apache.hadoop.hive.ql.session.SessionState.createRootHDFSDir(SessionState.java:597) 64 at org.apache.hadoop.hive.ql.session.SessionState.createSessionDirs(SessionState.java:554) 65 at org.apache.hadoop.hive.ql.session.SessionState.start(SessionState.java:508) 66 ... 8 more
1.问题所在:
内存不足
可以使用df -hl查看内存情况
2.解决方式:
(1)rm -rf 删除限制文件夹及内部内容
(2)使用
hdfs dfsadmin -safemode leave
强制退出安全模式
原文地址:https://www.cnblogs.com/shareUndergo/p/9076410.html
时间: 2024-10-10 03:20:39