Uploaded image for project: 'Hadoop HDFS'
  1. Hadoop HDFS
  2. HDFS-14031

HDFS FILE CREATE APPEND LEASE

    XMLWordPrintableJSON

Details

    • Bug
    • Status: Resolved
    • Major
    • Resolution: Invalid
    • 2.7.6
    • None
    • block placement
    • None
    • Hide
      org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException): Failed to APPEND_FILE /mnt/data1/datadir/hive_cluster1/warehouse/singhand_ntcstore.db/singhand_ntc_ip_tmp1/singhand_ntc_ip_log2073 for DFSClient_NONMAPREDUCE_818722237_1 on 172.16.0.166 because DFSClient_NONMAPREDUCE_818722237_1 is already the current lease holder.
      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.recoverLeaseInternal(FSNamesystem.java:2883)
      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInternal(FSNamesystem.java:2683)
      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInt(FSNamesystem.java:2982)
      at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFile(FSNamesystem.java:2950)
      at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.append(NameNodeRpcServer.java:654)
      at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.append(ClientNamenodeProtocolServerSideTranslatorPB.java:421)
      at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
      at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
      at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
      at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
      at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
      at java.security.AccessController.doPrivileged(Native Method)
      at javax.security.auth.Subject.doAs(Subject.java:422)
      at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
      at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2045)

      at org.apache.hadoop.ipc.Client.call(Client.java:1475)
      at org.apache.hadoop.ipc.Client.call(Client.java:1412)
      at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
      at com.sun.proxy.$Proxy8.append(Unknown Source)
      at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.append(ClientNamenodeProtocolTranslatorPB.java:328)
      at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source)
      at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
      at java.lang.reflect.Method.invoke(Method.java:498)
      at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
      at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
      at com.sun.proxy.$Proxy9.append(Unknown Source)
      at org.apache.hadoop.hdfs.DFSClient.callAppend(DFSClient.java:1808)
      at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1877)
      at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1847)
      at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:340)
      at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:336)
      at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
      at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:348)
      at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:318)
      at org.apache.hadoop.fs.FileSystem.append(FileSystem.java:1164)
      at com.singhand.hdfs.utils.HDFSWriter.appendWriter(HDFSWriter.java:90)
      at com.singhand.ntc.mock.ReadTopic2HDFS.loadData(ReadTopic2HDFS.java:149)
      at com.singhand.ntc.mock.ReadTopic2HDFS.kafka2HDFS(ReadTopic2HDFS.java:74)
      at com.singhand.ntc.mock.ReadTopic2HDFS.main(ReadTopic2HDFS.java:255)
      Show
      org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException): Failed to APPEND_FILE /mnt/data1/datadir/hive_cluster1/warehouse/singhand_ntcstore.db/singhand_ntc_ip_tmp1/singhand_ntc_ip_log2073 for DFSClient_NONMAPREDUCE_818722237_1 on 172.16.0.166 because DFSClient_NONMAPREDUCE_818722237_1 is already the current lease holder. at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.recoverLeaseInternal(FSNamesystem.java:2883) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInternal(FSNamesystem.java:2683) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInt(FSNamesystem.java:2982) at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFile(FSNamesystem.java:2950) at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.append(NameNodeRpcServer.java:654) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.append(ClientNamenodeProtocolServerSideTranslatorPB.java:421) at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java) at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616) at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049) at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045) at java.security.AccessController.doPrivileged(Native Method) at javax.security.auth.Subject.doAs(Subject.java:422) at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657) at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2045) at org.apache.hadoop.ipc.Client.call(Client.java:1475) at org.apache.hadoop.ipc.Client.call(Client.java:1412) at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229) at com.sun.proxy.$Proxy8.append(Unknown Source) at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.append(ClientNamenodeProtocolTranslatorPB.java:328) at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source) at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) at java.lang.reflect.Method.invoke(Method.java:498) at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191) at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102) at com.sun.proxy.$Proxy9.append(Unknown Source) at org.apache.hadoop.hdfs.DFSClient.callAppend(DFSClient.java:1808) at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1877) at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1847) at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:340) at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:336) at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81) at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:348) at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:318) at org.apache.hadoop.fs.FileSystem.append(FileSystem.java:1164) at com.singhand.hdfs.utils.HDFSWriter.appendWriter(HDFSWriter.java:90) at com.singhand.ntc.mock.ReadTopic2HDFS.loadData(ReadTopic2HDFS.java:149) at com.singhand.ntc.mock.ReadTopic2HDFS.kafka2HDFS(ReadTopic2HDFS.java:74) at com.singhand.ntc.mock.ReadTopic2HDFS.main(ReadTopic2HDFS.java:255)

    Description

      org.apache.hadoop.ipc.RemoteException(org.apache.hadoop.hdfs.protocol.AlreadyBeingCreatedException): Failed to APPEND_FILE /mnt/data1/datadir/hive_cluster1/warehouse/singhand_ntcstore.db/singhand_ntc_ip_tmp1/singhand_ntc_ip_log2073 for DFSClient_NONMAPREDUCE_818722237_1 on 172.16.0.166 because DFSClient_NONMAPREDUCE_818722237_1 is already the current lease holder.
          at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.recoverLeaseInternal(FSNamesystem.java:2883)
          at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInternal(FSNamesystem.java:2683)
          at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFileInt(FSNamesystem.java:2982)
          at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.appendFile(FSNamesystem.java:2950)
          at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.append(NameNodeRpcServer.java:654)
          at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.append(ClientNamenodeProtocolServerSideTranslatorPB.java:421)
          at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
          at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:616)
          at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:969)
          at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2049)
          at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2045)
          at java.security.AccessController.doPrivileged(Native Method)
          at javax.security.auth.Subject.doAs(Subject.java:422)
          at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1657)
          at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2045)

          at org.apache.hadoop.ipc.Client.call(Client.java:1475)
          at org.apache.hadoop.ipc.Client.call(Client.java:1412)
          at org.apache.hadoop.ipc.ProtobufRpcEngine$Invoker.invoke(ProtobufRpcEngine.java:229)
          at com.sun.proxy.$Proxy8.append(Unknown Source)
          at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolTranslatorPB.append(ClientNamenodeProtocolTranslatorPB.java:328)
          at sun.reflect.GeneratedMethodAccessor10.invoke(Unknown Source)
          at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43)
          at java.lang.reflect.Method.invoke(Method.java:498)
          at org.apache.hadoop.io.retry.RetryInvocationHandler.invokeMethod(RetryInvocationHandler.java:191)
          at org.apache.hadoop.io.retry.RetryInvocationHandler.invoke(RetryInvocationHandler.java:102)
          at com.sun.proxy.$Proxy9.append(Unknown Source)
          at org.apache.hadoop.hdfs.DFSClient.callAppend(DFSClient.java:1808)
          at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1877)
          at org.apache.hadoop.hdfs.DFSClient.append(DFSClient.java:1847)
          at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:340)
          at org.apache.hadoop.hdfs.DistributedFileSystem$4.doCall(DistributedFileSystem.java:336)
          at org.apache.hadoop.fs.FileSystemLinkResolver.resolve(FileSystemLinkResolver.java:81)
          at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:348)
          at org.apache.hadoop.hdfs.DistributedFileSystem.append(DistributedFileSystem.java:318)
          at org.apache.hadoop.fs.FileSystem.append(FileSystem.java:1164)
          at com.singhand.hdfs.utils.HDFSWriter.appendWriter(HDFSWriter.java:90)
          at com.singhand.ntc.mock.ReadTopic2HDFS.loadData(ReadTopic2HDFS.java:149)
          at com.singhand.ntc.mock.ReadTopic2HDFS.kafka2HDFS(ReadTopic2HDFS.java:74)
          at com.singhand.ntc.mock.ReadTopic2HDFS.main(ReadTopic2HDFS.java:255)

      Attachments

        Activity

          People

            Unassigned Unassigned
            lazy yc
            Votes:
            0 Vote for this issue
            Watchers:
            2 Start watching this issue

            Dates

              Created:
              Updated:
              Resolved: