First off, thank you for the snakebite Python3 port. I really appreciate it!!!
Traceback (most recent call last):
File "<stdin>", line 1, in <module>
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/client.py", line 174, in ls
recurse=recurse):
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/client.py", line 1223, in _find_items
fileinfo = self._get_file_info(path)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/client.py", line 1351, in _get_file_info
return self.service.getFileInfo(request)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/service.py", line 43, in <lambda>
rpc = lambda request, service=self, method=method.name: service.call(service_stub_class.__dict__[method], request)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/service.py", line 49, in call
return method(self.service, controller, request)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/google/protobuf/service_reflection.py", line 267, in <lambda>
self._StubMethod(inst, method, rpc_controller, request, callback))
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/google/protobuf/service_reflection.py", line 284, in _StubMethod
method_descriptor.output_type._concrete_class, callback)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/channel.py", line 450, in CallMethod
return self.parse_response(byte_stream, response_class)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/channel.py", line 421, in parse_response
self.handle_error(header)
File "/usr/local/anaconda3/envs/anaconda3/lib/python3.7/site-packages/snakebite/channel.py", line 424, in handle_error
raise RequestError("\n".join([header.exceptionClassName, header.errorMsg]))
snakebite.errors.RequestError: org.apache.hadoop.ipc.StandbyException
Operation category READ is not supported in state standby
at org.apache.hadoop.hdfs.server.namenode.ha.StandbyState.checkOperation(StandbyState.java:87)
at org.apache.hadoop.hdfs.server.namenode.NameNode$NameNodeHAContext.checkOperation(NameNode.java:1978)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.checkOperation(FSNamesystem.java:1368)
at org.apache.hadoop.hdfs.server.namenode.FSNamesystem.getFileInfo(FSNamesystem.java:4096)
at org.apache.hadoop.hdfs.server.namenode.NameNodeRpcServer.getFileInfo(NameNodeRpcServer.java:1130)
at org.apache.hadoop.hdfs.protocolPB.ClientNamenodeProtocolServerSideTranslatorPB.getFileInfo(ClientNamenodeProtocolServerSideTranslatorPB.java:851)
at org.apache.hadoop.hdfs.protocol.proto.ClientNamenodeProtocolProtos$ClientNamenodeProtocol$2.callBlockingMethod(ClientNamenodeProtocolProtos.java)
at org.apache.hadoop.ipc.ProtobufRpcEngine$Server$ProtoBufRpcInvoker.call(ProtobufRpcEngine.java:640)
at org.apache.hadoop.ipc.RPC$Server.call(RPC.java:982)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2351)
at org.apache.hadoop.ipc.Server$Handler$1.run(Server.java:2347)
at java.security.AccessController.doPrivileged(Native Method)
at javax.security.auth.Subject.doAs(Subject.java:422)
at org.apache.hadoop.security.UserGroupInformation.doAs(UserGroupInformation.java:1865)
at org.apache.hadoop.ipc.Server$Handler.run(Server.java:2345)
It seems Python3 doesn't support unbound methods and instead defines all methods as functions unless operating on an instantiated object. So when HAClient._wrap_methods()
calls the following _wrap_methods
function:
@classmethod
def _wrap_methods(cls):
# Add HA support to all public Client methods, but only do this when we haven't done this before
for name, meth in inspect.getmembers(cls, inspect.ismethod):
if not name.startswith("_"): # Only public methods
if inspect.isgeneratorfunction(meth):
setattr(cls, name, cls._ha_gen_method(meth))
else:
setattr(cls, name, cls._ha_return_method(meth))
Any objections if I create a pull request to resolve the issue and get HA Namenode failover working again?