org.apache.hadoop.io.IOUtils.closeSocket()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(8.2k)|赞(0)|评价(0)|浏览(126)

本文整理了Java中org.apache.hadoop.io.IOUtils.closeSocket()方法的一些代码示例,展示了IOUtils.closeSocket()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。IOUtils.closeSocket()方法的具体详情如下:
包路径:org.apache.hadoop.io.IOUtils
类名称:IOUtils
方法名:closeSocket

IOUtils.closeSocket介绍

[英]Closes the socket ignoring IOException
[中]忽略IOException关闭套接字

代码示例

代码示例来源:origin: apache/hbase

private void closeSocket() {
 IOUtils.closeStream(out);
 IOUtils.closeStream(in);
 IOUtils.closeSocket(socket);
 out = null;
 in = null;
 socket = null;
}

代码示例来源:origin: org.apache.hbase/hbase-client

private void closeSocket() {
 IOUtils.closeStream(out);
 IOUtils.closeStream(in);
 IOUtils.closeSocket(socket);
 out = null;
 in = null;
 socket = null;
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

IOUtils.closeStream(out);
IOUtils.closeStream(in);
IOUtils.closeSocket(sock);

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

private Peer newConnectedPeer(ExtendedBlock b, InetSocketAddress addr,
               Token<BlockTokenIdentifier> blockToken,
               DatanodeID datanodeId)
  throws IOException {
 Peer peer = null;
 boolean success = false;
 Socket sock = null;
 final int socketTimeout = datanode.getDnConf().getSocketTimeout();
 try {
  sock = NetUtils.getDefaultSocketFactory(conf).createSocket();
  NetUtils.connect(sock, addr, socketTimeout);
  peer = DFSUtilClient.peerFromSocketAndKey(datanode.getSaslClient(),
    sock, datanode.getDataEncryptionKeyFactoryForBlock(b),
    blockToken, datanodeId, socketTimeout);
  success = true;
  return peer;
 } finally {
  if (!success) {
   IOUtils.cleanup(null, peer);
   IOUtils.closeSocket(sock);
  }
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

IOUtils.closeStream(out);
IOUtils.closeStream(in);
IOUtils.closeSocket(sock);

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

IOUtils.closeStream(out);
IOUtils.closeStream(in);
IOUtils.closeSocket(sock);

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs

IOUtils.closeStream(mirrorIn);
  mirrorIn = null;
  IOUtils.closeSocket(mirrorSock);
  mirrorSock = null;
  if (isClient) {
IOUtils.closeStream(mirrorIn);
IOUtils.closeStream(replyOut);
IOUtils.closeSocket(mirrorSock);
IOUtils.closeStream(blockReceiver);
setCurrentBlockReceiver(null);

代码示例来源:origin: org.apache.slider/slider-core

private static boolean waitForServerDown(int port, long timeout) throws
  InterruptedException {
 long start = System.currentTimeMillis();
 while (true) {
  try {
   Socket sock = null;
   try {
    sock = new Socket("localhost", port);
    OutputStream outstream = sock.getOutputStream();
    outstream.write("stat".getBytes());
    outstream.flush();
   } finally {
    IOUtils.closeSocket(sock);
   }
  } catch (IOException e) {
   return true;
  }
  if (System.currentTimeMillis() > start + timeout) {
   break;
  }
  Thread.sleep(250);
 }
 return false;
}

代码示例来源:origin: com.aliyun.hbase/alihbase-client

private void closeSocket() {
 IOUtils.closeStream(out);
 IOUtils.closeStream(in);
 IOUtils.closeSocket(socket);
 out = null;
 in = null;
 socket = null;
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override
 public void close() throws IOException {
  IOUtils.closeStream(in);
  IOUtils.closeStream(out);
  IOUtils.closeSocket(sock);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-client

@Override
 public void close() throws IOException {
  IOUtils.closeStream(in);
  IOUtils.closeStream(out);
  IOUtils.closeSocket(sock);
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

@Override
 public void close() throws IOException {
  IOUtils.closeStream(in);
  IOUtils.closeStream(out);
  IOUtils.closeSocket(sock);
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-common-test

private void doIpcVersionTest(
  byte[] requestData,
  byte[] expectedResponse) throws Exception {
 Server server = new TestServer(1, true);
 InetSocketAddress addr = NetUtils.getConnectAddress(server);
 server.start();
 Socket socket = new Socket();
 try {
  NetUtils.connect(socket, addr, 5000);
  
  OutputStream out = socket.getOutputStream();
  InputStream in = socket.getInputStream();
  out.write(requestData, 0, requestData.length);
  out.flush();
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  IOUtils.copyBytes(in, baos, 256);
  
  byte[] responseData = baos.toByteArray();
  
  assertEquals(
    StringUtils.byteToHexString(expectedResponse),
    StringUtils.byteToHexString(responseData));
 } finally {
  IOUtils.closeSocket(socket);
  server.stop();
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

@Override
 public Peer newConnectedPeer(InetSocketAddress addr,
   Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
   throws IOException {
  Peer peer = null;
  Socket sock = NetUtils.getDefaultSocketFactory(conf).createSocket();
  try {
   sock.connect(addr, HdfsServerConstants.READ_TIMEOUT);
   sock.setSoTimeout(HdfsServerConstants.READ_TIMEOUT);
   peer = TcpPeerServer.peerFromSocket(sock);
  } finally {
   if (peer == null) {
    IOUtils.closeSocket(sock);
   }
  }
  return peer;
 }
}).

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

@Override
 public Peer newConnectedPeer(InetSocketAddress addr,
   Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
   throws IOException {
  Peer peer = null;
  Socket sock = NetUtils.getDefaultSocketFactory(conf).createSocket();
  try {
   sock.connect(addr, HdfsServerConstants.READ_TIMEOUT);
   sock.setSoTimeout(HdfsServerConstants.READ_TIMEOUT);
   peer = TcpPeerServer.peerFromSocket(sock);
  } finally {
   if (peer == null) {
    IOUtils.closeSocket(sock);
   }
  }
  return peer;
 }
}).

代码示例来源:origin: ch.cern.hadoop/hadoop-hdfs

@Override // RemotePeerFactory
public Peer newConnectedPeer(InetSocketAddress addr,
  Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
  throws IOException {
 Peer peer = null;
 boolean success = false;
 Socket sock = null;
 try {
  sock = socketFactory.createSocket();
  NetUtils.connect(sock, addr,
   getRandomLocalInterfaceAddr(),
   dfsClientConf.socketTimeout);
  peer = TcpPeerServer.peerFromSocketAndKey(saslClient, sock, this,
    blockToken, datanodeId);
  peer.setReadTimeout(dfsClientConf.socketTimeout);
  success = true;
  return peer;
 } finally {
  if (!success) {
   IOUtils.cleanup(LOG, peer);
   IOUtils.closeSocket(sock);
  }
 }
}

代码示例来源:origin: org.apache.hadoop/hadoop-hdfs-client

@Override // RemotePeerFactory
public Peer newConnectedPeer(InetSocketAddress addr,
  Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
  throws IOException {
 Peer peer = null;
 boolean success = false;
 Socket sock = null;
 final int socketTimeout = dfsClientConf.getSocketTimeout();
 try {
  sock = socketFactory.createSocket();
  NetUtils.connect(sock, addr, getRandomLocalInterfaceAddr(),
    socketTimeout);
  peer = DFSUtilClient.peerFromSocketAndKey(saslClient, sock, this,
    blockToken, datanodeId, socketTimeout);
  success = true;
  return peer;
 } finally {
  if (!success) {
   IOUtilsClient.cleanupWithLogger(LOG, peer);
   IOUtils.closeSocket(sock);
  }
 }
}

代码示例来源:origin: io.prestosql.hadoop/hadoop-apache

@Override // RemotePeerFactory
public Peer newConnectedPeer(InetSocketAddress addr,
  Token<BlockTokenIdentifier> blockToken, DatanodeID datanodeId)
  throws IOException {
 Peer peer = null;
 boolean success = false;
 Socket sock = null;
 try {
  sock = socketFactory.createSocket();
  NetUtils.connect(sock, addr,
   getRandomLocalInterfaceAddr(),
   dfsClientConf.socketTimeout);
  peer = TcpPeerServer.peerFromSocketAndKey(saslClient, sock, this,
    blockToken, datanodeId);
  peer.setReadTimeout(dfsClientConf.socketTimeout);
  success = true;
  return peer;
 } finally {
  if (!success) {
   IOUtils.cleanup(LOG, peer);
   IOUtils.closeSocket(sock);
  }
 }
}

代码示例来源:origin: ch.cern.hadoop/hadoop-common

private void doIpcVersionTest(
  byte[] requestData,
  byte[] expectedResponse) throws IOException {
 Server server = new TestServer(1, true);
 InetSocketAddress addr = NetUtils.getConnectAddress(server);
 server.start();
 Socket socket = new Socket();
 try {
  NetUtils.connect(socket, addr, 5000);
  
  OutputStream out = socket.getOutputStream();
  InputStream in = socket.getInputStream();
  out.write(requestData, 0, requestData.length);
  out.flush();
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  IOUtils.copyBytes(in, baos, 256);
  
  byte[] responseData = baos.toByteArray();
  
  assertEquals(
    StringUtils.byteToHexString(expectedResponse),
    StringUtils.byteToHexString(responseData));
 } finally {
  IOUtils.closeSocket(socket);
  server.stop();
 }
}

代码示例来源:origin: com.github.jiayuhan-it/hadoop-common

private void doIpcVersionTest(
  byte[] requestData,
  byte[] expectedResponse) throws IOException {
 Server server = new TestServer(1, true);
 InetSocketAddress addr = NetUtils.getConnectAddress(server);
 server.start();
 Socket socket = new Socket();
 try {
  NetUtils.connect(socket, addr, 5000);
  
  OutputStream out = socket.getOutputStream();
  InputStream in = socket.getInputStream();
  out.write(requestData, 0, requestData.length);
  out.flush();
  ByteArrayOutputStream baos = new ByteArrayOutputStream();
  IOUtils.copyBytes(in, baos, 256);
  
  byte[] responseData = baos.toByteArray();
  
  assertEquals(
    StringUtils.byteToHexString(expectedResponse),
    StringUtils.byteToHexString(responseData));
 } finally {
  IOUtils.closeSocket(socket);
  server.stop();
 }
}

相关文章