本文整理了Java中org.apache.hadoop.io.IOUtils.closeStream()
方法的一些代码示例,展示了IOUtils.closeStream()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。IOUtils.closeStream()
方法的具体详情如下:
包路径:org.apache.hadoop.io.IOUtils
类名称:IOUtils
方法名:closeStream
[英]Closes the stream ignoring Throwable. Must only be called in cleaning up from exception handlers.
[中]关闭流,忽略可丢弃项。只能在清理异常处理程序时调用。
代码示例来源:origin: apache/hbase
@Override
protected void cleanup(Context context) {
IOUtils.closeStream(inputFs);
IOUtils.closeStream(outputFs);
}
代码示例来源:origin: apache/hive
private void resetIn() {
IOUtils.closeStream(in);
in = null;
}
代码示例来源:origin: apache/hive
@Override
public void closeStream() {
IOUtils.closeStream(histStream);
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
public void close() {
IOUtils.closeStream(out);
IOUtils.closeStream(in);
}
}
代码示例来源:origin: apache/drill
private void resetIn() {
if (in != null) {
IOUtils.closeStream(in);
in = null;
}
}
代码示例来源:origin: apache/kylin
private File writeTmpSeqFile(InputStream inputStream) throws IOException {
File tempFile = File.createTempFile("kylin_stats_tmp", ".seq");
FileOutputStream out = null;
try {
out = new FileOutputStream(tempFile);
org.apache.commons.io.IOUtils.copy(inputStream, out);
} finally {
IOUtils.closeStream(inputStream);
IOUtils.closeStream(out);
}
return tempFile;
}
代码示例来源:origin: apache/hbase
private void closeSocket() {
IOUtils.closeStream(out);
IOUtils.closeStream(in);
IOUtils.closeSocket(socket);
out = null;
in = null;
socket = null;
}
代码示例来源:origin: apache/hive
public void close() {
for (NonSyncDataOutputBuffer element : loadedColumnsValueBuffer) {
IOUtils.closeStream(element);
}
if (codec != null) {
IOUtils.closeStream(decompressBuffer);
if (valDecompressor != null) {
// Make sure we only return valDecompressor once.
CodecPool.returnDecompressor(valDecompressor);
valDecompressor = null;
}
}
}
代码示例来源:origin: apache/hive
/** Close the reader. */
public void close() {
IOUtils.closeStream(in);
currentValue.close();
if (decompress) {
IOUtils.closeStream(keyDecompressedData);
if (keyDecompressor != null) {
// Make sure we only return keyDecompressor once.
CodecPool.returnDecompressor(keyDecompressor);
keyDecompressor = null;
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
private void close() throws IOException {
for (int i = 0; i < inReaders.length; i++) {
IOUtils.closeStream(inReaders[i]);
inReaders[i] = null;
}
if (outWriter != null) {
outWriter.close();
outWriter = null;
}
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
public void close() throws IOException {
if (out != null) {
OutputStream outShadow = this.out;
try {
finish();
outShadow.close();
outShadow = null;
} finally {
IOUtils.closeStream(outShadow);
}
}
}
代码示例来源:origin: apache/hive
@Override
public Object terminate(AggregationBuffer agg) throws HiveException {
result.reset();
try {
BloomKFilter.serialize(result, ((BloomFilterBuf)agg).bloomFilter);
} catch (IOException e) {
throw new HiveException(e);
} finally {
IOUtils.closeStream(result);
}
return new BytesWritable(result.toByteArray());
}
代码示例来源:origin: apache/hive
public Aggregation(long expectedEntries) {
ByteArrayOutputStream bytesOut = null;
try {
BloomKFilter bf = new BloomKFilter(expectedEntries);
bytesOut = new ByteArrayOutputStream();
BloomKFilter.serialize(bytesOut, bf);
bfBytes = bytesOut.toByteArray();
} catch (Exception err) {
throw new IllegalArgumentException("Error creating aggregation buffer", err);
} finally {
IOUtils.closeStream(bytesOut);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
@Override
public synchronized void close() throws IOException {
super.close();
DataOutputStream out = fs.create(new Path(dir, BLOOM_FILE_NAME), true);
try {
bloomFilter.write(out);
out.flush();
out.close();
out = null;
} finally {
IOUtils.closeStream(out);
}
}
代码示例来源:origin: apache/hive
public void writeWritable(Writable w) throws IOException {
DataOutputStream dos = null;
try {
ByteArrayOutputStream baos = new ByteArrayOutputStream();
dos = new DataOutputStream(baos);
WritableUtils.writeString(dos, w.getClass().getName());
w.write(dos);
out.writeBytes(baos.toByteArray(), Type.WRITABLE.code);
dos.close();
dos = null;
} finally {
IOUtils.closeStream(dos);
}
}
代码示例来源:origin: apache/hive
public void addSampleFile(Path inputPath, JobConf job) throws IOException {
FileSystem fs = inputPath.getFileSystem(job);
FSDataInputStream input = fs.open(inputPath);
try {
int count = input.readInt();
for (int i = 0; i < count; i++) {
byte[] key = new byte[input.readInt()];
input.readFully(key);
sampled.add(key);
}
} finally {
IOUtils.closeStream(input);
}
}
代码示例来源:origin: apache/hive
public int processFile(String fileName) throws IOException {
BufferedReader bufferedReader = null;
try {
bufferedReader = loadFile(fileName);
return (processReader(bufferedReader));
} finally {
IOUtils.closeStream(bufferedReader);
}
}
代码示例来源:origin: org.apache.hadoop/hadoop-common
void writeStreamToFile(InputStream in, PathData target,
boolean lazyPersist, boolean direct)
throws IOException {
FSDataOutputStream out = null;
try {
out = create(target, lazyPersist, direct);
IOUtils.copyBytes(in, out, getConf(), true);
} finally {
IOUtils.closeStream(out); // just in case copyBytes didn't
}
}
代码示例来源:origin: apache/hive
private int showCreateDatabase(Hive db, ShowCreateDatabaseDesc showCreateDb) throws HiveException {
DataOutputStream outStream = getOutputStream(showCreateDb.getResFile());
try {
String dbName = showCreateDb.getDatabaseName();
return showCreateDatabase(db, outStream, dbName);
} catch (Exception e) {
throw new HiveException(e);
} finally {
IOUtils.closeStream(outStream);
}
}
代码示例来源:origin: apache/drill
private int showCreateDatabase(Hive db, ShowCreateDatabaseDesc showCreateDb) throws HiveException {
DataOutputStream outStream = getOutputStream(showCreateDb.getResFile());
try {
String dbName = showCreateDb.getDatabaseName();
return showCreateDatabase(db, outStream, dbName);
} catch (Exception e) {
throw new HiveException(e);
} finally {
IOUtils.closeStream(outStream);
}
}
内容来源于网络,如有侵权,请联系作者删除!