org.apache.hadoop.hbase.io.hfile.HFile.getWriterFactory()方法的使用及代码示例

x33g5p2x  于2022-01-20 转载在 其他  
字(8.2k)|赞(0)|评价(0)|浏览(115)

本文整理了Java中org.apache.hadoop.hbase.io.hfile.HFile.getWriterFactory()方法的一些代码示例,展示了HFile.getWriterFactory()的具体用法。这些代码示例主要来源于Github/Stackoverflow/Maven等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。HFile.getWriterFactory()方法的具体详情如下:
包路径:org.apache.hadoop.hbase.io.hfile.HFile
类名称:HFile
方法名:getWriterFactory

HFile.getWriterFactory介绍

[英]Returns the factory to be used to create HFile writers
[中]返回用于创建HFile写入程序的工厂

代码示例

代码示例来源:origin: apache/hbase

/**
 * Returns the factory to be used to create {@link HFile} writers.
 * Disables block cache access for all writers created through the
 * returned factory.
 */
public static final WriterFactory getWriterFactoryNoCache(Configuration
   conf) {
 return HFile.getWriterFactory(conf, CacheConfig.DISABLED);
}

代码示例来源:origin: apache/hbase

this.timeRangeTracker = TimeRangeTracker.create(TimeRangeTracker.Type.NON_SYNC);
writer = HFile.getWriterFactory(conf, cacheConf)
  .withPath(fs, path)
  .withComparator(comparator)

代码示例来源:origin: apache/hbase

/**
 * Create an HFile with the given number of bytes
 */
private void createHFile(Path path, int rowIdx, byte[] dummyData) throws IOException {
 HFileContext meta = new HFileContextBuilder().build();
 HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path)
   .withFileContext(meta).create();
 long now = System.currentTimeMillis();
 try {
  KeyValue kv = new KeyValue(Bytes.add(STARTROW, Bytes.toBytes(rowIdx)), COLUMN_FAMILY,
    Bytes.toBytes("colX"), now, dummyData);
  writer.append(kv);
 } finally {
  writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(System.currentTimeMillis()));
  writer.close();
 }
}

代码示例来源:origin: apache/hbase

private static void createHFile(Configuration conf, FileSystem fs, Path path, byte[] family,
   byte[] qualifier) throws IOException {
  HFileContext context = new HFileContextBuilder().build();
  HFile.Writer writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path)
    .withFileContext(context).create();
  long now = System.currentTimeMillis();
  try {
   for (int i = 1; i <= 9; i++) {
    KeyValue kv =
      new KeyValue(Bytes.toBytes(i + ""), family, qualifier, now, Bytes.toBytes(i + ""));
    writer.append(kv);
   }
  } finally {
   writer.close();
  }
 }
}

代码示例来源:origin: apache/hbase

HFile.Writer writer = HFile.getWriterFactory(
  TEST_UTIL.getConfiguration(), cacheConf)
    .withOutputStream(fout)

代码示例来源:origin: apache/hbase

private void metablocks(final String compress) throws Exception {
 Path mFile = new Path(ROOT_DIR, "meta.hfile");
 FSDataOutputStream fout = createFSOutput(mFile);
 HFileContext meta = new HFileContextBuilder()
           .withCompression(HFileWriterImpl.compressionByName(compress))
           .withBlockSize(minBlockSize).build();
 Writer writer = HFile.getWriterFactory(conf, cacheConf)
   .withOutputStream(fout)
   .withFileContext(meta)
   .create();
 someTestingWithMetaBlock(writer);
 writer.close();
 fout.close();
 FSDataInputStream fin = fs.open(mFile);
 Reader reader = HFile.createReaderFromStream(mFile, fs.open(mFile),
   this.fs.getFileStatus(mFile).getLen(), cacheConf, conf);
 reader.loadFileInfo();
 // No data -- this should return false.
 assertFalse(reader.getScanner(false, false).seekTo());
 someReadingWithMetaBlock(reader);
 fs.delete(mFile, true);
 reader.close();
 fin.close();
}

代码示例来源:origin: apache/hbase

@Test
public void testNullMetaBlocks() throws Exception {
 for (Compression.Algorithm compressAlgo :
   HBaseCommonTestingUtility.COMPRESSION_ALGORITHMS) {
  Path mFile = new Path(ROOT_DIR, "nometa_" + compressAlgo + ".hfile");
  FSDataOutputStream fout = createFSOutput(mFile);
  HFileContext meta = new HFileContextBuilder().withCompression(compressAlgo)
            .withBlockSize(minBlockSize).build();
  Writer writer = HFile.getWriterFactory(conf, cacheConf)
    .withOutputStream(fout)
    .withFileContext(meta)
    .create();
  KeyValue kv = new KeyValue("foo".getBytes(), "f1".getBytes(), null, "value".getBytes());
  writer.append(kv);
  writer.close();
  fout.close();
  Reader reader = HFile.createReader(fs, mFile, cacheConf, true, conf);
  reader.loadFileInfo();
  assertNull(reader.getMetaBlock("non-existant", false));
 }
}

代码示例来源:origin: apache/hbase

/**
 * Create a truncated hfile and verify that exception thrown.
 */
@Test
public void testCorruptTruncatedHFile() throws IOException {
 Path f = new Path(ROOT_DIR, testName.getMethodName());
 HFileContext  context = new HFileContextBuilder().build();
 Writer w = HFile.getWriterFactory(conf, cacheConf).withPath(this.fs, f)
   .withFileContext(context).create();
 writeSomeRecords(w, 0, 100, false);
 w.close();
 Path trunc = new Path(f.getParent(), "trucated");
 truncateFile(fs, w.getPath(), trunc);
 try {
  Reader r = HFile.createReader(fs, trunc, cacheConf, true, conf);
 } catch (CorruptHFileException che) {
  // Expected failure
  return;
 }
 fail("Should have thrown exception");
}

代码示例来源:origin: apache/hbase

/**
 * Test empty HFile.
 * Test all features work reasonably when hfile is empty of entries.
 * @throws IOException
 */
@Test
public void testEmptyHFile() throws IOException {
 Path f = new Path(ROOT_DIR, testName.getMethodName());
 HFileContext context = new HFileContextBuilder().withIncludesTags(false).build();
 Writer w =
   HFile.getWriterFactory(conf, cacheConf).withPath(fs, f).withFileContext(context).create();
 w.close();
 Reader r = HFile.createReader(fs, f, cacheConf, true, conf);
 r.loadFileInfo();
 assertFalse(r.getFirstKey().isPresent());
 assertFalse(r.getLastKey().isPresent());
}

代码示例来源:origin: apache/hbase

.withCompression(HFileWriterImpl.compressionByName(codec))
          .build();
Writer writer = HFile.getWriterFactory(conf, cacheConf)
  .withOutputStream(fout)
  .withFileContext(meta)

代码示例来源:origin: apache/hbase

.withDataBlockEncoding(encoding)
  .build();
HFile.Writer writer = HFile.getWriterFactory(configuration, new CacheConfig(configuration))
  .withPath(fs, path)
  .withFileContext(meta)

代码示例来源:origin: apache/hbase

HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
HFile.Writer w =
  HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();

代码示例来源:origin: apache/hbase

private void createHFile(Path path,
  byte[] family, byte[] qualifier,
  byte[] startKey, byte[] endKey, int numRows) throws IOException {
 HFile.Writer writer = null;
 long now = System.currentTimeMillis();
 try {
  HFileContext context = new HFileContextBuilder().build();
  writer = HFile.getWriterFactory(conf, new CacheConfig(conf)).withPath(fs, path)
    .withFileContext(context).create();
  // subtract 2 since numRows doesn't include boundary keys
  for (byte[] key : Bytes.iterateOnSplits(startKey, endKey, true, numRows - 2)) {
   KeyValue kv = new KeyValue(key, family, qualifier, now, key);
   writer.append(kv);
  }
 } finally {
  if (writer != null) {
   writer.close();
  }
 }
}

代码示例来源:origin: apache/hbase

.withDataBlockEncoding(DataBlockEncoding.NONE).build();
HFile.Writer writer =
  HFile.getWriterFactory(conf, cacheConf).withPath(fs, hfilePath).withFileContext(meta)
    .create();
Random rand = new Random(19231737);

代码示例来源:origin: apache/hbase

/**
 * Create an HFile with the given number of rows with a specified value.
 */
public static void createHFile(FileSystem fs, Path path, byte[] family,
  byte[] qualifier, byte[] value, int numRows) throws IOException {
 HFileContext context = new HFileContextBuilder().withBlockSize(BLOCKSIZE)
             .withCompression(COMPRESSION)
             .build();
 HFile.Writer writer = HFile
   .getWriterFactory(conf, new CacheConfig(conf))
   .withPath(fs, path)
   .withFileContext(context)
   .create();
 long now = System.currentTimeMillis();
 try {
  // subtract 2 since iterateOnSplits doesn't include boundary keys
  for (int i = 0; i < numRows; i++) {
   KeyValue kv = new KeyValue(rowkey(i), family, qualifier, now, value);
   writer.append(kv);
  }
  writer.appendFileInfo(BULKLOAD_TIME_KEY, Bytes.toBytes(now));
 } finally {
  writer.close();
 }
}

代码示例来源:origin: apache/hbase

HFileContext meta = new HFileContextBuilder().withBlockSize(1024).build();
HFile.Writer w =
  HFile.getWriterFactory(conf, cacheConf).withPath(fs, p).withFileContext(meta).create();

代码示例来源:origin: apache/hbase

HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
  .withOutputStream(out)
  .withFileContext(fileContext)

代码示例来源:origin: apache/hbase

TEST_UTIL.getRandomUUID().toString() + ".hfile");
FSDataOutputStream out = fs.create(path);
HFile.Writer writer = HFile.getWriterFactory(conf, cacheConf)
 .withOutputStream(out)
 .withFileContext(fileContext)

代码示例来源:origin: apache/hbase

.build();
HFile.Writer writer =
  HFile.getWriterFactory(conf, cacheConf)
    .withPath(fs, hfilePath)
    .withFileContext(meta)

代码示例来源:origin: harbby/presto-connectors

/**
 * Returns the factory to be used to create {@link HFile} writers.
 * Disables block cache access for all writers created through the
 * returned factory.
 */
public static final WriterFactory getWriterFactoryNoCache(Configuration
   conf) {
 Configuration tempConf = new Configuration(conf);
 tempConf.setFloat(HConstants.HFILE_BLOCK_CACHE_SIZE_KEY, 0.0f);
 return HFile.getWriterFactory(conf, new CacheConfig(tempConf));
}

相关文章