本文整理了Java中org.apache.hadoop.hbase.HBaseTestingUtility.getRandomUUID()
方法的一些代码示例,展示了HBaseTestingUtility.getRandomUUID()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。HBaseTestingUtility.getRandomUUID()
方法的具体详情如下:
包路径:org.apache.hadoop.hbase.HBaseTestingUtility
类名称:HBaseTestingUtility
方法名:getRandomUUID
暂无
代码示例来源:origin: apache/hbase
SimpleRange(byte[] start, byte[] end) {
this.start = start;
this.end = end;
this.tiebreaker = TEST_UTIL.getRandomUUID();
}
代码示例来源:origin: apache/hbase
@Override
public UUID getPeerUUID() {
return utility1.getRandomUUID();
}
代码示例来源:origin: apache/hbase
private void init(String tableName) throws Exception {
fs = FileSystem.get(conf);
Path testDir = FSUtils.getRootDir(conf);
Path mobTestDir = new Path(testDir, MobConstants.MOB_DIR_NAME);
basePath = new Path(new Path(mobTestDir, tableName), family);
mobSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
delSuffix = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "") + "_del";
allFiles.clear();
mobFiles.clear();
delFiles.clear();
}
代码示例来源:origin: apache/hbase
@Before
public void setup() throws Exception {
tn = TableName.valueOf("test-" + util.getRandomUUID());
args = new HashMap<>();
// Prepare the arguments required for the test.
args.put(ImportTsv.COLUMNS_CONF_KEY, "HBASE_ROW_KEY,FAM:A,FAM:B");
args.put(ImportTsv.SEPARATOR_CONF_KEY, "\u001b");
}
代码示例来源:origin: apache/hbase
@Before
public void setUp() {
Random random = new Random();
uuid = TEST_UTIL.getRandomUUID().toString().replaceAll("-", "");
date = new Date();
dateStr = MobUtils.formatDate(date);
startKey = Bytes.toBytes(random.nextInt());
}
代码示例来源:origin: apache/hbase
private Configuration getConfForNodeHealthScript() throws IOException {
Configuration conf = UTIL.getConfiguration();
File tempDir = new File(UTIL.getDataTestDir().toString());
if (!tempDir.exists()) {
if (!tempDir.mkdirs()) {
throw new IOException("Failed mkdirs " + tempDir);
}
}
String scriptName = "HealthScript" + UTIL.getRandomUUID().toString()
+ (Shell.WINDOWS ? ".cmd" : ".sh");
healthScriptFile = new File(tempDir.getAbsolutePath(), scriptName);
conf.set(HConstants.HEALTH_SCRIPT_LOC, healthScriptFile.getAbsolutePath());
conf.setLong(HConstants.HEALTH_FAILURE_THRESHOLD, 3);
conf.setLong(HConstants.HEALTH_SCRIPT_TIMEOUT, SCRIPT_TIMEOUT);
return conf;
}
代码示例来源:origin: apache/hbase
private void putRows(Table ht, int numRows, String value, String key)
throws IOException {
for (int i = 0; i < numRows; i++) {
String row = key + "_" + TEST_UTIL.getRandomUUID().toString();
System.out.println(String.format("Saving row: %s, with value %s", row,
value));
Put put = new Put(Bytes.toBytes(row));
put.setDurability(Durability.SKIP_WAL);
put.addColumn(Bytes.toBytes("trans-blob"), null, Bytes
.toBytes("value for blob"));
put.addColumn(Bytes.toBytes("trans-type"), null, Bytes.toBytes("statement"));
put.addColumn(Bytes.toBytes("trans-date"), null, Bytes
.toBytes("20090921010101999"));
put.addColumn(Bytes.toBytes("trans-tags"), Bytes.toBytes("qual2"), Bytes
.toBytes(value));
put.addColumn(Bytes.toBytes("trans-group"), null, Bytes
.toBytes("adhocTransactionGroupId"));
ht.put(put);
}
}
代码示例来源:origin: apache/hbase
/**
* Creates a new mob file name by the old one.
* @param oldFileName The old mob file name.
* @return The new mob file name.
*/
String createMobFileName(String oldFileName) {
MobFileName mobFileName = MobFileName.create(oldFileName);
String startKey = mobFileName.getStartKey();
String date = mobFileName.getDate();
return MobFileName.create(startKey, date,
TEST_UTIL.getRandomUUID().toString().replaceAll("-", ""))
.getFileName();
}
代码示例来源:origin: apache/hbase
@Test
public void testBulkOutputWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args =
new String[] { "-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
// 2 Data rows, one with valid label and one with invalid label
String data =
"KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, 2);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testBulkOutputWithTsvImporterTextMapper() throws Exception {
final TableName table = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
String FAMILY = "FAM";
Path bulkOutputPath = new Path(util.getDataTestDirOnTestFS(table.getNameAsString()),"hfiles");
// Prepare the arguments required for the test.
String[] args =
new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY
+ "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + bulkOutputPath.toString(),
table.getNameAsString()
};
String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n";
doMROnTableTest(util, FAMILY, data, args, 4);
util.deleteTable(table);
}
代码示例来源:origin: apache/hbase
@Test
public void testMROnTableWithBulkload() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
"-D" + ImportTsv.COLUMNS_CONF_KEY
+ "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testMRWithOutputFormat() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE4\u001bVALUE8\u001bsecret&private\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testMROnTableWithInvalidOperationAttr() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest1=>myvalue\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, false);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterCustomTestMapperForOprAttr",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_ATTRIBUTES_KEY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001btest=>myvalue\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, true);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_TTL",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001b1000000\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testMROnTable() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Test
public void testBulkOutputWithTsvImporterTextMapperWithInvalidLabels() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
Path hfiles = new Path(util.getDataTestDirOnTestFS(tableName.getNameAsString()), "hfiles");
// Prepare the arguments required for the test.
String[] args =
new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY
+ "=org.apache.hadoop.hbase.mapreduce.TsvImporterTextMapper",
"-D" + ImportTsv.BULK_OUTPUT_CONF_KEY + "=" + hfiles.toString(),
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
// 2 Data rows, one with valid label and one with invalid label
String data =
"KEY\u001bVALUE1\u001bVALUE2\u001bprivate\nKEY1\u001bVALUE1\u001bVALUE2\u001binvalid\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1, 2);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
Scan scan = new Scan().addFamily(TEST_FAMILY2);
Result result = t.getScanner(scan).next();
if (result != null) {
return result.listCells();
}
return null;
} finally {
t.close();
connection.close();
}
}
}, USER_OTHER);
代码示例来源:origin: apache/hbase
@Test
public void testMROnTableWithDeletes() throws Exception {
final TableName tableName = TableName.valueOf(name.getMethodName() + util.getRandomUUID());
// Prepare the arguments required for the test.
String[] args = new String[] {
"-D" + ImportTsv.MAPPER_CONF_KEY + "=org.apache.hadoop.hbase.mapreduce.TsvImporterMapper",
"-D" + ImportTsv.COLUMNS_CONF_KEY + "=HBASE_ROW_KEY,FAM:A,FAM:B,HBASE_CELL_VISIBILITY",
"-D" + ImportTsv.SEPARATOR_CONF_KEY + "=\u001b", tableName.getNameAsString() };
String data = "KEY\u001bVALUE1\u001bVALUE2\u001bsecret&private\n";
util.createTable(tableName, FAMILY);
doMROnTableTest(util, FAMILY, data, args, 1);
issueDeleteAndVerifyData(tableName);
util.deleteTable(tableName);
}
代码示例来源:origin: apache/hbase
@Override
public Object run() throws Exception {
// force a new RS connection
conf.set("testkey", TEST_UTIL.getRandomUUID().toString());
Connection connection = ConnectionFactory.createConnection(conf);
Table t = connection.getTable(TEST_TABLE.getTableName());
try {
Scan scan = new Scan().addFamily(TEST_FAMILY1);
Result result = t.getScanner(scan).next();
if (result != null) {
assertTrue("Improper exclusion", result.containsColumn(TEST_FAMILY1, TEST_Q1));
assertFalse("Improper inclusion", result.containsColumn(TEST_FAMILY2, TEST_Q1));
return result.listCells();
}
return null;
} finally {
t.close();
connection.close();
}
}
}, USER_OTHER);
内容来源于网络,如有侵权,请联系作者删除!