本文整理了Java中org.sonar.duplications.block.Block.getStartUnit()
方法的一些代码示例,展示了Block.getStartUnit()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Block.getStartUnit()
方法的具体详情如下:
包路径:org.sonar.duplications.block.Block
类名称:Block
方法名:getStartUnit
暂无
代码示例来源:origin: SonarSource/sonarqube
public void insert(InputFile inputFile, Collection<Block> blocks) {
if (settings.isCrossProjectDuplicationEnabled()) {
int id = ((DefaultInputFile) inputFile).scannerId();
if (publisher.getWriter().hasComponentData(FileStructure.Domain.CPD_TEXT_BLOCKS, id)) {
throw new UnsupportedOperationException("Trying to save CPD tokens twice for the same file is not supported: " + inputFile.absolutePath());
}
final ScannerReport.CpdTextBlock.Builder builder = ScannerReport.CpdTextBlock.newBuilder();
publisher.getWriter().writeCpdTextBlocks(id, blocks.stream().map(block -> {
builder.clear();
builder.setStartLine(block.getStartLine());
builder.setEndLine(block.getEndLine());
builder.setStartTokenIndex(block.getStartUnit());
builder.setEndTokenIndex(block.getEndUnit());
builder.setHash(block.getBlockHash().toHexString());
return builder.build();
}).collect(Collectors.toList()));
}
for (Block block : blocks) {
mem.insert(block);
}
if (blocks.isEmpty()) {
LOG.debug("Not enough content in '{}' to have CPD blocks, it will not be part of the duplication detection", inputFile.relativePath());
}
indexedFiles.add(inputFile);
}
代码示例来源:origin: SonarSource/sonarqube
/**
* {@inheritDoc}
* <p>
* <strong>Note that this implementation allows insertion of two blocks with same index for one resource.</strong>
* </p>
*/
@Override
public void insert(Block block) {
sorted = false;
ensureCapacity();
resourceIds[size] = block.getResourceId();
int[] hash = block.getBlockHash().toIntArray();
if (hash.length != hashInts) {
throw new IllegalArgumentException("Expected " + hashInts + " ints in hash, but got " + hash.length);
}
int offset = size * blockInts;
for (int i = 0; i < hashInts; i++) {
blockData[offset++] = hash[i];
}
blockData[offset++] = block.getIndexInFile();
blockData[offset++] = block.getStartLine();
blockData[offset++] = block.getEndLine();
blockData[offset++] = block.getStartUnit();
blockData[offset] = block.getEndUnit();
size++;
}
代码示例来源:origin: SonarSource/sonarqube
origin = part;
builder.setLengthInUnits(lastBlock.getEndUnit() - firstBlock.getStartUnit() + 1);
} else if (part.getUnitStart() < origin.getUnitStart()) {
origin = part;
代码示例来源:origin: SonarSource/sonarqube
@Test
public void testBuilder() {
ByteArray hash = new ByteArray(1);
Block block = Block.builder()
.setResourceId("resource")
.setBlockHash(hash)
.setIndexInFile(1)
.setLines(2, 3)
.setUnit(4, 5)
.build();
assertThat(block.getResourceId(), is("resource"));
assertThat(block.getBlockHash(), sameInstance(hash));
assertThat(block.getIndexInFile(), is(1));
assertThat(block.getStartLine(), is(2));
assertThat(block.getEndLine(), is(3));
assertThat(block.getStartUnit(), is(4));
assertThat(block.getEndUnit(), is(5));
}
代码示例来源:origin: org.codehaus.sonar/sonar-duplications
/**
* {@inheritDoc}
* <p>
* <strong>Note that this implementation allows insertion of two blocks with same index for one resource.</strong>
* </p>
*/
@Override
public void insert(Block block) {
sorted = false;
ensureCapacity();
resourceIds[size] = block.getResourceId();
int[] hash = block.getBlockHash().toIntArray();
if (hash.length != hashInts) {
throw new IllegalArgumentException("Expected " + hashInts + " ints in hash, but got " + hash.length);
}
int offset = size * blockInts;
for (int i = 0; i < hashInts; i++) {
blockData[offset++] = hash[i];
}
blockData[offset++] = block.getIndexInFile();
blockData[offset++] = block.getStartLine();
blockData[offset++] = block.getEndLine();
blockData[offset++] = block.getStartUnit();
blockData[offset] = block.getEndUnit();
size++;
}
代码示例来源:origin: org.sonarsource.sonarqube/sonar-scanner-engine
public void insert(InputFile inputFile, Collection<Block> blocks) {
if (settings.isCrossProjectDuplicationEnabled()) {
int id = ((DefaultInputFile) inputFile).scannerId();
if (publisher.getWriter().hasComponentData(FileStructure.Domain.CPD_TEXT_BLOCKS, id)) {
throw new UnsupportedOperationException("Trying to save CPD tokens twice for the same file is not supported: " + inputFile.absolutePath());
}
final ScannerReport.CpdTextBlock.Builder builder = ScannerReport.CpdTextBlock.newBuilder();
publisher.getWriter().writeCpdTextBlocks(id, blocks.stream().map(block -> {
builder.clear();
builder.setStartLine(block.getStartLine());
builder.setEndLine(block.getEndLine());
builder.setStartTokenIndex(block.getStartUnit());
builder.setEndTokenIndex(block.getEndUnit());
builder.setHash(block.getBlockHash().toHexString());
return builder.build();
}).collect(Collectors.toList()));
}
for (Block block : blocks) {
mem.insert(block);
}
if (blocks.isEmpty()) {
LOG.debug("Not enough content in '{}' to have CPD blocks, it will not be part of the duplication detection", inputFile.relativePath());
}
indexedFiles.add(inputFile);
}
代码示例来源:origin: org.codehaus.sonar/sonar-duplications
origin = part;
builder.setLengthInUnits(lastBlock.getEndUnit() - firstBlock.getStartUnit() + 1);
} else if (part.getUnitStart() < origin.getUnitStart()) {
origin = part;
代码示例来源:origin: org.sonarsource.sonarqube/sonar-batch
@Override
public BatchReport.CpdTextBlock apply(Block input) {
builder.clear();
builder.setStartLine(input.getStartLine());
builder.setEndLine(input.getEndLine());
builder.setStartTokenIndex(input.getStartUnit());
builder.setEndTokenIndex(input.getEndUnit());
builder.setHash(input.getBlockHash().toHexString());
return builder.build();
}
}));
内容来源于网络,如有侵权,请联系作者删除!