本文整理了Java中java.util.Collections.nCopies()
方法的一些代码示例,展示了Collections.nCopies()
的具体用法。这些代码示例主要来源于Github
/Stackoverflow
/Maven
等平台,是从一些精选项目中提取出来的代码,具有较强的参考意义,能在一定程度帮忙到你。Collections.nCopies()
方法的具体详情如下:
包路径:java.util.Collections
类名称:Collections
方法名:nCopies
[英]Returns a list containing the specified number of the specified element. The list cannot be modified. The list is serializable.
[中]返回包含指定元素的指定编号的列表。无法修改该列表。列表是可序列化的。
代码示例来源:origin: graphhopper/graphhopper
/**
* Set routing request
* <p>
*
* @param points List of stopover points in order: start, 1st stop, 2nd stop, ..., end
*/
public GHRequest(List<GHPoint> points) {
this(points, Collections.nCopies(points.size(), Double.NaN));
}
代码示例来源:origin: konsoletyper/teavm
AllFunction(int count) {
this.result = result;
this.count = count;
list.addAll(Collections.nCopies(count, null));
}
代码示例来源:origin: apache/hbase
@Override
public void addFilterLists(List<Filter> filters) {
if (checkAndGetReversed(filters, isReversed()) != isReversed()) {
throw new IllegalArgumentException("Filters in the list must have the same reversed flag");
}
this.filters.addAll(filters);
this.subFiltersIncludedCell.addAll(Collections.nCopies(filters.size(), false));
this.prevFilterRCList.addAll(Collections.nCopies(filters.size(), null));
this.prevCellList.addAll(Collections.nCopies(filters.size(), null));
}
代码示例来源:origin: konsoletyper/teavm
private int union(int a, int b) {
Wrapper p = wrappers.get(set.find(a));
Wrapper q = wrappers.get(set.find(b));
int c = set.union(a, b);
if (c >= wrappers.size()) {
wrappers.addAll(Collections.nCopies(c - wrappers.size() + 1, null));
}
wrappers.set(c, union(p, q));
return c;
}
代码示例来源:origin: konsoletyper/teavm
private FieldReference[][] packFields(List<Set<FieldReference>> fields) {
List<Set<FieldReference>> joinedFields = new ArrayList<>(Collections.nCopies(fields.size(), null));
for (int i = 0; i < fields.size(); ++i) {
if (fields.get(i) == null) {
continue;
}
int j = definitionClasses[i];
Set<FieldReference> fieldSet = joinedFields.get(j);
if (fieldSet == null) {
fieldSet = new LinkedHashSet<>();
joinedFields.set(j, fieldSet);
}
fieldSet.addAll(fields.get(i));
}
FieldReference[][] packedFields = new FieldReference[fields.size()][];
for (int i = 0; i < packedFields.length; ++i) {
if (joinedFields.get(i) != null) {
packedFields[i] = joinedFields.get(i).toArray(new FieldReference[0]);
}
}
return packedFields;
}
代码示例来源:origin: prestodb/presto
singleValues.add(range.getLow().getValue());
switch (range.getLow().getBound()) {
case ABOVE:
rangeConjuncts.add(toPredicate(columnName, ">", range.getLow().getValue(), type, accumulator));
break;
case EXACTLY:
rangeConjuncts.add(toPredicate(columnName, ">=", range.getLow().getValue(), type, accumulator));
break;
case BELOW:
if (singleValues.size() == 1) {
disjuncts.add(toPredicate(columnName, "=", getOnlyElement(singleValues), type, accumulator));
else if (singleValues.size() > 1) {
for (Object value : singleValues) {
bindValue(value, type, accumulator);
String values = Joiner.on(",").join(nCopies(singleValues.size(), "?"));
disjuncts.add(quote(columnName) + " IN (" + values + ")");
代码示例来源:origin: prestodb/presto
protected void setup(int numSortChannels, int totalChannels, int numMergeSources, int pagesCount)
{
types = nCopies(totalChannels, BIGINT);
sortChannels = new ArrayList<>();
for (int i = 0; i < numSortChannels; i++) {
sortChannels.add(i);
}
sortTypes = nCopies(numSortChannels, BIGINT);
sortOrders = nCopies(numSortChannels, ASC_NULLS_FIRST);
outputChannels = new ArrayList<>();
for (int i = 0; i < totalChannels; i++) {
outputChannels.add(i);
}
createPages(totalChannels, pagesCount);
createPageProducers(numMergeSources);
}
代码示例来源:origin: konsoletyper/teavm
private void prepare(MethodNode method) {
InsnList instructions = method.instructions;
minLocal = 0;
if ((method.access & Opcodes.ACC_STATIC) != 0) {
minLocal = 1;
}
labelIndexes = new HashMap<>();
lineNumbers = new HashMap<>();
for (int i = 0; i < instructions.size(); ++i) {
AbstractInsnNode node = instructions.get(i);
if (node instanceof LabelNode) {
labelIndexes.put(((LabelNode) node).getLabel(), i);
}
if (node instanceof LineNumberNode) {
LineNumberNode lineNumberNode = (LineNumberNode) node;
lineNumbers.put(lineNumberNode.start.getLabel(), lineNumberNode.line);
}
}
for (LocalVariableNode localVar : method.localVariables) {
int location = labelIndexes.get(localVar.start.getLabel());
localVariableMap.computeIfAbsent(location, k -> new ArrayList<>()).add(localVar);
}
targetInstructions = new ArrayList<>(instructions.size());
targetInstructions.addAll(Collections.nCopies(instructions.size(), null));
basicBlocks.addAll(Collections.nCopies(instructions.size(), null));
stackBefore = new StackFrame[instructions.size()];
stackAfter = new StackFrame[instructions.size()];
}
代码示例来源:origin: konsoletyper/teavm
private void dfs(Graph graph, Program program) {
startLocations = new ArrayList<>(Collections.nCopies(graph.size(), null));
additionalConnections = new ArrayList<>();
Deque<Step> stack = new ArrayDeque<>();
if (visited[step.block]) {
if (step.location != null) {
additionalConnections.add(new AdditionalConnection(step.location, startLocations.get(step.block)));
代码示例来源:origin: konsoletyper/teavm
private List<Set<Phi>> getDestinationPhis(Program program) {
List<Set<Phi>> destinationPhis = new ArrayList<>();
destinationPhis.addAll(Collections.nCopies(program.variableCount(), null));
for (int i = 0; i < program.basicBlockCount(); ++i) {
BasicBlock block = program.basicBlockAt(i);
for (Phi phi : block.getPhis()) {
for (Incoming incoming : phi.getIncomings()) {
Set<Phi> phis = destinationPhis.get(incoming.getValue().getIndex());
if (phis == null) {
phis = new LinkedHashSet<>();
destinationPhis.set(incoming.getValue().getIndex(), phis);
}
phis.add(phi);
}
}
}
return destinationPhis;
}
代码示例来源:origin: google/guava
public void testDirectExecutorServiceInvokeAll() throws Exception {
final ExecutorService executor = newDirectExecutorService();
final ThreadLocal<Integer> threadLocalCount =
new ThreadLocal<Integer>() {
@Override
protected Integer initialValue() {
return 0;
}
};
final Callable<Integer> incrementTask =
new Callable<Integer>() {
@Override
public Integer call() {
int i = threadLocalCount.get();
threadLocalCount.set(i + 1);
return i;
}
};
List<Future<Integer>> futures = executor.invokeAll(Collections.nCopies(10, incrementTask));
for (int i = 0; i < 10; i++) {
Future<Integer> future = futures.get(i);
assertTrue("Task should have been run before being returned", future.isDone());
assertEquals(i, future.get().intValue());
}
assertEquals(10, threadLocalCount.get().intValue());
}
代码示例来源:origin: konsoletyper/teavm
public void addEdge(int from, int to) {
if (to < 0 || from < 0) {
throw new IllegalArgumentException();
}
sz = Math.max(sz, Math.max(from, to) + 1);
builtGraph = null;
if (addedEdges.size() == from) {
addedEdges.add(IntHashSet.from(to));
} else if (addedEdges.size() <= from) {
addedEdges.addAll(Collections.nCopies(from - addedEdges.size(), null));
addedEdges.add(IntHashSet.from(to));
} else {
IntSet set = addedEdges.get(from);
if (set == null) {
addedEdges.set(from, IntHashSet.from(to));
} else {
set.add(to);
}
}
}
代码示例来源:origin: apache/incubator-gobblin
public SimpleKafkaSpecConsumer(Config config, Optional<Logger> log) {
// Consumer
String kafkaConsumerClientClass = ConfigUtils.getString(config, CONSUMER_CLIENT_FACTORY_CLASS_KEY,
DEFAULT_CONSUMER_CLIENT_FACTORY_CLASS);
try {
Class<?> clientFactoryClass = (Class<?>) Class.forName(kafkaConsumerClientClass);
final GobblinKafkaConsumerClient.GobblinKafkaConsumerClientFactory factory =
(GobblinKafkaConsumerClient.GobblinKafkaConsumerClientFactory)
ConstructorUtils.invokeConstructor(clientFactoryClass);
_kafkaConsumer = factory.create(config);
} catch (ClassNotFoundException | NoSuchMethodException | IllegalAccessException | InstantiationException | InvocationTargetException e) {
if (log.isPresent()) {
log.get().error("Failed to instantiate Kafka consumer from class " + kafkaConsumerClientClass, e);
}
throw new RuntimeException("Failed to instantiate Kafka consumer", e);
}
List<KafkaTopic> kafkaTopics = _kafkaConsumer.getFilteredTopics(Collections.EMPTY_LIST,
Lists.newArrayList(Pattern.compile(config.getString(SimpleKafkaSpecExecutor.SPEC_KAFKA_TOPICS_KEY))));
_partitions = kafkaTopics.get(0).getPartitions();
_lowWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L));
_nextWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L));
_highWatermark = Lists.newArrayList(Collections.nCopies(_partitions.size(), 0L));
InputStream dummyInputStream = new ByteArrayInputStream(new byte[0]);
_decoder = DecoderFactory.get().binaryDecoder(dummyInputStream, null);
_reader = new SpecificDatumReader<AvroJobSpec>(AvroJobSpec.SCHEMA$);
_versionWriter = new FixedSchemaVersionWriter();
}
代码示例来源:origin: apache/hive
partitionNames = new ArrayList<>(partitions.size());
for (Partition partition : partitions) {
partitionNames.add(Warehouse.makePartName(tbl.getPartitionKeys(), partition.getValues()));
response.setPartitionValues(new ArrayList<>(partitionNames.size()));
LOG.info("Converting responses to Partition values for items: {}", partitionNames.size());
for (String partName : partitionNames) {
ArrayList<String> vals = new ArrayList<String>(Collections.nCopies(tbl.getPartitionKeys().size(), null));
PartitionValuesRow row = new PartitionValuesRow();
Warehouse.makeValsFromName(partName, vals);
代码示例来源:origin: apache/flink
public List<List<String>> getFieldOrigins(SqlNode sqlQuery) {
if (sqlQuery instanceof SqlExplain) {
return Collections.emptyList();
}
final RelDataType rowType = getValidatedNodeType(sqlQuery);
final int fieldCount = rowType.getFieldCount();
if (!sqlQuery.isA(SqlKind.QUERY)) {
return Collections.nCopies(fieldCount, null);
}
final List<List<String>> list = new ArrayList<>();
for (int i = 0; i < fieldCount; i++) {
list.add(getFieldOrigin(sqlQuery, i));
}
return ImmutableNullableList.copyOf(list);
}
代码示例来源:origin: commons-collections/commons-collections
/**
* Decorate the set method to perform the growth behaviour.
* <p>
* If the requested index is greater than the current size, the list will
* grow to the new size. Indices between the old size and the requested
* size will be filled with <code>null</code>.
* <p>
* If the index is less than the current size, the value will be set onto
* the underlying list directly.
* If the index is less than zero, the underlying list is called, which
* will probably throw an IndexOutOfBoundsException.
*
* @param index the index to set
* @param element the object to set at the specified index
* @return the object previously at that index
* @throws UnsupportedOperationException if the underlying list doesn't implement set
* @throws ClassCastException if the underlying list rejects the element
* @throws IllegalArgumentException if the underlying list rejects the element
*/
public Object set(int index, Object element) {
int size = getList().size();
if (index >= size) {
getList().addAll(Collections.nCopies((index - size) + 1, null));
}
return getList().set(index, element);
}
代码示例来源:origin: apache/hbase
public FilterListWithAND(List<Filter> filters) {
super(filters);
// For FilterList with AND, when call FL's transformCell(), we should transform cell for all
// sub-filters (because all sub-filters return INCLUDE*). So here, fill this array with true. we
// keep this in FilterListWithAND for abstracting the transformCell() in FilterListBase.
subFiltersIncludedCell = new ArrayList<>(Collections.nCopies(filters.size(), true));
}
代码示例来源:origin: konsoletyper/teavm
variableToSourceMap.add(-1);
definedVersions.addAll(Collections.nCopies(program.variableCount(), null));
synthesizedPhisByBlock.add(new ArrayList<>());
代码示例来源:origin: apache/ignite
null;
List<DatasetTrainer<? extends IgniteModel, L>> trainers = Collections.nCopies(ensembleSize, tr);
.mapToObj(mdlIdx -> {
AdaptableDatasetTrainer<Vector, Double, Vector, Double, ? extends IgniteModel, L> tr =
AdaptableDatasetTrainer.of(trainers.get(mdlIdx));
if (mappings != null) {
tr = tr.afterFeatureExtractor(featureValues -> {
int[] mapping = mappings.get(mdlIdx);
double[] newFeaturesValues = new double[mapping.length];
for (int j = 0; j < mapping.length; j++)
}).beforeTrainedModel(VectorUtils.getProjector(mappings.get(mdlIdx)));
代码示例来源:origin: konsoletyper/teavm
public GraphBuilder(int sz) {
addedEdges.addAll(Collections.nCopies(sz, null));
this.sz = sz;
}
内容来源于网络,如有侵权,请联系作者删除!