Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -24,6 +24,7 @@
import org.springframework.batch.infrastructure.item.ExecutionContext;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

/**
* Implementation of {@link Partitioner} that locates multiple resources and associates
Expand Down Expand Up @@ -70,7 +71,7 @@ public void setKeyName(String keyName) {
*/
@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> map = new HashMap<>(gridSize);
Map<String, ExecutionContext> map = CollectionUtils.newHashMap(gridSize);
int i = 0;
for (Resource resource : resources) {
ExecutionContext context = new ExecutionContext();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@

import org.springframework.batch.core.partition.Partitioner;
import org.springframework.batch.infrastructure.item.ExecutionContext;
import org.springframework.util.CollectionUtils;

/**
* Simplest possible implementation of {@link Partitioner}. Just creates a set of empty
Expand All @@ -37,7 +38,7 @@ public class SimplePartitioner implements Partitioner {

@Override
public Map<String, ExecutionContext> partition(int gridSize) {
Map<String, ExecutionContext> map = new HashMap<>(gridSize);
Map<String, ExecutionContext> map = CollectionUtils.newHashMap(gridSize);
for (int i = 0; i < gridSize; i++) {
map.put(PARTITION_KEY + i, new ExecutionContext());
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -33,6 +33,7 @@
import org.springframework.batch.core.partition.StepExecutionSplitter;
import org.springframework.batch.core.repository.JobRepository;
import org.springframework.batch.infrastructure.item.ExecutionContext;
import org.springframework.util.CollectionUtils;

/**
* Generic implementation of {@link StepExecutionSplitter} that delegates to a
Expand Down Expand Up @@ -128,7 +129,7 @@ public Set<StepExecution> split(StepExecution stepExecution, int gridSize) throw
JobExecution jobExecution = stepExecution.getJobExecution();

Map<String, ExecutionContext> contexts = getContexts(stepExecution, gridSize);
Set<StepExecution> set = new HashSet<>(contexts.size());
Set<StepExecution> set = CollectionUtils.newHashSet(contexts.size());

for (Entry<String, ExecutionContext> context : contexts.entrySet()) {

Expand All @@ -153,7 +154,7 @@ && shouldStart(allowStartIfComplete, stepExecution, lastStepExecution)) {
}
}

Set<StepExecution> executions = new HashSet<>(set.size());
Set<StepExecution> executions = CollectionUtils.newHashSet(set.size());
executions.addAll(set);

return executions;
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@
import org.springframework.core.task.TaskExecutor;
import org.springframework.core.task.TaskRejectedException;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

/**
* A {@link PartitionHandler} that uses a {@link TaskExecutor} to execute the partitioned
Expand Down Expand Up @@ -94,8 +95,8 @@ public Step getStep() {
protected Set<StepExecution> doHandle(StepExecution managerStepExecution,
Set<StepExecution> partitionStepExecutions) throws Exception {
Assert.notNull(step, "A Step must be provided.");
final Set<Future<StepExecution>> tasks = new HashSet<>(getGridSize());
final Set<StepExecution> result = new HashSet<>();
final Set<Future<StepExecution>> tasks = CollectionUtils.newHashSet(partitionStepExecutions.size());
final Set<StepExecution> result = CollectionUtils.newHashSet(partitionStepExecutions.size());

for (StepExecution stepExecution : partitionStepExecutions) {
final FutureTask<StepExecution> task = createTask(step, stepExecution);
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
import org.springframework.jdbc.core.BatchPreparedStatementSetter;
import org.springframework.jdbc.core.RowMapper;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;

/**
* JDBC DAO for {@link ExecutionContext}.
Expand Down Expand Up @@ -238,7 +239,7 @@ public void saveExecutionContext(StepExecution stepExecution) {
@Override
public void saveExecutionContexts(Collection<StepExecution> stepExecutions) {
Assert.notNull(stepExecutions, "Attempt to save an null collection of step executions");
Map<Long, String> serializedContexts = new HashMap<>(stepExecutions.size());
Map<Long, String> serializedContexts = CollectionUtils.newHashMap(stepExecutions.size());
for (StepExecution stepExecution : stepExecutions) {
Long executionId = stepExecution.getId();
ExecutionContext executionContext = stepExecution.getExecutionContext();
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -52,6 +52,7 @@
import org.springframework.core.io.ByteArrayResource;
import org.springframework.core.io.Resource;
import org.springframework.util.Assert;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;

/**
Expand Down Expand Up @@ -691,7 +692,7 @@ public DelimitedLineTokenizer build() {
}

if (!this.includedFields.isEmpty()) {
Set<Integer> deDupedFields = new HashSet<>(this.includedFields.size());
Set<Integer> deDupedFields = CollectionUtils.newHashSet(this.includedFields.size());
deDupedFields.addAll(this.includedFields);
deDupedFields.remove(null);

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -35,6 +35,7 @@

import org.jspecify.annotations.NullUnmarked;
import org.jspecify.annotations.Nullable;
import org.springframework.util.CollectionUtils;
import org.springframework.util.StringUtils;

/**
Expand Down Expand Up @@ -131,7 +132,7 @@ public DefaultFieldSet(@Nullable String[] tokens, String[] names, @Nullable Date
}
this.tokens = tokens.clone();
this.names = Arrays.asList(names);
this.nameIndexMap = new HashMap<>(names.length);
this.nameIndexMap = CollectionUtils.newHashMap(names.length);
for (int i = 0; i < names.length; i++) {
this.nameIndexMap.put(names[i], i);
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@
import org.springframework.context.annotation.Import;
import org.springframework.core.task.VirtualThreadTaskExecutor;
import org.springframework.jdbc.support.JdbcTransactionManager;
import org.springframework.util.CollectionUtils;

/**
* Configuration class that defines a partitioned step based on a
Expand Down Expand Up @@ -76,7 +77,7 @@ public Tasklet tasklet(@Value("#{stepExecutionContext['data']}") String partitio
@Bean
public Partitioner partitioner() {
return gridSize -> {
Map<String, ExecutionContext> partitionMap = new HashMap<>(gridSize);
Map<String, ExecutionContext> partitionMap = CollectionUtils.newHashMap(gridSize);
for (int i = 0; i < gridSize; i++) {
ExecutionContext executionContext = new ExecutionContext();
executionContext.put("data", "data" + i);
Expand Down