Commit e8295559 authored by Bergamote Orange's avatar Bergamote Orange
Browse files

unit tests & cluster duration default value update

parent 7a24960c
......@@ -22,6 +22,7 @@ import org.springframework.context.annotation.Configuration;
import org.springframework.core.task.SimpleAsyncTaskExecutor;
import org.springframework.core.task.TaskExecutor;
import org.springframework.jdbc.core.JdbcTemplate;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import javax.sql.DataSource;
import java.util.List;
......@@ -62,7 +63,7 @@ public class IdentificationStepBatchConfig {
.<String, List<SinglePlaceClusterPeriod>>chunk(properties.getIdentificationStepChunkSize())
.reader(reader)
.processor(compositeProcessor)
.writer(new SinglePlaceClusterPeriodListWriter(dataSource))
.writer(new SinglePlaceClusterPeriodListWriter(new NamedParameterJdbcTemplate(dataSource)))
.taskExecutor(taskExecutor())
.throttleLimit(20)
.build();
......
package fr.gouv.clea.identification.writer;
import fr.gouv.clea.dto.SinglePlaceClusterPeriod;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemWriter;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcOperations;
......@@ -15,14 +16,11 @@ import java.util.stream.Collectors;
import static fr.gouv.clea.config.BatchConstants.*;
@Slf4j
@RequiredArgsConstructor
public class SinglePlaceClusterPeriodListWriter implements ItemWriter<List<SinglePlaceClusterPeriod>> {
private final NamedParameterJdbcOperations jdbcTemplate;
public SinglePlaceClusterPeriodListWriter(DataSource datasource) {
this.jdbcTemplate = new NamedParameterJdbcTemplate(datasource);
}
@Override
public void write(List<? extends List<SinglePlaceClusterPeriod>> lists) {
final List<SinglePlaceClusterPeriod> flatList = lists.stream().flatMap(List::stream).collect(Collectors.toList());
......
......@@ -74,12 +74,13 @@ public class IndexationPartitioner implements Partitioner {
}
private void dispatchRemainingEntries(final Iterator<Map.Entry<String, List<String>>> prefixLtidsMapIterator, final Map<String, ExecutionContext> result) {
final int partitionIndex = 0;
int partitionIndex = 0;
while (prefixLtidsMapIterator.hasNext()) {
final Map.Entry<String, List<String>> currentValue = prefixLtidsMapIterator.next();
final ExecutionContext executionContext = result.get("partition-" + partitionIndex);
((List<String>) executionContext.get(PREFIXES_PARTITION_KEY)).add(currentValue.getKey());
((List<List<String>>) executionContext.get(LTIDS_LIST_PARTITION_KEY)).add(currentValue.getValue());
partitionIndex++;
}
}
}
package fr.gouv.clea.indexation.reader;
import lombok.Getter;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.item.ItemReader;
......@@ -10,6 +11,7 @@ import java.util.Map;
@Slf4j
public class StepExecutionContextReader implements ItemReader<Map.Entry<String, List<String>>> {
@Getter
private int index = 0;
private final List<String> prefixes;
......
......@@ -48,11 +48,10 @@ public class IndexationWriter implements ItemWriter<ClusterFile> {
log.info("Creating directories if not exists: {}", jobDirectoryOutputPath);
Files.createDirectories(jobDirectoryOutputPath);
clusterFile.forEach(clusterFile1 -> generateClusterFile(clusterFile1, jobDirectoryOutputPath));
}
private void generateClusterFile(final ClusterFile clusterFile, final Path directoryOutputPath) {
void generateClusterFile(final ClusterFile clusterFile, final Path directoryOutputPath) {
final Path jsonClusterPath = Path.of(directoryOutputPath.toString(), clusterFile.getName()+JSON_FILE_EXTENSION);
log.debug("Generating cluster file : {}", jsonClusterPath);
......@@ -65,5 +64,4 @@ public class IndexationWriter implements ItemWriter<ClusterFile> {
e.printStackTrace();
}
}
}
......@@ -21,7 +21,7 @@ spring:
clea:
batch:
cluster:
duration-unit-in-seconds: 3600
duration-unit-in-seconds: 1800
static-prefix-length: ${CLEA_BATCH_PREFIX_LENGTH:2}
files-output-path: ${CLEA_BATCH_OUTPUT_PATH:/tmp/v1}
#TODO: totally arbitrary values, find out actual plausible values
......
......@@ -18,6 +18,7 @@ import java.util.List;
import java.util.Random;
import java.util.UUID;
@Disabled("for local development purpose")
@SpringBootTest
//@ExtendWith(SpringExtension.class)
//@RunWith(SpringRunner.class)
......@@ -43,8 +44,7 @@ public class ExposedVisitGenerator {
// }
@Test
@Disabled("for local development purpose")
public void fillRandomVisits() {
void fillRandomVisits() {
// hour of now : 3826008000
// hour of 21-01-01 : 3818448000
// diff: 7560000
......@@ -57,9 +57,9 @@ public class ExposedVisitGenerator {
final long janv21 = 3818448000l;
JdbcTemplate jdbcTemplate = new JdbcTemplate(ds);
log.info("Starting to fill EXPOSED_VISITS...");
for (int l = 0; l < NB_LOCATIONS; l++) {
UUID lieu = UUID.randomUUID();
int venueType = r.nextInt(18)+1; // 1 to 18
......@@ -108,7 +108,7 @@ public class ExposedVisitGenerator {
}
});
//@formatter:on
}
log.info("Nb records in EXPOSED_VISITS: " + jdbcTemplate.queryForObject("select count(*) from EXPOSED_VISITS", Integer.class));
......
package fr.gouv.clea.identification.writer;
import fr.gouv.clea.dto.SinglePlaceClusterPeriod;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.ArgumentCaptor;
import org.mockito.Captor;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.junit.jupiter.MockitoExtension;
import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
import org.springframework.jdbc.core.namedparam.SqlParameterSource;
import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.when;
@Slf4j
@ExtendWith(MockitoExtension.class)
@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
class SinglePlaceClusterPeriodListWriterTest {
@Captor
private ArgumentCaptor<SqlParameterSource[]> sqlParameterSourcesCaptor;
@Mock
private NamedParameterJdbcTemplate jdbcTemplate;
@InjectMocks
SinglePlaceClusterPeriodListWriter writer;
@Test
void write_calls_template_with_parameters_from_provided_list() {
final UUID ltid1 = UUID.randomUUID();
final int venueType1 = 0;
final int venueCat1_1 = 1;
final int venueCat2_1 = 2;
final long periodStart1 = 0L;
final int firstTS1 = 3;
final int lastTS1 = 4;
final long clusterStart1 = 1L;
final int clusterDuration1 = 5;
final float riskLv1 = 1;
final UUID ltid2 = UUID.randomUUID();
final int venueType2 = 6;
final int venueCat1_2 = 7;
final int venueCat2_2 = 8;
final long periodStart2 = 0L;
final int firstTS2 = 9;
final int lastTS2 = 10;
final long clusterStart2 = 1L;
final int clusterDuration2 = 11;
final float riskLv2 = 1;
final SinglePlaceClusterPeriod period1 = buildClusterPeriod(ltid1, venueType1, venueCat1_1, venueCat2_1, periodStart1, firstTS1, lastTS1, clusterStart1, clusterDuration1, riskLv1);
final SinglePlaceClusterPeriod period2 = buildClusterPeriod(ltid2, venueType2, venueCat1_2, venueCat2_2, periodStart2, firstTS2, lastTS2, clusterStart2, clusterDuration2, riskLv2);
when(jdbcTemplate.batchUpdate(anyString(), sqlParameterSourcesCaptor.capture())).thenReturn(new int[2]);
// two lists of two singlePlaceClusterPeriods as input, one per ltid
writer.write(List.of(List.of(period1), List.of(period2)));
sqlParameterSourcesCaptor.getAllValues().forEach(sqlParameterSources -> log.info("sqlParameterSource: {}", Arrays.toString(sqlParameterSources)));
}
private SinglePlaceClusterPeriod buildClusterPeriod(final UUID ltid,
final int venueType,
final int venueCat1,
final int venueCat2,
final long periodStart,
final int firstTS,
final int lastTS,
final long clusterStart,
final int clusterDuration,
final float riskLv) {
return SinglePlaceClusterPeriod.builder()
.locationTemporaryPublicId(ltid)
.venueType(venueType)
.venueCategory1(venueCat1)
.venueCategory2(venueCat2)
.periodStart(periodStart)
.firstTimeSlot(firstTS)
.lastTimeSlot(lastTS)
.clusterStart(clusterStart)
.clusterDurationInSeconds(clusterDuration)
.riskLevel(riskLv)
.build();
}
}
package fr.gouv.clea.indexation;
import fr.gouv.clea.service.PrefixesStorageService;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.Nested;
......@@ -47,7 +46,7 @@ public class IndexationPartitionerTest {
// verify presence of registered data to ensure nothing is missing
verifyPrefixesCountMatchesInput(prefixLtidsMap, result);
verifyLtidsListCountMatchesInput(prefixLtidsMap, result);
verifyLtidsCountMatchesInput(prefixLtidsMap, result);
verifyResultLtidsMatchInput(prefixLtidsMap, result);
}
}
......@@ -68,7 +67,35 @@ public class IndexationPartitionerTest {
// retrieve all registered prefixes to ensure every prefix is present
verifyPrefixesCountMatchesInput(prefixLtidsMap, result);
verifyLtidsListCountMatchesInput(prefixLtidsMap, result);
verifyLtidsCountMatchesInput(prefixLtidsMap, result);
verifyResultLtidsMatchInput(prefixLtidsMap, result);
}
}
@Nested
class Multiple_items_per_partition {
@Test
void partition_returns_gridSize_number_of_partitions_with_multiple_results_per_partition() {
// 10 prefixes
Map<String, List<String>> prefixLtidsMap = getBigMap();
Mockito.when(prefixesStorageService.getPrefixWithAssociatedLtidsMap()).thenReturn(prefixLtidsMap);
final int gridSize = 4;
final Map<String, ExecutionContext> result = indexationPartitioner.partition(gridSize);
assertThat(result).hasSize(gridSize);
// retrieve all registered prefixes to ensure every prefix is present
verifyPrefixesCountMatchesInput(prefixLtidsMap, result);
verifyLtidsListCountMatchesInput(prefixLtidsMap, result);
verifyResultLtidsMatchInput(prefixLtidsMap, result);
int itemsPerPartition = prefixLtidsMap.size() / gridSize;
// 4*2 items + 1 on the 2 first partitions = 10 total
assertThat((List<String>)(result.get("partition-0").get(PREFIXES_PARTITION_KEY))).hasSize(itemsPerPartition + 1);
assertThat((List<String>)(result.get("partition-1").get(PREFIXES_PARTITION_KEY))).hasSize(itemsPerPartition + 1);
assertThat((List<String>)(result.get("partition-2").get(PREFIXES_PARTITION_KEY))).hasSize(itemsPerPartition);
assertThat((List<String>)(result.get("partition-3").get(PREFIXES_PARTITION_KEY))).hasSize(itemsPerPartition);
}
}
......@@ -76,6 +103,7 @@ public class IndexationPartitionerTest {
List<String> registeredPrefixesList = result.values().stream()
.map(executionContext -> (List<String>) executionContext.get(PREFIXES_PARTITION_KEY))
.filter(Objects::nonNull)
// stream of list of string to stream of string
.flatMap(Collection::stream)
.collect(Collectors.toList());
assertThat(registeredPrefixesList).hasSize(inputMap.size());
......@@ -86,16 +114,19 @@ public class IndexationPartitionerTest {
List<List<String>> registeredPrefixesList = result.values().stream()
.map(executionContext -> (List<List<String>>) executionContext.get(LTIDS_LIST_PARTITION_KEY))
.filter(Objects::nonNull)
// list of list of string to list of string
.flatMap(Collection::stream)
.collect(Collectors.toList());
assertThat(registeredPrefixesList).hasSize(inputMap.size());
}
private void verifyLtidsCountMatchesInput(final Map<String, List<String>> map, final Map<String, ExecutionContext> result) {
private void verifyResultLtidsMatchInput(final Map<String, List<String>> map, final Map<String, ExecutionContext> result) {
List<String> registeredPrefixesList = result.values().stream()
.map(executionContext -> (List<List<String>>) executionContext.get(LTIDS_LIST_PARTITION_KEY))
.filter(Objects::nonNull)
// stream of list of list of string to stream of list of string
.flatMap(Collection::stream)
// stream of list of string to stream of string
.flatMap(Collection::stream)
.collect(Collectors.toList());
final List<String> inputLtids = map.values().stream().flatMap(Collection::stream)
......@@ -127,4 +158,60 @@ public class IndexationPartitionerTest {
"56ca2138-c742-4d18-ac3c-28b30c16272g");
return Map.of(prefix1, ltidsList1, prefix2, ltidsList2, prefix3, ltidsList3, prefix4, ltidsList4);
}
private Map<String, List<String>> getBigMap() {
final String prefix1 = "01";
final List<String> ltidsList1 = List.of(
"01ca2138-c742-4d18-ac3c-28b30c16272f",
"01ca2138-c742-4d18-ac3c-28b30c16272h",
"01ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix2 = "02";
final List<String> ltidsList2 = List.of(
"02ca2138-c742-4d18-ac3c-28b30c16272f",
"02ca2138-c742-4d18-ac3c-28b30c16272h",
"02ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix3 = "03";
final List<String> ltidsList3 = List.of(
"03ca2138-c742-4d18-ac3c-28b30c16272f",
"03ca2138-c742-4d18-ac3c-28b30c16272h",
"03ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix4 = "04";
final List<String> ltidsList4 = List.of(
"04ca2138-c742-4d18-ac3c-28b30c16272f",
"04ca2138-c742-4d18-ac3c-28b30c16272h",
"04ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix5 = "05";
final List<String> ltidsList5 = List.of(
"05ca2138-c742-4d18-ac3c-28b30c16272f",
"05ca2138-c742-4d18-ac3c-28b30c16272h",
"05ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix6 = "06";
final List<String> ltidsList6 = List.of(
"06ca2138-c742-4d18-ac3c-28b30c16272f",
"06ca2138-c742-4d18-ac3c-28b30c16272h",
"06ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix7 = "07";
final List<String> ltidsList7 = List.of(
"07ca2138-c742-4d18-ac3c-28b30c16272f",
"07ca2138-c742-4d18-ac3c-28b30c16272h",
"07ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix8 = "08";
final List<String> ltidsList8 = List.of(
"08ca2138-c742-4d18-ac3c-28b30c16272f",
"08ca2138-c742-4d18-ac3c-28b30c16272h",
"08ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix9 = "09";
final List<String> ltidsList9 = List.of(
"09ca2138-c742-4d18-ac3c-28b30c16272f",
"09ca2138-c742-4d18-ac3c-28b30c16272h",
"09ca2138-c742-4d18-ac3c-28b30c16272g");
final String prefix10 = "10";
final List<String> ltidsList10 = List.of(
"10ca2138-c742-4d18-ac3c-28b30c16272f",
"10ca2138-c742-4d18-ac3c-28b30c16272h",
"10ca2138-c742-4d18-ac3c-28b30c16272g");
return Map.of(prefix1, ltidsList1, prefix2, ltidsList2, prefix3, ltidsList3, prefix4, ltidsList4,
prefix5, ltidsList5, prefix6, ltidsList6, prefix7, ltidsList7, prefix8, ltidsList8, prefix9, ltidsList9,
prefix10, ltidsList10);
}
}
package fr.gouv.clea.indexation.index;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
@ExtendWith(MockitoExtension.class)
@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
public class GenerateClusterIndexTaskletTest {
}
package fr.gouv.clea.indexation.reader;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.junit.jupiter.MockitoExtension;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import static org.assertj.core.api.Assertions.assertThat;
@ExtendWith(MockitoExtension.class)
@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
class StepExecutionContextReaderTest {
@Test
void indexation_reader_returns_map_entries_corresponding_to_its_prefixes_and_ltidsLists_inputs() {
final List<String> ltidsList2 = List.of("c7f878e0-25de-4bdc-9f62-a283b5133b3a",
"c7f878e0-25de-4bdc-9f62-a283b5133b3b",
"c7f878e0-25de-4bdc-9f62-a283b5133b3c");
final List<String> ltidsList1 = List.of("52f3fe14-2f58-4e26-b321-b73a51d411a1",
"52f3fe14-2f58-4e26-b321-b73a51d411a2",
"52f3fe14-2f58-4e26-b321-b73a51d411a3");
final List<List<String>> ltidsLists = List.of(ltidsList1, ltidsList2);
final String prefix1 = "52";
final String prefix2 = "c7";
final List<String> prefixes = List.of(prefix1, prefix2);
StepExecutionContextReader reader = new StepExecutionContextReader(prefixes, ltidsLists);
int index = 0;
assertThat(reader.getIndex()).isEqualTo(index);
final Map<Integer, Map.Entry<String, List<String>>> resultMap = new HashMap<>();
Map.Entry<String, List<String>> entry;
// simulate spring batch behaviour, looping on read() method until it returns null
while ((entry = reader.read()) != null) {
resultMap.put(index, entry);
index++;
assertThat(reader.getIndex()).isEqualTo(index);
}
for (int resultMapIndex=0; resultMapIndex< resultMap.size(); resultMapIndex++) {
assertThat(resultMap.get(resultMapIndex).getKey()).isEqualTo(prefixes.get(resultMapIndex));
assertThat(resultMap.get(resultMapIndex).getValue()).isEqualTo(ltidsLists.get(resultMapIndex));
}
}
}
package fr.gouv.clea.indexation.writer;
import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import fr.gouv.clea.config.BatchProperties;
import fr.gouv.clea.indexation.model.output.ClusterFile;
import fr.gouv.clea.indexation.model.output.ClusterFileIndex;
import fr.gouv.clea.indexation.model.output.ClusterFileItem;
import org.assertj.core.api.Assertions;
import org.junit.jupiter.api.DisplayNameGeneration;
import org.junit.jupiter.api.DisplayNameGenerator;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.mockito.InjectMocks;
import org.mockito.Mock;
import org.mockito.Mockito;
import org.mockito.internal.verification.Times;
import org.mockito.junit.jupiter.MockitoExtension;
import java.io.File;
import java.io.IOException;
import java.nio.file.Path;
import java.util.List;
import java.util.UUID;
import static fr.gouv.clea.config.BatchConstants.JSON_FILE_EXTENSION;
import static org.mockito.ArgumentMatchers.eq;
@ExtendWith(MockitoExtension.class)
@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
public class IndexationWriterTest {
@Mock
private BatchProperties batchProperties;
@Mock
private ObjectMapper objectMapper;
@InjectMocks
private IndexationWriter writer;
@Test
void generateClusterFile_writes_files_through_objectMapper_with_expected_directory_name() throws IOException {
final ClusterFile clusterFile = new ClusterFile();
clusterFile.setName("name");
final String ltid = "ltid";
final ClusterFileItem clusterFileItem = ClusterFileItem.builder()
.temporaryLocationId(ltid)
.build();
clusterFile.setItems(List.of(clusterFileItem));
String directoryOutputPath = "outputPath";
writer.generateClusterFile(clusterFile, Path.of(directoryOutputPath));
final File expectedFilePath = Path.of(directoryOutputPath, clusterFile.getName() + JSON_FILE_EXTENSION).toFile();
Mockito.verify(objectMapper, Mockito.times(1)).enable(SerializationFeature.INDENT_OUTPUT);
Mockito.verify(objectMapper, Mockito.times(1)).writeValue(expectedFilePath, clusterFile.getItems());
}
}
Supports Markdown
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment