Commit e0985554 authored by Bergamote Orange's avatar Bergamote Orange
Browse files

merge request review comments

parent dc30923c
......@@ -20,4 +20,6 @@ public class BatchConstants {
public static final String CLUSTER_START_COL = "cluster_start";
public static final String CLUSTER_DURATION_COL = "cluster_duration_in_seconds";
public static final String RISK_LEVEL_COL = "risk_level";
public static final String CLUSTER_INDEX_FILENAME = "clusterIndex.json";
public static final String JSON_FILE_EXTENSION = ".json";
}
package fr.gouv.clea.config;
import lombok.*;
import lombok.Getter;
import lombok.NoArgsConstructor;
import lombok.Setter;
import lombok.ToString;
import org.springframework.boot.context.properties.ConfigurationProperties;
@Getter
......@@ -21,5 +24,7 @@ public class BatchProperties {
private int gridSize;
private int chunkSize;
private int identificationStepChunkSize;
private int indexationStepChunkSize;
}
......@@ -17,10 +17,10 @@ public class CleaBatchJobConfig {
private JobBuilderFactory jobBuilderFactory;
@Bean
public Job cleaBatchJob(Step clusterIdentification, Step clustersIndexation, Step prefixesComputing, Step emptyIntermediateDb, Step clusterIndexGeneration) {
public Job cleaBatchJob(Step clusterIdentification, Step clustersIndexation, Step prefixesComputing, Step purgeIntermediateTable, Step clusterIndexGeneration) {
return this.jobBuilderFactory.get("clea-batch-job")
.incrementer(new RunIdIncrementer())
.start(emptyIntermediateDb)
.start(purgeIntermediateTable)
.next(clusterIdentification)
.next(prefixesComputing)
.next(clustersIndexation)
......
package fr.gouv.clea.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import fr.gouv.clea.indexation.index.GenerateClusterIndexTasklet;
import fr.gouv.clea.service.PrefixesStorageService;
import org.springframework.batch.core.Step;
......@@ -22,14 +23,14 @@ public class ClusterIndexGenerationStepBatchConfig {
private BatchProperties batchProperties;
@Bean
public Step clusterIndexGeneration() {
public Step clusterIndexGeneration(final ObjectMapper objectMapper) {
return stepBuilderFactory.get("clusterIndexGeneration")
.tasklet(generateClusterIndex())
.tasklet(generateClusterIndex(objectMapper))
.build();
}
@Bean
public Tasklet generateClusterIndex() {
return new GenerateClusterIndexTasklet(batchProperties, prefixesStorageService);
public Tasklet generateClusterIndex(final ObjectMapper objectMapper) {
return new GenerateClusterIndexTasklet(batchProperties, prefixesStorageService, objectMapper);
}
}
......@@ -59,7 +59,7 @@ public class IdentificationStepBatchConfig {
final SynchronizedItemStreamReader<String> reader = new SynchronizedItemStreamReader<>();
reader.setDelegate(identificationStepReader());
return stepBuilderFactory.get("clusterIdentification")
.<String, List<SinglePlaceClusterPeriod>>chunk(1000)
.<String, List<SinglePlaceClusterPeriod>>chunk(properties.getIdentificationStepChunkSize())
.reader(reader)
.processor(compositeProcessor)
.writer(new SinglePlaceClusterPeriodListWriter(dataSource))
......
package fr.gouv.clea.config;
import com.fasterxml.jackson.databind.ObjectMapper;
import fr.gouv.clea.indexation.IndexationPartitioner;
import fr.gouv.clea.indexation.model.output.ClusterFile;
import fr.gouv.clea.indexation.processor.SinglePlaceClusterBuilder;
......@@ -46,36 +47,35 @@ public class IndexationStepBatchConfig {
private ClusterPeriodModelsMapper mapper;
@Bean
public Step clustersIndexation() {
public Step clustersIndexation(final ObjectMapper objectMapper) {
return this.stepBuilderFactory.get("clustersIndexation")
.partitioner("partitioner", prefixPartitioner())
.partitionHandler(partitionHandler())
.partitionHandler(partitionHandler(objectMapper))
.build();
}
@Bean
public Partitioner prefixPartitioner() {
log.info("callToPartitioner");
return new IndexationPartitioner(prefixesStorageService);
}
@Bean
public TaskExecutorPartitionHandler partitionHandler() {
public TaskExecutorPartitionHandler partitionHandler(final ObjectMapper objectMapper) {
final TaskExecutorPartitionHandler partitionHandler = new TaskExecutorPartitionHandler();
partitionHandler.setGridSize(properties.getGridSize());
partitionHandler.setStep(partitionedClustersIndexation());
partitionHandler.setStep(partitionedClustersIndexation(objectMapper));
partitionHandler.setTaskExecutor(indexationTaskExecutor());
return partitionHandler;
}
@Bean
public Step partitionedClustersIndexation() {
public Step partitionedClustersIndexation(final ObjectMapper objectMapper) {
return stepBuilderFactory.get("partitionedClustersIndexation")
.<Map.Entry<String, List<String>>, ClusterFile>chunk(properties.getChunkSize())
.<Map.Entry<String, List<String>>, ClusterFile>chunk(properties.getIndexationStepChunkSize())
.reader(memoryMapItemReader(null, null))
.processor(singlePlaceClusterBuilder()) // build a Map of ClusterFile at once
.writer(indexationWriter()) // build Files and index
.writer(indexationWriter(objectMapper)) // build Files and index
.build();
}
......@@ -94,8 +94,8 @@ public class IndexationStepBatchConfig {
}
@Bean
public IndexationWriter indexationWriter() {
return new IndexationWriter(properties);
public IndexationWriter indexationWriter(final ObjectMapper objectMapper) {
return new IndexationWriter(properties, objectMapper);
}
@Bean
......
......@@ -15,25 +15,21 @@ import javax.sql.DataSource;
import static fr.gouv.clea.config.BatchConstants.SINGLE_PLACE_CLUSTER_PERIOD_TABLE;
@Configuration
public class EmptyIntermediateDbStepBatchConfig {
public class PurgeIntermediateTableStepBatchConfig {
@Autowired
private StepBuilderFactory stepBuilderFactory;
@Autowired
private DataSource dataSource;
@Bean
public Step emptyIntermediateDb() {
return stepBuilderFactory.get("emptyIntermediateDb")
.tasklet(emptyDb())
public Step purgeIntermediateTable(final JdbcTemplate jdbcTemplate) {
return stepBuilderFactory.get("purgeIntermediateTable")
.tasklet(clearTable(jdbcTemplate))
.build();
}
@Bean
public Tasklet emptyDb() {
public Tasklet clearTable(final JdbcTemplate jdbcTemplate) {
return (contribution, chunkContext) -> {
JdbcOperations jdbcTemplate = new JdbcTemplate(dataSource);
jdbcTemplate.execute("truncate " + SINGLE_PLACE_CLUSTER_PERIOD_TABLE + ";");
return RepeatStatus.FINISHED;
};
......
......@@ -41,16 +41,16 @@ public class SinglePlaceExposedVisitsBuilder implements ItemProcessor<String, Si
final List<ExposedVisit> list = jdbcTemplate.query("select * from " + EXPOSED_VISITS_TABLE
+ " WHERE ltid= ? ORDER BY " + PERIOD_COLUMN + ", " + TIMESLOT_COLUMN,
rowMapper, UUID.fromString(ltid));
ExposedVisit v = list.stream().findFirst().orElse(null);
if (null != v) {
long ln = counter.incrementAndGet();
if (0 == ln % 1000) {
log.info("Loaded {} visits, current LTId={} ", ln, ltid);
ExposedVisit firstExposedVisit = list.stream().findFirst().orElse(null);
if (null != firstExposedVisit) {
long loadedVisitsCount = counter.incrementAndGet();
if (0 == loadedVisitsCount % 1000) {
log.info("Loaded {} visits, current LTId={} ", loadedVisitsCount, ltid);
}
return SinglePlaceExposedVisits.builder()
.locationTemporaryPublicId(v.getLocationTemporaryPublicId())
.venueType(v.getVenueType()).venueCategory1(v.getVenueCategory1())
.venueCategory2(v.getVenueCategory2()).visits(list).build();
.locationTemporaryPublicId(firstExposedVisit.getLocationTemporaryPublicId())
.venueType(firstExposedVisit.getVenueType()).venueCategory1(firstExposedVisit.getVenueCategory1())
.venueCategory2(firstExposedVisit.getVenueCategory2()).visits(list).build();
}
return null;
}
......
......@@ -29,34 +29,41 @@ public class IndexationPartitioner implements Partitioner {
public Map<String, ExecutionContext> partition(int gridSize) {
log.info("Computing indexation partitions...");
final Map<String, List<String>> map = prefixesStorageService.getPrefixWithAssociatedLtidsMap();
final Map<String, List<String>> prefixLtidsMap = prefixesStorageService.getPrefixWithAssociatedLtidsMap();
final Iterator<Map.Entry<String, List<String>>> mapIterator = map.entrySet().iterator();
final Iterator<Map.Entry<String, List<String>>> prefixLtidsMapIterator = prefixLtidsMap.entrySet().iterator();
final Map<String, ExecutionContext> result = new HashMap<>();
// At least 1 prefix per partition
final int partitionSize = Math.max(map.size()/gridSize, 1) ;
final int partitionSize = Math.max(prefixLtidsMap.size()/gridSize, 1) ;
// map.size() if less prefixes than parameterized gridSize, otherwise gridSize
final int partitionsTotalNumber = Math.min(map.size(), gridSize);
// prefixLtidsMap.size() if less prefixes than parameterized gridSize, otherwise gridSize
final int partitionsTotalNumber = Math.min(prefixLtidsMap.size(), gridSize);
// Build partitions execution contexts by splitting map into X equal parts, X being partitionsTotalNumber
for (int partitionsIndex = 0; partitionsIndex< partitionsTotalNumber; partitionsIndex++) {
final ExecutionContext value = new ExecutionContext();
final List<String> prefixes = new ArrayList<>();
final List<List<String>> ltids = new ArrayList<>();
for (int partitionIndex = 0; partitionIndex < partitionSize; partitionIndex++) {
if (mapIterator.hasNext()) {
final var nextItem = mapIterator.next();
prefixes.add(nextItem.getKey());
ltids.add(nextItem.getValue());
} else {
break;
}
value.put("prefixes", prefixes);
value.put("ltids", ltids);
}
final ExecutionContext value = createPartitionExecutionContext(prefixLtidsMapIterator, partitionSize);
result.put("partition-"+partitionsIndex, value);
}
log.debug("{} partitions created", result.size());
return result;
}
private ExecutionContext createPartitionExecutionContext(Iterator<Map.Entry<String, List<String>>> prefixesLtidsMapIterator, int partitionSize) {
final ExecutionContext value = new ExecutionContext();
final List<String> prefixes = new ArrayList<>();
final List<List<String>> ltids = new ArrayList<>();
for (int partitionIndex = 0; partitionIndex < partitionSize; partitionIndex++) {
if (prefixesLtidsMapIterator.hasNext()) {
final var nextItem = prefixesLtidsMapIterator.next();
prefixes.add(nextItem.getKey());
ltids.add(nextItem.getValue());
} else {
break;
}
value.put("prefixes", prefixes);
value.put("ltids", ltids);
}
return value;
}
}
......@@ -18,6 +18,8 @@ import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Set;
import static fr.gouv.clea.config.BatchConstants.CLUSTER_INDEX_FILENAME;
@Slf4j
public class GenerateClusterIndexTasklet implements Tasklet {
......@@ -25,9 +27,12 @@ public class GenerateClusterIndexTasklet implements Tasklet {
private final String outputPath;
public GenerateClusterIndexTasklet(final BatchProperties batchProperties, PrefixesStorageService prefixesStorageService) {
private final ObjectMapper objectMapper;
public GenerateClusterIndexTasklet(final BatchProperties batchProperties, PrefixesStorageService prefixesStorageService, ObjectMapper objectMapper) {
this.outputPath = batchProperties.getFilesOutputPath();
this.prefixesStorageService = prefixesStorageService;
this.objectMapper = objectMapper;
}
@Override
......@@ -48,12 +53,11 @@ public class GenerateClusterIndexTasklet implements Tasklet {
.prefixes(prefixes)
.build();
log.info("Generating cluster index : " + outputPath + File.separator + "clusterIndex.json");
log.info("Generating cluster index : " + outputPath + File.separator + CLUSTER_INDEX_FILENAME);
Path jsonPath = Paths.get(outputPath + File.separator + "clusterIndex.json");
Path jsonPath = Paths.get(outputPath + File.separator + CLUSTER_INDEX_FILENAME);
File jsonIndex = jsonPath.toFile();
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
mapper.writeValue(jsonIndex, clusterFileIndex);
objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
objectMapper.writeValue(jsonIndex, clusterFileIndex);
}
}
......@@ -52,7 +52,7 @@ public class SinglePlaceClusterBuilder implements ItemProcessor<Map.Entry<String
private Consumer<String> createClusterFile(final ClusterFile clusterFile) {
return ltid -> {
final List<SinglePlaceClusterPeriod> singlePlacePeriodsList = queryForSinglePlaceClusterPeriods(ltid);
final List<SinglePlaceClusterPeriod> singlePlacePeriodsList = getSinglePlaceClusterPeriods(ltid);
// Verify if at least one period is present, and if so, proceed
singlePlacePeriodsList.stream().findFirst().ifPresent(firstPeriod -> {
List<ClusterPeriod> clusterPeriods = buildClusterPeriods(singlePlacePeriodsList);
......@@ -71,7 +71,7 @@ public class SinglePlaceClusterBuilder implements ItemProcessor<Map.Entry<String
.build());
}
private List<SinglePlaceClusterPeriod> queryForSinglePlaceClusterPeriods(final String ltid) {
private List<SinglePlaceClusterPeriod> getSinglePlaceClusterPeriods(final String ltid) {
return jdbcTemplate.query("select * from " + SINGLE_PLACE_CLUSTER_PERIOD_TABLE + " WHERE ltid= ?",
new SinglePlaceClusterPeriodRowMapper(), UUID.fromString(ltid));
}
......
......@@ -4,11 +4,8 @@ import com.fasterxml.jackson.databind.ObjectMapper;
import com.fasterxml.jackson.databind.SerializationFeature;
import fr.gouv.clea.config.BatchProperties;
import fr.gouv.clea.indexation.model.output.ClusterFile;
import fr.gouv.clea.indexation.model.output.ClusterFileIndex;
import fr.gouv.clea.service.PrefixesStorageService;
import lombok.extern.slf4j.Slf4j;
import org.springframework.batch.core.StepExecution;
import org.springframework.batch.core.annotation.AfterStep;
import org.springframework.batch.core.annotation.BeforeStep;
import org.springframework.batch.core.configuration.annotation.StepScope;
import org.springframework.batch.item.ItemWriter;
......@@ -19,45 +16,50 @@ import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.List;
import java.util.Set;
import static fr.gouv.clea.config.BatchConstants.JSON_FILE_EXTENSION;
@StepScope
@Slf4j
public class IndexationWriter implements ItemWriter<ClusterFile> {
private String outputPath;
private final String rootDirectoryOutputPath;
private final ObjectMapper objectMapper;
private Long jobId;
private Path jobDirectoryOutputPath;
@BeforeStep
public void retrieveInterStepData(final StepExecution stepExecution) {
this.jobId = stepExecution.getJobExecutionId();
this.jobDirectoryOutputPath = Path.of(this.rootDirectoryOutputPath, this.jobId.toString());
}
public IndexationWriter(final BatchProperties config) {
this.outputPath = config.getFilesOutputPath();
public IndexationWriter(final BatchProperties config, ObjectMapper objectMapper) {
this.rootDirectoryOutputPath = config.getFilesOutputPath();
this.objectMapper = objectMapper;
}
@Override
public void write(List<? extends ClusterFile> clusterFile) throws Exception {
log.info("Creating directories if not exists: " + outputPath + File.separator + jobId + File.separator);
Files.createDirectories(Paths.get(outputPath + File.separator + this.jobId + File.separator));
log.info("Creating directories if not exists: {}", jobDirectoryOutputPath);
Files.createDirectories(jobDirectoryOutputPath);
clusterFile.forEach(this::generateClusterFile);
clusterFile.forEach(clusterFile1 -> generateClusterFile(clusterFile1, jobDirectoryOutputPath));
}
private void generateClusterFile(final ClusterFile clusterFile) {
private void generateClusterFile(final ClusterFile clusterFile, final Path directoryOutputPath) {
final String outputClusterFilePath = outputPath + File.separator + this.jobId + File.separator + clusterFile.getName() + ".json";
log.debug("Generating cluster file : {}", outputClusterFilePath);
Path jsonClusterPath = Paths.get(outputClusterFilePath);
final Path jsonClusterPath = Path.of(directoryOutputPath.toString(), clusterFile.getName()+JSON_FILE_EXTENSION);
log.debug("Generating cluster file : {}", jsonClusterPath);
File jsonClusterFile = jsonClusterPath.toFile();
ObjectMapper mapper = new ObjectMapper();
mapper.enable(SerializationFeature.INDENT_OUTPUT);
objectMapper.enable(SerializationFeature.INDENT_OUTPUT);
try {
mapper.writeValue(jsonClusterFile, clusterFile.getItems());
objectMapper.writeValue(jsonClusterFile, clusterFile.getItems());
} catch (IOException e) {
log.error(e.getMessage());
e.printStackTrace();
......
......@@ -26,7 +26,7 @@ public class ListItemReader implements ItemReader<List<String>>, ItemStream {
ltidList.add(clusterLtid);
}
//null return means all input data has been read, and forwards execution to processor
return !ltidList.isEmpty() ? ltidList : null;
return ltidList.isEmpty() ? null : ltidList;
}
/**
......
......@@ -6,7 +6,6 @@ import fr.gouv.clea.service.PrefixesStorageService;
import lombok.RequiredArgsConstructor;
import org.springframework.batch.item.ItemWriter;
import java.util.ArrayList;
import java.util.List;
@RequiredArgsConstructor
......@@ -16,17 +15,17 @@ public class PrefixesMemoryWriter implements ItemWriter<List<String>> {
private final int prefixLength;
public PrefixesMemoryWriter(BatchProperties config, PrefixesStorageService prefixesStorageService) {
public PrefixesMemoryWriter(final BatchProperties config, final PrefixesStorageService prefixesStorageService) {
this.prefixLength = config.getStaticPrefixLength();
this.prefixesStorageService = prefixesStorageService;
}
@Override
public void write(List<? extends List<String>> ltids) {
public void write(final List<? extends List<String>> ltids) {
ltids.get(0).forEach(ltid -> {
final String prefix = Prefix.of(ltid, prefixLength);
prefixesStorageService.getPrefixWithAssociatedLtidsMap().computeIfAbsent(prefix, p -> new ArrayList<>());
prefixesStorageService.getPrefixWithAssociatedLtidsMap().get(prefix).add(ltid);
prefixesStorageService.addPrefixIfAbsent(prefix);
prefixesStorageService.addLtidToPrefix(prefix, ltid);
});
}
}
......@@ -3,6 +3,7 @@ package fr.gouv.clea.service;
import lombok.Getter;
import org.springframework.stereotype.Component;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.concurrent.ConcurrentHashMap;
......@@ -12,4 +13,12 @@ public class PrefixesStorageService {
@Getter
private final Map<String, List<String>> prefixWithAssociatedLtidsMap = new ConcurrentHashMap<>();
public void addPrefixIfAbsent(final String prefix) {
prefixWithAssociatedLtidsMap.computeIfAbsent(prefix, p -> new ArrayList<>());
}
public void addLtidToPrefix(final String prefix, final String ltid) {
prefixWithAssociatedLtidsMap.get(prefix).add(ltid);
}
}
......@@ -3,9 +3,6 @@ spring:
driverClassName: org.postgresql.Driver
test-on-borrow: true
validation-query: SELECT 1
hikari:
#FIXME: 3h20 for now - testing
connection-timeout: 12000000
jpa:
hibernate.ddl-auto: none
database-platform: org.hibernate.dialect.PostgreSQL95Dialect # even with h2 database
......@@ -28,5 +25,6 @@ clea:
static-prefix-length: ${CLEA_BATCH_PREFIX_LENGTH:2}
files-output-path: ${CLEA_BATCH_OUTPUT_PATH:/tmp/v1}
#TODO: totally arbitrary values, find out actual plausible values
grid-size: ${CLEA_BATCH_CLUSTER_GRID_SIZE:6}
chunk-size: ${CLEA_BATCH_CLUSTER_CHUNK_SIZE:1000}
\ No newline at end of file
grid-size: 6
identification-step-chunk-size: 1000
indexation-step-chunk-size: 1000
\ No newline at end of file
......@@ -2,6 +2,7 @@ package fr.gouv.clea.identification;
import fr.gouv.clea.entity.ExposedVisit;
import lombok.extern.slf4j.Slf4j;
import org.junit.jupiter.api.Disabled;
import org.junit.jupiter.api.Test;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.boot.test.context.SpringBootTest;
......@@ -42,12 +43,13 @@ public class ExposedVisitGenerator {
// }
@Test
@Disabled("for local development purpose")
public void fillRandomVisits() {
// hour of now : 3826008000
// hour of 21-01-01 : 3818448000
// diff: 7560000
final int NBLIEUX = 5000;
final int NB_LOCATIONS = 5000;
final int batchSize = 10;
final Random r = new Random();
......@@ -58,7 +60,7 @@ public class ExposedVisitGenerator {
log.info("Starting to fill EXPOSED_VISITS...");
for (int l = 0; l < NBLIEUX; l++) {
for (int l = 0; l < NB_LOCATIONS; l++) {
UUID lieu = UUID.randomUUID();
int venueType = r.nextInt(18)+1; // 1 to 18
int venueCategory1 = r.nextInt(4) + 1; // 1 to 4
......
......@@ -15,6 +15,8 @@ import org.mockito.junit.jupiter.MockitoExtension;
import java.util.List;
import java.util.UUID;
import static org.assertj.core.api.Assertions.assertThat;
@DisplayNameGeneration(DisplayNameGenerator.ReplaceUnderscores.class)
@ExtendWith(MockitoExtension.class)
class ClusterToPeriodsProcessorTest {
......@@ -35,26 +37,26 @@ class ClusterToPeriodsProcessorTest {
final int lastTimeSlot = 1001;
final int riskLevel = 4;
final long periodStart = 1L;
final ClusterPeriod clusterPeriod = buildPeriod(clusterStart, clusterDurationInSeconds, firstTimeSlot, lastTimeSlot, riskLevel, periodStart);
final SinglePlaceCluster cluster = buildCluster(ltid, venueType, venueCat1, venueCat2, clusterPeriod);
final ClusterPeriod p1 = buildPeriod(clusterStart, clusterDurationInSeconds, firstTimeSlot, lastTimeSlot, riskLevel, periodStart);
final SinglePlaceCluster cluster = buildCluster(ltid, venueType, venueCat1, venueCat2, p1);
final List<SinglePlaceClusterPeriod> singlePlaceClusterPeriods = processor.process(cluster);
Assertions.assertThat(singlePlaceClusterPeriods.size()).isEqualTo(1);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getClusterStart()).isEqualTo(clusterStart);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getClusterDurationInSeconds()).isEqualTo(clusterDurationInSeconds);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getFirstTimeSlot()).isEqualTo(firstTimeSlot);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getLastTimeSlot()).isEqualTo(lastTimeSlot);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getPeriodStart()).isEqualTo(periodStart);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getLocationTemporaryPublicId()).isEqualTo(ltid);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getVenueType()).isEqualTo(venueType);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getVenueCategory1()).isEqualTo(venueCat1);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getVenueCategory2()).isEqualTo(venueCat2);
Assertions.assertThat(singlePlaceClusterPeriods.get(0).getRiskLevel()).isEqualTo(riskLevel);
assertThat(singlePlaceClusterPeriods.size()).isEqualTo(1);
assertThat(singlePlaceClusterPeriods.get(0).getClusterStart()).isEqualTo(clusterStart);
assertThat(singlePlaceClusterPeriods.get(0).getClusterDurationInSeconds()).isEqualTo(clusterDurationInSeconds);
assertThat(singlePlaceClusterPeriods.get(0).getFirstTimeSlot()).isEqualTo(firstTimeSlot);
assertThat(singlePlaceClusterPeriods.get(0).getLastTimeSlot()).isEqualTo(lastTimeSlot);
assertThat(singlePlaceClusterPeriods.get(0).getPeriodStart()).isEqualTo(periodStart);
assertThat(singlePlaceClusterPeriods.get(0).getLocationTemporaryPublicId()).isEqualTo(ltid);
assertThat(singlePlaceClusterPeriods.get(0).getVenueType()).isEqualTo(venueType);
assertThat(singlePlaceClusterPeriods.get(0).getVenueCategory1()).isEqualTo(venueCat1);
assertThat(singlePlaceClusterPeriods.get(0).getVenueCategory2()).isEqualTo(venueCat2);
assertThat(singlePlaceClusterPeriods.get(0).getRiskLevel()).isEqualTo(riskLevel);
}
private SinglePlaceCluster buildCluster(UUID ltid, int venueType, int venueCat1, int venueCat2, ClusterPeriod p1) {
private SinglePlaceCluster buildCluster(final UUID ltid, final int venueType, final int venueCat1, final int venueCat2, final ClusterPeriod p1) {
final SinglePlaceCluster cluster = new SinglePlaceCluster();