Attention une mise à jour du service Gitlab va être effectuée le mardi 30 novembre entre 17h30 et 18h00. Cette mise à jour va générer une interruption du service dont nous ne maîtrisons pas complètement la durée mais qui ne devrait pas excéder quelques minutes. Cette mise à jour intermédiaire en version 14.0.12 nous permettra de rapidement pouvoir mettre à votre disposition une version plus récente.

Commit 63f68df6 authored by Figue Orange's avatar Figue Orange
Browse files

Merge branch 'develop' into master

parents d0661651 b70084f5
Pipeline #308064 passed with stages
in 30 minutes and 42 seconds
......@@ -44,5 +44,5 @@ integration-tests:
- docker-compose down
artifacts:
paths:
- "$CI_PROJECT_DIR/cucumber-reports.html"
- "$CI_PROJECT_DIR/target/cucumber-reports.html"
expire_in: 2 weeks
/*
* Copyright 2007-present the original author or authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
import java.net.*;
import java.io.*;
import java.nio.channels.*;
import java.util.Properties;
public class MavenWrapperDownloader {
private static final String WRAPPER_VERSION = "0.5.6";
/**
* Default URL to download the maven-wrapper.jar from, if no 'downloadUrl' is provided.
*/
private static final String DEFAULT_DOWNLOAD_URL = "https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/"
+ WRAPPER_VERSION + "/maven-wrapper-" + WRAPPER_VERSION + ".jar";
/**
* Path to the maven-wrapper.properties file, which might contain a downloadUrl property to
* use instead of the default one.
*/
private static final String MAVEN_WRAPPER_PROPERTIES_PATH =
".mvn/wrapper/maven-wrapper.properties";
/**
* Path where the maven-wrapper.jar will be saved to.
*/
private static final String MAVEN_WRAPPER_JAR_PATH =
".mvn/wrapper/maven-wrapper.jar";
/**
* Name of the property which should be used to override the default download url for the wrapper.
*/
private static final String PROPERTY_NAME_WRAPPER_URL = "wrapperUrl";
public static void main(String args[]) {
System.out.println("- Downloader started");
File baseDirectory = new File(args[0]);
System.out.println("- Using base directory: " + baseDirectory.getAbsolutePath());
// If the maven-wrapper.properties exists, read it and check if it contains a custom
// wrapperUrl parameter.
File mavenWrapperPropertyFile = new File(baseDirectory, MAVEN_WRAPPER_PROPERTIES_PATH);
String url = DEFAULT_DOWNLOAD_URL;
if (mavenWrapperPropertyFile.exists()) {
FileInputStream mavenWrapperPropertyFileInputStream = null;
try {
mavenWrapperPropertyFileInputStream = new FileInputStream(mavenWrapperPropertyFile);
Properties mavenWrapperProperties = new Properties();
mavenWrapperProperties.load(mavenWrapperPropertyFileInputStream);
url = mavenWrapperProperties.getProperty(PROPERTY_NAME_WRAPPER_URL, url);
} catch (IOException e) {
System.out.println("- ERROR loading '" + MAVEN_WRAPPER_PROPERTIES_PATH + "'");
} finally {
try {
if (mavenWrapperPropertyFileInputStream != null) {
mavenWrapperPropertyFileInputStream.close();
}
} catch (IOException e) {
// Ignore ...
}
}
}
System.out.println("- Downloading from: " + url);
File outputFile = new File(baseDirectory.getAbsolutePath(), MAVEN_WRAPPER_JAR_PATH);
if (!outputFile.getParentFile().exists()) {
if (!outputFile.getParentFile().mkdirs()) {
System.out.println(
"- ERROR creating output directory '" + outputFile.getParentFile().getAbsolutePath() + "'");
}
}
System.out.println("- Downloading to: " + outputFile.getAbsolutePath());
try {
downloadFileFromURL(url, outputFile);
System.out.println("Done");
System.exit(0);
} catch (Throwable e) {
System.out.println("- Error downloading");
e.printStackTrace();
System.exit(1);
}
}
private static void downloadFileFromURL(String urlString, File destination) throws Exception {
if (System.getenv("MVNW_USERNAME") != null && System.getenv("MVNW_PASSWORD") != null) {
String username = System.getenv("MVNW_USERNAME");
char[] password = System.getenv("MVNW_PASSWORD").toCharArray();
Authenticator.setDefault(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password);
}
});
}
URL website = new URL(urlString);
ReadableByteChannel rbc;
rbc = Channels.newChannel(website.openStream());
FileOutputStream fos = new FileOutputStream(destination);
fos.getChannel().transferFrom(rbc, 0, Long.MAX_VALUE);
fos.close();
rbc.close();
}
}
distributionUrl=https://repo.maven.apache.org/maven2/org/apache/maven/apache-maven/3.6.3/apache-maven-3.6.3-bin.zip
wrapperUrl=https://repo.maven.apache.org/maven2/io/takari/maven-wrapper/0.5.6/maven-wrapper-0.5.6.jar
......@@ -6,18 +6,17 @@ die() { echo "`date "+%Y-%m-%d %T.999"` ERROR 0 --- [ clea-batch] clea-batc
info() { echo "`date "+%Y-%m-%d %T.999"` INFO 0 --- [ clea-batch] clea-batch.sh : $*"; }
WORKDIR=${CLEA_BATCH_CLUSTER_OUTPUT_PATH:-/tmp/v1}
BUCKET=${BUCKET:-}
BUCKET_OUTSCALE=${BUCKET_OUTSCALE:-$BUCKET}
BUCKET_SCALEWAY=${BUCKET_SCALEWAY:-$BUCKET}
[ -n "${BUCKET_OUTSCALE}" ] || die "Environment variable BUCKET_OUTSCALE required"
[ -n "${BUCKET_SCALEWAY}" ] || die "Environment variable BUCKET_SCALEWAY required"
PROFILE_OUTSCALE=${PROFILE_OUTSCALE:-s3outscale}
PROFILE_OUTSCALE=${PROFILE_OUTSCALE:-s3outscale}
PROFILE_SCALEWAY=${PROFILE_SCALEWAY:-s3scaleway}
ENDPOINT_OUTSCALE=${ENDPOINT_OUTSCALE:-} # use https://oos.eu-west-2.outscale.com/ https://oos.cloudgouv-eu-west-1.outscale.com
ENDPOINT_SCALEWAY=${ENDPOINT_SCALEWAY:-} # https://s3.fr-par.scw.cloud
[ -n "${ENDPOINT_OUTSCALE}" ] || die "Environment variable ENDPOINT_OUTSCALE required"
[ -n "${ENDPOINT_SCALEWAY}" ] || die "Environment variable ENDPOINT_OUTSCALE required"
BUCKET_FILES_RETENTION_IN_DAYS=${BUCKET_FILES_RETENTION_IN_DAYS:-15}
set -o pipefail # trace ERR through pipes
set -o errtrace # trace ERR through 'time command' and other functions
......@@ -25,21 +24,45 @@ set -o nounset ## set -u : exit the script if you try to use an uninitialised
#set -o errexit ## set -e : exit the script if any statement returns a non-true return value
set +e
copy_files_to_bucket() {
[ -n "${BUCKET_OUTSCALE}" ] || die "Environment variable BUCKET_OUTSCALE required"
local PROFILE=$1
local BUCKET=$2
local ENDPOINT=$3
if ! java -jar clea-batch.jar $@ ; then
die "Java batch fails"
fi
info "Copying to $ENDPOINT ...."
AWS_OPTS="--profile=$PROFILE --endpoint-url=$ENDPOINT --no-progress"
# All files except indexCluster.json
aws $AWS_OPTS s3 sync --acl public-read --exclude=clusterIndex.json $WORKDIR s3://${BUCKET}/v1 || die "AWS s3 fails to copy cluster files to bucket"
# only indexCluster.json at the root of "v1"
aws $AWS_OPTS s3 cp --acl public-read $(find $WORKDIR -type f -name clusterIndex.json) s3://${BUCKET}/v1/ || die "AWS s3 fails to copy clusterIndex file to bucket"
}
purge_old_bucket_iterations() {
# purge bucket files older than x days
# add --dryrun for testing purpose
info "Copying files...."
local PROFILE=$1
local BUCKET=$2
local ENDPOINT=$3
TODAY_MINUS_RETENTION_DAYS=$(date --date="${BUCKET_FILES_RETENTION_IN_DAYS} days ago" +%Y-%m-%d)
info "Purging bucket files older than ${TODAY_MINUS_RETENTION_DAYS}"
aws --profile=$PROFILE --endpoint-url=$ENDPOINT s3 ls --recursive s3://${BUCKET}/v1 \
| awk -v date=$TODAY_MINUS_RETENTION_DAYS '$1 < date {print $4}' \
| xargs -n1 -t -I {} aws s3 --profile=$PROFILE --endpoint-url=$ENDPOINT rm s3://${BUCKET}/{}
}
if ! java -jar clea-batch.jar $@ ; then
die "Java batch fails"
fi
# Test that output folder exists, computing NBFILES fails if folder doesn't exist
[ -d $WORKDIR ] || die "Working directory $WORKDIR not exists"
# count that there is at least "n" cluster files (to not push empty list)
MIN_FILES=1
NBFILES=$(find $WORKDIR -type f | wc -l)
......@@ -54,35 +77,16 @@ if [ $NB_INDEX -gt 1 ] ; then
die "Many clusterIndex.json found ($NB_INDEX), possible partial or failed batch already present"
fi
# Copy clusterfiles to s3
# =======================
info "Copying to OUTSCALE ...."
AWS_OPTS="--profile=$PROFILE_OUTSCALE --endpoint-url=$ENDPOINT_OUTSCALE --no-progress"
BUCKET="$BUCKET_OUTSCALE"
# All files except indexCluster.json
aws $AWS_OPTS s3 sync --acl public-read --exclude=clusterIndex.json $WORKDIR s3://${BUCKET}/v1 || die "AWS s3 fails to copy cluster files to bucket"
# only indexCluster.json at the root of "v1"
aws $AWS_OPTS s3 cp --acl public-read $(find $WORKDIR -type f -name clusterIndex.json) s3://${BUCKET}/v1/ || die "AWS s3 fails to copy clusterIndex file to bucket"
# COPY TO SCALEWAY (optional)
# --------------------
if [ -n "$BUCKET_SCALEWAY" ] && [ -n "$PROFILE_SCALEWAY" ] && [ -n "$ENDPOINT_SCALEWAY" ] ; then
info "Copying to SCALEWAY ...."
AWS_OPTS="--profile=$PROFILE_SCALEWAY --endpoint-url=$ENDPOINT_SCALEWAY --no-progress"
BUCKET=$BUCKET_SCALEWAY
copy_files_to_bucket $PROFILE_OUTSCALE $BUCKET_OUTSCALE $ENDPOINT_OUTSCALE
copy_files_to_bucket $PROFILE_SCALEWAY $BUCKET_SCALEWAY $ENDPOINT_SCALEWAY
# All files except indexCluster.json
aws $AWS_OPTS s3 sync --acl public-read --exclude=clusterIndex.json $WORKDIR s3://${BUCKET}/v1 || die "AWS s3 fails to copy cluster files to bucket"
# only indexCluster.json at the root of "v1"
aws $AWS_OPTS s3 cp --acl public-read $(find $WORKDIR -type f -name clusterIndex.json) s3://${BUCKET}/v1/ || die "AWS s3 fails to copy clusterIndex file to bucket"
fi
purge_old_bucket_iterations $PROFILE_OUTSCALE $BUCKET_OUTSCALE $ENDPOINT_OUTSCALE
purge_old_bucket_iterations $PROFILE_SCALEWAY $BUCKET_SCALEWAY $ENDPOINT_SCALEWAY
# purge batch temporary files
info "Purging working files"
rm -rf $WORKDIR
info "End of clea-batch"
#!/usr/bin/env bash
PROFILE=
BUCKET=
ENDPOINT=
BUCKET_FILES_RETENTION_IN_DAYS=15
TODAY_MINUS_RETENTION_DAYS=$(date --date="${BUCKET_FILES_RETENTION_IN_DAYS} days ago" +%Y-%m-%d)
echo "Listing bucket files and extracting iterations older than $TODAY_MINUS_RETENTION_DAYS"
ITERATIONS_TO_REMOVE=$(
aws --profile=$PROFILE --endpoint-url=$ENDPOINT s3 ls --recursive s3://${BUCKET}/v1 \
| awk -v "date=$TODAY_MINUS_RETENTION_DAYS" '$1 < date {print $4}' \
| grep '^v1/[0-9]*/' \
| cut -d'/' -f 2 \
| sort \
| uniq
)
echo "Found $(echo -n "$ITERATIONS_TO_REMOVE" | wc -l) iterations to remove"
for i in $ITERATIONS_TO_REMOVE ; do
echo "Removing iteration $i"
aws s3 --profile=$PROFILE --endpoint-url=$ENDPOINT rm "s3://$BUCKET/v1/$i" --recursive
done
......@@ -103,6 +103,11 @@
<artifactId>assertj-core</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.awaitility</groupId>
<artifactId>awaitility</artifactId>
<scope>compile</scope>
</dependency>
<dependency>
<groupId>org.apache.commons</groupId>
<artifactId>commons-lang3</artifactId>
......
......@@ -5,7 +5,7 @@ import org.springframework.boot.autoconfigure.SpringBootApplication;
import org.springframework.boot.context.properties.ConfigurationPropertiesScan;
@SpringBootApplication
@ConfigurationPropertiesScan("fr.gouv.clea.integrationtests.config")
@ConfigurationPropertiesScan
public class CleaIntegrationTestsApplication {
public static void main(String[] args) {
......
......@@ -3,7 +3,7 @@ package fr.gouv.clea.integrationtests;
import io.cucumber.spring.CucumberContextConfiguration;
import org.springframework.boot.test.context.SpringBootTest;
@SpringBootTest(classes = { CleaIntegrationTestsApplication.class })
@SpringBootTest
@CucumberContextConfiguration
public class CucumberSpringConfiguration {
}
......@@ -5,6 +5,6 @@ import io.cucumber.junit.CucumberOptions;
import org.junit.runner.RunWith;
@RunWith(Cucumber.class)
@CucumberOptions(features = { "classpath:features" }, plugin = { "pretty", "html:cucumber-reports.html" })
@CucumberOptions(features = { "classpath:features" }, plugin = { "pretty", "html:target/cucumber-reports.html" })
public class CucumberTest {
}
......@@ -6,14 +6,13 @@ import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Profile;
@Slf4j
@Configuration
@RequiredArgsConstructor
public class BeansConfiguration {
public class ApplicationConfiguration {
private final MinioProperties minioProperties;
private final ApplicationProperties applicationProperties;
@Bean
public ObjectMapper objectMapper() {
......@@ -21,20 +20,13 @@ public class BeansConfiguration {
}
@Bean
@Profile("docker | dev")
public MinioClient dockerMinioClient() {
return MinioClient.builder()
.endpoint(minioProperties.getUrl())
.credentials(minioProperties.getAccess().getName(), minioProperties.getAccess().getSecret())
.build();
}
@Bean
// TODO: Next step, test on actual platform. No guarantees that it works yet.
@Profile("!(docker | dev)")
public MinioClient minioClient() {
return MinioClient.builder()
.endpoint(minioProperties.getUrl())
.endpoint(applicationProperties.getBucket().getUrl())
.credentials(
applicationProperties.getBucket().getAccessKey(),
applicationProperties.getBucket().getSecretKey()
)
.build();
}
}
package fr.gouv.clea.integrationtests.config;
import lombok.AllArgsConstructor;
import lombok.Value;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.ConstructorBinding;
import org.springframework.validation.annotation.Validated;
import javax.annotation.Nullable;
import javax.validation.Valid;
import javax.validation.constraints.NotBlank;
import javax.validation.constraints.NotNull;
import java.net.URL;
@Value
@Validated
@AllArgsConstructor
@ConstructorBinding
@ConfigurationProperties("clea")
@ConfigurationProperties("integration-tests")
public class ApplicationProperties {
@Valid
CleaWsRestProperties wsRest;
@Valid
CleaBatchProperties batch;
@NotNull
Integer dupScanThreshold;
String serverAuthorityPublicKey;
@NotNull
String qrCodePrefix;
String manualContactTracingAuthorityPublicKey;
@Nullable
String serverAuthorityPublicKey;
WsRest wsRest;
@Nullable
String manualContactTracingAuthorityPublicKey;
Batch batch;
Bucket bucket;
@Value
public static class Batch {
@NotBlank
String command;
}
@Value
public static class WsRest {
@NotNull
URL baseUrl;
}
@Value
public static class Bucket {
@NotNull
URL url;
@NotBlank
String bucketName;
String accessKey;
String secretKey;
}
}
package fr.gouv.clea.integrationtests.config;
import lombok.AllArgsConstructor;
import lombok.Value;
import javax.validation.Valid;
import java.util.List;
@Value
@Valid
@AllArgsConstructor
public class CleaBatchProperties {
List<String> command;
int timeoutInSeconds;
}
package fr.gouv.clea.integrationtests.config;
import lombok.AllArgsConstructor;
import lombok.Value;
import javax.validation.Valid;
import javax.validation.constraints.NotNull;
import java.net.URL;
@Value
@Valid
@AllArgsConstructor
public class CleaWsRestProperties {
@NotNull
URL baseUrl;
}
package fr.gouv.clea.integrationtests.config;
import lombok.Data;
import lombok.Value;
import lombok.extern.slf4j.Slf4j;
import org.springframework.boot.context.properties.ConfigurationProperties;
import org.springframework.boot.context.properties.ConstructorBinding;
@Slf4j
@Data
@ConstructorBinding
@ConfigurationProperties("minio")
public class MinioProperties {
private final MinioAccessProperties access;
private final String url;
private final String bucketName;
private final String defaultBaseFolder;
@Value
static class MinioAccessProperties {
String name;
String secret;
}
}
package fr.gouv.clea.integrationtests.feature;
package fr.gouv.clea.integrationtests.cucumber;
import io.cucumber.java.Before;
import lombok.RequiredArgsConstructor;
......@@ -14,7 +14,7 @@ public class CucumberHooks {
private final RestHighLevelClient esClient;
@Before
public void emptyIndices() throws IOException {
public void clearElasticIndices() throws IOException {
esClient.indices().delete(new DeleteIndexRequest("*"), RequestOptions.DEFAULT);
}
}
package fr.gouv.clea.integrationtests.cucumber;
import fr.gouv.clea.integrationtests.repository.model.LocationStat;
import fr.gouv.clea.integrationtests.repository.model.ReportStat;
import io.cucumber.java.DataTableType;
import org.ocpsoft.prettytime.nlp.PrettyTimeParser;
import java.util.Map;
import static java.lang.Integer.parseInt;
import static org.assertj.core.api.Assertions.assertThat;
public class DataDableConverters {
private final PrettyTimeParser prettyTimeParser = new PrettyTimeParser();
@DataTableType
public LocationStat toLocationStat(Map<String, String> entry) {
assertThat(entry)
.containsKeys(
"venue type", "venue category1", "venue category2", "backward visits", "forward visits",
"period start"
);
final var periodStart = prettyTimeParser.parse(entry.get("period start"))
.get(0)
.toInstant();
return LocationStat.builder()
.id(null)
.periodStart(periodStart)
.venueType(parseInt(entry.get("venue type")))
.venueCategory1(parseInt(entry.get("venue category1")))
.venueCategory2(parseInt(entry.get("venue category2")))
.backwardVisits(parseInt(entry.get("backward visits")))
.forwardVisits(parseInt(entry.get("forward visits")))
.build();
}
@DataTableType
public ReportStat toReportStat(Map<String, String> entry) {
assertThat(entry)
.containsKeys("reported", "rejected", "close", "backwards", "forwards");
return ReportStat.builder()
.id(null)
.reported(parseInt(entry.get("reported")))
.rejected(parseInt(entry.get("rejected")))
.close(parseInt(entry.get("close")))
.backwards(parseInt(entry.get("backwards")))
.forwards(parseInt(entry.get("forwards")))
.build();
}