From dd685fd849e28de84933d033c97d75d2eb7dbe73 Mon Sep 17 00:00:00 2001
From: TCHERNIATINSKY <philippe.tcherniatinsky@inrae.fr>
Date: Wed, 2 Mar 2022 18:06:25 +0100
Subject: [PATCH 1/3] =?UTF-8?q?Ajout=20de=20la=20fonctionnalit=C3=A9=20d'u?=
 =?UTF-8?q?nicit=C3=A9=20sur=20les=20lignes=20de=20data?=
MIME-Version: 1.0
Content-Type: text/plain; charset=UTF-8
Content-Transfer-Encoding: 8bit

- ajout d'une section dans le yaml et objets associés
- test de la validité de la section et de la présence des variableComponentKeys cités
- Ajout du champs uniqueness dans la base comme unique (avec datatype et datagroup)
- modification de upsert sur cette clef unique
- mise en place d'un test d'unicité dans une méthode dédiée

Dans le cas ou la section uniqueness est vide l'unicité se fait sur les valeurs de dataValues.

Il manque le portage lors de l'upgrade du yaml si celui-ci modifie les valeurs d'unicité.
---
 .gitlab-ci.yml                                |  12 +
 Documentation_fichier_Yaml.md                 |  30 ++
 pom.xml                                       |  44 +-
 .../fr/inra/oresing/model/Configuration.java  |   1 +
 src/main/java/fr/inra/oresing/model/Data.java |   2 +
 .../oresing/model/VariableComponentKey.java   |   3 +
 .../persistence/BinaryFileRepository.java     |  24 +-
 .../oresing/persistence/DataRepository.java   |  21 +-
 ...InApplicationSchemaRepositoryTemplate.java |   2 +-
 .../JsonTableRepositoryTemplate.java          |   7 +
 .../inra/oresing/persistence/Uniqueness.java  |  18 +
 .../rest/ApplicationConfigurationService.java |  46 +-
 .../rest/ConfigurationParsingResult.java      |   8 +
 .../fr/inra/oresing/rest/OreSiService.java    | 222 +++++---
 .../migration/application/V1__init_schema.sql |  27 +-
 .../java/fr/inra/oresing/rest/ACBB_TEST.java  |   4 +
 .../ApplicationConfigurationServiceTest.java  |  63 ++-
 .../java/fr/inra/oresing/rest/Fixtures.java   |   1 +
 .../oresing/rest/HAUTE_FREQUENCE_TEST.java    |   4 +
 .../java/fr/inra/oresing/rest/ORE_TEST.java   |   4 +
 .../inra/oresing/rest/OreSiResourcesTest.java | 234 ++++----
 src/test/resources/data/acbb/acbb.yaml        |  42 +-
 .../duplication/data_with_duplicateds.csv     |   6 +
 .../data/duplication/duplication.yaml         |   5 +
 .../data/hautefrequence/hautefrequence.yaml   |  28 +-
 .../resources/data/monsore/compare/export.csv | 294 +++++-----
 src/test/resources/data/monsore/data-pem.csv  |  36 +-
 .../data/monsore/manche-scarff-p1-pem.csv     |  36 +-
 .../data/monsore/monsore-with-repository.yaml | 297 ++++++-----
 src/test/resources/data/monsore/monsore.yaml  | 291 +++++-----
 src/test/resources/data/olac/olac.yaml        | 113 +++-
 .../resources/data/validation/fake-app.yaml   |   7 +
 ui2/src/locales/en.json                       | 501 +++++++++---------
 ui2/src/locales/fr.json                       |   8 +-
 34 files changed, 1434 insertions(+), 1007 deletions(-)
 create mode 100644 src/main/java/fr/inra/oresing/persistence/Uniqueness.java
 create mode 100644 src/test/java/fr/inra/oresing/rest/ACBB_TEST.java
 create mode 100644 src/test/java/fr/inra/oresing/rest/HAUTE_FREQUENCE_TEST.java
 create mode 100644 src/test/java/fr/inra/oresing/rest/ORE_TEST.java
 create mode 100644 src/test/resources/data/duplication/data_with_duplicateds.csv

diff --git a/.gitlab-ci.yml b/.gitlab-ci.yml
index 7bcd098b4..cb8333e87 100644
--- a/.gitlab-ci.yml
+++ b/.gitlab-ci.yml
@@ -42,6 +42,18 @@ maven_test_rest_1:
   script: mvn --batch-mode clean test -Ptest_rest_1
   tags:
     - docker
+maven_test_rest_1_ACBB:
+  image: maven:3.6.3-openjdk-11
+  stage: test
+  script: mvn --batch-mode clean test -Ptest_rest_1_ACBB
+  tags:
+    - docker
+maven_test_rest_1_Haute_Frequence:
+  image: maven:3.6.3-openjdk-11
+  stage: test
+  script: mvn --batch-mode clean test -Ptest_rest_1_Haute_Frequence
+  tags:
+    - docker
 maven_test_rest_2:
   image: maven:3.6.3-openjdk-11
   stage: test
diff --git a/Documentation_fichier_Yaml.md b/Documentation_fichier_Yaml.md
index 62f46a8a9..410f4fb07 100644
--- a/Documentation_fichier_Yaml.md
+++ b/Documentation_fichier_Yaml.md
@@ -490,6 +490,36 @@ La partie validation peut être utilisée pour vérifier le contenu d'une colonn
 
 <span style="color: orange">*validations* est indenté de 2. </span>
 
+##### Déclaration des contraintes d'unicité
+Il s'agit de déclarer comment une ligne d'un fichier s'exprime de manière unique (contrainte d'unicité au sens de la base de données)
+
+Il peut y avoir plusieurs contraintes d'unicité. Il suffit de déclarer chaque contrainte avec un nom dans la section _uniqueness_. Pour chacune des contraintes, on liste la liste des _variable components_ qui composent la clef.
+
+Si un fichier possède des lignes en doublon soit avec lui-même, soit avec des lignes déjà enregistrées en base, il sera rejeté.
+
+Les contraintes ne s'appliquent que pour les fichiers d'un même type de données.
+
+Exemple de déclaration de deux contraintes portant respectivement sur 3 et 2 valeurs.
+
+``` yaml
+dataTypes:
+  mon_datatype:
+    uniqueness:
+      uk1:
+        - variable: projet
+          component: value
+        - variable: site
+          component: chemin
+        - variable: date
+          component: value
+      uk2:
+        - variable: espece
+        component: value
+        - variable: Couleur des individus
+        component: value
+
+``` 
+
 ##### *authorization* porte bien son nom c'est là qu'on définira les autorisations d'accès aux données :
 Authorization permet de définir des groupes de valeurs. Une ligne du fichier est découpée en autant de ligne que de 
 *dataGroups* et contient un *authorizationScope* et un *timeScope*.
diff --git a/pom.xml b/pom.xml
index 3ea5f5ea1..e319ad0f9 100644
--- a/pom.xml
+++ b/pom.xml
@@ -327,11 +327,47 @@
                             <testSourceDirectory>src/test</testSourceDirectory>
                             <testFailureIgnore>false</testFailureIgnore>
                             <includes>
-                                <include>fr/inra/oresing/rest/**Test.java</include>
+                                <include>fr/inra/oresing/rest/ApplicationConfigurationServiceTest.java</include>
+                                <include>fr/inra/oresing/rest/Migration.java</include>
+                                <include>fr/inra/oresing/rest/MultiYaml.java</include>
+                                <include>fr/inra/oresing/rest/RelationalService.java</include>
                             </includes>
-                            <excludes>
-                                <exclude>fr/inra/oresing/rest/AuthorizationResourcesTest.java</exclude>
-                            </excludes>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
+            <id>test_rest_1_Haute_Frequence</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <configuration>
+                            <forkCount>4</forkCount>
+                            <reuseForks>false</reuseForks>
+                            <testSourceDirectory>src/test</testSourceDirectory>
+                            <testFailureIgnore>false</testFailureIgnore>
+                            <groups>fr.inra.oresing.rest.HAUTE_FREQUENCE_TEST</groups>
+                        </configuration>
+                    </plugin>
+                </plugins>
+            </build>
+        </profile>
+        <profile>
+            <id>test_rest_1_ACBB</id>
+            <build>
+                <plugins>
+                    <plugin>
+                        <groupId>org.apache.maven.plugins</groupId>
+                        <artifactId>maven-surefire-plugin</artifactId>
+                        <configuration>
+                            <forkCount>4</forkCount>
+                            <reuseForks>false</reuseForks>
+                            <testSourceDirectory>src/test</testSourceDirectory>
+                            <testFailureIgnore>false</testFailureIgnore>
+                            <groups>fr.inra.oresing.rest.ACBB_TEST</groups>
                         </configuration>
                     </plugin>
                 </plugins>
diff --git a/src/main/java/fr/inra/oresing/model/Configuration.java b/src/main/java/fr/inra/oresing/model/Configuration.java
index 083e7ea74..84c7552e5 100644
--- a/src/main/java/fr/inra/oresing/model/Configuration.java
+++ b/src/main/java/fr/inra/oresing/model/Configuration.java
@@ -132,6 +132,7 @@ public class Configuration {
         FormatDescription format;
         LinkedHashMap<String, ColumnDescription> data = new LinkedHashMap<>();
         LinkedHashMap<String, LineValidationRuleDescription> validations = new LinkedHashMap<>();
+        List<VariableComponentKey> uniqueness = new LinkedList<>();
         TreeMap<Integer, List<MigrationDescription>> migrations = new TreeMap<>();
         AuthorizationDescription authorization;
         LinkedHashMap<String, String> repository = null;
diff --git a/src/main/java/fr/inra/oresing/model/Data.java b/src/main/java/fr/inra/oresing/model/Data.java
index cbbbaecbf..cae749c7a 100644
--- a/src/main/java/fr/inra/oresing/model/Data.java
+++ b/src/main/java/fr/inra/oresing/model/Data.java
@@ -4,6 +4,7 @@ import lombok.Getter;
 import lombok.Setter;
 import lombok.ToString;
 
+import java.util.List;
 import java.util.Map;
 import java.util.UUID;
 
@@ -18,4 +19,5 @@ public class Data extends OreSiEntity {
     private Map<String, Map<String, UUID>> refsLinkedTo;
     private Map<String, Map<String, String>> dataValues;
     private UUID binaryFile;
+    private List<String> uniqueness;
 }
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/model/VariableComponentKey.java b/src/main/java/fr/inra/oresing/model/VariableComponentKey.java
index f366fe13e..45612c252 100644
--- a/src/main/java/fr/inra/oresing/model/VariableComponentKey.java
+++ b/src/main/java/fr/inra/oresing/model/VariableComponentKey.java
@@ -20,4 +20,7 @@ public class VariableComponentKey {
     public String getId() {
         return variable + SEPARATOR + component;
     }
+    public String toSqlExtractPattern(){
+        return String.format("aggreg.datavalues #>> '{%s,%s}'%n",variable.replaceAll("'", "''"), component.replaceAll("'", "''"));
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/persistence/BinaryFileRepository.java b/src/main/java/fr/inra/oresing/persistence/BinaryFileRepository.java
index e99949819..2d5eaa266 100644
--- a/src/main/java/fr/inra/oresing/persistence/BinaryFileRepository.java
+++ b/src/main/java/fr/inra/oresing/persistence/BinaryFileRepository.java
@@ -32,6 +32,26 @@ public class BinaryFileRepository extends JsonTableInApplicationSchemaRepository
         return find("id = :id", parameters).stream().findFirst();
     }
 
+    public Optional<BinaryFile> findPublishedVersions(BinaryFileDataset binaryFileDataset) {
+        Preconditions.checkArgument(binaryFileDataset != null);
+        String query = String.format("SELECT '%s' as \"@class\", " +
+                        "to_jsonb(t) as json " +
+                        "FROM (select id, application, name, comment, size, params from %s  " +
+                        "WHERE application = :application::uuid\n" +
+                        "and (params->>'published' )::bool\n" +
+                        "and params->'binaryfiledataset'->'requiredauthorizations'= :requiredAuthorization::jsonb) t",
+                getEntityClass().getName(), getTable().getSqlIdentifier()
+        );
+        Optional<BinaryFile> result = getNamedParameterJdbcTemplate().query(
+                query,
+                new MapSqlParameterSource()
+                        .addValue("application", getApplication().getId())
+                        .addValue("requiredAuthorization",   getJsonRowMapper().toJson(binaryFileDataset.getRequiredauthorizations())),
+                getJsonRowMapper()
+        ).stream().findFirst();
+        return result;
+    }
+
     public Optional<BinaryFile> tryFindByIdWithData(UUID id) {
         Preconditions.checkArgument(id != null);
         String query = String.format("SELECT '%s' as \"@class\", to_jsonb(t) as json FROM (select id, application, name, comment, size, convert_from(data, 'UTF8') as \"data\", params from %s  WHERE id = :id) t", getEntityClass().getName(), getTable().getSqlIdentifier());
@@ -95,7 +115,7 @@ public class BinaryFileRepository extends JsonTableInApplicationSchemaRepository
             }
         }
         if (overlap) {
-            where.add("params  #> '{\"binaryfiledataset\", \"datatype\"}' @@('$ == \""+datatype+"\"')");
+            where.add("params  #> '{\"binaryfiledataset\", \"datatype\"}' @@('$ == \"" + datatype + "\"')");
             where.add("params @@ ('$.published==true')");
             String t = "(tsrange(\n" +
                     "\tcoalesce((params #>> '{\"binaryfiledataset\", \"from\"}'), '-infinity')::timestamp,\n" +
@@ -105,7 +125,7 @@ public class BinaryFileRepository extends JsonTableInApplicationSchemaRepository
                     "(tsrange(\n" +
                     "\tcoalesce((params #>> '{\"binaryfiledataset\", \"from\"}'), '-infinity')::timestamp,\n" +
                     "\tcoalesce((params #>> '{\"binaryfiledataset\", \"to\"}'), 'infinity')::timestamp\n" +
-                    "\t) != tsrange(coalesce(:from::timestamp, '-infinity')::timestamp, coalesce(:to::timestamp, 'infinity')::timestamp))\n" ;
+                    "\t) != tsrange(coalesce(:from::timestamp, '-infinity')::timestamp, coalesce(:to::timestamp, 'infinity')::timestamp))\n";
             where.add(t);
             mapSqlParameterSource.addValue("from", binaryFileDataset.getFrom());
             mapSqlParameterSource.addValue("to", binaryFileDataset.getTo());
diff --git a/src/main/java/fr/inra/oresing/persistence/DataRepository.java b/src/main/java/fr/inra/oresing/persistence/DataRepository.java
index 386f1d4a7..d9c11caf5 100644
--- a/src/main/java/fr/inra/oresing/persistence/DataRepository.java
+++ b/src/main/java/fr/inra/oresing/persistence/DataRepository.java
@@ -18,7 +18,7 @@ import java.util.UUID;
 @Scope(scopeName = ConfigurableBeanFactory.SCOPE_PROTOTYPE)
 public class DataRepository extends JsonTableInApplicationSchemaRepositoryTemplate<Data> {
 
-    public DataRepository(Application application) {
+     public DataRepository(Application application) {
         super(application);
     }
 
@@ -27,13 +27,17 @@ public class DataRepository extends JsonTableInApplicationSchemaRepositoryTempla
         return getSchema().data();
     }
 
+
+
     @Override
     protected String getUpsertQuery() {
-        return "INSERT INTO " + getTable().getSqlIdentifier() + "(id, application, dataType, rowId, \"authorization\", refsLinkedTo, dataValues, binaryFile) SELECT id, application, dataType, rowId, \"authorization\", refsLinkedTo, dataValues, binaryFile FROM json_populate_recordset(NULL::" + getTable().getSqlIdentifier() + ", :json::json) "
-                + " ON CONFLICT (id) DO UPDATE SET updateDate=current_timestamp, application=EXCLUDED.application, dataType=EXCLUDED.dataType, rowId=EXCLUDED.rowId, \"authorization\"=EXCLUDED.\"authorization\", refsLinkedTo=EXCLUDED.refsLinkedTo, dataValues=EXCLUDED.dataValues, binaryFile=EXCLUDED.binaryFile"
+        return "INSERT INTO " + getTable().getSqlIdentifier() + "(id, application, dataType, rowId, \"authorization\", uniqueness, refsLinkedTo, dataValues, binaryFile) \n" +
+                "SELECT id, application, dataType, rowId, \"authorization\", uniqueness,  refsLinkedTo, dataValues, binaryFile FROM json_populate_recordset(NULL::" + getTable().getSqlIdentifier() + ", :json::json) "
+                + " ON CONFLICT (dataType, datagroup, uniqueness) DO UPDATE SET id=EXCLUDED.id, updateDate=current_timestamp, application=EXCLUDED.application, dataType=EXCLUDED.dataType, rowId=EXCLUDED.rowId, \"authorization\"=EXCLUDED.\"authorization\", refsLinkedTo=EXCLUDED.refsLinkedTo, dataValues=EXCLUDED.dataValues, binaryFile=EXCLUDED.binaryFile"
                 + " RETURNING id";
     }
 
+
     @Override
     protected Class<Data> getEntityClass() {
         return Data.class;
@@ -49,7 +53,8 @@ public class DataRepository extends JsonTableInApplicationSchemaRepositoryTempla
     public int removeByFileId(UUID fileId) {
         String query = "DELETE FROM " + getTable().getSqlIdentifier() +
                 "\n  WHERE binaryfile = :binaryFile";
-        return getNamedParameterJdbcTemplate().update(query, ImmutableMap.of("binaryFile", fileId));
+        final int binaryFile = getNamedParameterJdbcTemplate().update(query, ImmutableMap.of("binaryFile", fileId));
+        return binaryFile;
     }
 
     public String getSqlToMergeData(DownloadDatasetQuery downloadDatasetQuery) {
@@ -82,4 +87,12 @@ public class DataRepository extends JsonTableInApplicationSchemaRepositoryTempla
         int count = getNamedParameterJdbcTemplate().update(sql, sqlParams);
         return count;
     }
+
+    public List<Uniqueness> findStoredUniquenessForDatatype(Application application, String dataType) {
+        String query = "select 'fr.inra.oresing.persistence.Uniqueness' as \"@class\",uniqueness as json from  " + getTable().getSqlIdentifier()
+                + "  WHERE application = :applicationId::uuid AND dataType = :dataType";
+        MapSqlParameterSource sqlParams = new MapSqlParameterSource("applicationId", getApplication().getId())
+                .addValue("dataType", dataType);
+        return getNamedParameterJdbcTemplate().query(query, sqlParams, new JsonRowMapper<Uniqueness>());
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/persistence/JsonTableInApplicationSchemaRepositoryTemplate.java b/src/main/java/fr/inra/oresing/persistence/JsonTableInApplicationSchemaRepositoryTemplate.java
index 7724e8249..30daec5ee 100644
--- a/src/main/java/fr/inra/oresing/persistence/JsonTableInApplicationSchemaRepositoryTemplate.java
+++ b/src/main/java/fr/inra/oresing/persistence/JsonTableInApplicationSchemaRepositoryTemplate.java
@@ -21,4 +21,4 @@ abstract class JsonTableInApplicationSchemaRepositoryTemplate<T extends OreSiEnt
     protected Application getApplication() {
         return application;
     }
-}
+}
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/persistence/JsonTableRepositoryTemplate.java b/src/main/java/fr/inra/oresing/persistence/JsonTableRepositoryTemplate.java
index fcd7c5902..35b4e53df 100644
--- a/src/main/java/fr/inra/oresing/persistence/JsonTableRepositoryTemplate.java
+++ b/src/main/java/fr/inra/oresing/persistence/JsonTableRepositoryTemplate.java
@@ -10,6 +10,7 @@ import org.springframework.jdbc.core.namedparam.EmptySqlParameterSource;
 import org.springframework.jdbc.core.namedparam.MapSqlParameterSource;
 import org.springframework.jdbc.core.namedparam.NamedParameterJdbcTemplate;
 import org.springframework.jdbc.core.namedparam.SqlParameterSource;
+import org.springframework.transaction.support.TransactionTemplate;
 
 import java.util.List;
 import java.util.NoSuchElementException;
@@ -18,6 +19,8 @@ import java.util.UUID;
 import java.util.stream.Stream;
 
 abstract class JsonTableRepositoryTemplate<T extends OreSiEntity> implements InitializingBean {
+    @Autowired
+    private TransactionTemplate transactionTemplate;
 
     @Autowired
     private JsonRowMapper<T> jsonRowMapper;
@@ -117,4 +120,8 @@ abstract class JsonTableRepositoryTemplate<T extends OreSiEntity> implements Ini
         List<T> result = namedParameterJdbcTemplate.query(query, sqlParameterSource, getJsonRowMapper());
         return result;
     }
+
+    public void flush() {
+        transactionTemplate.getTransactionManager().getTransaction(transactionTemplate).flush();
+    }
 }
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/persistence/Uniqueness.java b/src/main/java/fr/inra/oresing/persistence/Uniqueness.java
new file mode 100644
index 000000000..54280e0a7
--- /dev/null
+++ b/src/main/java/fr/inra/oresing/persistence/Uniqueness.java
@@ -0,0 +1,18 @@
+package fr.inra.oresing.persistence;
+
+import lombok.Value;
+
+import java.util.HashMap;
+import java.util.List;
+
+/**
+ * Représente une donnée correspondant à une valeur de type <code>ltree</code>.
+ *
+ * Un ltree correspond à une séquence de labels séparés par des points. Les labels sont
+ * contraingnants en terme de syntaxe et cette classe gère l'échappement.
+ *
+ * https://www.postgresql.org/docs/current/ltree.html
+ */
+@Value
+public class Uniqueness extends HashMap<String, List<String>> {
+}
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java b/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
index 98b3a1a15..3cc6ded58 100644
--- a/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
+++ b/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
@@ -31,17 +31,8 @@ import org.springframework.stereotype.Component;
 import org.springframework.web.multipart.MultipartFile;
 
 import java.io.IOException;
-import java.util.Arrays;
-import java.util.Collection;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
+import java.util.*;
+import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
@@ -90,7 +81,8 @@ public class ApplicationConfigurationService {
             return internationalizationMap;
         }
     }
-    ConfigurationParsingResult unzipConfiguration(MultipartFile file){
+
+    ConfigurationParsingResult unzipConfiguration(MultipartFile file) {
         return null;
     }
 
@@ -181,6 +173,7 @@ public class ApplicationConfigurationService {
             verifyDatatypeCheckerReferenceRefersToExistingReference(builder, references, dataType, dataTypeDescription);
             verifyDatatypeCheckerGroovyExpressionExistsAndCanCompile(builder, dataTypeDescription);
             verifyInternationalizedColumnsExistsForPatternInDatatype(configuration, builder, dataType, dataTypeDescription);
+            verifyUniquenessComponentKeysInDatatype(configuration, dataType, dataTypeDescription, builder);
 
             Configuration.AuthorizationDescription authorization = dataTypeDescription.getAuthorization();
             Set<String> variables = dataTypeDescription.getData().keySet();
@@ -578,6 +571,22 @@ public class ApplicationConfigurationService {
         }
     }
 
+    private void verifyUniquenessComponentKeysInDatatype(Configuration configuration, String dataType, Configuration.DataTypeDescription dataTypeDescription, ConfigurationParsingResult.Builder builder) {
+        final List<VariableComponentKey> uniqueness = dataTypeDescription.getUniqueness();
+        final Set<String> availableVariableComponents = dataTypeDescription.getData().entrySet().stream()
+                .map(entry -> entry.getValue().getComponents().keySet().stream()
+                        .map(componentName -> new VariableComponentKey(entry.getKey(), componentName).getId()))
+                .flatMap(Function.identity())
+                .collect(Collectors.<String>toSet());
+        Set<String> variableComponentsKeyInUniqueness = uniqueness.stream()
+                .map(variableComponentKey -> variableComponentKey.getId())
+                .collect(Collectors.toSet());
+        ImmutableSet<String> unknownUsedAsVariableComponentUniqueness = Sets.difference(variableComponentsKeyInUniqueness, availableVariableComponents).immutableCopy();
+        if (!unknownUsedAsVariableComponentUniqueness.isEmpty()) {
+            builder.recordUnknownUsedAsVariableComponentUniqueness(dataType, unknownUsedAsVariableComponentUniqueness, availableVariableComponents);
+        }
+    }
+
     private void verifyInternationalizedColumnsExists(Configuration configuration, ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry) {
         String reference = referenceEntry.getKey();
         Configuration.ReferenceDescription referenceDescription = referenceEntry.getValue();
@@ -617,17 +626,17 @@ public class ApplicationConfigurationService {
             ImmutableSet<String> variableComponentCheckers = ImmutableSet.of("Date", "Float", "Integer", "RegularExpression", "Reference");
             String columns = checker.getParams().getColumns();
             Set<String> groovyColumn = Optional.ofNullable(checker)
-                    .map(check->check.getParams())
-                    .filter(params->params.getGroovy() != null)
-                    .map(params-> MoreObjects.firstNonNull(params.getColumns(), ""))
+                    .map(check -> check.getParams())
+                    .filter(params -> params.getGroovy() != null)
+                    .map(params -> MoreObjects.firstNonNull(params.getColumns(), ""))
 
                     // autant mettre une collection dans le YAML directement
-                    .map(values-> values.split(","))
-                    .map(values-> Arrays.stream(values).collect(Collectors.toSet()))
+                    .map(values -> values.split(","))
+                    .map(values -> Arrays.stream(values).collect(Collectors.toSet()))
                     .orElse(Set.of());
 
             if (GroovyLineChecker.NAME.equals(checker.getName())) {
-                String expression =Optional.of(checker)
+                String expression = Optional.of(checker)
                         .map(Configuration.CheckerDescription::getParams)
                         .map(Configuration.CheckerConfigurationDescription::getGroovy)
                         .map(GroovyConfiguration::getExpression)
@@ -734,4 +743,5 @@ public class ApplicationConfigurationService {
         int version;
     }
 
+
 }
\ No newline at end of file
diff --git a/src/main/java/fr/inra/oresing/rest/ConfigurationParsingResult.java b/src/main/java/fr/inra/oresing/rest/ConfigurationParsingResult.java
index 8d5cf59e9..7e22773ee 100644
--- a/src/main/java/fr/inra/oresing/rest/ConfigurationParsingResult.java
+++ b/src/main/java/fr/inra/oresing/rest/ConfigurationParsingResult.java
@@ -271,6 +271,14 @@ public class ConfigurationParsingResult {
             ));
         }
 
+        public Builder recordUnknownUsedAsVariableComponentUniqueness(String dataType, Set<String> unknownUsedAsVariableComponentUniqueness,Set<String>  availableVariableComponents) {
+            return recordError("unknownUsedAsVariableComponentUniqueness", ImmutableMap.of(
+                    "dataType", dataType,
+                    "unknownUsedAsVariableComponentUniqueness", unknownUsedAsVariableComponentUniqueness,
+                    "availableVariableComponents", availableVariableComponents
+            ));
+        }
+
         public Builder recordInvalidInternationalizedColumnsForDataType(String dataType, String reference, Set<String> unknownUsedAsKeyInternationalizedColumns, Set<String> knownColumns) {
             return recordError("invalidInternationalizedColumnsForDataType", ImmutableMap.of(
                     "dataType", dataType,
diff --git a/src/main/java/fr/inra/oresing/rest/OreSiService.java b/src/main/java/fr/inra/oresing/rest/OreSiService.java
index 4b74d5f77..3b301bfdf 100644
--- a/src/main/java/fr/inra/oresing/rest/OreSiService.java
+++ b/src/main/java/fr/inra/oresing/rest/OreSiService.java
@@ -3,60 +3,16 @@ package fr.inra.oresing.rest;
 import com.google.common.base.Charsets;
 import com.google.common.base.Preconditions;
 import com.google.common.base.Predicate;
-import com.google.common.collect.BiMap;
-import com.google.common.collect.HashBiMap;
-import com.google.common.collect.HashMultimap;
-import com.google.common.collect.ImmutableList;
-import com.google.common.collect.ImmutableMap;
-import com.google.common.collect.ImmutableMultiset;
-import com.google.common.collect.ImmutableSet;
-import com.google.common.collect.ImmutableSetMultimap;
-import com.google.common.collect.ImmutableSortedSet;
-import com.google.common.collect.Iterators;
-import com.google.common.collect.Maps;
-import com.google.common.collect.MoreCollectors;
-import com.google.common.collect.Ordering;
-import com.google.common.collect.SetMultimap;
-import com.google.common.collect.Sets;
+import com.google.common.collect.*;
 import com.google.common.primitives.Ints;
 import fr.inra.oresing.OreSiTechnicalException;
-import fr.inra.oresing.checker.CheckerFactory;
-import fr.inra.oresing.checker.DateLineChecker;
-import fr.inra.oresing.checker.FloatChecker;
-import fr.inra.oresing.checker.IntegerChecker;
-import fr.inra.oresing.checker.InvalidDatasetContentException;
-import fr.inra.oresing.checker.LineChecker;
-import fr.inra.oresing.checker.ReferenceLineChecker;
-import fr.inra.oresing.checker.ReferenceLineCheckerConfiguration;
+import fr.inra.oresing.checker.*;
 import fr.inra.oresing.groovy.CommonExpression;
 import fr.inra.oresing.groovy.Expression;
 import fr.inra.oresing.groovy.GroovyContextHelper;
 import fr.inra.oresing.groovy.StringGroovyExpression;
-import fr.inra.oresing.model.Application;
-import fr.inra.oresing.model.Authorization;
-import fr.inra.oresing.model.BinaryFile;
-import fr.inra.oresing.model.BinaryFileDataset;
-import fr.inra.oresing.model.Configuration;
-import fr.inra.oresing.model.Data;
-import fr.inra.oresing.model.Datum;
-import fr.inra.oresing.model.LocalDateTimeRange;
-import fr.inra.oresing.model.ReferenceColumn;
-import fr.inra.oresing.model.ReferenceColumnSingleValue;
-import fr.inra.oresing.model.ReferenceColumnValue;
-import fr.inra.oresing.model.ReferenceDatum;
-import fr.inra.oresing.model.ReferenceValue;
-import fr.inra.oresing.model.VariableComponentKey;
-import fr.inra.oresing.persistence.AuthenticationService;
-import fr.inra.oresing.persistence.BinaryFileInfos;
-import fr.inra.oresing.persistence.DataRepository;
-import fr.inra.oresing.persistence.DataRow;
-import fr.inra.oresing.persistence.Ltree;
-import fr.inra.oresing.persistence.OreSiRepository;
-import fr.inra.oresing.persistence.ReferenceValueRepository;
-import fr.inra.oresing.persistence.SqlPolicy;
-import fr.inra.oresing.persistence.SqlSchema;
-import fr.inra.oresing.persistence.SqlSchemaForApplication;
-import fr.inra.oresing.persistence.SqlService;
+import fr.inra.oresing.model.*;
+import fr.inra.oresing.persistence.*;
 import fr.inra.oresing.persistence.roles.OreSiRightOnApplicationRole;
 import fr.inra.oresing.persistence.roles.OreSiUserRole;
 import fr.inra.oresing.rest.validationcheckresults.DateValidationCheckResult;
@@ -87,24 +43,12 @@ import java.io.ByteArrayInputStream;
 import java.io.IOException;
 import java.io.InputStream;
 import java.io.StringWriter;
-import java.time.LocalDate;
-import java.time.LocalDateTime;
-import java.time.ZoneOffset;
+import java.time.Duration;
+import java.time.*;
 import java.time.format.DateTimeFormatter;
 import java.time.temporal.ChronoUnit;
-import java.util.ArrayList;
-import java.util.Collections;
-import java.util.HashMap;
-import java.util.HashSet;
-import java.util.Iterator;
-import java.util.LinkedHashMap;
-import java.util.LinkedHashSet;
-import java.util.LinkedList;
-import java.util.List;
-import java.util.Map;
-import java.util.Optional;
-import java.util.Set;
-import java.util.UUID;
+import java.util.*;
+import java.util.concurrent.atomic.AtomicLong;
 import java.util.function.Consumer;
 import java.util.function.Function;
 import java.util.regex.Matcher;
@@ -446,6 +390,12 @@ public class OreSiService {
                 unPublishVersions(app, filesToStore);
             }
             return storedFile.getId();
+        } else if (params != null && params.getBinaryfiledataset() != null) {
+            BinaryFile publishedVersion = getPublishedVersion(params, app);
+            if (publishedVersion != null && publishedVersion.getParams().published) {
+                filesToStore.add(publishedVersion);
+                unPublishVersions(app, filesToStore);
+            }
         }
         Configuration conf = app.getConfiguration();
         Configuration.DataTypeDescription dataTypeDescription = conf.getDataTypes().get(dataType);
@@ -454,7 +404,6 @@ public class OreSiService {
         publishVersion(dataType, errors, app, storedFile, dataTypeDescription, formatDescription, params == null ? null : params.binaryfiledataset);
         InvalidDatasetContentException.checkErrorsIsEmpty(errors);
         relationalService.onDataUpdate(app.getName());
-        unPublishVersions(app, filesToStore);
         storePublishedVersion(app, filesToStore, storedFile);
         filesToStore.stream()
                 .forEach(repo.getRepository(app.getId()).binaryFile()::store);
@@ -481,6 +430,7 @@ public class OreSiService {
                     f.getParams().published = false;
                     repo.getRepository(app).binaryFile().store(f);
                 });
+        repo.getRepository(app).binaryFile().flush();
     }
 
     private void publishVersion(String dataType,
@@ -502,15 +452,28 @@ public class OreSiService {
 
             ImmutableList<String> columns = readHeaderRow(linesIterator);
             readPostHeader(formatDescription, linesIterator);
-
+            UniquenessBuilder uniquenessBuilder = new UniquenessBuilder(app, dataType);
             Stream<Data> dataStream = Streams.stream(csvParser)
                     .map(buildCsvRecordToLineAsMapFn(columns))
                     .flatMap(lineAsMap -> buildLineAsMapToRecordsFn(formatDescription).apply(lineAsMap).stream())
                     .map(buildMergeLineValuesAndConstantValuesFn(constantValues))
                     .map(buildReplaceMissingValuesByDefaultValuesFn(app, dataType, binaryFileDataset == null ? null : binaryFileDataset.getRequiredauthorizations()))
-                    .flatMap(buildLineValuesToEntityStreamFn(app, dataType, storedFile.getId(), errors, binaryFileDataset));
-
-            repo.getRepository(app).data().storeAll(dataStream);
+                    .flatMap(buildLineValuesToEntityStreamFn(app, dataType, storedFile.getId(), uniquenessBuilder, errors, binaryFileDataset));
+            AtomicLong lines = new AtomicLong();
+            final Instant debut = Instant.now();
+            final DataRepository dataRepository = repo.getRepository(app).data();
+            dataRepository
+                    .storeAll(
+                            dataStream
+                                    .filter(Objects::nonNull)
+                                    .peek(k -> {
+                                        if (lines.incrementAndGet() % 1000 == 0) {
+                                            log.debug(String.format("%d %d", lines.get(), Duration.between(debut, Instant.now()).getSeconds()));
+                                        }
+                                    })
+                    );
+            errors.addAll(uniquenessBuilder.getErrors());
+            InvalidDatasetContentException.checkErrorsIsEmpty(errors);
         }
     }
 
@@ -541,6 +504,11 @@ public class OreSiService {
         return new LinkedList<>();
     }
 
+    @Nullable
+    private BinaryFile getPublishedVersion(FileOrUUID params, Application app) {
+        return repo.getRepository(app).binaryFile().findPublishedVersions(params.getBinaryfiledataset()).orElse(null);
+    }
+
     @Nullable
     private BinaryFile loadOrCreateFile(MultipartFile file, FileOrUUID params, Application app) {
         BinaryFile storedFile = Optional.ofNullable(params)
@@ -572,14 +540,15 @@ public class OreSiService {
      * @param app
      * @param dataType
      * @param fileId
+     * @param uniquenessBuilder
      * @return
      */
-    private Function<RowWithData, Stream<Data>> buildLineValuesToEntityStreamFn(Application app, String dataType, UUID fileId, List<CsvRowValidationCheckResult> errors, BinaryFileDataset binaryFileDataset) {
+    private Function<RowWithData, Stream<Data>> buildLineValuesToEntityStreamFn(Application app, String dataType, UUID fileId, UniquenessBuilder uniquenessBuilder, List<CsvRowValidationCheckResult> errors, BinaryFileDataset binaryFileDataset) {
         ImmutableSet<LineChecker> lineCheckers = checkerFactory.getLineCheckers(app, dataType);
         Configuration conf = app.getConfiguration();
         Configuration.DataTypeDescription dataTypeDescription = conf.getDataTypes().get(dataType);
 
-        return buildRowWithDataStreamFunction(app, dataType, fileId, errors, lineCheckers, dataTypeDescription, binaryFileDataset);
+        return buildRowWithDataStreamFunction(app, dataType, fileId, uniquenessBuilder, errors, lineCheckers, dataTypeDescription, binaryFileDataset);
     }
 
     /**
@@ -588,6 +557,7 @@ public class OreSiService {
      * @param app
      * @param dataType
      * @param fileId
+     * @param uniquenessBuilder
      * @param errors
      * @param lineCheckers
      * @param dataTypeDescription
@@ -597,7 +567,7 @@ public class OreSiService {
     private Function<RowWithData, Stream<Data>> buildRowWithDataStreamFunction(Application app,
                                                                                String dataType,
                                                                                UUID fileId,
-                                                                               List<CsvRowValidationCheckResult> errors,
+                                                                               UniquenessBuilder uniquenessBuilder, List<CsvRowValidationCheckResult> errors,
                                                                                ImmutableSet<LineChecker> lineCheckers,
                                                                                Configuration.DataTypeDescription dataTypeDescription,
                                                                                BinaryFileDataset binaryFileDataset) {
@@ -655,7 +625,10 @@ public class OreSiService {
             checkRequiredAuthorizationInDatasetRange(requiredAuthorizations, errors, binaryFileDataset, rowWithData.getLineNumber());
             // String rowId = Hashing.sha256().hashString(line.toString(), Charsets.UTF_8).toString();
             String rowId = UUID.randomUUID().toString();
-
+            final List<String> uniquenessValues = uniquenessBuilder.test(datum, rowWithData.getLineNumber());
+            if(uniquenessValues ==null) {
+                return Stream.of((Data) null);
+            };
             Stream<Data> dataStream = dataTypeDescription.getAuthorization().getDataGroups().entrySet().stream().map(entry -> {
                 String dataGroup = entry.getKey();
                 Configuration.DataGroupDescription dataGroupDescription = entry.getValue();
@@ -676,7 +649,6 @@ public class OreSiService {
                     toStore.computeIfAbsent(variable, k -> new LinkedHashMap<>()).put(component, value);
                     refsLinkedToToStore.computeIfAbsent(variable, k -> new LinkedHashMap<>()).put(component, refsLinkedTo.get(variableComponentKey));
                 }
-
                 Data e = new Data();
                 e.setBinaryFile(fileId);
                 e.setDataType(dataType);
@@ -684,6 +656,7 @@ public class OreSiService {
                 e.setAuthorization(new Authorization(List.of(dataGroup), requiredAuthorizations, timeScope));
                 e.setApplication(app.getId());
                 e.setRefsLinkedTo(refsLinkedToToStore);
+                e.setUniqueness(uniquenessValues);
                 e.setDataValues(toStore);
                 return e;
             });
@@ -1332,4 +1305,103 @@ public class OreSiService {
         int lineNumber;
         List<Map.Entry<String, String>> columns;
     }
-}
+
+    protected class UniquenessBuilder {
+        final List<VariableComponentKey> uniquenessDescription;
+        final Map<UniquenessKeys, List<Integer>> uniquenessInFile = new TreeMap<>();
+        private String dataType;
+
+        public UniquenessBuilder(Application application, String dataType) {
+            this.uniquenessDescription = getUniquenessDescription(application, dataType);
+            this.dataType = dataType;
+        }
+
+        private List<VariableComponentKey> getUniquenessDescription(Application application, String dataType) {
+            final List<VariableComponentKey> uniqueness = application.getConfiguration().getDataTypes().get(dataType).getUniqueness();
+            if (uniqueness.isEmpty()) {
+                return application.getConfiguration().getDataTypes().get(dataType).getData().entrySet().stream()
+                        .flatMap(this::getVariableComponentKeys)
+                        .collect(Collectors.toList());
+            } else {
+                return uniqueness;
+            }
+        }
+
+        private Stream<VariableComponentKey> getVariableComponentKeys(Map.Entry<String, Configuration.ColumnDescription> entry) {
+            return entry.getValue().getComponents().keySet().stream()
+                    .map(componentName -> new VariableComponentKey(entry.getKey(), componentName));
+        }
+
+        public List<String> test(Datum datum, int lineNumber) {
+            UniquenessKeys uniquenessKeys = new UniquenessKeys(datum, uniquenessDescription);
+            uniquenessInFile.compute(uniquenessKeys, (k,v) -> v==null?new LinkedList<>():v)
+                    .add(lineNumber);
+            boolean isInError = uniquenessInFile.get(uniquenessKeys).size()>1;
+            return isInError?null:uniquenessKeys.getValues();
+        }
+
+        private CsvRowValidationCheckResult getErrorForEntry(Map.Entry<UniquenessKeys, List<Integer>> entry) {
+            return new CsvRowValidationCheckResult(
+                    DefaultValidationCheckResult.error(
+                            "duplicatedLineInDatatype",
+                            ImmutableMap.of(
+                                    "file", this.dataType,
+                                    "duplicatedRows", entry.getValue(),
+                                    "uniquenessKey", getUniquenessKey(entry.getKey())
+                            )
+                    ), entry.getValue().get(0));
+        }
+
+        public List<CsvRowValidationCheckResult> getErrors() {
+            return uniquenessInFile.entrySet().stream()
+                    .filter(entry -> entry.getValue().size() > 1)
+                    .map(this::getErrorForEntry)
+                    .collect(Collectors.toList());
+        }
+
+        public Map<String, String> getUniquenessKey(UniquenessKeys uniquenessKeys){
+            Map<String, String> uniquenessKeyMap = new HashMap<>();
+            for (int i = 0; i < uniquenessDescription.size(); i++) {
+                uniquenessKeyMap.put(uniquenessDescription.get(i).getId(), uniquenessKeys.getValues().get(i));
+            }
+            return uniquenessKeyMap;
+        }
+    }
+
+    class UniquenessKeys implements Comparable<UniquenessKeys> {
+        List<String> values = new LinkedList<>();
+        List<VariableComponentKey> uniquenessDescription;
+        public UniquenessKeys(Datum datum, List<VariableComponentKey> uniquenessDescription) {
+            this.uniquenessDescription = uniquenessDescription;
+            this.values = uniquenessDescription.stream()
+                    .map(variableComponentKey -> datum.get(variableComponentKey))
+                    .collect(Collectors.toList());
+        }
+
+        public List<String> getValues() {
+            return values;
+        }
+
+        public String getKey() {
+            return values.stream().collect(Collectors.joining());
+        }
+
+        @Override
+        public boolean equals(Object o) {
+            if (this == o) return true;
+            if (o == null || getClass() != o.getClass()) return false;
+            UniquenessKeys that = (UniquenessKeys) o;
+            return Objects.equals(getKey(), that.getKey());
+        }
+
+        @Override
+        public int hashCode() {
+            return Objects.hash(values);
+        }
+
+        @Override
+        public int compareTo(UniquenessKeys uniquenessKeys) {
+            return this.getKey().compareTo(uniquenessKeys.getKey());
+        }
+    }
+}
\ No newline at end of file
diff --git a/src/main/resources/migration/application/V1__init_schema.sql b/src/main/resources/migration/application/V1__init_schema.sql
index 47f954f3f..5270b7fed 100644
--- a/src/main/resources/migration/application/V1__init_schema.sql
+++ b/src/main/resources/migration/application/V1__init_schema.sql
@@ -57,9 +57,11 @@ BEGIN
                   into result
                   from unnest("authorizedArray") authorized
                   where ${requiredauthorizationscomparing}
-                  ((("authorized").datagroup= array[]::TEXT[]) or ((authorized).datagroup @> COALESCE(("authorization").datagroup, array[]::TEXT[])))
-                  and ((("authorized").timescope =  '(,)'::tsrange ) or (authorized).timescope @> COALESCE(("authorization").timescope, '[,]'::tsrange))
-        );
+                  ((("authorized").datagroup = array []::TEXT[]) or
+                   ((authorized).datagroup @> COALESCE(("authorization").datagroup, array []::TEXT[])))
+                      and ((("authorized").timescope = '(,)'::tsrange) or
+                           (authorized).timescope @> COALESCE(("authorization").timescope, '[,]'::tsrange))
+               );
     return result;
 END;
 $$ language 'plpgsql';
@@ -76,17 +78,22 @@ create table Data
     creationDate    DateOrNow,
     updateDate      DateOrNow,
     application     EntityRef REFERENCES Application (id),
-    dataType        TEXT CHECK (name_check(application, 'dataType', dataType)),
+    dataType        TEXT
+        constraint name_check CHECK (name_check(application, 'dataType', dataType)),
     rowId           TEXT                                                             NOT NULL,
     datagroup       TEXT GENERATED ALWAYS AS (("authorization").datagroup[1]) STORED NOT NULL,
     "authorization" ${applicationSchema}.authorization                               NOT NULL check (("authorization").datagroup[1] is not null),
-    refsLinkedTo    jsonb check (refs_check_for_datatype('${applicationSchema}', application, refsLinkedTo,
-                                                         datatype)),
+    refsLinkedTo    jsonb
+        constraint refs_check_for_datatype check (refs_check_for_datatype('${applicationSchema}', application,
+                                                                          refsLinkedTo,
+                                                                          datatype)),
+    uniqueness      jsonb,
     dataValues      jsonb,
-    binaryFile      EntityRef REFERENCES BinaryFile (id)
+    binaryFile      EntityRef REFERENCES BinaryFile (id),
+    constraint refs_check_for_datatype_uniqueness unique (dataType, datagroup, uniqueness)
 );
-CREATE INDEX data_refslinkedto_index ON Data USING gin (refsLinkedTo);
-CREATE INDEX data_refvalues_index ON Data USING gin (dataValues);
+CREATE INDEX data_refslinkedto_index ON Data USING gin (refsLinkedTo jsonb_path_ops);
+CREATE INDEX data_refvalues_index ON Data USING gin (dataValues jsonb_path_ops);
 
 ALTER TABLE Data
     ADD CONSTRAINT row_uniqueness UNIQUE (rowId, dataGroup);
@@ -116,4 +123,4 @@ GRANT SELECT, INSERT, UPDATE, DELETE, REFERENCES ON OreSiAuthorization TO public
 --ALTER TABLE BinaryFile ENABLE ROW LEVEL SECURITY;
 --ALTER TABLE ReferenceValue ENABLE ROW LEVEL SECURITY;
 ALTER TABLE Data
-    ENABLE ROW LEVEL SECURITY;
+    ENABLE ROW LEVEL SECURITY;
\ No newline at end of file
diff --git a/src/test/java/fr/inra/oresing/rest/ACBB_TEST.java b/src/test/java/fr/inra/oresing/rest/ACBB_TEST.java
new file mode 100644
index 000000000..36f4af010
--- /dev/null
+++ b/src/test/java/fr/inra/oresing/rest/ACBB_TEST.java
@@ -0,0 +1,4 @@
+package fr.inra.oresing.rest;
+
+class ACBB_TEST {
+}
\ No newline at end of file
diff --git a/src/test/java/fr/inra/oresing/rest/ApplicationConfigurationServiceTest.java b/src/test/java/fr/inra/oresing/rest/ApplicationConfigurationServiceTest.java
index f339d17e5..5794ea2b9 100644
--- a/src/test/java/fr/inra/oresing/rest/ApplicationConfigurationServiceTest.java
+++ b/src/test/java/fr/inra/oresing/rest/ApplicationConfigurationServiceTest.java
@@ -66,7 +66,7 @@ public class ApplicationConfigurationServiceTest {
             byte[] bytes = in.readAllBytes();
             ConfigurationParsingResult configurationParsingResult = service.parseConfigurationBytes(bytes);
             log.debug("résultat de la validation de " + resource + " = " + configurationParsingResult);
-            Assert.assertTrue(resource + " doit être reconnu comme un fichier valide",configurationParsingResult.isValid());
+            Assert.assertTrue(resource + " doit être reconnu comme un fichier valide", configurationParsingResult.isValid());
         } catch (IOException e) {
             throw new OreSiTechnicalException("ne peut pas lire le fichier de test " + resource, e);
         }
@@ -97,20 +97,37 @@ public class ApplicationConfigurationServiceTest {
 
     @Test
     public void testMissingReferenceForChecker() {
-        ConfigurationParsingResult configurationParsingResult = parseYaml("refType: sites","");
+        ConfigurationParsingResult configurationParsingResult = parseYaml("refType: sites", "");
         Assert.assertFalse(configurationParsingResult.isValid());
         List<ValidationCheckResult> validationCheckResults = configurationParsingResult.getValidationCheckResults();
         ValidationCheckResult missingReferenceForChecker = Iterables.find(validationCheckResults, vcr -> "missingReferenceForChecker".equals(vcr.getMessage()));
         ValidationCheckResult authorizationScopeVariableComponentReftypeNull = Iterables.find(validationCheckResults, vcr -> "authorizationScopeVariableComponentReftypeNull".equals(vcr.getMessage()));
 
-        Assert.assertEquals(true, missingReferenceForChecker!=null);
-        Assert.assertEquals(true, authorizationScopeVariableComponentReftypeNull!=null);
+        Assert.assertEquals(true, missingReferenceForChecker != null);
+        Assert.assertEquals(true, authorizationScopeVariableComponentReftypeNull != null);
+    }
+
+    @Test
+    public void testMissingVariableComponentForUniqueness() {
+        ConfigurationParsingResult configurationParsingResult = parseYaml(
+                "- variable: date\n" +
+                        "        component: day",
+                "- variable: date\n" +
+                        "        component: jour"
+        );
+        Assert.assertFalse(configurationParsingResult.isValid());
+        List<ValidationCheckResult> validationCheckResults = configurationParsingResult.getValidationCheckResults();
+        ValidationCheckResult missingVariableComponentForChecker = Iterables.find(validationCheckResults, vcr -> "unknownUsedAsVariableComponentUniqueness".equals(vcr.getMessage()));
+
+        final Set<String> unknownUsedAsVariableComponentUniqueness = (Set<String>) missingVariableComponentForChecker.getMessageParams().get("unknownUsedAsVariableComponentUniqueness");
+        Assert.assertEquals(true, unknownUsedAsVariableComponentUniqueness != null);
+        Assert.assertEquals(true, unknownUsedAsVariableComponentUniqueness.contains("date_jour"));
     }
 
     @Test
     public void testMissingInternationalizedColumn() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("internationalizedColumns:\n" +
-                "      nom du projet_key:","internationalizedColumns:\n" +
+                "      nom du projet_key:", "internationalizedColumns:\n" +
                 "      nom du projet_unknown:");
         Assert.assertFalse(configurationParsingResult.isValid());
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
@@ -120,14 +137,14 @@ public class ApplicationConfigurationServiceTest {
 
     @Test
     public void testUnknownReferenceForChecker() {
-        ConfigurationParsingResult configurationParsingResult = parseYaml("refType: sites","refType: sitee");
+        ConfigurationParsingResult configurationParsingResult = parseYaml("refType: sites", "refType: sitee");
         Assert.assertFalse(configurationParsingResult.isValid());
         List<ValidationCheckResult> validationCheckResults = configurationParsingResult.getValidationCheckResults();
         ValidationCheckResult unknownReferenceForChecker = Iterables.find(validationCheckResults, vcr -> "unknownReferenceForChecker".equals(vcr.getMessage()));
         ValidationCheckResult authorizationScopeVariableComponentReftypeUnknown = Iterables.find(validationCheckResults, vcr -> "authorizationScopeVariableComponentReftypeUnknown".equals(vcr.getMessage()));
 
-        Assert.assertEquals(true, unknownReferenceForChecker!=null);
-        Assert.assertEquals(true, authorizationScopeVariableComponentReftypeUnknown!=null);
+        Assert.assertEquals(true, unknownReferenceForChecker != null);
+        Assert.assertEquals(true, authorizationScopeVariableComponentReftypeUnknown != null);
     }
 
     @Test
@@ -215,7 +232,7 @@ public class ApplicationConfigurationServiceTest {
     @Test
     public void testVariableInMultipleDataGroup() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("data:\n" +
-                "            - Couleur des individus","data:\n" +
+                "            - Couleur des individus", "data:\n" +
                 "            - localization\n" +
                 "            - Couleur des individus");
         Assert.assertFalse(configurationParsingResult.isValid());
@@ -227,7 +244,7 @@ public class ApplicationConfigurationServiceTest {
     @Test
     public void testRecordInvalidKeyColumns() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("columns:\n" +
-                "      nom du projet_key:","columns:\n" +
+                "      nom du projet_key:", "columns:\n" +
                 "      nom du Projet_key:");
         Assert.assertFalse(configurationParsingResult.isValid());
         long count = configurationParsingResult.getValidationCheckResults()
@@ -243,7 +260,7 @@ public class ApplicationConfigurationServiceTest {
         ConfigurationParsingResult configurationParsingResult = parseYaml("component: site\n" +
                 "      timeScope:\n" +
                 "        variable: date\n" +
-                "        component: day","component: site\n");
+                "        component: day", "component: site\n");
         Assert.assertFalse(configurationParsingResult.isValid());
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
         log.debug(onlyError.getMessage());
@@ -254,7 +271,7 @@ public class ApplicationConfigurationServiceTest {
     public void testTimeScopeVariableComponentKeyMissingVariable() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("timeScope:\n" +
                 "        variable: date\n" +
-                "        component: day","timeScope:\n" +
+                "        component: day", "timeScope:\n" +
                 "        component: day");
         Assert.assertFalse(configurationParsingResult.isValid());
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
@@ -266,7 +283,7 @@ public class ApplicationConfigurationServiceTest {
     public void testTimeScopeVariableComponentKeyUnknownVariable() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("timeScope:\n" +
                 "        variable: date\n" +
-                "        component: day","timeScope:\n" +
+                "        component: day", "timeScope:\n" +
                 "        variable: dates\n" +
                 "        component: day");
         Assert.assertFalse(configurationParsingResult.isValid());
@@ -279,7 +296,7 @@ public class ApplicationConfigurationServiceTest {
     public void testTimeVariableComponentKeyMissingComponent() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("timeScope:\n" +
                 "        variable: date\n" +
-                "        component: day","timeScope:\n" +
+                "        component: day", "timeScope:\n" +
                 "        variable: date\n" +
                 "        component: ~");
         Assert.assertFalse(configurationParsingResult.isValid());
@@ -292,7 +309,7 @@ public class ApplicationConfigurationServiceTest {
     public void testTimeVariableComponentKeyUnknownComponent() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("timeScope:\n" +
                 "        variable: date\n" +
-                "        component: day","timeScope:\n" +
+                "        component: day", "timeScope:\n" +
                 "        variable: date\n" +
                 "        component: days");
         Assert.assertFalse(configurationParsingResult.isValid());
@@ -333,7 +350,7 @@ public class ApplicationConfigurationServiceTest {
     @Test
     public void testTimeScopeVariableComponentPatternUnknown() {
         ConfigurationParsingResult configurationParsingResult = parseYaml("params:\n" +
-                "                pattern: dd/MM/yyyy","params:\n" +
+                "                pattern: dd/MM/yyyy", "params:\n" +
                 "                pattern: dd/MM");
         Assert.assertFalse(configurationParsingResult.isValid());
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
@@ -343,7 +360,7 @@ public class ApplicationConfigurationServiceTest {
 
     @Test
     public void testUnrecognizedProperty() {
-        ConfigurationParsingResult configurationParsingResult = parseYaml("compositeReferences","compositReference");
+        ConfigurationParsingResult configurationParsingResult = parseYaml("compositeReferences", "compositReference");
         Assert.assertFalse(configurationParsingResult.isValid());
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
         log.debug(onlyError.getMessage());
@@ -409,8 +426,8 @@ public class ApplicationConfigurationServiceTest {
         ValidationCheckResult authorizationVariableComponentKeyUnknownComponent = Iterables.find(validationCheckResults, vcr -> "authorizationVariableComponentKeyUnknownComponent".equals(vcr.getMessage()));
         ValidationCheckResult csvBoundToUnknownVariableComponent = Iterables.find(validationCheckResults, vcr -> "csvBoundToUnknownVariableComponent".equals(vcr.getMessage()));
 
-        Assert.assertEquals(true, authorizationVariableComponentKeyUnknownComponent!=null);
-        Assert.assertEquals(true, csvBoundToUnknownVariableComponent!=null);
+        Assert.assertEquals(true, authorizationVariableComponentKeyUnknownComponent != null);
+        Assert.assertEquals(true, csvBoundToUnknownVariableComponent != null);
     }
 
     @Test
@@ -429,8 +446,8 @@ public class ApplicationConfigurationServiceTest {
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
         log.debug(onlyError.getMessage());
         Assert.assertEquals("invalidInternationalizedColumns", onlyError.getMessage());
-        Assert.assertTrue(((Set)onlyError.getMessageParams().get("unknownUsedAsInternationalizedColumns")).contains("nom du site"));
-        Assert.assertTrue(((Set)onlyError.getMessageParams().get("knownColumns")).contains("nom du site_fr"));
+        Assert.assertTrue(((Set) onlyError.getMessageParams().get("unknownUsedAsInternationalizedColumns")).contains("nom du site"));
+        Assert.assertTrue(((Set) onlyError.getMessageParams().get("knownColumns")).contains("nom du site_fr"));
     }
 
     @Test
@@ -451,8 +468,8 @@ public class ApplicationConfigurationServiceTest {
         ValidationCheckResult onlyError = Iterables.getOnlyElement(configurationParsingResult.getValidationCheckResults());
         log.debug(onlyError.getMessage());
         Assert.assertEquals("invalidInternationalizedColumns", onlyError.getMessage());
-        Assert.assertTrue(((Set)onlyError.getMessageParams().get("unknownUsedAsInternationalizedColumns")).contains("nom du site"));
-        Assert.assertTrue(((Set)onlyError.getMessageParams().get("knownColumns")).contains("nom du site_fr"));
+        Assert.assertTrue(((Set) onlyError.getMessageParams().get("unknownUsedAsInternationalizedColumns")).contains("nom du site"));
+        Assert.assertTrue(((Set) onlyError.getMessageParams().get("knownColumns")).contains("nom du site_fr"));
     }
 
     @Test
diff --git a/src/test/java/fr/inra/oresing/rest/Fixtures.java b/src/test/java/fr/inra/oresing/rest/Fixtures.java
index 5afe20261..a406c2844 100644
--- a/src/test/java/fr/inra/oresing/rest/Fixtures.java
+++ b/src/test/java/fr/inra/oresing/rest/Fixtures.java
@@ -420,6 +420,7 @@ public class Fixtures {
     public Map<String, String> getDuplicatedDataFiles() {
         Map<String, String> referentielFiles = new LinkedHashMap<>();
         referentielFiles.put("data_without_duplicateds", "/data/duplication/data.csv");
+        referentielFiles.put("data_with_duplicateds", "/data/duplication/data_with_duplicateds.csv");
         return referentielFiles;
     }
 
diff --git a/src/test/java/fr/inra/oresing/rest/HAUTE_FREQUENCE_TEST.java b/src/test/java/fr/inra/oresing/rest/HAUTE_FREQUENCE_TEST.java
new file mode 100644
index 000000000..3db7519c2
--- /dev/null
+++ b/src/test/java/fr/inra/oresing/rest/HAUTE_FREQUENCE_TEST.java
@@ -0,0 +1,4 @@
+package fr.inra.oresing.rest;
+
+final class HAUTE_FREQUENCE_TEST {
+}
\ No newline at end of file
diff --git a/src/test/java/fr/inra/oresing/rest/ORE_TEST.java b/src/test/java/fr/inra/oresing/rest/ORE_TEST.java
new file mode 100644
index 000000000..f6c44de60
--- /dev/null
+++ b/src/test/java/fr/inra/oresing/rest/ORE_TEST.java
@@ -0,0 +1,4 @@
+package fr.inra.oresing.rest;
+
+public class ORE_TEST {
+}
\ No newline at end of file
diff --git a/src/test/java/fr/inra/oresing/rest/OreSiResourcesTest.java b/src/test/java/fr/inra/oresing/rest/OreSiResourcesTest.java
index 05466322b..51c55bd15 100644
--- a/src/test/java/fr/inra/oresing/rest/OreSiResourcesTest.java
+++ b/src/test/java/fr/inra/oresing/rest/OreSiResourcesTest.java
@@ -13,6 +13,7 @@ import fr.inra.oresing.persistence.JsonRowMapper;
 import lombok.extern.slf4j.Slf4j;
 import org.apache.commons.io.IOUtils;
 import org.apache.commons.lang3.StringUtils;
+import org.hamcrest.CoreMatchers;
 import org.hamcrest.Matchers;
 import org.hamcrest.core.Is;
 import org.hamcrest.core.IsEqual;
@@ -22,6 +23,7 @@ import org.junit.Assert;
 import org.junit.Before;
 import org.junit.Ignore;
 import org.junit.Test;
+import org.junit.experimental.categories.Category;
 import org.junit.runner.RunWith;
 import org.springframework.beans.factory.annotation.Autowired;
 import org.springframework.boot.test.autoconfigure.SpringBootDependencyInjectionTestExecutionListener;
@@ -230,12 +232,12 @@ public class OreSiResourcesTest {
                     .andExpect(jsonPath("$.checkedFormatVariableComponents.ReferenceLineChecker", IsNull.notNullValue()))
                     .andExpect(jsonPath("$.checkedFormatVariableComponents.IntegerChecker", IsNull.notNullValue()))
                     .andExpect(jsonPath("$.rows").isArray())
-                    .andExpect(jsonPath("$.rows", Matchers.hasSize(306)))
+                    .andExpect(jsonPath("$.rows", Matchers.hasSize(272)))
                     //.andExpect(jsonPath("$.rows.value").value(list))
-                    .andExpect(jsonPath("$.totalRows", Is.is(306)))
-                    .andExpect(jsonPath("$.rows[*].values.date.value", Matchers.hasSize(306)))
-                    .andExpect(jsonPath("$.rows[*].values['Nombre d\\'individus'].unit", Matchers.hasSize(306)))
-                    .andExpect(jsonPath("$.rows[*].values['Couleur des individus'].unit", Matchers.hasSize(306)))
+                    .andExpect(jsonPath("$.totalRows", Is.is(272)))
+                    .andExpect(jsonPath("$.rows[*].values.date.value", Matchers.hasSize(272)))
+                    .andExpect(jsonPath("$.rows[*].values['Nombre d\\'individus'].unit", Matchers.hasSize(272)))
+                    .andExpect(jsonPath("$.rows[*].values['Couleur des individus'].unit", Matchers.hasSize(272)))
                     .andReturn().getResponse().getContentAsString();
             log.debug(StringUtils.abbreviate(actualJson, 50));
         }
@@ -258,11 +260,11 @@ public class OreSiResourcesTest {
                             .param("downloadDatasetQuery", filter)
                             .accept(MediaType.APPLICATION_JSON))
                     .andExpect(status().isOk())
-                    .andExpect(jsonPath("$.rows", Matchers.hasSize(9)))
-                    .andExpect(jsonPath("$.rows[*].values.date[?(@.value == 'date:1984-01-01T00:00:00:01/01/1984')]", Matchers.hasSize(9)))
-                    .andExpect(jsonPath("$.rows[*].values['Nombre d\\'individus'][?(@.value ==25)]", Matchers.hasSize(9)))
-                    .andExpect(jsonPath("$.rows[*].values['Couleur des individus'][?(@.value =='couleur_des_individus__vert')]", Matchers.hasSize(9)))
-                    .andExpect(jsonPath("$.rows[*].values.site.plateforme").value(Stream.of("a", "p1", "p1", "p1", "p1", "p1", "p1", "p2", "p2").collect(Collectors.toList())))
+                    .andExpect(jsonPath("$.rows", Matchers.hasSize(8)))
+                    .andExpect(jsonPath("$.rows[*].values.date[?(@.value == 'date:1984-01-01T00:00:00:01/01/1984')]", Matchers.hasSize(8)))
+                    .andExpect(jsonPath("$.rows[*].values['Nombre d\\'individus'][?(@.value ==25)]", Matchers.hasSize(8)))
+                    .andExpect(jsonPath("$.rows[*].values['Couleur des individus'][?(@.value =='couleur_des_individus__vert')]", Matchers.hasSize(8)))
+                    .andExpect(jsonPath("$.rows[*].values.site.plateforme").value(Stream.of("a", "p1", "p1", "p1", "p1", "p1", "p2", "p2").collect(Collectors.toList())))
                     .andReturn().getResponse().getContentAsString();
             log.debug(StringUtils.abbreviate(actualJson, 50));
 
@@ -285,12 +287,14 @@ public class OreSiResourcesTest {
             Assert.assertEquals(expectedCsvToList.size(), actualCsvToList.size());
             actualCsvToList.forEach(expectedCsvToList::remove);
             Assert.assertTrue(expectedCsvToList.isEmpty());
-            Assert.assertEquals(306, StringUtils.countMatches(actualCsv, "/1984"));
+            Assert.assertEquals(272, StringUtils.countMatches(actualCsv, "/1984"));
         }
 
         try (InputStream in = getClass().getResourceAsStream(fixtures.getPemDataResourceName())) {
             String csv = IOUtils.toString(Objects.requireNonNull(in), StandardCharsets.UTF_8);
-            String invalidCsv = csv.replace("projet_manche", "projet_manch");
+            String invalidCsv = csv
+                    .replace("projet_manche", "projet_manch")
+                    .replace("projet_atlantique", "projet_atlantiqu");
             MockMultipartFile refFile = new MockMultipartFile("file", "data-pem.csv", "text/plain", invalidCsv.getBytes(StandardCharsets.UTF_8));
             response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/monsore/data/pem")
                             .file(refFile)
@@ -299,10 +303,13 @@ public class OreSiResourcesTest {
                     .andReturn().getResponse().getContentAsString();
             log.debug(StringUtils.abbreviate(response, 50));
             Assert.assertTrue(response.contains("projet_manch"));
+            Assert.assertTrue(response.contains("projet_atlantiqu"));
+            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("41"));
+            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("42"));
+            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("43"));
+            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("10"));
             Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("141"));
             Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("142"));
-            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("143"));
-            Assert.assertTrue("Il faut mentionner les lignes en erreur", response.contains("310"));
         }
 
 //        // restitution de data json
@@ -537,8 +544,8 @@ public class OreSiResourcesTest {
         response = mockMvc.perform(get("/api/v1/applications/monsore/data/pem")
                         .cookie(authCookie))
                 .andExpect(status().is2xxSuccessful())
-                .andExpect(jsonPath("$.totalRows").value(136))
-                .andExpect(jsonPath("$.rows[*]", Matchers.hasSize(136)))
+                .andExpect(jsonPath("$.totalRows").value(102))
+                .andExpect(jsonPath("$.rows[*]", Matchers.hasSize(102)))
                 .andExpect(jsonPath("$.rows[*].values[? (@.site.chemin == 'oir__p1')][? (@.projet.value == 'projet_manche')]", Matchers.hasSize(0)))
                 .andReturn().getResponse().getContentAsString();
         log.debug(StringUtils.abbreviate(response, 50));
@@ -641,6 +648,7 @@ public class OreSiResourcesTest {
     }
 
     @Test
+    @Category(ACBB_TEST.class)
     public void addApplicationAcbb() throws Exception {
         authenticationService.addUserRightCreateApplication(userId);
 
@@ -659,71 +667,17 @@ public class OreSiResourcesTest {
             JsonPath.parse(response).read("$.id");
         }
 
-        // Ajout de referentiel
-        for (Map.Entry<String, String> e : fixtures.getAcbbReferentielFiles().entrySet()) {
-            try (InputStream refStream = getClass().getResourceAsStream(e.getValue())) {
-                MockMultipartFile refFile = new MockMultipartFile("file", e.getValue(), "text/plain", refStream);
-
-                String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/references/{refType}", e.getKey())
-                                .file(refFile)
-                                .cookie(authCookie))
-                        .andExpect(status().isCreated())
-                        .andExpect(jsonPath("$.id", IsNull.notNullValue()))
-                        .andReturn().getResponse().getContentAsString();
-
-                JsonPath.parse(response).read("$.id");
-            }
-        }
-
-        String getReferenceResponse = mockMvc.perform(get("/api/v1/applications/acbb/references/parcelles")
-                        .contentType(MediaType.APPLICATION_JSON)
-                        .cookie(authCookie))
-                .andExpect(status().isOk())
-                .andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
-                .andReturn().getResponse().getContentAsString();
-
-        GetReferenceResult refs = objectMapper.readValue(getReferenceResponse, GetReferenceResult.class);
-        Assert.assertEquals(103, refs.getReferenceValues().size());
-
-        // Ajout de referentiel
-        for (Map.Entry<String, String> e : fixtures.getAcbbReferentielFiles().entrySet()) {
-            try (InputStream refStream = getClass().getResourceAsStream(e.getValue())) {
-                MockMultipartFile refFile = new MockMultipartFile("file", e.getValue(), "text/plain", refStream);
-
-                String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/references/{refType}", e.getKey())
-                                .file(refFile)
-                                .cookie(authCookie))
-                        .andExpect(status().isCreated())
-                        .andExpect(jsonPath("$.id", IsNull.notNullValue()))
-                        .andReturn().getResponse().getContentAsString();
-
-                JsonPath.parse(response).read("$.id");
-            }
-        }
-
-        getReferenceResponse = mockMvc.perform(get("/api/v1/applications/acbb/references/parcelles")
-                        .contentType(MediaType.APPLICATION_JSON)
-                        .cookie(authCookie))
-                .andExpect(status().isOk())
-                .andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
-                .andReturn().getResponse().getContentAsString();
-
-        refs = objectMapper.readValue(getReferenceResponse, GetReferenceResult.class);
-        Assert.assertEquals(103, refs.getReferenceValues().size());
-
-        String getReferenceCsvResponse = mockMvc.perform(get("/api/v1/applications/acbb/references/parcelles/csv")
-                        .cookie(authCookie)
-                        .accept(MediaType.TEXT_PLAIN))
-                .andExpect(status().isOk())
-                .andReturn().getResponse().getContentAsString();
-
-        Assert.assertEquals(31, StringUtils.countMatches(getReferenceCsvResponse, "theix"));
+        addReferences();
+        addDataFluxTours();
+        addDataBiomassProduction();
+        addDataSWC();
+    }
 
-        // ajout de data
-        try (InputStream in = getClass().getResourceAsStream(fixtures.getFluxToursDataResourceName())) {
-            MockMultipartFile file = new MockMultipartFile("file", "Flux_tours.csv", "text/plain", in);
+    private void addDataSWC() throws Exception {
+        try (InputStream in = fixtures.openSwcDataResourceName(true)) {
+            MockMultipartFile file = new MockMultipartFile("file", "SWC.csv", "text/plain", in);
 
-            String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/data/flux_tours")
+            String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/data/SWC")
                             .file(file)
                             .cookie(authCookie))
                     .andExpect(status().is2xxSuccessful())
@@ -732,36 +686,29 @@ public class OreSiResourcesTest {
             log.debug(StringUtils.abbreviate(response, 50));
         }
 
-        // restitution de data json
         {
-//            String expectedJson = Resources.toString(getClass().getResource("/data/acbb/compare/export.json"), Charsets.UTF_8);
-            String actualJson = mockMvc.perform(get("/api/v1/applications/acbb/data/flux_tours")
+            String actualJson = mockMvc.perform(get("/api/v1/applications/acbb/data/SWC")
                             .cookie(authCookie)
                             .accept(MediaType.APPLICATION_JSON))
                     .andExpect(status().isOk())
-//                    .andExpect(content().json(expectedJson))
                     .andReturn().getResponse().getContentAsString();
 
             log.debug(StringUtils.abbreviate(actualJson, 50));
-            Assert.assertEquals(17568, StringUtils.countMatches(actualJson, "/2004"));
-            Assert.assertTrue(actualJson.contains("laqueuille.laqueuille__1"));
+            Assert.assertEquals(2912, StringUtils.countMatches(actualJson, "\"SWC\":"));
         }
 
-        // restitution de data csv
         {
-//            String expectedCsv = Resources.toString(getClass().getResource("/data/acbb/compare/export.csv"), Charsets.UTF_8);
-            String actualCsv = mockMvc.perform(get("/api/v1/applications/acbb/data/flux_tours")
+            String actualCsv = mockMvc.perform(get("/api/v1/applications/acbb/data/SWC")
                             .cookie(authCookie)
                             .accept(MediaType.TEXT_PLAIN))
                     .andExpect(status().isOk())
-//                    .andExpect(content().string(expectedCsv))
                     .andReturn().getResponse().getContentAsString();
             log.debug(StringUtils.abbreviate(actualCsv, 50));
-            Assert.assertEquals(17568, StringUtils.countMatches(actualCsv, "/2004"));
-            Assert.assertTrue(actualCsv.contains("Parcelle"));
-            Assert.assertTrue(actualCsv.contains("laqueuille;1"));
+            Assert.assertEquals(1456, StringUtils.countMatches(actualCsv, "/2010"));
         }
+    }
 
+    private void addDataBiomassProduction() throws Exception {
         // ajout de data
         try (InputStream in = getClass().getResourceAsStream(fixtures.getBiomasseProductionTeneurDataResourceName())) {
             MockMultipartFile file = new MockMultipartFile("file", "biomasse_production_teneur.csv", "text/plain", in);
@@ -795,11 +742,14 @@ public class OreSiResourcesTest {
             log.debug(StringUtils.abbreviate(actualCsv, 50));
             Assert.assertEquals(252, StringUtils.countMatches(actualCsv, "prairie permanente"));
         }
+    }
 
-        try (InputStream in = fixtures.openSwcDataResourceName(true)) {
-            MockMultipartFile file = new MockMultipartFile("file", "SWC.csv", "text/plain", in);
+    private void addDataFluxTours() throws Exception {
+        // ajout de data
+        try (InputStream in = getClass().getResourceAsStream(fixtures.getFluxToursDataResourceName())) {
+            MockMultipartFile file = new MockMultipartFile("file", "Flux_tours.csv", "text/plain", in);
 
-            String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/data/SWC")
+            String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/data/flux_tours")
                             .file(file)
                             .cookie(authCookie))
                     .andExpect(status().is2xxSuccessful())
@@ -808,29 +758,75 @@ public class OreSiResourcesTest {
             log.debug(StringUtils.abbreviate(response, 50));
         }
 
+        // restitution de data json
         {
-            String actualJson = mockMvc.perform(get("/api/v1/applications/acbb/data/SWC")
+//            String expectedJson = Resources.toString(getClass().getResource("/data/acbb/compare/export.json"), Charsets.UTF_8);
+            String actualJson = mockMvc.perform(get("/api/v1/applications/acbb/data/flux_tours")
                             .cookie(authCookie)
                             .accept(MediaType.APPLICATION_JSON))
                     .andExpect(status().isOk())
+                    .andExpect(jsonPath("$.rows[? (@.values.date.datetime =~ /.*\\/2004.*/i)]", Matchers.hasSize(17568)))
+                    .andExpect(jsonPath("$.rows[*].values.parcelle.chemin", Matchers.hasSize(17568)))
+//                    .andExpect(content().json(expectedJson))
                     .andReturn().getResponse().getContentAsString();
 
             log.debug(StringUtils.abbreviate(actualJson, 50));
-            Assert.assertEquals(2912, StringUtils.countMatches(actualJson, "\"SWC\":"));
         }
 
+        // restitution de data csv
         {
-            String actualCsv = mockMvc.perform(get("/api/v1/applications/acbb/data/SWC")
+//            String expectedCsv = Resources.toString(getClass().getResource("/data/acbb/compare/export.csv"), Charsets.UTF_8);
+            String actualCsv = mockMvc.perform(get("/api/v1/applications/acbb/data/flux_tours")
                             .cookie(authCookie)
                             .accept(MediaType.TEXT_PLAIN))
                     .andExpect(status().isOk())
+//                    .andExpect(content().string(expectedCsv))
                     .andReturn().getResponse().getContentAsString();
             log.debug(StringUtils.abbreviate(actualCsv, 50));
-            Assert.assertEquals(1456, StringUtils.countMatches(actualCsv, "/2010"));
+            Assert.assertEquals(17568, StringUtils.countMatches(actualCsv, "/2004"));
+            Assert.assertTrue(actualCsv.contains("Parcelle"));
+            Assert.assertTrue(actualCsv.contains("laqueuille;1"));
         }
     }
 
+    private void addReferences() throws Exception {
+        // Ajout de referentiel
+        for (Map.Entry<String, String> e : fixtures.getAcbbReferentielFiles().entrySet()) {
+            try (InputStream refStream = getClass().getResourceAsStream(e.getValue())) {
+                MockMultipartFile refFile = new MockMultipartFile("file", e.getValue(), "text/plain", refStream);
+
+                String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/acbb/references/{refType}", e.getKey())
+                                .file(refFile)
+                                .cookie(authCookie))
+                        .andExpect(status().isCreated())
+                        .andExpect(jsonPath("$.id", IsNull.notNullValue()))
+                        .andReturn().getResponse().getContentAsString();
+
+                JsonPath.parse(response).read("$.id");
+            }
+        }
+
+        String getReferenceResponse = mockMvc.perform(get("/api/v1/applications/acbb/references/parcelles")
+                        .contentType(MediaType.APPLICATION_JSON)
+                        .cookie(authCookie))
+                .andExpect(status().isOk())
+                .andExpect(content().contentTypeCompatibleWith(MediaType.APPLICATION_JSON))
+                .andReturn().getResponse().getContentAsString();
+
+        final GetReferenceResult refs = objectMapper.readValue(getReferenceResponse, GetReferenceResult.class);
+        Assert.assertEquals(103, refs.getReferenceValues().size());
+
+        String getReferenceCsvResponse = mockMvc.perform(get("/api/v1/applications/acbb/references/parcelles/csv")
+                        .cookie(authCookie)
+                        .accept(MediaType.TEXT_PLAIN))
+                .andExpect(status().isOk())
+                .andReturn().getResponse().getContentAsString();
+
+        Assert.assertEquals(31, StringUtils.countMatches(getReferenceCsvResponse, "theix"));
+    }
+
     @Test
+    @Category(HAUTE_FREQUENCE_TEST.class)
     public void addApplicationHauteFrequence() throws Exception {
         authenticationService.addUserRightCreateApplication(userId);
         try (InputStream configurationFile = fixtures.getClass().getResourceAsStream(fixtures.getHauteFrequenceApplicationConfigurationResourceName())) {
@@ -1038,7 +1034,7 @@ on test le dépôt d'un fichier récursif
             Assert.assertEquals("zones_etudes", messageParams.get("references"));
             Assert.assertEquals(3L, messageParams.get("lineNumber"));
             Assert.assertEquals("site3", messageParams.get("missingReferencesKey"));
-            Assert.assertTrue(Set.of("site3","site1.site2","site1","site2").containsAll((Set) messageParams.get("knownReferences")));
+            Assert.assertTrue(Set.of("site3", "site1.site2", "site1", "site2").containsAll((Set) messageParams.get("knownReferences")));
         }
 
         //il doit toujours y avoir le même nombre de ligne
@@ -1064,7 +1060,7 @@ on test le dépôt d'un fichier récursif
                     .andExpect(status().is2xxSuccessful())
                     .andExpect(jsonPath("$.totalRows", IsEqual.equalTo(4
                     )))
-                .andReturn().getResponse().getContentAsString();
+                    .andReturn().getResponse().getContentAsString();
             log.debug(response);
         }
 
@@ -1072,21 +1068,48 @@ on test le dépôt d'un fichier récursif
 
         try (InputStream refStream = fixtures.getClass().getResourceAsStream(dataWithoutDuplicateds)) {
             MockMultipartFile refFile = new MockMultipartFile("file", "data_without_duplicateds.csv", "text/plain", refStream);
+            final String response = mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/duplicated/data/dty")
+                            .file(refFile)
+                            .cookie(authCookie))
+                    .andExpect(status().is2xxSuccessful())
+                    //.andExpect(jsonPath("$[0].validationCheckResult.messageParams.file", IsEqual.equalTo("dty"))).andExpect(jsonPath("$[0].validationCheckResult.messageParams.failingRowContent", StringContains.containsString("1980-02-23")))
+                    .andReturn().getResponse().getContentAsString();
+        }
+        // le nombre de ligne est inchangé
+        try (InputStream refStream = fixtures.getClass().getResourceAsStream(dataWithoutDuplicateds)) {
+            final String response = mockMvc.perform(get("/api/v1/applications/duplicated/data/dty")
+                            .cookie(authCookie))
+                    .andExpect(status().is2xxSuccessful())
+                    .andExpect(jsonPath("$.totalRows", IsEqual.equalTo(4
+                    )))
+                    .andReturn().getResponse().getContentAsString();
+            log.debug(response);
+        }
+        //on teste un dépot de fichier de données avec lignes dupliquées
+        final String dataWithDuplicateds = fixtures.getDuplicatedDataFiles().get("data_with_duplicateds");
+        try (InputStream refStream = fixtures.getClass().getResourceAsStream(dataWithDuplicateds)) {
+            MockMultipartFile refFile = new MockMultipartFile("file", "data_with_duplicateds.csv", "text/plain", refStream);
             mockMvc.perform(MockMvcRequestBuilders.multipart("/api/v1/applications/duplicated/data/dty")
                             .file(refFile)
                             .cookie(authCookie))
-                    .andExpect(MockMvcResultMatchers.status().is2xxSuccessful());
+                    .andExpect(status().is4xxClientError())
+                    .andExpect(jsonPath("$[0].validationCheckResult.message", IsEqual.equalTo("duplicatedLineInDatatype")))
+                    .andExpect(jsonPath("$[0].validationCheckResult.messageParams.duplicatedRows", CoreMatchers.hasItem(5)))
+                    .andExpect(jsonPath("$[0].validationCheckResult.messageParams.duplicatedRows", CoreMatchers.hasItem(6)))
+                    .andExpect(jsonPath("$[0].validationCheckResult.messageParams.uniquenessKey.Date_day", IsEqual.equalTo("24/02/1980")))
+                    .andExpect(jsonPath("$[0].validationCheckResult.messageParams.uniquenessKey.localization_zones_etudes", IsEqual.equalTo("site1")));
+
         }
         // le nombre de ligne est inchangé
-        /*try (InputStream refStream = fixtures.getClass().getResourceAsStream(dataWithoutDuplicateds)) {
+        try (InputStream refStream = fixtures.getClass().getResourceAsStream(dataWithoutDuplicateds)) {
             final String response = mockMvc.perform(get("/api/v1/applications/duplicated/data/dty")
                             .cookie(authCookie))
                     .andExpect(status().is2xxSuccessful())
                     .andExpect(jsonPath("$.totalRows", IsEqual.equalTo(4
                     )))
-                .andReturn().getResponse().getContentAsString();
+                    .andReturn().getResponse().getContentAsString();
             log.debug(response);
-        }*/
+        }
     }
 
     @Test
@@ -1136,6 +1159,7 @@ on test le dépôt d'un fichier récursif
                             .file(refFile)
                             .cookie(authCookie))
                     .andExpect(MockMvcResultMatchers.status().is2xxSuccessful());
+
         }
 
         // ajout de data
diff --git a/src/test/resources/data/acbb/acbb.yaml b/src/test/resources/data/acbb/acbb.yaml
index 8c1306409..bd9246d8f 100644
--- a/src/test/resources/data/acbb/acbb.yaml
+++ b/src/test/resources/data/acbb/acbb.yaml
@@ -124,6 +124,13 @@ compositeReferences:
         parentKeyColumn: "site"
 dataTypes:
   flux_tours:
+    uniqueness:
+      - variable: site
+        component: chemin
+      - variable: parcelle
+        component: chemin
+      - variable: date
+        component: datetime
     data:
       site:
         components:
@@ -145,12 +152,23 @@ dataTypes:
                 refType: parcelles
       date:
         components:
+          datetime:
+            defaultValue: >
+              return datumByVariableAndComponent.date.day +" " +datumByVariableAndComponent.date.time
+            checker:
+              name: Date
+              params:
+                pattern: dd/MM/yyyy HH:mm:ss
           day:
             checker:
               name: Date
               params:
                 pattern: dd/MM/yyyy
           time:
+            checker:
+              name: Date
+              params:
+                pattern: HH:mm:ss
       CO2:
         components:
           value:
@@ -377,7 +395,7 @@ dataTypes:
           component: chemin
       timeScope:
         variable: date
-        component: day
+        component: datetime
       dataGroups:
         all:
           label: "Toutes les données"
@@ -395,6 +413,15 @@ dataTypes:
             - u*
             - Fc gf
   biomasse_production_teneur:
+    uniqueness:
+      - variable: parcelle
+        component: chemin
+      - variable: date de mesure
+        component: valeur
+      - variable: date ou num incrémental intervention
+        component: valeur
+      - variable: mav_nature
+        component: valeur
     data:
       parcelle:
         components:
@@ -1447,6 +1474,17 @@ dataTypes:
             - HUT
             - MOR
   SWC:
+    uniqueness:
+      - variable: Nom parcelle
+        component: chemin
+      - variable: Nom traitement
+        component: valeur
+      - variable: Date
+        component: datetime
+      - variable: contexte
+        component: profondeur
+      - variable: contexte
+        component: répétition
     data:
       Nom parcelle:
         components:
@@ -1472,7 +1510,7 @@ dataTypes:
                 pattern: HH:mm:ss
           datetime:
             defaultValue: >
-              return datumByVariableAndComponent.get("Date").get("day") +" " +datumByVariableAndComponent.get("Date").get("time")
+              return datumByVariableAndComponent.Date.day +" " +datumByVariableAndComponent.Date.time
             checker:
               name: Date
               params:
diff --git a/src/test/resources/data/duplication/data_with_duplicateds.csv b/src/test/resources/data/duplication/data_with_duplicateds.csv
new file mode 100644
index 000000000..da042bff4
--- /dev/null
+++ b/src/test/resources/data/duplication/data_with_duplicateds.csv
@@ -0,0 +1,6 @@
+site;date
+site1.site2;23/02/1980
+site1.site2;24/02/1980
+site1;23/02/1980
+site1;24/02/1980
+site1;24/02/1980
\ No newline at end of file
diff --git a/src/test/resources/data/duplication/duplication.yaml b/src/test/resources/data/duplication/duplication.yaml
index cae0e427e..25d05b2ed 100644
--- a/src/test/resources/data/duplication/duplication.yaml
+++ b/src/test/resources/data/duplication/duplication.yaml
@@ -58,6 +58,11 @@ dataTypes:
               name: Reference
               params:
                 refType: zones_etudes
+    uniqueness:
+      - variable: Date
+        component: day
+      - variable: localization
+        component: zones_etudes
     format:
       headerLine: 1
       firstRowLine: 2
diff --git a/src/test/resources/data/hautefrequence/hautefrequence.yaml b/src/test/resources/data/hautefrequence/hautefrequence.yaml
index 2160dab13..953b1a827 100644
--- a/src/test/resources/data/hautefrequence/hautefrequence.yaml
+++ b/src/test/resources/data/hautefrequence/hautefrequence.yaml
@@ -22,7 +22,20 @@ compositeReferences:
       -
         reference: projet
 dataTypes: 
-  hautefrequence: 
+  hautefrequence:
+    uniqueness:
+      - variable: date
+        component: datetime
+      - variable: localization
+        component: projet
+      - variable: localization
+        component: site
+      - variable: localization
+        component: plateforme
+      - variable: localization
+        component: profondeur
+      - variable: outil
+        component: value
     authorization: 
       dataGroups: 
         all: 
@@ -42,11 +55,18 @@ dataTypes:
           component: projet
           variable: localization
       timeScope: 
-        component: day
+        component: datetime
         variable: date
     data: 
       date: 
-        components: 
+        components:
+          datetime:
+            defaultValue: >
+              return datumByVariableAndComponent.date.day +" " +datumByVariableAndComponent.date.time
+            checker:
+              name: Date
+              params:
+                pattern: dd/MM/yyyy HH:mm:ss
           day: 
             checker: 
               name: Date
@@ -193,4 +213,4 @@ references:
   variable: 
     columns: 
       codeVariable: ~
-    keyColumns: [codeVariable]
+    keyColumns: [codeVariable]
\ No newline at end of file
diff --git a/src/test/resources/data/monsore/compare/export.csv b/src/test/resources/data/monsore/compare/export.csv
index bf17a3019..5eba68c29 100644
--- a/src/test/resources/data/monsore/compare/export.csv
+++ b/src/test/resources/data/monsore/compare/export.csv
@@ -1,307 +1,273 @@
 date;site;projet;espece;Nombre d'individus;plateforme;Couleur des individus
-01/01/1984;bassin_versant.nivelle;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.nivelle;projet_atlantique;trm;25;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.nivelle;projet_atlantique;sat;39;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.nivelle;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.nivelle;projet_atlantique;ang;38;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.nivelle;projet_atlantique;trf;18;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;54;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;27;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.nivelle;projet_atlantique;ang;38;p1;couleur_des_individus__vert
-01/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;54;a;couleur_des_individus__bleu
-01/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;39;a;couleur_des_individus__bleu
-01/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;18;a;couleur_des_individus__rouge
+01/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;54;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.nivelle;projet_atlantique;trm;25;p1;couleur_des_individus__vert
 01/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;27;a;couleur_des_individus__rouge
-01/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;25;a;couleur_des_individus__vert
+01/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;18;a;couleur_des_individus__rouge
+01/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;39;a;couleur_des_individus__bleu
 01/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;38;a;couleur_des_individus__vert
 01/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;15;a;couleur_des_individus__bleu
+01/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;25;a;couleur_des_individus__vert
+01/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;54;a;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_atlantique;trm;25;p2;couleur_des_individus__vert
+01/01/1984;bassin_versant.oir;projet_atlantique;trf;18;p2;couleur_des_individus__rouge
 01/01/1984;bassin_versant.oir;projet_atlantique;ang;38;p2;couleur_des_individus__vert
-01/01/1984;bassin_versant.oir;projet_atlantique;alo;15;p2;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_atlantique;lpf;54;p2;couleur_des_individus__bleu
+01/01/1984;bassin_versant.oir;projet_atlantique;alo;15;p2;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_atlantique;sat;39;p2;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_atlantique;lpm;27;p2;couleur_des_individus__rouge
-01/01/1984;bassin_versant.oir;projet_atlantique;trf;18;p2;couleur_des_individus__rouge
-01/01/1984;bassin_versant.scarff;projet_atlantique;trf;18;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.scarff;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.scarff;projet_atlantique;ang;38;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.scarff;projet_atlantique;sat;39;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.scarff;projet_atlantique;lpf;54;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.scarff;projet_atlantique;ang;38;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.scarff;projet_atlantique;trm;25;p1;couleur_des_individus__vert
-01/01/1984;bassin_versant.scarff;projet_atlantique;sat;39;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.scarff;projet_atlantique;lpm;27;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.nivelle;projet_manche;ang;38;p1;couleur_des_individus__vert
-01/01/1984;bassin_versant.nivelle;projet_manche;lpf;54;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.nivelle;projet_manche;trm;25;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.scarff;projet_atlantique;trf;18;p1;couleur_des_individus__rouge
+01/01/1984;bassin_versant.scarff;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.nivelle;projet_manche;trf;18;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.nivelle;projet_manche;sat;39;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.nivelle;projet_manche;lpm;27;p1;couleur_des_individus__rouge
 01/01/1984;bassin_versant.nivelle;projet_manche;alo;15;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.oir;projet_manche;trm;25;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.nivelle;projet_manche;lpf;54;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.nivelle;projet_manche;lpm;27;p1;couleur_des_individus__rouge
+01/01/1984;bassin_versant.nivelle;projet_manche;sat;39;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.nivelle;projet_manche;trm;25;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.nivelle;projet_manche;ang;38;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.oir;projet_manche;lpm;27;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.oir;projet_manche;alo;15;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_manche;lpf;54;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_manche;sat;39;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.oir;projet_manche;ang;38;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.oir;projet_manche;trf;18;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.oir;projet_manche;lpm;27;p2;couleur_des_individus__rouge
+01/01/1984;bassin_versant.oir;projet_manche;alo;15;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.oir;projet_manche;ang;38;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.oir;projet_manche;trm;25;p1;couleur_des_individus__vert
+01/01/1984;bassin_versant.oir;projet_manche;lpf;54;p2;couleur_des_individus__bleu
+01/01/1984;bassin_versant.oir;projet_manche;alo;15;p2;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_manche;trf;18;p2;couleur_des_individus__rouge
 01/01/1984;bassin_versant.oir;projet_manche;sat;39;p2;couleur_des_individus__bleu
-01/01/1984;bassin_versant.oir;projet_manche;lpf;54;p2;couleur_des_individus__bleu
+01/01/1984;bassin_versant.oir;projet_manche;lpm;27;p2;couleur_des_individus__rouge
 01/01/1984;bassin_versant.oir;projet_manche;ang;38;p2;couleur_des_individus__vert
-01/01/1984;bassin_versant.oir;projet_manche;alo;15;p2;couleur_des_individus__bleu
 01/01/1984;bassin_versant.oir;projet_manche;trm;25;p2;couleur_des_individus__vert
-01/01/1984;bassin_versant.scarff;projet_manche;sat;39;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.scarff;projet_manche;lpm;27;p1;couleur_des_individus__rouge
 01/01/1984;bassin_versant.scarff;projet_manche;ang;38;p1;couleur_des_individus__vert
-01/01/1984;bassin_versant.scarff;projet_manche;trf;18;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.scarff;projet_manche;lpf;54;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.scarff;projet_manche;lpm;27;p1;couleur_des_individus__rouge
-01/01/1984;bassin_versant.scarff;projet_manche;trf;18;p1;couleur_des_individus__rouge
+01/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__vert
 01/01/1984;bassin_versant.scarff;projet_manche;alo;15;p1;couleur_des_individus__bleu
+01/01/1984;bassin_versant.scarff;projet_manche;trf;18;p1;couleur_des_individus__rouge
 01/01/1984;bassin_versant.scarff;projet_manche;lpf;54;p1;couleur_des_individus__bleu
 01/01/1984;bassin_versant.scarff;projet_manche;sat;39;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.scarff;projet_manche;alo;15;p1;couleur_des_individus__bleu
-01/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__vert
-01/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__vert
-01/01/1984;bassin_versant.scarff;projet_manche;ang;38;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.nivelle;projet_atlantique;alo;18;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.nivelle;projet_atlantique;trf;14;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;32;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.nivelle;projet_atlantique;ang;25;p1;couleur_des_individus__rouge
 02/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.nivelle;projet_atlantique;sat;15;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.nivelle;projet_atlantique;trm;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.nivelle;projet_atlantique;ang;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;32;p1;couleur_des_individus__bleu
-02/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;25;a;couleur_des_individus__rouge
-02/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;;a;couleur_des_individus__vert
+02/01/1984;bassin_versant.nivelle;projet_atlantique;sat;15;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.nivelle;projet_atlantique;trf;14;p1;couleur_des_individus__bleu
 02/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;15;a;couleur_des_individus__vert
+02/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;18;a;couleur_des_individus__vert
+02/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;25;a;couleur_des_individus__rouge
 02/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;14;a;couleur_des_individus__bleu
-02/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;25;a;couleur_des_individus__rouge
 02/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;32;a;couleur_des_individus__bleu
-02/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;18;a;couleur_des_individus__vert
+02/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;;a;couleur_des_individus__vert
+02/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;25;a;couleur_des_individus__rouge
 02/01/1984;bassin_versant.oir;projet_atlantique;trm;25;p2;couleur_des_individus__rouge
-02/01/1984;bassin_versant.oir;projet_atlantique;sat;15;p2;couleur_des_individus__vert
 02/01/1984;bassin_versant.oir;projet_atlantique;trf;14;p2;couleur_des_individus__bleu
 02/01/1984;bassin_versant.oir;projet_atlantique;lpf;;p2;couleur_des_individus__vert
-02/01/1984;bassin_versant.oir;projet_atlantique;alo;18;p2;couleur_des_individus__vert
 02/01/1984;bassin_versant.oir;projet_atlantique;ang;25;p2;couleur_des_individus__rouge
 02/01/1984;bassin_versant.oir;projet_atlantique;lpm;32;p2;couleur_des_individus__bleu
-02/01/1984;bassin_versant.scarff;projet_atlantique;lpm;32;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.oir;projet_atlantique;alo;18;p2;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_atlantique;sat;15;p2;couleur_des_individus__vert
 02/01/1984;bassin_versant.scarff;projet_atlantique;alo;18;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.scarff;projet_atlantique;trf;14;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.scarff;projet_atlantique;ang;25;p1;couleur_des_individus__rouge
 02/01/1984;bassin_versant.scarff;projet_atlantique;lpf;;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.scarff;projet_atlantique;lpm;32;p1;couleur_des_individus__bleu
 02/01/1984;bassin_versant.scarff;projet_atlantique;sat;15;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.scarff;projet_atlantique;ang;25;p1;couleur_des_individus__rouge
 02/01/1984;bassin_versant.scarff;projet_atlantique;trm;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.scarff;projet_atlantique;trf;14;p1;couleur_des_individus__bleu
 02/01/1984;bassin_versant.nivelle;projet_manche;ang;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.nivelle;projet_manche;lpm;32;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.nivelle;projet_manche;trf;14;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.nivelle;projet_manche;lpf;;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.nivelle;projet_manche;sat;15;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.nivelle;projet_manche;alo;18;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.nivelle;projet_manche;sat;15;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.nivelle;projet_manche;trf;14;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.nivelle;projet_manche;lpm;32;p1;couleur_des_individus__bleu
 02/01/1984;bassin_versant.nivelle;projet_manche;trm;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.oir;projet_manche;alo;18;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.oir;projet_manche;sat;15;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.oir;projet_manche;lpm;32;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.nivelle;projet_manche;lpf;;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.oir;projet_manche;ang;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.oir;projet_manche;lpf;50;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.oir;projet_manche;trm;25;p1;couleur_des_individus__rouge
 02/01/1984;bassin_versant.oir;projet_manche;trf;14;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.oir;projet_manche;lpf;;p2;couleur_des_individus__vert
-02/01/1984;bassin_versant.oir;projet_manche;lpm;32;p2;couleur_des_individus__bleu
-02/01/1984;bassin_versant.oir;projet_manche;trm;25;p2;couleur_des_individus__rouge
-02/01/1984;bassin_versant.oir;projet_manche;ang;25;p2;couleur_des_individus__rouge
-02/01/1984;bassin_versant.oir;projet_manche;alo;18;p2;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_manche;lpf;50;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_manche;lpm;32;p1;couleur_des_individus__bleu
+02/01/1984;bassin_versant.oir;projet_manche;sat;15;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_manche;alo;18;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.oir;projet_manche;trf;14;p2;couleur_des_individus__bleu
 02/01/1984;bassin_versant.oir;projet_manche;sat;15;p2;couleur_des_individus__vert
-02/01/1984;bassin_versant.scarff;projet_manche;lpf;;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.scarff;projet_manche;sat;15;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.scarff;projet_manche;ang;25;p1;couleur_des_individus__rouge
+02/01/1984;bassin_versant.oir;projet_manche;lpm;32;p2;couleur_des_individus__bleu
+02/01/1984;bassin_versant.oir;projet_manche;alo;18;p2;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_manche;ang;25;p2;couleur_des_individus__rouge
+02/01/1984;bassin_versant.oir;projet_manche;lpf;;p2;couleur_des_individus__vert
+02/01/1984;bassin_versant.oir;projet_manche;trm;25;p2;couleur_des_individus__rouge
+02/01/1984;bassin_versant.scarff;projet_manche;trf;14;p1;couleur_des_individus__bleu
 02/01/1984;bassin_versant.scarff;projet_manche;sat;15;p1;couleur_des_individus__vert
+02/01/1984;bassin_versant.scarff;projet_manche;lpf;;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.scarff;projet_manche;lpm;32;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.scarff;projet_manche;alo;18;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.scarff;projet_manche;ang;25;p1;couleur_des_individus__rouge
 02/01/1984;bassin_versant.scarff;projet_manche;alo;18;p1;couleur_des_individus__vert
 02/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__rouge
-02/01/1984;bassin_versant.scarff;projet_manche;trf;14;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.scarff;projet_manche;trf;14;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.scarff;projet_manche;lpm;32;p1;couleur_des_individus__bleu
-02/01/1984;bassin_versant.scarff;projet_manche;lpf;;p1;couleur_des_individus__vert
-02/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.nivelle;projet_atlantique;trm;27;p1;couleur_des_individus__bleu
 03/01/1984;bassin_versant.nivelle;projet_atlantique;ang;;p1;couleur_des_individus__bleu
-03/01/1984;bassin_versant.nivelle;projet_atlantique;trf;;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.nivelle;projet_atlantique;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;41;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.nivelle;projet_atlantique;trf;;p1;couleur_des_individus__vert
 03/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;45;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.nivelle;projet_atlantique;alo;16;p1;couleur_des_individus__rouge
+03/01/1984;bassin_versant.nivelle;projet_atlantique;trm;27;p1;couleur_des_individus__bleu
+03/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;45;a;couleur_des_individus__rouge
+03/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;;a;couleur_des_individus__bleu
 03/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;41;a;couleur_des_individus__vert
+03/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;27;a;couleur_des_individus__bleu
 03/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;16;a;couleur_des_individus__rouge
 03/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;;a;couleur_des_individus__vert
-03/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;27;a;couleur_des_individus__bleu
-03/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;45;a;couleur_des_individus__rouge
-03/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;;a;couleur_des_individus__bleu
+03/01/1984;bassin_versant.oir;projet_atlantique;ang;;p2;couleur_des_individus__bleu
 03/01/1984;bassin_versant.oir;projet_atlantique;lpm;41;p2;couleur_des_individus__vert
-03/01/1984;bassin_versant.oir;projet_atlantique;alo;16;p2;couleur_des_individus__rouge
+03/01/1984;bassin_versant.oir;projet_atlantique;lpf;45;p2;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_atlantique;trf;;p2;couleur_des_individus__vert
+03/01/1984;bassin_versant.oir;projet_atlantique;alo;16;p2;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_atlantique;trm;27;p2;couleur_des_individus__bleu
-03/01/1984;bassin_versant.oir;projet_atlantique;lpf;45;p2;couleur_des_individus__rouge
-03/01/1984;bassin_versant.oir;projet_atlantique;ang;;p2;couleur_des_individus__bleu
 03/01/1984;bassin_versant.scarff;projet_atlantique;trm;27;p1;couleur_des_individus__bleu
-03/01/1984;bassin_versant.scarff;projet_atlantique;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.scarff;projet_atlantique;lpf;45;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.scarff;projet_atlantique;lpm;41;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.scarff;projet_atlantique;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.scarff;projet_atlantique;ang;;p1;couleur_des_individus__bleu
+03/01/1984;bassin_versant.scarff;projet_atlantique;lpm;41;p1;couleur_des_individus__vert
 03/01/1984;bassin_versant.scarff;projet_atlantique;trf;;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.nivelle;projet_manche;ang;;p1;couleur_des_individus__bleu
+03/01/1984;bassin_versant.nivelle;projet_manche;lpm;41;p1;couleur_des_individus__vert
 03/01/1984;bassin_versant.nivelle;projet_manche;trm;27;p1;couleur_des_individus__bleu
-03/01/1984;bassin_versant.nivelle;projet_manche;trf;;p1;couleur_des_individus__vert
 03/01/1984;bassin_versant.nivelle;projet_manche;lpf;45;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.nivelle;projet_manche;alo;16;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.nivelle;projet_manche;ang;;p1;couleur_des_individus__bleu
-03/01/1984;bassin_versant.nivelle;projet_manche;lpm;41;p1;couleur_des_individus__vert
-03/01/1984;bassin_versant.oir;projet_manche;ang;20;p1;couleur_des_individus__bleu
+03/01/1984;bassin_versant.nivelle;projet_manche;trf;;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.oir;projet_manche;lpf;45;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_manche;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_manche;trf;20;p1;couleur_des_individus__vert
-03/01/1984;bassin_versant.oir;projet_manche;lpf;45;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.oir;projet_manche;trm;27;p1;couleur_des_individus__bleu
 03/01/1984;bassin_versant.oir;projet_manche;lpm;41;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.oir;projet_manche;ang;20;p1;couleur_des_individus__bleu
+03/01/1984;bassin_versant.oir;projet_manche;trm;27;p1;couleur_des_individus__bleu
+03/01/1984;bassin_versant.oir;projet_manche;trm;27;p2;couleur_des_individus__bleu
 03/01/1984;bassin_versant.oir;projet_manche;ang;;p2;couleur_des_individus__bleu
-03/01/1984;bassin_versant.oir;projet_manche;alo;16;p2;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_manche;trf;;p2;couleur_des_individus__vert
 03/01/1984;bassin_versant.oir;projet_manche;lpf;45;p2;couleur_des_individus__rouge
-03/01/1984;bassin_versant.oir;projet_manche;trm;27;p2;couleur_des_individus__bleu
+03/01/1984;bassin_versant.oir;projet_manche;alo;16;p2;couleur_des_individus__rouge
 03/01/1984;bassin_versant.oir;projet_manche;lpm;41;p2;couleur_des_individus__vert
-03/01/1984;bassin_versant.scarff;projet_manche;lpm;41;p1;couleur_des_individus__vert
-03/01/1984;bassin_versant.scarff;projet_manche;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.scarff;projet_manche;trf;;p1;couleur_des_individus__vert
+03/01/1984;bassin_versant.scarff;projet_manche;ang;;p1;couleur_des_individus__bleu
 03/01/1984;bassin_versant.scarff;projet_manche;alo;16;p1;couleur_des_individus__rouge
 03/01/1984;bassin_versant.scarff;projet_manche;trm;27;p1;couleur_des_individus__bleu
 03/01/1984;bassin_versant.scarff;projet_manche;lpf;45;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.scarff;projet_manche;ang;;p1;couleur_des_individus__bleu
-03/01/1984;bassin_versant.scarff;projet_manche;trf;;p1;couleur_des_individus__vert
-03/01/1984;bassin_versant.scarff;projet_manche;lpf;45;p1;couleur_des_individus__rouge
-03/01/1984;bassin_versant.scarff;projet_manche;trm;27;p1;couleur_des_individus__bleu
 03/01/1984;bassin_versant.scarff;projet_manche;lpm;41;p1;couleur_des_individus__vert
-03/01/1984;bassin_versant.scarff;projet_manche;ang;;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.nivelle;projet_atlantique;trm;27;p1;couleur_des_individus__vert
 04/01/1984;bassin_versant.nivelle;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.nivelle;projet_atlantique;sat;24;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.nivelle;projet_atlantique;trf;;p1;couleur_des_individus__rouge
+04/01/1984;bassin_versant.nivelle;projet_atlantique;trm;27;p1;couleur_des_individus__vert
 04/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;51;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.nivelle;projet_atlantique;ang;22;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.nivelle;projet_atlantique;trf;;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;43;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.nivelle;projet_atlantique;sat;24;p1;couleur_des_individus__bleu
-04/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;51;a;couleur_des_individus__bleu
-04/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;22;a;couleur_des_individus__vert
-04/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;27;a;couleur_des_individus__vert
-04/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;15;a;couleur_des_individus__bleu
 04/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;43;a;couleur_des_individus__rouge
 04/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;24;a;couleur_des_individus__bleu
 04/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;;a;couleur_des_individus__rouge
-04/01/1984;bassin_versant.oir;projet_atlantique;ang;22;p2;couleur_des_individus__vert
-04/01/1984;bassin_versant.oir;projet_atlantique;sat;24;p2;couleur_des_individus__bleu
-04/01/1984;bassin_versant.oir;projet_atlantique;trf;;p2;couleur_des_individus__rouge
+04/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;51;a;couleur_des_individus__bleu
+04/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;22;a;couleur_des_individus__vert
+04/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;15;a;couleur_des_individus__bleu
+04/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;27;a;couleur_des_individus__vert
 04/01/1984;bassin_versant.oir;projet_atlantique;trm;27;p2;couleur_des_individus__vert
-04/01/1984;bassin_versant.oir;projet_atlantique;lpf;51;p2;couleur_des_individus__bleu
+04/01/1984;bassin_versant.oir;projet_atlantique;sat;24;p2;couleur_des_individus__bleu
 04/01/1984;bassin_versant.oir;projet_atlantique;alo;15;p2;couleur_des_individus__bleu
+04/01/1984;bassin_versant.oir;projet_atlantique;ang;22;p2;couleur_des_individus__vert
+04/01/1984;bassin_versant.oir;projet_atlantique;lpf;51;p2;couleur_des_individus__bleu
+04/01/1984;bassin_versant.oir;projet_atlantique;trf;;p2;couleur_des_individus__rouge
 04/01/1984;bassin_versant.oir;projet_atlantique;lpm;43;p2;couleur_des_individus__rouge
 04/01/1984;bassin_versant.scarff;projet_atlantique;lpm;43;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.scarff;projet_atlantique;trf;;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.scarff;projet_atlantique;trm;27;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.scarff;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.scarff;projet_atlantique;ang;22;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.scarff;projet_atlantique;lpf;51;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.scarff;projet_atlantique;sat;24;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.scarff;projet_atlantique;lpf;51;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.scarff;projet_atlantique;alo;15;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.scarff;projet_atlantique;trm;27;p1;couleur_des_individus__vert
 04/01/1984;bassin_versant.nivelle;projet_manche;sat;24;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.nivelle;projet_manche;lpf;51;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.nivelle;projet_manche;alo;15;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.nivelle;projet_manche;lpm;43;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.nivelle;projet_manche;trm;27;p1;couleur_des_individus__vert
+04/01/1984;bassin_versant.nivelle;projet_manche;lpf;51;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.nivelle;projet_manche;trf;;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.nivelle;projet_manche;ang;22;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.nivelle;projet_manche;lpm;43;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.oir;projet_manche;lpm;43;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.oir;projet_manche;trf;20;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.oir;projet_manche;lpf;51;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.oir;projet_manche;trf;20;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.oir;projet_manche;alo;15;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.oir;projet_manche;trm;27;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.oir;projet_manche;sat;24;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.oir;projet_manche;ang;22;p1;couleur_des_individus__vert
+04/01/1984;bassin_versant.oir;projet_manche;sat;24;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.oir;projet_manche;trm;27;p1;couleur_des_individus__vert
+04/01/1984;bassin_versant.oir;projet_manche;lpm;43;p1;couleur_des_individus__rouge
+04/01/1984;bassin_versant.oir;projet_manche;trm;27;p2;couleur_des_individus__vert
 04/01/1984;bassin_versant.oir;projet_manche;ang;22;p2;couleur_des_individus__vert
-04/01/1984;bassin_versant.oir;projet_manche;lpm;43;p2;couleur_des_individus__rouge
 04/01/1984;bassin_versant.oir;projet_manche;alo;15;p2;couleur_des_individus__bleu
-04/01/1984;bassin_versant.oir;projet_manche;sat;24;p2;couleur_des_individus__bleu
-04/01/1984;bassin_versant.oir;projet_manche;trm;27;p2;couleur_des_individus__vert
 04/01/1984;bassin_versant.oir;projet_manche;lpf;51;p2;couleur_des_individus__bleu
 04/01/1984;bassin_versant.oir;projet_manche;trf;;p2;couleur_des_individus__rouge
-04/01/1984;bassin_versant.scarff;projet_manche;lpm;43;p1;couleur_des_individus__rouge
+04/01/1984;bassin_versant.oir;projet_manche;lpm;43;p2;couleur_des_individus__rouge
+04/01/1984;bassin_versant.oir;projet_manche;sat;24;p2;couleur_des_individus__bleu
 04/01/1984;bassin_versant.scarff;projet_manche;ang;22;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.scarff;projet_manche;trm;27;p1;couleur_des_individus__vert
 04/01/1984;bassin_versant.scarff;projet_manche;lpf;51;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.scarff;projet_manche;trm;27;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.scarff;projet_manche;ang;22;p1;couleur_des_individus__vert
-04/01/1984;bassin_versant.scarff;projet_manche;trf;;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.scarff;projet_manche;alo;15;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.scarff;projet_manche;trf;;p1;couleur_des_individus__rouge
 04/01/1984;bassin_versant.scarff;projet_manche;lpm;43;p1;couleur_des_individus__rouge
-04/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__bleu
-04/01/1984;bassin_versant.scarff;projet_manche;lpf;51;p1;couleur_des_individus__bleu
 04/01/1984;bassin_versant.scarff;projet_manche;alo;15;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.nivelle;projet_atlantique;trf;21;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.nivelle;projet_atlantique;ang;27;p1;couleur_des_individus__rouge
+04/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__bleu
+04/01/1984;bassin_versant.scarff;projet_manche;trm;27;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;49;p1;couleur_des_individus__bleu
 05/01/1984;bassin_versant.nivelle;projet_atlantique;trm;25;p1;couleur_des_individus__rouge
+05/01/1984;bassin_versant.nivelle;projet_atlantique;alo;17;p1;couleur_des_individus__vert
 05/01/1984;bassin_versant.nivelle;projet_atlantique;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.nivelle;projet_atlantique;lpm;49;p1;couleur_des_individus__bleu
 05/01/1984;bassin_versant.nivelle;projet_atlantique;lpf;59;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.nivelle;projet_atlantique;alo;17;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.nivelle;projet_atlantique;trf;21;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.nivelle;projet_atlantique;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;plateforme.oir.oir__p1;projet_atlantique;trf;21;a;couleur_des_individus__bleu
-05/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;24;a;couleur_des_individus__vert
-05/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;49;a;couleur_des_individus__bleu
-05/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;59;a;couleur_des_individus__vert
 05/01/1984;plateforme.oir.oir__p1;projet_atlantique;alo;17;a;couleur_des_individus__vert
 05/01/1984;plateforme.oir.oir__p1;projet_atlantique;ang;27;a;couleur_des_individus__rouge
 05/01/1984;plateforme.oir.oir__p1;projet_atlantique;trm;25;a;couleur_des_individus__rouge
+05/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpm;49;a;couleur_des_individus__bleu
+05/01/1984;plateforme.oir.oir__p1;projet_atlantique;lpf;59;a;couleur_des_individus__vert
+05/01/1984;plateforme.oir.oir__p1;projet_atlantique;sat;24;a;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_atlantique;trf;21;p2;couleur_des_individus__bleu
 05/01/1984;bassin_versant.oir;projet_atlantique;lpf;59;p2;couleur_des_individus__vert
-05/01/1984;bassin_versant.oir;projet_atlantique;alo;17;p2;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_atlantique;trm;25;p2;couleur_des_individus__rouge
 05/01/1984;bassin_versant.oir;projet_atlantique;sat;24;p2;couleur_des_individus__vert
-05/01/1984;bassin_versant.oir;projet_atlantique;ang;27;p2;couleur_des_individus__rouge
 05/01/1984;bassin_versant.oir;projet_atlantique;lpm;49;p2;couleur_des_individus__bleu
-05/01/1984;bassin_versant.oir;projet_atlantique;trf;21;p2;couleur_des_individus__bleu
-05/01/1984;bassin_versant.oir;projet_atlantique;trm;25;p2;couleur_des_individus__rouge
-05/01/1984;bassin_versant.scarff;projet_atlantique;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.scarff;projet_atlantique;lpm;49;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.oir;projet_atlantique;alo;17;p2;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_atlantique;ang;27;p2;couleur_des_individus__rouge
+05/01/1984;bassin_versant.scarff;projet_atlantique;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;bassin_versant.scarff;projet_atlantique;trm;25;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.scarff;projet_atlantique;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.scarff;projet_atlantique;lpm;49;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.scarff;projet_atlantique;sat;24;p1;couleur_des_individus__vert
 05/01/1984;bassin_versant.scarff;projet_atlantique;trf;21;p1;couleur_des_individus__bleu
 05/01/1984;bassin_versant.scarff;projet_atlantique;alo;17;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.scarff;projet_atlantique;ang;27;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.nivelle;projet_manche;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.scarff;projet_atlantique;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.nivelle;projet_manche;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;bassin_versant.nivelle;projet_manche;alo;17;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.nivelle;projet_manche;trf;21;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.nivelle;projet_manche;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.nivelle;projet_manche;sat;24;p1;couleur_des_individus__vert
 05/01/1984;bassin_versant.nivelle;projet_manche;lpm;49;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.nivelle;projet_manche;trf;21;p1;couleur_des_individus__bleu
 05/01/1984;bassin_versant.nivelle;projet_manche;trm;25;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.nivelle;projet_manche;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.nivelle;projet_manche;ang;27;p1;couleur_des_individus__rouge
+05/01/1984;bassin_versant.oir;projet_manche;trf;21;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.oir;projet_manche;lpm;49;p1;couleur_des_individus__bleu
+05/01/1984;bassin_versant.oir;projet_manche;trm;25;p1;couleur_des_individus__rouge
+05/01/1984;bassin_versant.oir;projet_manche;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;bassin_versant.oir;projet_manche;lpf;59;p1;couleur_des_individus__vert
 05/01/1984;bassin_versant.oir;projet_manche;alo;17;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.oir;projet_manche;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;bassin_versant.oir;projet_manche;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.oir;projet_manche;lpm;49;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.oir;projet_manche;trm;25;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.oir;projet_manche;trf;21;p1;couleur_des_individus__bleu
 05/01/1984;bassin_versant.oir;projet_manche;alo;17;p2;couleur_des_individus__vert
-05/01/1984;bassin_versant.oir;projet_manche;sat;24;p2;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_manche;ang;27;p2;couleur_des_individus__rouge
 05/01/1984;bassin_versant.oir;projet_manche;trf;21;p2;couleur_des_individus__bleu
 05/01/1984;bassin_versant.oir;projet_manche;trm;25;p2;couleur_des_individus__rouge
-05/01/1984;bassin_versant.oir;projet_manche;ang;27;p2;couleur_des_individus__rouge
-05/01/1984;bassin_versant.oir;projet_manche;lpf;59;p2;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_manche;sat;24;p2;couleur_des_individus__vert
 05/01/1984;bassin_versant.oir;projet_manche;lpm;49;p2;couleur_des_individus__bleu
-05/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.scarff;projet_manche;lpm;49;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.scarff;projet_manche;ang;27;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.scarff;projet_manche;trf;21;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.scarff;projet_manche;alo;17;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.oir;projet_manche;lpf;59;p2;couleur_des_individus__vert
 05/01/1984;bassin_versant.scarff;projet_manche;lpm;49;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.scarff;projet_manche;alo;17;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.scarff;projet_manche;ang;27;p1;couleur_des_individus__rouge
 05/01/1984;bassin_versant.scarff;projet_manche;trm;25;p1;couleur_des_individus__rouge
-05/01/1984;bassin_versant.scarff;projet_manche;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.scarff;projet_manche;alo;17;p1;couleur_des_individus__vert
 05/01/1984;bassin_versant.scarff;projet_manche;trf;21;p1;couleur_des_individus__bleu
-05/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__vert
-05/01/1984;bassin_versant.scarff;projet_manche;lpf;59;p1;couleur_des_individus__vert
\ No newline at end of file
+05/01/1984;bassin_versant.scarff;projet_manche;lpf;59;p1;couleur_des_individus__vert
+05/01/1984;bassin_versant.scarff;projet_manche;ang;27;p1;couleur_des_individus__rouge
+05/01/1984;bassin_versant.scarff;projet_manche;sat;24;p1;couleur_des_individus__vert
\ No newline at end of file
diff --git a/src/test/resources/data/monsore/data-pem.csv b/src/test/resources/data/monsore/data-pem.csv
index a51a24202..3bc929a2a 100644
--- a/src/test/resources/data/monsore/data-pem.csv
+++ b/src/test/resources/data/monsore/data-pem.csv
@@ -273,38 +273,4 @@ projet_manche;scarff;p1;05/01/1984;lpm;couleur_des_individus__bleu;49
 projet_manche;scarff;p1;05/01/1984;alo;couleur_des_individus__vert;17
 projet_manche;scarff;p1;05/01/1984;trm;couleur_des_individus__rouge;25
 projet_manche;scarff;p1;05/01/1984;trf;couleur_des_individus__bleu;21
-projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
-projet_manche;scarff;p1;01/01/1984;lpf;couleur_des_individus__bleu;54
-projet_manche;scarff;p1;01/01/1984;ang;couleur_des_individus__vert;38
-projet_manche;scarff;p1;01/01/1984;lpm;couleur_des_individus__rouge;27
-projet_manche;scarff;p1;01/01/1984;alo;couleur_des_individus__bleu;15
-projet_manche;scarff;p1;01/01/1984;trm;couleur_des_individus__vert;25
-projet_manche;scarff;p1;01/01/1984;trf;couleur_des_individus__rouge;18
-projet_manche;scarff;p1;01/01/1984;sat;couleur_des_individus__bleu;39
-projet_manche;scarff;p1;02/01/1984;lpf;couleur_des_individus__vert;
-projet_manche;scarff;p1;02/01/1984;ang;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;02/01/1984;lpm;couleur_des_individus__bleu;32
-projet_manche;scarff;p1;02/01/1984;alo;couleur_des_individus__vert;18
-projet_manche;scarff;p1;02/01/1984;trm;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;02/01/1984;trf;couleur_des_individus__bleu;14
-projet_manche;scarff;p1;02/01/1984;sat;couleur_des_individus__vert;15
-projet_manche;scarff;p1;03/01/1984;lpf;couleur_des_individus__rouge;45
-projet_manche;scarff;p1;03/01/1984;ang;couleur_des_individus__bleu;
-projet_manche;scarff;p1;03/01/1984;lpm;couleur_des_individus__vert;41
-projet_manche;scarff;p1;03/01/1984;alo;couleur_des_individus__rouge;16
-projet_manche;scarff;p1;03/01/1984;trm;couleur_des_individus__bleu;27
-projet_manche;scarff;p1;03/01/1984;trf;couleur_des_individus__vert;
-projet_manche;scarff;p1;04/01/1984;lpf;couleur_des_individus__bleu;51
-projet_manche;scarff;p1;04/01/1984;ang;couleur_des_individus__vert;22
-projet_manche;scarff;p1;04/01/1984;lpm;couleur_des_individus__rouge;43
-projet_manche;scarff;p1;04/01/1984;alo;couleur_des_individus__bleu;15
-projet_manche;scarff;p1;04/01/1984;trm;couleur_des_individus__vert;27
-projet_manche;scarff;p1;04/01/1984;trf;couleur_des_individus__rouge;
-projet_manche;scarff;p1;04/01/1984;sat;couleur_des_individus__bleu;24
-projet_manche;scarff;p1;05/01/1984;lpf;couleur_des_individus__vert;59
-projet_manche;scarff;p1;05/01/1984;ang;couleur_des_individus__rouge;27
-projet_manche;scarff;p1;05/01/1984;lpm;couleur_des_individus__bleu;49
-projet_manche;scarff;p1;05/01/1984;alo;couleur_des_individus__vert;17
-projet_manche;scarff;p1;05/01/1984;trm;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;05/01/1984;trf;couleur_des_individus__bleu;21
-projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
+projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
\ No newline at end of file
diff --git a/src/test/resources/data/monsore/manche-scarff-p1-pem.csv b/src/test/resources/data/monsore/manche-scarff-p1-pem.csv
index 1dab3c4dc..82419496f 100644
--- a/src/test/resources/data/monsore/manche-scarff-p1-pem.csv
+++ b/src/test/resources/data/monsore/manche-scarff-p1-pem.csv
@@ -35,38 +35,4 @@ projet_manche;scarff;p1;05/01/1984;lpm;couleur_des_individus__bleu;49
 projet_manche;scarff;p1;05/01/1984;alo;couleur_des_individus__vert;17
 projet_manche;scarff;p1;05/01/1984;trm;couleur_des_individus__rouge;25
 projet_manche;scarff;p1;05/01/1984;trf;couleur_des_individus__bleu;21
-projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
-projet_manche;scarff;p1;01/01/1984;lpf;couleur_des_individus__bleu;54
-projet_manche;scarff;p1;01/01/1984;ang;couleur_des_individus__vert;38
-projet_manche;scarff;p1;01/01/1984;lpm;couleur_des_individus__rouge;27
-projet_manche;scarff;p1;01/01/1984;alo;couleur_des_individus__bleu;15
-projet_manche;scarff;p1;01/01/1984;trm;couleur_des_individus__vert;25
-projet_manche;scarff;p1;01/01/1984;trf;couleur_des_individus__rouge;18
-projet_manche;scarff;p1;01/01/1984;sat;couleur_des_individus__bleu;39
-projet_manche;scarff;p1;02/01/1984;lpf;couleur_des_individus__vert;
-projet_manche;scarff;p1;02/01/1984;ang;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;02/01/1984;lpm;couleur_des_individus__bleu;32
-projet_manche;scarff;p1;02/01/1984;alo;couleur_des_individus__vert;18
-projet_manche;scarff;p1;02/01/1984;trm;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;02/01/1984;trf;couleur_des_individus__bleu;14
-projet_manche;scarff;p1;02/01/1984;sat;couleur_des_individus__vert;15
-projet_manche;scarff;p1;03/01/1984;lpf;couleur_des_individus__rouge;45
-projet_manche;scarff;p1;03/01/1984;ang;couleur_des_individus__bleu;
-projet_manche;scarff;p1;03/01/1984;lpm;couleur_des_individus__vert;41
-projet_manche;scarff;p1;03/01/1984;alo;couleur_des_individus__rouge;16
-projet_manche;scarff;p1;03/01/1984;trm;couleur_des_individus__bleu;27
-projet_manche;scarff;p1;03/01/1984;trf;couleur_des_individus__vert;
-projet_manche;scarff;p1;04/01/1984;lpf;couleur_des_individus__bleu;51
-projet_manche;scarff;p1;04/01/1984;ang;couleur_des_individus__vert;22
-projet_manche;scarff;p1;04/01/1984;lpm;couleur_des_individus__rouge;43
-projet_manche;scarff;p1;04/01/1984;alo;couleur_des_individus__bleu;15
-projet_manche;scarff;p1;04/01/1984;trm;couleur_des_individus__vert;27
-projet_manche;scarff;p1;04/01/1984;trf;couleur_des_individus__rouge;
-projet_manche;scarff;p1;04/01/1984;sat;couleur_des_individus__bleu;24
-projet_manche;scarff;p1;05/01/1984;lpf;couleur_des_individus__vert;59
-projet_manche;scarff;p1;05/01/1984;ang;couleur_des_individus__rouge;27
-projet_manche;scarff;p1;05/01/1984;lpm;couleur_des_individus__bleu;49
-projet_manche;scarff;p1;05/01/1984;alo;couleur_des_individus__vert;17
-projet_manche;scarff;p1;05/01/1984;trm;couleur_des_individus__rouge;25
-projet_manche;scarff;p1;05/01/1984;trf;couleur_des_individus__bleu;21
-projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
+projet_manche;scarff;p1;05/01/1984;sat;couleur_des_individus__vert;24
\ No newline at end of file
diff --git a/src/test/resources/data/monsore/monsore-with-repository.yaml b/src/test/resources/data/monsore/monsore-with-repository.yaml
index 20e911205..596fd33fa 100644
--- a/src/test/resources/data/monsore/monsore-with-repository.yaml
+++ b/src/test/resources/data/monsore/monsore-with-repository.yaml
@@ -29,13 +29,13 @@ references:
       pattern:
         fr: '{esp_nom}'
         en: '{esp_nom}'
-    keyColumns: [esp_nom]
+    keyColumns:
+      - esp_nom
     columns:
-      esp_nom:
-      esp_definition_fr:
-      esp_definition_en:
-      colonne_homonyme_entre_referentiels:
-
+      esp_nom: null
+      esp_definition_fr: null
+      esp_definition_en: null
+      colonne_homonyme_entre_referentiels: null
   projet:
     internationalizationName:
       fr: Projet
@@ -51,32 +51,34 @@ references:
       pattern:
         fr: '{nom_key}'
         en: '{nom_key}'
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      definition_fr:
-      definition_en:
-      colonne_homonyme_entre_referentiels:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      definition_fr: null
+      definition_en: null
+      colonne_homonyme_entre_referentiels: null
   sites:
     validations:
       typeSitesRef:
-        description: "référence au type de site"
+        description: référence au type de site
         checker:
           name: Reference
           params:
             refType: type_de_sites
             columns: tze_type_nom
       siteParentRef:
-        description: "référence à la colonne parent"
+        description: référence à la colonne parent
         checker:
           name: Reference
           params:
             refType: sites
             columns: zet_chemin_parent
-    keyColumns: [zet_chemin_parent,zet_nom_key]
+    keyColumns:
+      - zet_chemin_parent
+      - zet_nom_key
     internationalizationName:
       fr: Site
       en: Site
@@ -92,16 +94,16 @@ references:
         fr: '{zet_nom_key}'
         en: '{zet_nom_key}'
     columns:
-      tze_type_nom:
-      zet_nom_key:
-      zet_nom_fr:
-      zet_nom_en:
-      zet_description_fr:
-      zet_description_en:
-      zet_chemin_parent:
-
+      tze_type_nom: null
+      zet_nom_key: null
+      zet_nom_fr: null
+      zet_nom_en: null
+      zet_description_fr: null
+      zet_description_en: null
+      zet_chemin_parent: null
   themes:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Thème
       en: Thematic
@@ -117,14 +119,14 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      description_fr:
-      description_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      description_fr: null
+      description_en: null
   type de fichiers:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Types de fichiers
       en: Files types
@@ -140,14 +142,14 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      description_fr:
-      description_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      description_fr: null
+      description_en: null
   type_de_sites:
-    keyColumns: [tze_nom_key]
+    keyColumns:
+      - tze_nom_key
     internationalizationName:
       fr: Types de sites
       en: Sites types
@@ -163,61 +165,69 @@ references:
         fr: '{tze_nom_key}'
         en: '{tze_nom_key}'
     columns:
-      tze_nom_key:
-      tze_nom_fr:
-      tze_nom_en:
-      tze_definition_fr:
-      tze_definition_en:
-
+      tze_nom_key: null
+      tze_nom_fr: null
+      tze_nom_en: null
+      tze_definition_fr: null
+      tze_definition_en: null
   types_de_donnees_par_themes_de_sites_et_projet:
     internationalizationName:
       fr: Types de données par site et projet
       en: Data types by site and project
     internationalizationDisplay:
       pattern:
-        fr: 'nom du projet: {nom du projet}, nom du site : {nom du site}, nom du thème : {nom du thème}, nom du type de données : {nom du type de données} '
-        en: 'projet name: {nom du projet}, site name : {nom du site}, theme name : {nom du thème}, data type name : {nom du type de données} '
+        fr: >-
+          nom du projet: {nom du projet}, nom du site : {nom du site}, nom du
+          thème : {nom du thème}, nom du type de données : {nom du type de
+          données}
+        en: >-
+          projet name: {nom du projet}, site name : {nom du site}, theme name :
+          {nom du thème}, data type name : {nom du type de données}
     validations:
       projetRef:
-        description: "référence au projet"
+        description: référence au projet
         checker:
           name: Reference
           params:
             refType: projet
             columns: nom du projet
       sitesRef:
-        description: "référence au site"
+        description: référence au site
         checker:
           name: Reference
           params:
             refType: sites
             columns: nom du site
       themesRef:
-        description: "référence au theme"
+        description: référence au theme
         checker:
           name: Reference
           params:
             refType: themes
             columns: nom du thème
-
       checkDatatype:
-        description: "test"
+        description: test
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = Arrays.stream(datum.get("nom du type de données").split("_")).collect{it.substring(0, 1)}.join();
-                return application.getDataType().contains(datatype);
-    keyColumns: ["nom du projet","nom du site","nom du thème","nom du type de données" ]
+                String datatype = Arrays.stream(datum.get("nom du type de
+                données").split("_")).collect{it.substring(0, 1)}.join(); return
+                application.getDataType().contains(datatype);
+    keyColumns:
+      - nom du projet
+      - nom du site
+      - nom du thème
+      - nom du type de données
     columns:
-      nom du projet:
-      nom du site:
-      nom du thème:
-      nom du type de données:
-
+      nom du projet: null
+      nom du site: null
+      nom du thème: null
+      nom du type de données: null
   unites:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Unités
       en: Units
@@ -233,15 +243,16 @@ references:
         fr: '{nom_key} ({code_key})'
         en: '{nom_key} ({code_key})'
     columns:
-      code_key:
-      code_fr:
-      code_en:
-      nom_key:
-      nom_fr:
-      nom_en:
-
+      code_key: null
+      code_fr: null
+      code_en: null
+      nom_key: null
+      nom_fr: null
+      nom_en: null
   valeurs_qualitatives:
-    keyColumns: [nom_key,valeur_key]
+    keyColumns:
+      - nom_key
+      - valeur_key
     internationalizationName:
       fr: Valeurs qualitatives
       en: Qualitative values
@@ -257,15 +268,15 @@ references:
         fr: '{valeur_key}'
         en: '{valeur_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      valeur_key:
-      valeur_fr:
-      valeur_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      valeur_key: null
+      valeur_fr: null
+      valeur_en: null
   variables:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Variables
       en: Variables
@@ -281,51 +292,56 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      definition_fr:
-      definition_en:
-      isQualitative:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      definition_fr: null
+      definition_en: null
+      isQualitative: null
   variables_et_unites_par_types_de_donnees:
     validations:
       variableRef:
-        description: "référence à la variable"
+        description: référence à la variable
         checker:
           name: Reference
           params:
             refType: variables
             columns: nom de la variable
       uniteRef:
-        description: "référence à l'unité'"
+        description: référence à l'unité'
         checker:
           name: Reference
           params:
             refType: unites
             columns: nom de l'unité
       checkDatatype:
-        description: "test"
+        description: test
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = Arrays.stream(datum.get("nom du type de données").split("_")).collect{it.substring(0, 1)}.join();
-                return application.getDataType().contains(datatype);
-    keyColumns: [nom du type de données,nom de la variable]
+                String datatype = Arrays.stream(datum.get("nom du type de
+                données").split("_")).collect{it.substring(0, 1)}.join(); return
+                application.getDataType().contains(datatype);
+    keyColumns:
+      - nom du type de données
+      - nom de la variable
     internationalizationName:
       fr: Variables et unités par type de données
       en: Variables and units by data type
     internationalizationDisplay:
       pattern:
-        fr: "nom du type de données : {nom du type de données}, nom de la variable : {nom de la variable}, : nom de l'unité {nom de l'unité}"
-        en: "datatype name : {nom du type de données}, variable name : {nom de la variable}, : unit name {nom de l'unité}"
+        fr: >-
+          nom du type de données : {nom du type de données}, nom de la variable
+          : {nom de la variable}, : nom de l'unité {nom de l'unité}
+        en: >-
+          datatype name : {nom du type de données}, variable name : {nom de la
+          variable}, : unit name {nom de l'unité}
     columns:
-      nom du type de données:
-      nom de la variable:
-      nom de l'unité:
-
+      nom du type de données: null
+      nom de la variable: null
+      nom de l'unité: null
 dataTypes:
   pem:
     internationalizationName:
@@ -333,9 +349,9 @@ dataTypes:
       en: Trap in ascent
     internationalizationDisplay:
       especes:
-          pattern:
-            fr: 'espèce :{esp_nom}'
-            en: 'espèce :{esp_nom}'
+        pattern:
+          fr: 'espèce :{esp_nom}'
+          en: 'espèce :{esp_nom}'
     repository:
       toto: test
     data:
@@ -361,14 +377,16 @@ dataTypes:
               name: Reference
               params:
                 refType: sites
-          plateforme:
+          plateforme: null
           chemin:
             params:
               references:
-                -  sites
+                - sites
             defaultValue: >
               return references.get("sites")
-              .find{it.getRefValues().get("zet_chemin_parent").equals(datumByVariableAndComponent.get("site").get("bassin")) && it.getRefValues().get("zet_nom_key").equals(datumByVariableAndComponent.get("site").get("plateforme"))}
+              .find{it.getRefValues().get("zet_chemin_parent").equals(datumByVariableAndComponent.get("site").get("bassin"))
+              &&
+              it.getRefValues().get("zet_nom_key").equals(datumByVariableAndComponent.get("site").get("plateforme"))}
               .getHierarchicalKey();
             checker:
               name: Reference
@@ -381,7 +399,7 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-                required:
+                required: null
       espece:
         components:
           value:
@@ -397,91 +415,91 @@ dataTypes:
               params:
                 refType: valeurs_qualitatives
           unit:
-            defaultValue: "return \"sans_unite\""
+            defaultValue: return "sans_unite"
             checker:
               name: Reference
               params:
                 refType: unites
-                required:
+                required: null
       Nombre d'individus:
         components:
           value:
-            defaultValue: "return 0"
+            defaultValue: return 0
             checker:
               name: Integer
               params:
-                required:
+                required: null
           unit:
-            defaultValue: "return \"sans_unite\""
+            defaultValue: return "sans_unite"
             checker:
               name: Reference
               params:
                 refType: unites
-                required:
+                required: null
     validations:
       unitOfColor:
-        description: "vérifie l'unité de la couleur des individus"
+        description: vérifie l'unité de la couleur des individus
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = "piegeage_en_montee";
-                String variable = "Couleur des individus";
-                String codeVariable = "couleur_des_individus";
-                String component = "unit";
-                return referencesValues.get("variables_et_unites_par_types_de_donnees")
+                String datatype = "piegeage_en_montee"; String variable =
+                "Couleur des individus"; String codeVariable =
+                "couleur_des_individus"; String component = "unit"; return
+                referencesValues.get("variables_et_unites_par_types_de_donnees")
                 .findAll{it.get("nom du type de données").equals(datatype)}
                 .find{it.get("nom de la variable").equals(codeVariable)}
-                .get("nom de l'unité").equals(datum.get(variable).get(component));
+                .get("nom de
+                l'unité").equals(datum.get(variable).get(component));
               references:
                 - variables_et_unites_par_types_de_donnees
       unitOfIndividus:
-        description: "vérifie l'unité du nombre d'individus"
+        description: vérifie l'unité du nombre d'individus
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = "piegeage_en_montee";
-                String variable = "Nombre d'individus";
-                String codeVariable = "nombre_d_individus";
-                String component = "unit";
-                return referencesValues.get("variables_et_unites_par_types_de_donnees")
+                String datatype = "piegeage_en_montee"; String variable =
+                "Nombre d'individus"; String codeVariable =
+                "nombre_d_individus"; String component = "unit"; return
+                referencesValues.get("variables_et_unites_par_types_de_donnees")
                 .findAll{it.get("nom du type de données").equals(datatype)}
                 .find{it.get("nom de la variable").equals(codeVariable)}
-                .get("nom de l'unité").equals(datum.get(variable).get(component));
+                .get("nom de
+                l'unité").equals(datum.get(variable).get(component));
               references:
                 - variables_et_unites_par_types_de_donnees
     format:
       headerLine: 4
       firstRowLine: 5
       columns:
-        - header: "projet"
+        - header: projet
           boundTo:
             variable: projet
             component: value
-        - header: "site"
+        - header: site
           boundTo:
             variable: site
             component: bassin
-        - header: "plateforme"
+        - header: plateforme
           boundTo:
             variable: site
             component: plateforme
-        - header: "date"
+        - header: date
           boundTo:
             variable: date
             component: value
-        - header: "espece"
+        - header: espece
           boundTo:
             variable: espece
             component: value
-        - header: "Couleur des individus"
+        - header: Couleur des individus
           boundTo:
             variable: Couleur des individus
             component: value
-        - header: "Nombre d'individus"
+        - header: Nombre d'individus
           boundTo:
             variable: Nombre d'individus
             component: value
@@ -507,7 +525,7 @@ dataTypes:
           internationalizationName:
             fr: Référentiels
             en: Repositories
-          label: "Référentiel"
+          label: Référentiel
           data:
             - projet
             - site
@@ -517,13 +535,22 @@ dataTypes:
           internationalizationName:
             fr: Qualitatif
             en: Qualitative
-          label: "Données qualitatives"
+          label: Données qualitatives
           data:
             - Couleur des individus
         quantitatif:
           internationalizationName:
             fr: Quantitatif
             en: Quantitative
-          label: "Données quantitatives"
+          label: Données quantitatives
           data:
-            - Nombre d'individus
\ No newline at end of file
+            - Nombre d'individus
+    uniqueness:
+      - variable: projet
+        component: value
+      - variable: site
+        component: chemin
+      - variable: date
+        component: value
+      - variable: espece
+        component: value
\ No newline at end of file
diff --git a/src/test/resources/data/monsore/monsore.yaml b/src/test/resources/data/monsore/monsore.yaml
index e82a39925..ce613e5b1 100644
--- a/src/test/resources/data/monsore/monsore.yaml
+++ b/src/test/resources/data/monsore/monsore.yaml
@@ -29,13 +29,13 @@ references:
       pattern:
         fr: '{esp_nom}'
         en: '{esp_nom}'
-    keyColumns: [esp_nom]
+    keyColumns:
+      - esp_nom
     columns:
-      esp_nom:
-      esp_definition_fr:
-      esp_definition_en:
-      colonne_homonyme_entre_referentiels:
-
+      esp_nom: null
+      esp_definition_fr: null
+      esp_definition_en: null
+      colonne_homonyme_entre_referentiels: null
   projet:
     internationalizationName:
       fr: Projet
@@ -51,32 +51,34 @@ references:
       pattern:
         fr: '{nom_key}'
         en: '{nom_key}'
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      definition_fr:
-      definition_en:
-      colonne_homonyme_entre_referentiels:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      definition_fr: null
+      definition_en: null
+      colonne_homonyme_entre_referentiels: null
   sites:
     validations:
       typeSitesRef:
-        description: "référence au type de site"
+        description: référence au type de site
         checker:
           name: Reference
           params:
             refType: type_de_sites
             columns: tze_type_nom
       siteParentRef:
-        description: "référence à la colonne parent"
+        description: référence à la colonne parent
         checker:
           name: Reference
           params:
             refType: sites
             columns: zet_chemin_parent
-    keyColumns: [zet_chemin_parent,zet_nom_key]
+    keyColumns:
+      - zet_chemin_parent
+      - zet_nom_key
     internationalizationName:
       fr: Site
       en: Site
@@ -92,16 +94,16 @@ references:
         fr: '{zet_nom_key}'
         en: '{zet_nom_key}'
     columns:
-      tze_type_nom:
-      zet_nom_key:
-      zet_nom_fr:
-      zet_nom_en:
-      zet_description_fr:
-      zet_description_en:
-      zet_chemin_parent:
-
+      tze_type_nom: null
+      zet_nom_key: null
+      zet_nom_fr: null
+      zet_nom_en: null
+      zet_description_fr: null
+      zet_description_en: null
+      zet_chemin_parent: null
   themes:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Thème
       en: Thematic
@@ -117,14 +119,14 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      description_fr:
-      description_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      description_fr: null
+      description_en: null
   type de fichiers:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Types de fichiers
       en: Files types
@@ -140,14 +142,14 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      description_fr:
-      description_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      description_fr: null
+      description_en: null
   type_de_sites:
-    keyColumns: [tze_nom_key]
+    keyColumns:
+      - tze_nom_key
     internationalizationName:
       fr: Types de sites
       en: Sites types
@@ -163,61 +165,69 @@ references:
         fr: '{tze_nom_key}'
         en: '{tze_nom_key}'
     columns:
-      tze_nom_key:
-      tze_nom_fr:
-      tze_nom_en:
-      tze_definition_fr:
-      tze_definition_en:
-
+      tze_nom_key: null
+      tze_nom_fr: null
+      tze_nom_en: null
+      tze_definition_fr: null
+      tze_definition_en: null
   types_de_donnees_par_themes_de_sites_et_projet:
     internationalizationName:
       fr: Types de données par site et projet
       en: Data types by site and project
     internationalizationDisplay:
       pattern:
-        fr: 'nom du projet: {nom du projet}, nom du site : {nom du site}, nom du thème : {nom du thème}, nom du type de données : {nom du type de données} '
-        en: 'projet name: {nom du projet}, site name : {nom du site}, theme name : {nom du thème}, data type name : {nom du type de données} '
+        fr: >-
+          nom du projet: {nom du projet}, nom du site : {nom du site}, nom du
+          thème : {nom du thème}, nom du type de données : {nom du type de
+          données}
+        en: >-
+          projet name: {nom du projet}, site name : {nom du site}, theme name :
+          {nom du thème}, data type name : {nom du type de données}
     validations:
       projetRef:
-        description: "référence au projet"
+        description: référence au projet
         checker:
           name: Reference
           params:
             refType: projet
             columns: nom du projet
       sitesRef:
-        description: "référence au site"
+        description: référence au site
         checker:
           name: Reference
           params:
             refType: sites
             columns: nom du site
       themesRef:
-        description: "référence au theme"
+        description: référence au theme
         checker:
           name: Reference
           params:
             refType: themes
             columns: nom du thème
-
       checkDatatype:
-        description: "test"
+        description: test
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = Arrays.stream(datum.get("nom du type de données").split("_")).collect{it.substring(0, 1)}.join();
-                return application.getDataType().contains(datatype);
-    keyColumns: ["nom du projet","nom du site","nom du thème","nom du type de données" ]
+                String datatype = Arrays.stream(datum.get("nom du type de
+                données").split("_")).collect{it.substring(0, 1)}.join(); return
+                application.getDataType().contains(datatype);
+    keyColumns:
+      - nom du projet
+      - nom du site
+      - nom du thème
+      - nom du type de données
     columns:
-      nom du projet:
-      nom du site:
-      nom du thème:
-      nom du type de données:
-
+      nom du projet: null
+      nom du site: null
+      nom du thème: null
+      nom du type de données: null
   unites:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Unités
       en: Units
@@ -233,15 +243,16 @@ references:
         fr: '{nom_key} ({code_key})'
         en: '{nom_key} ({code_key})'
     columns:
-      code_key:
-      code_fr:
-      code_en:
-      nom_key:
-      nom_fr:
-      nom_en:
-
+      code_key: null
+      code_fr: null
+      code_en: null
+      nom_key: null
+      nom_fr: null
+      nom_en: null
   valeurs_qualitatives:
-    keyColumns: [nom_key,valeur_key]
+    keyColumns:
+      - nom_key
+      - valeur_key
     internationalizationName:
       fr: Valeurs qualitatives
       en: Qualitative values
@@ -257,15 +268,15 @@ references:
         fr: '{valeur_key}'
         en: '{valeur_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      valeur_key:
-      valeur_fr:
-      valeur_en:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      valeur_key: null
+      valeur_fr: null
+      valeur_en: null
   variables:
-    keyColumns: [nom_key]
+    keyColumns:
+      - nom_key
     internationalizationName:
       fr: Variables
       en: Variables
@@ -281,51 +292,56 @@ references:
         fr: '{nom_key}'
         en: '{nom_key}'
     columns:
-      nom_key:
-      nom_fr:
-      nom_en:
-      definition_fr:
-      definition_en:
-      isQualitative:
-
+      nom_key: null
+      nom_fr: null
+      nom_en: null
+      definition_fr: null
+      definition_en: null
+      isQualitative: null
   variables_et_unites_par_types_de_donnees:
     validations:
       variableRef:
-        description: "référence à la variable"
+        description: référence à la variable
         checker:
           name: Reference
           params:
             refType: variables
             columns: nom de la variable
       uniteRef:
-        description: "référence à l'unité'"
+        description: référence à l'unité'
         checker:
           name: Reference
           params:
             refType: unites
             columns: nom de l'unité
       checkDatatype:
-        description: "test"
+        description: test
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = Arrays.stream(datum.get("nom du type de données").split("_")).collect{it.substring(0, 1)}.join();
-                return application.getDataType().contains(datatype);
-    keyColumns: [nom du type de données,nom de la variable]
+                String datatype = Arrays.stream(datum.get("nom du type de
+                données").split("_")).collect{it.substring(0, 1)}.join(); return
+                application.getDataType().contains(datatype);
+    keyColumns:
+      - nom du type de données
+      - nom de la variable
     internationalizationName:
       fr: Variables et unités par type de données
       en: Variables and units by data type
     internationalizationDisplay:
       pattern:
-        fr: "nom du type de données : {nom du type de données}, nom de la variable : {nom de la variable}, : nom de l'unité {nom de l'unité}"
-        en: "datatype name : {nom du type de données}, variable name : {nom de la variable}, : unit name {nom de l'unité}"
+        fr: >-
+          nom du type de données : {nom du type de données}, nom de la variable
+          : {nom de la variable}, : nom de l'unité {nom de l'unité}
+        en: >-
+          datatype name : {nom du type de données}, variable name : {nom de la
+          variable}, : unit name {nom de l'unité}
     columns:
-      nom du type de données:
-      nom de la variable:
-      nom de l'unité:
-
+      nom du type de données: null
+      nom de la variable: null
+      nom de l'unité: null
 dataTypes:
   pem:
     internationalizationName:
@@ -359,14 +375,16 @@ dataTypes:
               name: Reference
               params:
                 refType: sites
-          plateforme:
+          plateforme: null
           chemin:
             params:
               references:
-                -  sites
+                - sites
             defaultValue: >
               return references.get("sites")
-              .find{it.getRefValues().get("zet_chemin_parent").equals(datumByVariableAndComponent.get("site").get("bassin")) && it.getRefValues().get("zet_nom_key").equals(datumByVariableAndComponent.get("site").get("plateforme"))}
+              .find{it.getRefValues().get("zet_chemin_parent").equals(datumByVariableAndComponent.get("site").get("bassin"))
+              &&
+              it.getRefValues().get("zet_nom_key").equals(datumByVariableAndComponent.get("site").get("plateforme"))}
               .getHierarchicalKey();
             checker:
               name: Reference
@@ -379,7 +397,7 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-                required:
+                required: null
       espece:
         components:
           value:
@@ -395,91 +413,91 @@ dataTypes:
               params:
                 refType: valeurs_qualitatives
           unit:
-            defaultValue: "return \"sans_unite\""
+            defaultValue: return "sans_unite"
             checker:
               name: Reference
               params:
                 refType: unites
-                required:
+                required: null
       Nombre d'individus:
         components:
           value:
-            defaultValue: "return 0"
+            defaultValue: return 0
             checker:
               name: Integer
               params:
-                required:
+                required: null
           unit:
-            defaultValue: "return \"sans_unite\""
+            defaultValue: return "sans_unite"
             checker:
               name: Reference
               params:
                 refType: unites
-                required:
+                required: null
     validations:
       unitOfColor:
-        description: "vérifie l'unité de la couleur des individus"
+        description: vérifie l'unité de la couleur des individus
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = "piegeage_en_montee";
-                String variable = "Couleur des individus";
-                String codeVariable = "couleur_des_individus";
-                String component = "unit";
-                return referencesValues.get("variables_et_unites_par_types_de_donnees")
+                String datatype = "piegeage_en_montee"; String variable =
+                "Couleur des individus"; String codeVariable =
+                "couleur_des_individus"; String component = "unit"; return
+                referencesValues.get("variables_et_unites_par_types_de_donnees")
                 .findAll{it.get("nom du type de données").equals(datatype)}
                 .find{it.get("nom de la variable").equals(codeVariable)}
-                .get("nom de l'unité").equals(datum.get(variable).get(component));
+                .get("nom de
+                l'unité").equals(datum.get(variable).get(component));
               references:
                 - variables_et_unites_par_types_de_donnees
       unitOfIndividus:
-        description: "vérifie l'unité du nombre d'individus"
+        description: vérifie l'unité du nombre d'individus
         checker:
           name: GroovyExpression
           params:
             groovy:
               expression: >
-                String datatype = "piegeage_en_montee";
-                String variable = "Nombre d'individus";
-                String codeVariable = "nombre_d_individus";
-                String component = "unit";
-                return referencesValues.get("variables_et_unites_par_types_de_donnees")
+                String datatype = "piegeage_en_montee"; String variable =
+                "Nombre d'individus"; String codeVariable =
+                "nombre_d_individus"; String component = "unit"; return
+                referencesValues.get("variables_et_unites_par_types_de_donnees")
                 .findAll{it.get("nom du type de données").equals(datatype)}
                 .find{it.get("nom de la variable").equals(codeVariable)}
-                .get("nom de l'unité").equals(datum.get(variable).get(component));
+                .get("nom de
+                l'unité").equals(datum.get(variable).get(component));
               references:
                 - variables_et_unites_par_types_de_donnees
     format:
       headerLine: 4
       firstRowLine: 5
       columns:
-        - header: "projet"
+        - header: projet
           boundTo:
             variable: projet
             component: value
-        - header: "site"
+        - header: site
           boundTo:
             variable: site
             component: bassin
-        - header: "plateforme"
+        - header: plateforme
           boundTo:
             variable: site
             component: plateforme
-        - header: "date"
+        - header: date
           boundTo:
             variable: date
             component: value
-        - header: "espece"
+        - header: espece
           boundTo:
             variable: espece
             component: value
-        - header: "Couleur des individus"
+        - header: Couleur des individus
           boundTo:
             variable: Couleur des individus
             component: value
-        - header: "Nombre d'individus"
+        - header: Nombre d'individus
           boundTo:
             variable: Nombre d'individus
             component: value
@@ -505,7 +523,7 @@ dataTypes:
           internationalizationName:
             fr: Référentiels
             en: Repositories
-          label: "Référentiel"
+          label: Référentiel
           data:
             - projet
             - site
@@ -515,13 +533,22 @@ dataTypes:
           internationalizationName:
             fr: Qualitatif
             en: Qualitative
-          label: "Données qualitatives"
+          label: Données qualitatives
           data:
             - Couleur des individus
         quantitatif:
           internationalizationName:
             fr: Quantitatif
             en: Quantitative
-          label: "Données quantitatives"
+          label: Données quantitatives
           data:
-            - Nombre d'individus
\ No newline at end of file
+            - Nombre d'individus
+    uniqueness:
+      - variable: projet
+        component: value
+      - variable: site
+        component: chemin
+      - variable: date
+        component: value
+      - variable: espece
+        component: value
\ No newline at end of file
diff --git a/src/test/resources/data/olac/olac.yaml b/src/test/resources/data/olac/olac.yaml
index 57b21f136..22e1c4458 100644
--- a/src/test/resources/data/olac/olac.yaml
+++ b/src/test/resources/data/olac/olac.yaml
@@ -161,6 +161,15 @@ dataTypes:
       en: Collection condition
     repository:
       toto: test
+    uniqueness:
+      - variable: date
+        component: datetime
+      - variable: projet
+        component: value
+      - variable: site
+        component: nom du site
+      - variable: site
+        component: nom de la plateforme
     data:
       date:
         components:
@@ -335,6 +344,21 @@ dataTypes:
     internationalizationName:
       fr: Physico Chimie
       en: Chemical Physics
+    uniqueness:
+      - variable: date
+        component: day
+      - variable: projet
+        component: nom du projet
+      - variable: site
+        component: nom du site
+      - variable: plateforme
+        component: nom de la plateforme
+      - variable: outil
+        component: prélèvement
+      - variable: outil
+        component: mesure
+      - variable: profondeur
+        component: minimum
     data:
       date:
         components:
@@ -343,7 +367,6 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-          time:
       projet:
         components:
           nom du projet:
@@ -623,9 +646,29 @@ dataTypes:
     internationalizationName:
       fr: Données des sondes
       en: Probe data
+    uniqueness:
+      - variable: dates
+        component: datetime
+      - variable: projets
+        component: nom du projet
+      - variable: sites
+        component: nom du site
+      - variable: plateformes
+        component: nom de la plateforme
+      - variable: outils
+        component: mesure
+      - variable: variables
+        component: profondeur réelle observée
     data:
       dates:
         components:
+          datetime:
+            defaultValue: >
+              return datumByVariableAndComponent.dates.day +" " +datumByVariableAndComponent.dates.time
+            checker:
+              name: Date
+              params:
+                pattern: dd/MM/yyyy HH:mm
           day:
             checker:
               name: Date
@@ -819,6 +862,19 @@ dataTypes:
             variable: variables
             component: turbidite
   phytoplancton_aggregated:
+    uniqueness:
+      - variable: dates
+        component: day
+      - variable: projets
+        component: nom du projet
+      - variable: sites
+        component: nom du site
+      - variable: plateformes
+        component: nom de la plateforme
+      - variable: outils
+        component: prélèvement
+      - variable: outils
+        component: mesure
     data:
       dates:
         components:
@@ -827,7 +883,6 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-          time:
       projets:
         components:
           nom du projet:
@@ -922,6 +977,23 @@ dataTypes:
             variable: variables
             component: biovolume_algal
   phytoplancton__truncated:
+    uniqueness:
+      - variable: dates
+        component: day
+      - variable: projets
+        component: nom du projet
+      - variable: sites
+        component: nom du site
+      - variable: plateformes
+        component: nom de la plateforme
+      - variable: outils
+        component: prélèvement
+      - variable: outils
+        component: mesure
+      - variable: profondeurs
+        component: min
+      - variable: variables
+        component: nom du taxon déterminé
     data:
       dates:
         components:
@@ -930,7 +1002,6 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-          time:
       projets:
         components:
           nom du projet:
@@ -1078,6 +1149,25 @@ dataTypes:
             variable: variables
             component: nombre de champs comptés
   zooplancton__truncated:
+    uniqueness:
+      - variable: dates
+        component: day
+      - variable: projets
+        component: nom du projet
+      - variable: sites
+        component: nom du site
+      - variable: plateformes
+        component: nom de la plateforme
+      - variable: outils
+        component: prélèvement
+      - variable: outils
+        component: mesure
+      - variable: profondeurs
+        component: min
+      - variable: profondeurs
+        component: max
+      - variable: variables
+        component: nom du taxon déterminé
     data:
       dates:
         components:
@@ -1086,7 +1176,6 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-          time:
       projets:
         components:
           nom du projet:
@@ -1219,6 +1308,21 @@ dataTypes:
             variable: variables
             component: valeur
   zooplancton_biovolumes:
+    uniqueness:
+      - variable: dates
+        component: day
+      - variable: projets
+        component: nom du projet
+      - variable: sites
+        component: nom du site
+      - variable: plateformes
+        component: nom de la plateforme
+      - variable: outils
+        component: prélèvement
+      - variable: outils
+        component: mesure
+      - variable: profondeurs
+        component: max
     data:
       dates:
         components:
@@ -1227,7 +1331,6 @@ dataTypes:
               name: Date
               params:
                 pattern: dd/MM/yyyy
-          time:
       projets:
         components:
           nom du projet:
diff --git a/src/test/resources/data/validation/fake-app.yaml b/src/test/resources/data/validation/fake-app.yaml
index 208988dd0..ec8e50634 100644
--- a/src/test/resources/data/validation/fake-app.yaml
+++ b/src/test/resources/data/validation/fake-app.yaml
@@ -108,6 +108,13 @@ dataTypes:
       timeScope:
         variable: date
         component: day
+    uniqueness:
+      - variable: date
+        component: day
+      - variable: date
+        component: time
+      - variable: localization
+        component: site
     data:
       date:
         components:
diff --git a/ui2/src/locales/en.json b/ui2/src/locales/en.json
index 408436409..38858236c 100644
--- a/ui2/src/locales/en.json
+++ b/ui2/src/locales/en.json
@@ -1,251 +1,252 @@
 {
-    "titles":{
-        "login-page":"Welcome to SI-ORE",
-        "applications-page":"My applications",
-        "references-page":"{applicationName} references",
-        "references-data":"{refName} data",
-        "application-creation":"Application creation",
-        "data-types-page":"{applicationName} data types",
-        "data-types-repository": "Management of data sets {applicationName}",
-        "data-type-authorizations":"{dataType} authorizations",
-        "data-type-new-authorization":"New authorization for {dataType}"
-    },
-    "login":{
-        "signin":"Sign in",
-        "signup":"Sign up",
-        "login":"Login",
-        "login-placeholder":"Ex: michel",
-        "pwd":"Password",
-        "pwd-placeholder":"Ex: xxxx",
-        "pwd-forgotten":"Forgotten password ? ",
-        "register":"Register",
-        "confirm-pwd":"Confirmer le mot de passe",
-        "aria-btn-login": "Validate login form button",
-        "aria-btn-signup": "Create an account button"
-    },
-    "validation":{
-        "obligatoire":"Mandatory",
-        "facultatif":"Optional",
-        "invalid-required":"Please fill the field",
-        "invalid-application-name":"The name must only includes lowercase letters.",
-        "invalid-application-name-length":"The name's length should be between 4 and 20 characters.",
-        "invalid-confirmed":"Fields don't match."
-    },
-    "alert":{
-        "cancel":"Cancel",
-        "server-error":"A server error occured",
-        "server-error-appli-exist": "This application's name exist",
-        "user-unknown":"Unknown user",
-        "application-creation-success":"The app has been created!",
-        "application-validate-success":"The YAML file is valid!",
-        "application-edit-success": "The YAML file is update !",
-        "warning":"Warning !",
-        "reference-deletion-msg":"You're about to delete the reference : {label}. Are you sure ?",
-        "delete":"Delete",
-        "reference-csv-upload-error":"An error occured while uploading the csv file",
-        "reference-updated":"Reference updated",
-        "data-updated":"Data type updated",
-        "registered-user":"User registered",
-        "revoke-authorization":"Authorization revoked",
-        "create-authorization":"Authorization created",
-        "dataTypeFiltreEmpty" : "No data matching your criteria"
-    },
-    "message":{
-        "app-config-error":"Error in yaml file",
-        "close":"Close message",
-        "data-type-config-error":"Error in csv file"
-    },
-    "menu":{
-        "logout":"Log out",
-        "applications":"Applications",
-        "references":"References",
-        "french":"Français",
-        "english":"English",
-        "language":"Language",
-        "aria-sub-menu": "Access path",
-        "aria-nav-bar": "Main menu",
-        "aria-pagination": "Pagination",
-        "aria-curent-page": "Curent page",
-        "aria-next-page": "Next page",
-        "aria-previous-page": "Previous page"
-    },
-    "applications":{
-        "chose-config":"Chose a configuration",
-        "create":"Create application",
-        "comment": "Comment : ",
-        "no-comment": "No comment",
-        "test":"Test",
-        "name":"Application name",
-        "name-placeholder":"Ex : olac",
-        "creation-date":"Creation date",
-        "actions":"Actions",
-        "references":"References",
-        "dataset":"Data types",
-        "trier":"Sort by",
-        "trierA_z":"Name A - z",
-        "trierZ_a":"Name Z - a",
-        "trierRecent":"Recent date",
-        "filter":"Filter by",
-        "change": "Edit app",
-        "version" : "The current version of the application <b class=\"has-text-primary\">{applicationName}</b> is <b class=\"has-text-primary\">{version}.</b>"
-    },
-    "errors":{
-        "emptyFile":"File is empty",
-        "missingReferenceForChecker":"For data type <code>{dataType}</code>, datum <code>{datum}</code>, component <code>{component}</code>, you need to one of those references : <code>{references}</code>",
-        "missingReferenceForCheckerInReference":"For reference <code>{reference}</code> and validation <code>{validationKey}</code>, you need to one of those references : <code>{references}</code>",
-        "unknownReferenceForChecker":"For data type <code>{dataType}</code>, datum <code>{datum}</code>, component <code>{component}</code>, the reference <code>{refType}</code> is not in the accepted references which are: <code>{references}</code>",
-        "unknownReferenceForCheckerInReference":"For reference <code>{reference}</code> and validation <code>{validationKey}</code>, the reference <code>{refType}</code> is not in the accepted references which are: <code>{references}</code>",
-        "unsupportedVersion":"YAML files with version <code>{actualVersion}</code> aren't currently managed, expected version : <code>{expectedVersion}</code>",
-        "undeclaredDataGroupForVariable":"Variable <code>{variable}</code> doesn't belong to any data group, it needs to be in one",
-        "variableInMultipleDataGroup":"Variable <code>{variable}</code> is declared in several data groups, it needs to be only in one",
-        "unknownVariablesInDataGroup":"Data group <code>{dataGroup}</code> has undeclared data : <code>{unknownVariables}</code>. <br>Known data : <code>{variables}</code>",
-        "unknownReferenceInCompositereference": "The composite reference <code> {compositeReference} </code> contains references that are not declared <code> {unknownReferences} </code>. Known references <code> {references} </code>" ,
-        "requiredReferenceInCompositeReferenceForParentKeyColumn": "No reference has been declared for the <i>parentKeyColumn</i> <code> {parentKeyColumn} </code> in the composite reference <code> {compositeReference} </code>.",
-        "requiredParentKeyColumnInCompositeReferenceForReference": "In the composite reference <code> {compositeReference} </code> the reference <code> {reference} </code> refers to <code> {referenceTo} </code> but does not declare a < i> parentKeyColumn </i>. ",
-        "missingParentColumnForReferenceInCompositeReference": "In the composite reference <code> {compositeReference} </code> the <i> parentKeyColumn </i> <code> {parentKeyColumn} </code> does not exist in the reference <code> {reference } </code>. ",
-        "missingParentRecursiveKeyColumnForReferenceInCompositeReference": "In the composite reference <code> {compositeReference} </code> the <i> parentRecursiveKey </i> <code> {parentRecursiveKey} </code> does not exist in the reference <code> {reference } </code>. ",
-        "missingTimeScopeVariableComponentKey":"Mandatory indication of the variable (and its component) used for the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>",
-        "missingReferenceInCompositereference": "All components of the composite reference <code> {compositeReference} </code> must declare a reference.",
-        "timeScopeVariableComponentKeyMissingVariable":"Mandatory indication of the variable in which we collect the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>. <br>Accepted values : <code>{variables}</code>",
-        "timeScopeVariableComponentKeyUnknownVariable":"<code>{variable}</code> doesn't be along to any of known variables : <code>{knownVariables}</code>",
-        "timeVariableComponentKeyMissingComponent":"Mandatory indication of the component of : <code>{variable}</code> in which we collect the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>. <br>Accepted values : <code>{knownComponents}</code>",
-        "timeVariableComponentKeyUnknownComponent":"<code>{component}</code> doesn't belong to any of known variables : <code>{variable}</code>. <br>Known components : <code>{knownComponents}</code>",
-        "timeScopeVariableComponentWrongChecker":"The component <code>{component}</code> of variable <code>{variable}</code> can't be used for carrying time information because it's not declared as : <code>{expectedChecker}</code>",
-        "timeScopeVariableComponentPatternUnknown":"The component <code>{component}</code> of variable <code>{variable}</code> can't be used for carrying time information because the date format : <code>{pattern}</code> isn't managed. <br>Accepted formats : <code>{knownPatterns}</code>",
-        "missingAuthorizationForDatatype":"The authorization section must be present in the description of the <code> {datatype} </code> data type",
-        "missingAuthorizationScopeVariableComponentKey":"You must indicate at least one group of variables (and their components) in which (s) we collect the spatial information to be attached to the data for the rights management dataset <code> {dataType } </code> ",
-        "authorizationScopeVariableComponentKeyMissingVariable":"You must indicate the variable in which to collect the spatial information to which to attach the data for the management of the rights dataset <code> {dataType} </code> for the authorization <code> {authorizationName} </code>. Possible values ​​<code> {variables} </code> ",
-        "authorizationScopeVariableComponentKeyUnknownVariable":"<code> {variable} </code> is not one of the known columns <code> {knownVariables} </code>",
-        "authorizationVariableComponentKeyMissingComponent":"You must indicate the component of the variable <code> {variable} </code> in which we collect the spatial information to which to attach the data for the management of rights dataset <code> {dataType} < / code> for authorization <code> {authorizationName} </code>. Possible values ​​<code> {knownComponents} </code> ",
-        "authorizationVariableComponentKeyUnknownComponent":"<code> {component} </code> is not one of the known components for the variable <code> {variable} </code>. Known components: <code> {knownComponents} </code>",
-        "authorizationScopeVariableComponentWrongChecker":"The <code> {component} </code> component of the <code> {variable} </code> variable cannot be used as carrying time information because it is not declared data like <code> {expectedChecker} </code> ",
-        "authorizationScopeVariableComponentReftypeUnknown":"The <code> {refType} </code> reference of the <code> {component} </code> component of the <code> {variable} </code> variable has not been declared. accepted: <code> {knownPatterns} </code> ",
-        "authorizationScopeVariableComponentReftypeNull":"No reference has been defined for the <code> {component} </code> component of the variable <code> {variable} </code>. Accepted references: <code> {knownPatterns} </ code> ",
-        "unrecognizedProperty ":" Error at line <code> {lineNumber} </code> (column <code> {columnNumber} </code>): <code> {unknownPropertyName} </code>, c ' is not a recognized property. Recognized properties are <code> {knownProperties} </code> ",
-        "unrecognizedProperty":"Error in line <code>{lineNumber}</code> (column : <code>{columnNumber}</code>) : <code>{unknownPropertyName}</code>, is not a known property. <br>Known properties : <code>{knownProperties}</code>",
-        "invalidFormat":"Error in line : <code>{lineNumber}</code> (column : <code>{columnNumber}</code>) : <code>{value}</code> doesn't have the right format. <br>Expected format : <code>{targetTypeName}</code>",
-        "missingRequiredExpression":"For the validation rule <code>{lineValidationRuleKey}</code>, you have to write the expression to evaluate in order to verify that the data are following the rules.",
-        "illegalGroovyExpression":"For the validation rule : {lineValidationRuleKey}</code>, the expression : <code>{expression}</code> is not correct. Expression compilation error at line : <code>{compilationError.lineNumber}</code> (column : <code>{compilationError.columnNumber}</code>) message '<code>{compilationError.message}</code>'",
-        "unknownCheckerName":"For the validation rule : <code>{lineValidationRuleKey}</code>, '<code>{checkerName}</code>' is declared but is not a known checker",
-        "unknownCheckerNameForVariableComponentCheckerInReference":"In the reference validation <code> {reference} </code> and the validation rule <code> {validationRuleDescriptionEntryKey} </code>, '<code> {name} </code>' is declared but not a known check: possible values ​​<code> {variableComponentCheckers} </code>. ",
-        "csvBoundToUnknownVariable":"In the CSV format, header <code>{header}</code> is bound to unknown variable <code>{variable}</code>. Known variables: <code>{variables}</code>",
-        "unknownCheckerNameForVariableComponent":"In the description of the data type {datatype} the component <code> {component} </code> of the variable <code> {variable} </code> the value checker <code> {checkerName} </code> is declared but it is not a known control <br /> Known checkers are <code>{variableComponentCheckers}</code>",
-        "missingColumnReferenceForCheckerInReference":"In the reference description {reference} and the validation <code> {validationRuleDescriptionEntryKey} </code> the value checker <code> {checkerName} </code> declares unknown columns <code> {missingColumns } </code>: possible values ​​<code> {knownColumns} </code> ",
-        "missingParamColumnReferenceForCheckerInReference":"In the description of the <code> {reference} </code>, the validation rule <code> {validationRuleDescriptionEntryKey} </code> does not specify on which columns the rule should be executed by declaring a <code> columns </code> parameter ",
-        "csvBoundToUnknownVariableComponent":"In the CSV format, header <code>{header}</code> is bound to <code>{variable}</code> but it has no <code>{component}</code> component. Known components: <code>{components}</code>",
-        "invalidKeyColumns":"In the description of reference <code>{reference}</code>, colomns <code>{unknownUsedAsKeyElementColumns}</code> are declared as components of the key but there are no such columns. Known columns are <code>{knownColumns}</code>",
-        "missingKeyColumnsForReference": "In the description of reference <code>{reference}</code>, you must declare the components (at least one) of the key",
-        "invalidInternationalizedColumns":"In the repository description <code> {reference} </code>, the columns <code> {unknownUsedAsInternationalizedColumns} </code> are declared as part of internationalizable columns when they do not exist. Known columns: <code> {knownColumns} </code> ",
-        "invalidInternationalizedColumnsForDataType": "In the <code> {reference} </code> repository overload of the <code> {dataType} </code> data type description, the <code> {unknownUsedAsInternationalizedColumns} </code> columns are declared as part of internationalizable columns when they do not exist. Known columns: <code> {knownColumns} </code> ",
-        "unknownReferenceInDatatypeReferenceDisplay": "In the repository display overload of the <code> {dataType} </code> datatype description, the <code> {reference} </code> repository is unknown. Known repositories known : <code> {references} </code> ",
-        "unexpectedHeaderColumn":"Unexpected column header. Expected : <code>{expectedHeaderColumn}</code> <br />Actual : <code>{actualHeaderColumn}</code>",
-        "headerColumnPatternNotMatching":"Column header pattern not matching. Pattern to match : <code>{expectedHeaderColumnPattern}</code><br/>Actual header : <code>{actualHeaderColumn}</code>",
-        "unexpectedTokenCount":"Unexpected token count. Expected : <code>{expectedTokenCount}</code><br/>Actual header : <code>{actualHeader}</code> has <code>{actualTokenCount}</code> tokens",
-        "invalidHeaders":"Invalid headers. Expected columns : <code>{expectedColumns}</code><br/>Actual columns : <code>{actualColumns}</code><br/>Missing columns : <code>{missingColumns}</code><br/>Unknown columns : <code>{unknownColumns}</code>",
-        "duplicatedHeaders":"These headers are duplicated : <code>{duplicatedHeaders}</code>",
-        "patternNotMatched": "For the identified component: <code> {target} </code> the value <code> {value} </code> does not respect the expected format: <code> {pattern} </code>. ",
-        "patternNotMatchedWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> does not respect the expected format: <code> {pattern} </code>." ,
-        "invalidDate": "For the identified component: <code> {target} </code> the date <code> {value} </code> does not respect the expected format: <code> {pattern} </code>. ",
-        "invalidDateWithColumn": "For column: <code> {column} </code> the date <code> {value} </code> does not respect the expected format: <code> {pattern} </code>." ,
-        "invalidInteger": "For the identified component: <code> {target} </code> the value <code> {value} </code> must be an integer.",
-        "invalidIntegerWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> must be an integer.",
-        "invalidFloat": "For the identified component: <code> {target} </code> the value <code> {value} </code> must be a decimal number.",
-        "invalidFloatWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> must be a decimal number.",
-        "invalidReference": "For the identified component: <code> {target} </code> the value <code> {value} </code> does not exist in the reference <code> {refType} </code>. Expected values ​​<code> {referenceValues} </code>. ",
-        "invalidReferenceWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> does not exist in the reference <code> {refType} </code>. Values expected <code> {referenceValues} </code>. ",
-        "checkerExpressionReturnedFalse": "The following constraint is not respected: <code> {expression} </code>",
-        "requiredValue": "For the identified component: <code> {target} </code> the value cannot be zero.",
-        "requiredValueWithColumn": "For column: <code> {target} </code> the value cannot be zero.",
-        "timerangeoutofinterval":"The date <code> {value} </code> is incompatible with the date range of the deposit. It must be between <code> {from} </code> and <code> {to} </code>. ",
-        "badauthorizationscopeforrepository":"Authorization <code> {authorization} </code>) must be <code> {expectedValue} / code> and not <code> {givenValue} </code>",
-        "overlappingpublishedversion":"There is a deposited version in the deposit dates have an overlapping period with the deposited version. <code> {overlapingFiles] </code>",
-        "duplicateLineInReference": "In the repository file {file}, line {lineNumber} has the same id {duplicateKey} as lines {otherLines}",
-        "duplicateLineInDatatype": "In data file {file}, line {lineNumber} has the same identifier {duplicateKey} as lines {otherLines}",
-        "missingParentLineInRecursiveReference": "In repository file {references} the id value {missingReferencesKey} for parent does not exist. Accepted values ${knownReferences}"
-    },
-    "referencesManagement":{
-        "actions":"Actions",
-        "consult":"Consult",
-        "download":"Download",
-        "delete":"Delete",
-        "references":"References",
-        "data":"Data"
-    },
-    "dataTypesManagement": {
-        "data-types": "Data types",
-        "consult-authorization": "Consult authorizations",
-        "réinitialiser": "Reset",
-        "all": "all",
-        "filtre": "filter",
-        "tri": "sort",
-        "ASC": "ASC",
-        "DESC": "DESC",
-        "validate": "Validate",
-        "filtered": "Filters used",
-        "sorted": "The sorts used",
-        "title-modal-numeric": "Choice of value range",
-        "title-modal-date": "Choice of date range",
-        "manage-datasets": "Manage datasets"
-    },
-    "dataTypesRepository":  {
-        "card-title-upload-file": "Drop a version on this dataset",
-        "start-date": "Start date",
-        "end-date": "End date",
-        "comment": "Comment",
-        "choose-file": "Choose a file",
-        "placeholder-datepicker": "Type or select a date...",
-        "placeholder-select": "Select...",
-        "submit": "Submit",
-        "list-file-data": "List of datasets on this repository",
-        "list-file-data-period": "List of versions for the period",
-        "table-file-data-id": "ID",
-        "table-file-data-size": "Size",
-        "table-file-data-create": "Create",
-        "table-file-data-create-by": "Create by :",
-        "table-file-data-publish": "Publish",
-        "table-file-data-publish-by": "Publish by :",
-        "table-file-data-publication": "Publication",
-        "table-file-data-delete": "Delete",
-        "table-file-data-period": "Period"
-    },
-    "dataTypeAuthorizations": {
-        "add-auhtorization": "Add an authorization",
-        "sub-menu-data-type-authorizations": "{dataType} authorizations",
-        "sub-menu-new-authorization": "new authorization",
-        "users": "Users",
-        "name": "Name",
-        "roles": "Roles",
-        "users-placeholder": "Chose users to authorize",
-        "data-groups": "Data groups",
-        "data-groups-placeholder": "Chose data groups to authorize",
-        "authorization-scopes": "Authorization scopes",
-        "period": "Authorization period",
-        "from-date": "From date : ",
-        "to-date": "To date : ",
-        "from-date-to-date": "From date to date : ",
-        "always": "Always",
-        "to": "to",
-        "from": "from",
-        "create": "Create authorization",
-        "user": "User",
-        "data-group": "Data group",
-        "data-type": "Data type",
-        "actions": "Actions",
-        "revoke": "Revoke"
-    },
-    "ponctuation": {
-        "semicolon" : ";",
-        "comma" : ",",
-        "period": ".",
-        "colon" : ":",
-        "hyphen" : "-",
-        "plus" : "+",
-        "less" :"<",
-        "greater" : ">",
-        "arrow-right": "->",
-        "arrow-left": "<-",
-        "slash": "/",
-        "regEx": ".*",
-        "star": "*"
-    }
-}
+  "titles": {
+    "login-page": "Welcome to SI-ORE",
+    "applications-page": "My applications",
+    "references-page": "{applicationName} references",
+    "references-data": "{refName} data",
+    "application-creation": "Application creation",
+    "data-types-page": "{applicationName} data types",
+    "data-types-repository": "Management of data sets {applicationName}",
+    "data-type-authorizations": "{dataType} authorizations",
+    "data-type-new-authorization": "New authorization for {dataType}"
+  },
+  "login": {
+    "signin": "Sign in",
+    "signup": "Sign up",
+    "login": "Login",
+    "login-placeholder": "Ex: michel",
+    "pwd": "Password",
+    "pwd-placeholder": "Ex: xxxx",
+    "pwd-forgotten": "Forgotten password ? ",
+    "register": "Register",
+    "confirm-pwd": "Confirmer le mot de passe",
+    "aria-btn-login": "Validate login form button",
+    "aria-btn-signup": "Create an account button"
+  },
+  "validation": {
+    "obligatoire": "Mandatory",
+    "facultatif": "Optional",
+    "invalid-required": "Please fill the field",
+    "invalid-application-name": "The name must only includes lowercase letters.",
+    "invalid-application-name-length": "The name's length should be between 4 and 20 characters.",
+    "invalid-confirmed": "Fields don't match."
+  },
+  "alert": {
+    "cancel": "Cancel",
+    "server-error": "A server error occured",
+    "server-error-appli-exist": "This application's name exist",
+    "user-unknown": "Unknown user",
+    "application-creation-success": "The app has been created!",
+    "application-validate-success": "The YAML file is valid!",
+    "application-edit-success": "The YAML file is update !",
+    "warning": "Warning !",
+    "reference-deletion-msg": "You're about to delete the reference : {label}. Are you sure ?",
+    "delete": "Delete",
+    "reference-csv-upload-error": "An error occured while uploading the csv file",
+    "reference-updated": "Reference updated",
+    "data-updated": "Data type updated",
+    "registered-user": "User registered",
+    "revoke-authorization": "Authorization revoked",
+    "create-authorization": "Authorization created",
+    "dataTypeFiltreEmpty": "No data matching your criteria"
+  },
+  "message": {
+    "app-config-error": "Error in yaml file",
+    "close": "Close message",
+    "data-type-config-error": "Error in csv file"
+  },
+  "menu": {
+    "logout": "Log out",
+    "applications": "Applications",
+    "references": "References",
+    "french": "Français",
+    "english": "English",
+    "language": "Language",
+    "aria-sub-menu": "Access path",
+    "aria-nav-bar": "Main menu",
+    "aria-pagination": "Pagination",
+    "aria-curent-page": "Curent page",
+    "aria-next-page": "Next page",
+    "aria-previous-page": "Previous page"
+  },
+  "applications": {
+    "chose-config": "Chose a configuration",
+    "create": "Create application",
+    "comment": "Comment : ",
+    "no-comment": "No comment",
+    "test": "Test",
+    "name": "Application name",
+    "name-placeholder": "Ex : olac",
+    "creation-date": "Creation date",
+    "actions": "Actions",
+    "references": "References",
+    "dataset": "Data types",
+    "trier": "Sort by",
+    "trierA_z": "Name A - z",
+    "trierZ_a": "Name Z - a",
+    "trierRecent": "Recent date",
+    "filter": "Filter by",
+    "change": "Edit app",
+    "version": "The current version of the application <b class=\"has-text-primary\">{applicationName}</b> is <b class=\"has-text-primary\">{version}.</b>"
+  },
+  "errors": {
+    "emptyFile": "File is empty",
+    "missingReferenceForChecker": "For data type <code>{dataType}</code>, datum <code>{datum}</code>, component <code>{component}</code>, you need to one of those references : <code>{references}</code>",
+    "missingReferenceForCheckerInReference": "For reference <code>{reference}</code> and validation <code>{validationKey}</code>, you need to one of those references : <code>{references}</code>",
+    "unknownReferenceForChecker": "For data type <code>{dataType}</code>, datum <code>{datum}</code>, component <code>{component}</code>, the reference <code>{refType}</code> is not in the accepted references which are: <code>{references}</code>",
+    "unknownReferenceForCheckerInReference": "For reference <code>{reference}</code> and validation <code>{validationKey}</code>, the reference <code>{refType}</code> is not in the accepted references which are: <code>{references}</code>",
+    "unsupportedVersion": "YAML files with version <code>{actualVersion}</code> aren't currently managed, expected version : <code>{expectedVersion}</code>",
+    "undeclaredDataGroupForVariable": "Variable <code>{variable}</code> doesn't belong to any data group, it needs to be in one",
+    "variableInMultipleDataGroup": "Variable <code>{variable}</code> is declared in several data groups, it needs to be only in one",
+    "unknownVariablesInDataGroup": "Data group <code>{dataGroup}</code> has undeclared data : <code>{unknownVariables}</code>. <br>Known data : <code>{variables}</code>",
+    "unknownReferenceInCompositereference": "The composite reference <code> {compositeReference} </code> contains references that are not declared <code> {unknownReferences} </code>. Known references <code> {references} </code>",
+    "requiredReferenceInCompositeReferenceForParentKeyColumn": "No reference has been declared for the <i>parentKeyColumn</i> <code> {parentKeyColumn} </code> in the composite reference <code> {compositeReference} </code>.",
+    "requiredParentKeyColumnInCompositeReferenceForReference": "In the composite reference <code> {compositeReference} </code> the reference <code> {reference} </code> refers to <code> {referenceTo} </code> but does not declare a < i> parentKeyColumn </i>. ",
+    "missingParentColumnForReferenceInCompositeReference": "In the composite reference <code> {compositeReference} </code> the <i> parentKeyColumn </i> <code> {parentKeyColumn} </code> does not exist in the reference <code> {reference } </code>. ",
+    "missingParentRecursiveKeyColumnForReferenceInCompositeReference": "In the composite reference <code> {compositeReference} </code> the <i> parentRecursiveKey </i> <code> {parentRecursiveKey} </code> does not exist in the reference <code> {reference } </code>. ",
+    "missingTimeScopeVariableComponentKey": "Mandatory indication of the variable (and its component) used for the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>",
+    "missingReferenceInCompositereference": "All components of the composite reference <code> {compositeReference} </code> must declare a reference.",
+    "timeScopeVariableComponentKeyMissingVariable": "Mandatory indication of the variable in which we collect the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>. <br>Accepted values : <code>{variables}</code>",
+    "timeScopeVariableComponentKeyUnknownVariable": "<code>{variable}</code> doesn't be along to any of known variables : <code>{knownVariables}</code>",
+    "timeVariableComponentKeyMissingComponent": "Mandatory indication of the component of : <code>{variable}</code> in which we collect the time period for which we need to attach the data for rights management of data type : <code>{dataType}</code>. <br>Accepted values : <code>{knownComponents}</code>",
+    "timeVariableComponentKeyUnknownComponent": "<code>{component}</code> doesn't belong to any of known variables : <code>{variable}</code>. <br>Known components : <code>{knownComponents}</code>",
+    "timeScopeVariableComponentWrongChecker": "The component <code>{component}</code> of variable <code>{variable}</code> can't be used for carrying time information because it's not declared as : <code>{expectedChecker}</code>",
+    "timeScopeVariableComponentPatternUnknown": "The component <code>{component}</code> of variable <code>{variable}</code> can't be used for carrying time information because the date format : <code>{pattern}</code> isn't managed. <br>Accepted formats : <code>{knownPatterns}</code>",
+    "missingAuthorizationForDatatype": "The authorization section must be present in the description of the <code> {datatype} </code> data type",
+    "missingAuthorizationScopeVariableComponentKey": "You must indicate at least one group of variables (and their components) in which (s) we collect the spatial information to be attached to the data for the rights management dataset <code> {dataType } </code> ",
+    "authorizationScopeVariableComponentKeyMissingVariable": "You must indicate the variable in which to collect the spatial information to which to attach the data for the management of the rights dataset <code> {dataType} </code> for the authorization <code> {authorizationName} </code>. Possible values ​​<code> {variables} </code> ",
+    "authorizationScopeVariableComponentKeyUnknownVariable": "<code> {variable} </code> is not one of the known columns <code> {knownVariables} </code>",
+    "authorizationVariableComponentKeyMissingComponent": "You must indicate the component of the variable <code> {variable} </code> in which we collect the spatial information to which to attach the data for the management of rights dataset <code> {dataType} < / code> for authorization <code> {authorizationName} </code>. Possible values ​​<code> {knownComponents} </code> ",
+    "authorizationVariableComponentKeyUnknownComponent": "<code> {component} </code> is not one of the known components for the variable <code> {variable} </code>. Known components: <code> {knownComponents} </code>",
+    "authorizationScopeVariableComponentWrongChecker": "The <code> {component} </code> component of the <code> {variable} </code> variable cannot be used as carrying time information because it is not declared data like <code> {expectedChecker} </code> ",
+    "authorizationScopeVariableComponentReftypeUnknown": "The <code> {refType} </code> reference of the <code> {component} </code> component of the <code> {variable} </code> variable has not been declared. accepted: <code> {knownPatterns} </code> ",
+    "authorizationScopeVariableComponentReftypeNull": "No reference has been defined for the <code> {component} </code> component of the variable <code> {variable} </code>. Accepted references: <code> {knownPatterns} </ code> ",
+    "unrecognizedProperty ": " Error at line <code> {lineNumber} </code> (column <code> {columnNumber} </code>): <code> {unknownPropertyName} </code>, c ' is not a recognized property. Recognized properties are <code> {knownProperties} </code> ",
+    "unrecognizedProperty": "Error in line <code>{lineNumber}</code> (column : <code>{columnNumber}</code>) : <code>{unknownPropertyName}</code>, is not a known property. <br>Known properties : <code>{knownProperties}</code>",
+    "invalidFormat": "Error in line : <code>{lineNumber}</code> (column : <code>{columnNumber}</code>) : <code>{value}</code> doesn't have the right format. <br>Expected format : <code>{targetTypeName}</code>",
+    "missingRequiredExpression": "For the validation rule <code>{lineValidationRuleKey}</code>, you have to write the expression to evaluate in order to verify that the data are following the rules.",
+    "illegalGroovyExpression": "For the validation rule : {lineValidationRuleKey}</code>, the expression : <code>{expression}</code> is not correct. Expression compilation error at line : <code>{compilationError.lineNumber}</code> (column : <code>{compilationError.columnNumber}</code>) message '<code>{compilationError.message}</code>'",
+    "unknownCheckerName": "For the validation rule : <code>{lineValidationRuleKey}</code>, '<code>{checkerName}</code>' is declared but is not a known checker",
+    "unknownCheckerNameForVariableComponentCheckerInReference": "In the reference validation <code> {reference} </code> and the validation rule <code> {validationRuleDescriptionEntryKey} </code>, '<code> {name} </code>' is declared but not a known check: possible values ​​<code> {variableComponentCheckers} </code>. ",
+    "csvBoundToUnknownVariable": "In the CSV format, header <code>{header}</code> is bound to unknown variable <code>{variable}</code>. Known variables: <code>{variables}</code>",
+    "unknownCheckerNameForVariableComponent": "In the description of the data type {datatype} the component <code> {component} </code> of the variable <code> {variable} </code> the value checker <code> {checkerName} </code> is declared but it is not a known control <br /> Known checkers are <code>{variableComponentCheckers}</code>",
+    "missingColumnReferenceForCheckerInReference": "In the reference description {reference} and the validation <code> {validationRuleDescriptionEntryKey} </code> the value checker <code> {checkerName} </code> declares unknown columns <code> {missingColumns } </code>: possible values ​​<code> {knownColumns} </code> ",
+    "missingParamColumnReferenceForCheckerInReference": "In the description of the <code> {reference} </code>, the validation rule <code> {validationRuleDescriptionEntryKey} </code> does not specify on which columns the rule should be executed by declaring a <code> columns </code> parameter ",
+    "csvBoundToUnknownVariableComponent": "In the CSV format, header <code>{header}</code> is bound to <code>{variable}</code> but it has no <code>{component}</code> component. Known components: <code>{components}</code>",
+    "invalidKeyColumns": "In the description of reference <code>{reference}</code>, colomns <code>{unknownUsedAsKeyElementColumns}</code> are declared as components of the key but there are no such columns. Known columns are <code>{knownColumns}</code>",
+    "missingKeyColumnsForReference": "In the description of reference <code>{reference}</code>, you must declare the components (at least one) of the key",
+    "invalidInternationalizedColumns": "In the repository description <code> {reference} </code>, the columns <code> {unknownUsedAsInternationalizedColumns} </code> are declared as part of internationalizable columns when they do not exist. Known columns: <code> {knownColumns} </code> ",
+    "invalidInternationalizedColumnsForDataType": "In the <code> {reference} </code> repository overload of the <code> {dataType} </code> data type description, the <code> {unknownUsedAsInternationalizedColumns} </code> columns are declared as part of internationalizable columns when they do not exist. Known columns: <code> {knownColumns} </code> ",
+    "unknownReferenceInDatatypeReferenceDisplay": "In the repository display overload of the <code> {dataType} </code> datatype description, the <code> {reference} </code> repository is unknown. Known repositories known : <code> {references} </code> ",
+    "unexpectedHeaderColumn": "Unexpected column header. Expected : <code>{expectedHeaderColumn}</code> <br />Actual : <code>{actualHeaderColumn}</code>",
+    "headerColumnPatternNotMatching": "Column header pattern not matching. Pattern to match : <code>{expectedHeaderColumnPattern}</code><br/>Actual header : <code>{actualHeaderColumn}</code>",
+    "unexpectedTokenCount": "Unexpected token count. Expected : <code>{expectedTokenCount}</code><br/>Actual header : <code>{actualHeader}</code> has <code>{actualTokenCount}</code> tokens",
+    "invalidHeaders": "Invalid headers. Expected columns : <code>{expectedColumns}</code><br/>Actual columns : <code>{actualColumns}</code><br/>Missing columns : <code>{missingColumns}</code><br/>Unknown columns : <code>{unknownColumns}</code>",
+    "duplicatedHeaders": "These headers are duplicated : <code>{duplicatedHeaders}</code>",
+    "patternNotMatched": "For the identified component: <code> {target} </code> the value <code> {value} </code> does not respect the expected format: <code> {pattern} </code>. ",
+    "patternNotMatchedWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> does not respect the expected format: <code> {pattern} </code>.",
+    "invalidDate": "For the identified component: <code> {target} </code> the date <code> {value} </code> does not respect the expected format: <code> {pattern} </code>. ",
+    "invalidDateWithColumn": "For column: <code> {column} </code> the date <code> {value} </code> does not respect the expected format: <code> {pattern} </code>.",
+    "invalidInteger": "For the identified component: <code> {target} </code> the value <code> {value} </code> must be an integer.",
+    "invalidIntegerWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> must be an integer.",
+    "invalidFloat": "For the identified component: <code> {target} </code> the value <code> {value} </code> must be a decimal number.",
+    "invalidFloatWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> must be a decimal number.",
+    "invalidReference": "For the identified component: <code> {target} </code> the value <code> {value} </code> does not exist in the reference <code> {refType} </code>. Expected values ​​<code> {referenceValues} </code>. ",
+    "invalidReferenceWithColumn": "For column: <code> {target} </code> the value <code> {value} </code> does not exist in the reference <code> {refType} </code>. Values expected <code> {referenceValues} </code>. ",
+    "checkerExpressionReturnedFalse": "The following constraint is not respected: <code> {expression} </code>",
+    "requiredValue": "For the identified component: <code> {target} </code> the value cannot be zero.",
+    "requiredValueWithColumn": "For column: <code> {target} </code> the value cannot be zero.",
+    "timerangeoutofinterval": "The date <code> {value} </code> is incompatible with the date range of the deposit. It must be between <code> {from} </code> and <code> {to} </code>. ",
+    "badauthorizationscopeforrepository": "Authorization <code> {authorization} </code>) must be <code> {expectedValue} / code> and not <code> {givenValue} </code>",
+    "overlappingpublishedversion": "There is a deposited version in the deposit dates have an overlapping period with the deposited version. <code> {overlapingFiles] </code>",
+    "duplicateLineInReference": "In the repository file {file}, line {lineNumber} has the same id {duplicateKey} as lines {otherLines}",
+    "duplicatedLineInDatatype": "In data file {file},  lines {duplicatedRows}  breaks uniqueness constraint for values {uniquenessKey}",
+    "missingParentLineInRecursiveReference": "In repository file {references} the id value {missingReferencesKey} for parent does not exist. Accepted values ${knownReferences}",
+    "unknownUsedAsVariableComponentUniqueness": "In the description of data type {dataType} in the <I>uniqueness</i> section component variable descriptions {unknownUsedAsVariableComponentUniqueness} do not exist. Accepted values {availableVariableComponents}"
+  },
+  "referencesManagement": {
+    "actions": "Actions",
+    "consult": "Consult",
+    "download": "Download",
+    "delete": "Delete",
+    "references": "References",
+    "data": "Data"
+  },
+  "dataTypesManagement": {
+    "data-types": "Data types",
+    "consult-authorization": "Consult authorizations",
+    "réinitialiser": "Reset",
+    "all": "all",
+    "filtre": "filter",
+    "tri": "sort",
+    "ASC": "ASC",
+    "DESC": "DESC",
+    "validate": "Validate",
+    "filtered": "Filters used",
+    "sorted": "The sorts used",
+    "title-modal-numeric": "Choice of value range",
+    "title-modal-date": "Choice of date range",
+    "manage-datasets": "Manage datasets"
+  },
+  "dataTypesRepository": {
+    "card-title-upload-file": "Drop a version on this dataset",
+    "start-date": "Start date",
+    "end-date": "End date",
+    "comment": "Comment",
+    "choose-file": "Choose a file",
+    "placeholder-datepicker": "Type or select a date...",
+    "placeholder-select": "Select...",
+    "submit": "Submit",
+    "list-file-data": "List of datasets on this repository",
+    "list-file-data-period": "List of versions for the period",
+    "table-file-data-id": "ID",
+    "table-file-data-size": "Size",
+    "table-file-data-create": "Create",
+    "table-file-data-create-by": "Create by :",
+    "table-file-data-publish": "Publish",
+    "table-file-data-publish-by": "Publish by :",
+    "table-file-data-publication": "Publication",
+    "table-file-data-delete": "Delete",
+    "table-file-data-period": "Period"
+  },
+  "dataTypeAuthorizations": {
+    "add-auhtorization": "Add an authorization",
+    "sub-menu-data-type-authorizations": "{dataType} authorizations",
+    "sub-menu-new-authorization": "new authorization",
+    "users": "Users",
+    "name": "Name",
+    "roles": "Roles",
+    "users-placeholder": "Chose users to authorize",
+    "data-groups": "Data groups",
+    "data-groups-placeholder": "Chose data groups to authorize",
+    "authorization-scopes": "Authorization scopes",
+    "period": "Authorization period",
+    "from-date": "From date : ",
+    "to-date": "To date : ",
+    "from-date-to-date": "From date to date : ",
+    "always": "Always",
+    "to": "to",
+    "from": "from",
+    "create": "Create authorization",
+    "user": "User",
+    "data-group": "Data group",
+    "data-type": "Data type",
+    "actions": "Actions",
+    "revoke": "Revoke"
+  },
+  "ponctuation": {
+    "semicolon": ";",
+    "comma": ",",
+    "period": ".",
+    "colon": ":",
+    "hyphen": "-",
+    "plus": "+",
+    "less": "<",
+    "greater": ">",
+    "arrow-right": "->",
+    "arrow-left": "<-",
+    "slash": "/",
+    "regEx": ".*",
+    "star": "*"
+  }
+}
\ No newline at end of file
diff --git a/ui2/src/locales/fr.json b/ui2/src/locales/fr.json
index 48ea91b1b..ed1ccaf6a 100644
--- a/ui2/src/locales/fr.json
+++ b/ui2/src/locales/fr.json
@@ -160,8 +160,10 @@
         "badauthorizationscopeforrepository": "L'autorisation <code>{authorization}</code>) doit être <code>{expectedValue}/code> et non <code>{givenValue}</code>",
         "overlappingpublishedversion": "Il existe une version déposée dans les dates de dépôt ont une période chevauchante avec la version déposée. <code>{overlapingFiles]</code>",
         "duplicatedLineInReference": "Dans le fichier du référentiel {file}, la ligne {lineNumber} a le même identifiant {duplicateKey} que les lignes {otherLines}",
-        "duplicatedLineInDatatype": "Dans le fichier de données {file}, la ligne {lineNumber} a le même identifiant {duplicateKey} que les lignes {otherLines}",
-        "missingParentLineInRecursiveReference": "Dans le fichier du référentiel {references} la valeur d'identifiant {missingReferencesKey} pour le  parent n'existe pas. Valeurs acceptées {knownReferences}"
+        "duplicatedLineInDatatype": "Dans le fichier de données {file} les lignes {duplicatedRows} rompent la contrainte d'unicité pour les valeurs {}",
+        "missingParentLineInRecursiveReference": "Dans le fichier du référentiel {references} la valeur d'identifiant {missingReferencesKey} pour le  parent n'existe pas. Valeurs acceptées {knownReferences}",
+        "unknownUsedAsVariableComponentUniqueness": "Dans la description du type de données {dataType} dans la section <I>uniqueness</i> les descriptions de variable composants {unknownUsedAsVariableComponentUniqueness} n'existent pas. Valeurs acceptées {availableVariableComponents}"
+
     },
     "referencesManagement": {
         "actions": "Actions",
@@ -247,4 +249,4 @@
         "slash": "/",
         "regEx": ".*"
     }
-}
+}
\ No newline at end of file
-- 
GitLab


From 48bbedaf73e0617c4cd9641cbfa11248360a4122 Mon Sep 17 00:00:00 2001
From: TCHERNIATINSKY <philippe.tcherniatinsky@inrae.fr>
Date: Wed, 9 Mar 2022 15:10:11 +0100
Subject: [PATCH 2/3] nettoyage du code

---
 .../rest/ApplicationConfigurationService.java | 180 ++++++++----------
 1 file changed, 84 insertions(+), 96 deletions(-)

diff --git a/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java b/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
index 3cc6ded58..b02b9372e 100644
--- a/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
+++ b/src/main/java/fr/inra/oresing/rest/ApplicationConfigurationService.java
@@ -18,9 +18,7 @@ import fr.inra.oresing.groovy.GroovyExpression;
 import fr.inra.oresing.model.Configuration;
 import fr.inra.oresing.model.LocalDateTimeRange;
 import fr.inra.oresing.model.VariableComponentKey;
-import fr.inra.oresing.model.internationalization.Internationalization;
-import fr.inra.oresing.model.internationalization.InternationalizationDisplay;
-import fr.inra.oresing.model.internationalization.InternationalizationMap;
+import fr.inra.oresing.model.internationalization.*;
 import lombok.Getter;
 import lombok.Setter;
 import lombok.ToString;
@@ -32,14 +30,13 @@ import org.springframework.web.multipart.MultipartFile;
 
 import java.io.IOException;
 import java.util.*;
-import java.util.function.Function;
 import java.util.stream.Collectors;
 import java.util.stream.Stream;
 
 @Component
 @Slf4j
 public class ApplicationConfigurationService {
-    public static final List INTERNATIONALIZED_FIELDS = List.of("internationalization", "internationalizationName", "internationalizedColumns", "internationalizationDisplay");
+    public static final List<String> INTERNATIONALIZED_FIELDS = List.of("internationalization", "internationalizationName", "internationalizedColumns", "internationalizationDisplay");
 
     Map<String, Map> getInternationalizedSections(Map<String, Object> toParse, List<IllegalArgumentException> exceptions) {
         Map<String, Map> parsedMap = new LinkedHashMap<>();
@@ -120,9 +117,6 @@ public class ApplicationConfigurationService {
             Map<String, Object> mappedObject = (Map<String, Object>) mapper.readValue(bytes, Object.class);
             List<IllegalArgumentException> exceptions = List.of();
             internationalizedSections = getInternationalizedSections(mappedObject, exceptions);
-            if (!exceptions.isEmpty()) {
-                return onMappingExceptions(exceptions);
-            }
             try {
                 configuration = mapper.convertValue(mappedObject, Configuration.class);
                 configuration.setInternationalization(mapper.convertValue(internationalizedSections, InternationalizationMap.class));
@@ -160,10 +154,10 @@ public class ApplicationConfigurationService {
         }
 
         for (Map.Entry<String, Configuration.ReferenceDescription> referenceEntry : configuration.getReferences().entrySet()) {
-            verifyReferenceKeyColumns(configuration, builder, referenceEntry);
+            verifyReferenceKeyColumns(builder, referenceEntry);
             verifyInternationalizedColumnsExists(configuration, builder, referenceEntry);
             verifyInternationalizedColumnsExistsForPattern(configuration, builder, referenceEntry);
-            verifyValidationCheckersAreValids(configuration, builder, referenceEntry, references);
+            verifyValidationCheckersAreValids(builder, referenceEntry, references);
         }
 
         for (Map.Entry<String, Configuration.DataTypeDescription> entry : configuration.getDataTypes().entrySet()) {
@@ -172,8 +166,8 @@ public class ApplicationConfigurationService {
             verifyDatatypeCheckersExists(builder, dataTypeDescription, dataType);
             verifyDatatypeCheckerReferenceRefersToExistingReference(builder, references, dataType, dataTypeDescription);
             verifyDatatypeCheckerGroovyExpressionExistsAndCanCompile(builder, dataTypeDescription);
-            verifyInternationalizedColumnsExistsForPatternInDatatype(configuration, builder, dataType, dataTypeDescription);
-            verifyUniquenessComponentKeysInDatatype(configuration, dataType, dataTypeDescription, builder);
+            verifyInternationalizedColumnsExistsForPatternInDatatype(configuration, builder, dataType);
+            verifyUniquenessComponentKeysInDatatype(dataType, dataTypeDescription, builder);
 
             Configuration.AuthorizationDescription authorization = dataTypeDescription.getAuthorization();
             Set<String> variables = dataTypeDescription.getData().keySet();
@@ -207,7 +201,7 @@ public class ApplicationConfigurationService {
         Configuration.CompositeReferenceDescription compositeReferenceDescription = compositeReferenceEntry.getValue();
         Set<String> expectingReferences = compositeReferenceDescription.getComponents()
                 .stream()
-                .map(crd -> crd.getReference())
+                .map(Configuration.CompositeReferenceComponentDescription::getReference)
                 .filter(ref -> {
                     if (ref == null) {
                         builder.recordMissingReferenceInCompositereference(compositeReferenceName);
@@ -307,9 +301,7 @@ public class ApplicationConfigurationService {
             builder.recordMissingAuthorizationScopeVariableComponentKey(dataType);
         } else {
             Configuration.DataTypeDescription dataTypeDescription = configuration.getDataTypes().get(dataType);
-            authorizationScopesVariableComponentKey.entrySet().stream().forEach(authorizationScopeVariableComponentKeyEntry -> {
-                String authorizationScopeName = authorizationScopeVariableComponentKeyEntry.getKey();
-                VariableComponentKey authorizationScopeVariableComponentKey = authorizationScopeVariableComponentKeyEntry.getValue();
+            authorizationScopesVariableComponentKey.forEach((authorizationScopeName, authorizationScopeVariableComponentKey) -> {
                 if (authorizationScopeVariableComponentKey.getVariable() == null) {
                     builder.recordAuthorizationScopeVariableComponentKeyMissingVariable(dataType, authorizationScopeName, variables);
                 } else {
@@ -330,8 +322,11 @@ public class ApplicationConfigurationService {
                                 if (authorizationScopeVariableComponentChecker == null || !"Reference".equals(authorizationScopeVariableComponentChecker.getName())) {
                                     builder.recordAuthorizationScopeVariableComponentWrongChecker(authorizationScopeVariableComponentKey, "Date");
                                 }
-                                String refType = null;
-                                Configuration.CheckerConfigurationDescription checkerConfigurationDescription = authorizationScopeVariableComponentChecker.getParams();
+                                String refType;
+                                Configuration.CheckerConfigurationDescription checkerConfigurationDescription = null;
+                                if (authorizationScopeVariableComponentChecker != null) {
+                                    checkerConfigurationDescription = authorizationScopeVariableComponentChecker.getParams();
+                                }
                                 if (checkerConfigurationDescription == null) {
                                     builder.recordAuthorizationScopeVariableComponentReftypeNull(authorizationScopeVariableComponentKey, configuration.getReferences().keySet());
                                 } else {
@@ -340,9 +335,9 @@ public class ApplicationConfigurationService {
                                         builder.recordAuthorizationScopeVariableComponentReftypeUnknown(authorizationScopeVariableComponentKey, refType, configuration.getReferences().keySet());
                                     } else {
                                         Set<String> compositesReferences = configuration.getCompositeReferences().values().stream()
-                                                .map(e -> e.getComponents())
+                                                .map(Configuration.CompositeReferenceDescription::getComponents)
                                                 .flatMap(List::stream)
-                                                .map(crd -> crd.getReference())
+                                                .map(Configuration.CompositeReferenceComponentDescription::getReference)
                                                 .collect(Collectors.toSet());
                                         if (!compositesReferences.contains(refType)) {
                                             builder.recordAuthorizationVariableComponentMustReferToCompositereference(dataType, authorizationScopeName, refType, compositesReferences);
@@ -379,7 +374,7 @@ public class ApplicationConfigurationService {
                                 builder.recordTimeScopeVariableComponentWrongChecker(timeScopeVariableComponentKey, "Date");
                             }
                             Optional.ofNullable(timeScopeVariableComponentChecker)
-                                    .map(checkerDescription -> checkerDescription.getParams())
+                                    .map(Configuration.CheckerDescription::getParams)
                                     .map(Configuration.CheckerConfigurationDescription::getPattern)
                                     .ifPresent(pattern -> {
                                         if (!LocalDateTimeRange.getKnownPatterns().contains(pattern)) {
@@ -463,7 +458,7 @@ public class ApplicationConfigurationService {
         }
     }
 
-    private void verifyReferenceKeyColumns(Configuration configuration, ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry) {
+    private void verifyReferenceKeyColumns(ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry) {
         String reference = referenceEntry.getKey();
         Configuration.ReferenceDescription referenceDescription = referenceEntry.getValue();
         List<String> keyColumns = referenceDescription.getKeyColumns();
@@ -482,55 +477,67 @@ public class ApplicationConfigurationService {
     private void verifyInternationalizedColumnsExistsForPattern(Configuration configuration, ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry) {
         String reference = referenceEntry.getKey();
         Configuration.ReferenceDescription referenceDescription = referenceEntry.getValue();
-        Set<String> internationalizedColumnsForDisplay = Optional.ofNullable(configuration.getInternationalization())
-                .map(i -> i.getReferences())
-                .map(r -> r.getOrDefault(reference, null))
-                .map(im -> im.getInternationalizationDisplay())
-                .map(ic -> ic.getPattern())
-                .map(patterns -> patterns.values()
-                        .stream()
-                        .map(pattern -> InternationalizationDisplay.getPatternColumns(pattern))
-                        .flatMap(List::stream)
-                        .collect(Collectors.toSet())
-                )
-                .orElseGet(Set::of);
-        Set<String> internationalizedColumns = Optional.ofNullable(configuration.getInternationalization())
-                .map(i -> i.getReferences())
-                .map(r -> r.getOrDefault(reference, null))
-                .map(im -> im.getInternationalizedColumns())
-                .map(ic -> {
-                    Set<String> columns = new LinkedHashSet<>(ic.keySet());
-                    ic.values().stream()
-                            .forEach(v -> columns.addAll(v.values()));
-                    return columns;
-                })
-                .orElse(new HashSet<>());
+        Set<String> internationalizedColumnsForDisplay = Set.of();
+        InternationalizationMap internationalization = configuration.getInternationalization();
+        if (internationalization != null) {
+            Map<String, fr.inra.oresing.model.internationalization.InternationalizationReferenceMap> references = internationalization.getReferences();
+            if (references != null) {
+                fr.inra.oresing.model.internationalization.InternationalizationReferenceMap orDefault = references.getOrDefault(reference, null);
+                if (orDefault != null) {
+                    InternationalizationDisplay internationalizationDisplay = orDefault.getInternationalizationDisplay();
+                    if (internationalizationDisplay != null) {
+                        Map<String, String> patterns = internationalizationDisplay.getPattern();
+                        if (patterns != null) {
+                            internationalizedColumnsForDisplay = patterns.values()
+                                    .stream()
+                                    .map(InternationalizationDisplay::getPatternColumns)
+                                    .flatMap(List::stream)
+                                    .collect(Collectors.toSet());
+                        }
+                    }
+                }
+            }
+        }
+        Set<String> internationalizedColumns = getInternationalizedColumns(configuration, reference);
         Set<String> columns = Optional.ofNullable(referenceDescription)
-                .map(r -> r.getColumns())
+                .map(Configuration.ReferenceDescription::getColumns)
                 .map(c -> new LinkedHashSet(c.keySet()))
                 .orElseGet(LinkedHashSet::new);
         columns.addAll(internationalizedColumns);
 
 
-        ImmutableSet<String> internationalizedColumnsSet = ImmutableSet.copyOf(internationalizedColumns);
         ImmutableSet<String> unknownUsedAsInternationalizedColumnsSetColumns = Sets.difference(internationalizedColumnsForDisplay, columns).immutableCopy();
         if (!unknownUsedAsInternationalizedColumnsSetColumns.isEmpty()) {
             builder.recordInvalidInternationalizedColumns(reference, unknownUsedAsInternationalizedColumnsSetColumns, columns);
         }
     }
 
-    private void verifyInternationalizedColumnsExistsForPatternInDatatype(Configuration configuration, ConfigurationParsingResult.Builder builder, String dataType, Configuration.DataTypeDescription dataTypeDescription) {
+    private Set<String> getInternationalizedColumns(Configuration configuration, String reference) {
+        return Optional.ofNullable(configuration.getInternationalization())
+                .map(InternationalizationMap::getReferences)
+                .map(r -> r.getOrDefault(reference, null))
+                .map(InternationalizationReferenceMap::getInternationalizedColumns)
+                .map(ic -> {
+                    Set<String> columns = new LinkedHashSet<>(ic.keySet());
+                    ic.values()
+                            .forEach(v -> columns.addAll(v.values()));
+                    return columns;
+                })
+                .orElse(new HashSet<>());
+    }
+
+    private void verifyInternationalizedColumnsExistsForPatternInDatatype(Configuration configuration, ConfigurationParsingResult.Builder builder, String dataType) {
         Map<String, InternationalizationDisplay> internationalizationDisplayMap = Optional.ofNullable(configuration.getInternationalization())
-                .map(i -> i.getDataTypes())
+                .map(InternationalizationMap::getDataTypes)
                 .map(r -> r.getOrDefault(dataType, null))
-                .map(im -> im.getInternationalizationDisplay())
+                .map(InternationalizationDataTypeMap::getInternationalizationDisplay)
                 .orElseGet(Map::of);
         for (Map.Entry<String, InternationalizationDisplay> internationalizationDisplayEntry : internationalizationDisplayMap.entrySet()) {
             Set<String> internationalizedColumnsForDisplay = Optional.ofNullable(internationalizationDisplayEntry.getValue())
-                    .map(ic -> ic.getPattern())
+                    .map(InternationalizationDisplay::getPattern)
                     .map(patterns -> patterns.values()
                             .stream()
-                            .map(pattern -> InternationalizationDisplay.getPatternColumns(pattern))
+                            .map(InternationalizationDisplay::getPatternColumns)
                             .flatMap(List::stream)
                             .collect(Collectors.toSet())
                     )
@@ -544,26 +551,15 @@ public class ApplicationConfigurationService {
             }
 
 
-            Set<String> internationalizedColumns = Optional.ofNullable(configuration.getInternationalization())
-                    .map(i -> i.getReferences())
-                    .map(r -> r.getOrDefault(reference, null))
-                    .map(im -> im.getInternationalizedColumns())
-                    .map(ic -> {
-                        Set<String> columns = new LinkedHashSet<>(ic.keySet());
-                        ic.values().stream()
-                                .forEach(v -> columns.addAll(v.values()));
-                        return columns;
-                    })
-                    .orElse(new HashSet<>());
+            Set<String> internationalizedColumns = getInternationalizedColumns(configuration, reference);
             Configuration.ReferenceDescription referenceDescription = configuration.getReferences().getOrDefault(reference, null);
-            Set<String> columns = Optional.ofNullable(referenceDescription)
-                    .map(r -> r.getColumns())
+            LinkedHashSet columns = Optional.ofNullable(referenceDescription)
+                    .map(Configuration.ReferenceDescription::getColumns)
                     .map(c -> new LinkedHashSet(c.keySet()))
                     .orElseGet(LinkedHashSet::new);
             columns.addAll(internationalizedColumns);
 
 
-            ImmutableSet<String> internationalizedColumnsSet = ImmutableSet.copyOf(internationalizedColumns);
             ImmutableSet<String> unknownUsedAsInternationalizedColumnsSetColumns = Sets.difference(internationalizedColumnsForDisplay, columns).immutableCopy();
             if (!unknownUsedAsInternationalizedColumnsSetColumns.isEmpty()) {
                 builder.recordInvalidInternationalizedColumnsForDataType(dataType, reference, unknownUsedAsInternationalizedColumnsSetColumns, columns);
@@ -571,16 +567,17 @@ public class ApplicationConfigurationService {
         }
     }
 
-    private void verifyUniquenessComponentKeysInDatatype(Configuration configuration, String dataType, Configuration.DataTypeDescription dataTypeDescription, ConfigurationParsingResult.Builder builder) {
+    private void verifyUniquenessComponentKeysInDatatype(String dataType, Configuration.DataTypeDescription dataTypeDescription, ConfigurationParsingResult.Builder builder) {
         final List<VariableComponentKey> uniqueness = dataTypeDescription.getUniqueness();
         final Set<String> availableVariableComponents = dataTypeDescription.getData().entrySet().stream()
-                .map(entry -> entry.getValue().getComponents().keySet().stream()
+                .flatMap(entry -> entry.getValue().getComponents().keySet().stream()
                         .map(componentName -> new VariableComponentKey(entry.getKey(), componentName).getId()))
-                .flatMap(Function.identity())
                 .collect(Collectors.<String>toSet());
-        Set<String> variableComponentsKeyInUniqueness = uniqueness.stream()
-                .map(variableComponentKey -> variableComponentKey.getId())
-                .collect(Collectors.toSet());
+        Set<String> variableComponentsKeyInUniqueness = new HashSet<>();
+        for (VariableComponentKey variableComponentKey : uniqueness) {
+            String id = variableComponentKey.getId();
+            variableComponentsKeyInUniqueness.add(id);
+        }
         ImmutableSet<String> unknownUsedAsVariableComponentUniqueness = Sets.difference(variableComponentsKeyInUniqueness, availableVariableComponents).immutableCopy();
         if (!unknownUsedAsVariableComponentUniqueness.isEmpty()) {
             builder.recordUnknownUsedAsVariableComponentUniqueness(dataType, unknownUsedAsVariableComponentUniqueness, availableVariableComponents);
@@ -590,20 +587,10 @@ public class ApplicationConfigurationService {
     private void verifyInternationalizedColumnsExists(Configuration configuration, ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry) {
         String reference = referenceEntry.getKey();
         Configuration.ReferenceDescription referenceDescription = referenceEntry.getValue();
-        Set<String> internationalizedColumns = Optional.ofNullable(configuration.getInternationalization())
-                .map(i -> i.getReferences())
-                .map(r -> r.getOrDefault(reference, null))
-                .map(im -> im.getInternationalizedColumns())
-                .map(ic -> {
-                    Set<String> columns = new LinkedHashSet<>(ic.keySet());
-                    ic.values().stream()
-                            .forEach(v -> columns.addAll(v.values()));
-                    return columns;
-                })
-                .orElse(new HashSet<>());
+        Set<String> internationalizedColumns = getInternationalizedColumns(configuration, reference);
         Set<String> columns = Optional.ofNullable(referenceDescription)
-                .map(r -> r.getColumns())
-                .map(c -> c.keySet())
+                .map(Configuration.ReferenceDescription::getColumns)
+                .map(LinkedHashMap::keySet)
                 .orElse(new HashSet<>());
 
         ImmutableSet<String> internationalizedColumnsSet = ImmutableSet.copyOf(internationalizedColumns);
@@ -613,7 +600,7 @@ public class ApplicationConfigurationService {
         }
     }
 
-    private void verifyValidationCheckersAreValids(Configuration configuration, ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry, Set<String> references) {
+    private void verifyValidationCheckersAreValids(ConfigurationParsingResult.Builder builder, Map.Entry<String, Configuration.ReferenceDescription> referenceEntry, Set<String> references) {
         String reference = referenceEntry.getKey();
         Configuration.ReferenceDescription referenceDescription = referenceEntry.getValue();
         for (Map.Entry<String, Configuration.LineValidationRuleDescription> validationRuleDescriptionEntry : referenceDescription.getValidations().entrySet()) {
@@ -625,15 +612,16 @@ public class ApplicationConfigurationService {
             }
             ImmutableSet<String> variableComponentCheckers = ImmutableSet.of("Date", "Float", "Integer", "RegularExpression", "Reference");
             String columns = checker.getParams().getColumns();
-            Set<String> groovyColumn = Optional.ofNullable(checker)
-                    .map(check -> check.getParams())
-                    .filter(params -> params.getGroovy() != null)
-                    .map(params -> MoreObjects.firstNonNull(params.getColumns(), ""))
-
-                    // autant mettre une collection dans le YAML directement
-                    .map(values -> values.split(","))
-                    .map(values -> Arrays.stream(values).collect(Collectors.toSet()))
-                    .orElse(Set.of());
+            // autant mettre une collection dans le YAML directement
+            Set<String> groovyColumn = Set.of();
+            Configuration.CheckerConfigurationDescription params = checker.getParams();
+            if (params != null && params.getGroovy() != null) {
+                String values = MoreObjects.firstNonNull(params.getColumns(), "");
+                if (values != null) {
+                    String[] split = values.split(",");
+                    groovyColumn = Arrays.stream(split).collect(Collectors.toSet());
+                }
+            }
 
             if (GroovyLineChecker.NAME.equals(checker.getName())) {
                 String expression = Optional.of(checker)
@@ -693,7 +681,7 @@ public class ApplicationConfigurationService {
 
     private ConfigurationParsingResult onMappingExceptions(List<IllegalArgumentException> exceptions) {
         ConfigurationParsingResult.Builder builder = ConfigurationParsingResult.builder();
-        exceptions.stream()
+        exceptions
                 .forEach(exception -> {
                     if (exception.getCause() instanceof UnrecognizedPropertyException) {
                         UnrecognizedPropertyException e = (UnrecognizedPropertyException) exception.getCause();
-- 
GitLab


From 3045a7f1cb2680fb24e233822a59171bc6be85e1 Mon Sep 17 00:00:00 2001
From: TCHERNIATINSKY <philippe.tcherniatinsky@inrae.fr>
Date: Wed, 9 Mar 2022 16:31:41 +0100
Subject: [PATCH 3/3] Modification de la documentation

---
 Documentation_fichier_Yaml.md | 20 ++++++++------------
 1 file changed, 8 insertions(+), 12 deletions(-)

diff --git a/Documentation_fichier_Yaml.md b/Documentation_fichier_Yaml.md
index 410f4fb07..7afc3036c 100644
--- a/Documentation_fichier_Yaml.md
+++ b/Documentation_fichier_Yaml.md
@@ -493,9 +493,11 @@ La partie validation peut être utilisée pour vérifier le contenu d'une colonn
 ##### Déclaration des contraintes d'unicité
 Il s'agit de déclarer comment une ligne d'un fichier s'exprime de manière unique (contrainte d'unicité au sens de la base de données)
 
-Il peut y avoir plusieurs contraintes d'unicité. Il suffit de déclarer chaque contrainte avec un nom dans la section _uniqueness_. Pour chacune des contraintes, on liste la liste des _variable components_ qui composent la clef.
+Il ne peut y avoir qu'une seule contrainte d'unicité. Il suffit de déclarer la contrainte dans la section _uniqueness_, en listant la liste des _variable components_ qui composent la clef.
 
-Si un fichier possède des lignes en doublon soit avec lui-même, soit avec des lignes déjà enregistrées en base, il sera rejeté.
+Si un fichier possède des lignes en doublon avec lui-même il sera rejeté.
+
+Si une ligne possede la même clef qu'une ligne de la base de données, la ligne sera mise à jour.
 
 Les contraintes ne s'appliquent que pour les fichiers d'un même type de données.
 
@@ -505,17 +507,11 @@ Exemple de déclaration de deux contraintes portant respectivement sur 3 et 2 va
 dataTypes:
   mon_datatype:
     uniqueness:
-      uk1:
-        - variable: projet
-          component: value
-        - variable: site
-          component: chemin
-        - variable: date
-          component: value
-      uk2:
-        - variable: espece
+      - variable: projet
         component: value
-        - variable: Couleur des individus
+      - variable: site
+        component: chemin
+      - variable: date
         component: value
 
 ``` 
-- 
GitLab