diff --git a/src/main/java/fr/abes/logskbart/configuration/KafkaConfig.java b/src/main/java/fr/abes/logskbart/configuration/KafkaConfig.java index a457358..bbd95b3 100644 --- a/src/main/java/fr/abes/logskbart/configuration/KafkaConfig.java +++ b/src/main/java/fr/abes/logskbart/configuration/KafkaConfig.java @@ -13,24 +13,25 @@ import java.sql.Timestamp; import java.util.HashMap; import java.util.Map; +import java.util.UUID; @Configuration @EnableKafka public class KafkaConfig { - @Value("${spring.kafka.consumer.bootstrap-servers}") + @Value("${abes.kafka.bootstrap-servers}") private String bootstrapAddress; - @Value("${topic.groupid.source}") - private String groupId; - @Bean public ConsumerFactory consumerLogsFactory() { Map props = new HashMap<>(); props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapAddress); - props.put(ConsumerConfig.GROUP_ID_CONFIG, groupId); + props.put(ConsumerConfig.GROUP_INSTANCE_ID_CONFIG,("SchedulerCoordinator"+ UUID.randomUUID())); props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class); + props.put(ConsumerConfig.ENABLE_AUTO_COMMIT_CONFIG, true); + props.put(ConsumerConfig.MAX_POLL_RECORDS_CONFIG, 10); + props.put(ConsumerConfig.MAX_POLL_INTERVAL_MS_CONFIG, 60000); return new DefaultKafkaConsumerFactory<>(props); } diff --git a/src/main/java/fr/abes/logskbart/service/EmailService.java b/src/main/java/fr/abes/logskbart/service/EmailService.java index 80e241f..e4354ba 100644 --- a/src/main/java/fr/abes/logskbart/service/EmailService.java +++ b/src/main/java/fr/abes/logskbart/service/EmailService.java @@ -41,6 +41,8 @@ public void sendEmail(String packageName, String message) { // Envoi du message par mail sendMail(requestJson); + + log.info("L'email a été correctement envoyé à " + recipient); } public void sendMailWithAttachment(String packageName, Path mailAttachmentPath) { @@ -57,7 +59,7 @@ public void sendMailWithAttachment(String packageName, Path mailAttachmentPath) // Suppression du fichier temporaire Files.deleteIfExists(mailAttachmentPath); - log.info("L'email a été correctement envoyé à " + recipient); + log.info("L'email avec PJ a été correctement envoyé à " + recipient); } catch (IOException e) { throw new RuntimeException(e); diff --git a/src/main/resources/application-dev.properties b/src/main/resources/application-dev.properties index fb3edd2..80a0b60 100644 --- a/src/main/resources/application-dev.properties +++ b/src/main/resources/application-dev.properties @@ -1,5 +1,5 @@ # Consumer properties -spring.kafka.consumer.bootstrap-servers= +abes.kafka.bootstrap-servers= abes.kafka.concurrency.nbThread= # Properties defined from .env on server #ignore resolution error diff --git a/src/main/resources/application-prod.properties b/src/main/resources/application-prod.properties index 34b174a..315f249 100644 --- a/src/main/resources/application-prod.properties +++ b/src/main/resources/application-prod.properties @@ -1,5 +1,5 @@ # Consumer properties -spring.kafka.consumer.bootstrap-servers= +abes.kafka.bootstrap-servers= abes.kafka.concurrency.nbThread= # Base Postgres diff --git a/src/main/resources/application-test.properties b/src/main/resources/application-test.properties index 4e98d54..0409db2 100644 --- a/src/main/resources/application-test.properties +++ b/src/main/resources/application-test.properties @@ -1,5 +1,5 @@ # Consumer properties -spring.kafka.consumer.bootstrap-servers= +abes.kafka.bootstrap-servers= abes.kafka.concurrency.nbThread= # Properties defined from .env on server #ignore resolution error diff --git a/src/main/resources/application.properties b/src/main/resources/application.properties index fbf49b5..5eea022 100644 --- a/src/main/resources/application.properties +++ b/src/main/resources/application.properties @@ -14,11 +14,6 @@ server.port=8082 # Configuration des logs log4j2.logdir=logs -# Common Kafka Properties -spring.kafka.consumer.key-serializer=org.apache.kafka.common.serialization.StringSerializer -spring.kafka.consumer.value-serializer=org.apache.kafka.common.serialization.StringSerializer -spring.kafka.consumer.max-poll-records=10000 - # Topic Kafka topic.name.source.error=bacon.logs.toload # Pour la partie consumer : insertion dans kafka de la ligne kbart avec bestppn