Merge branch 'feature/SUPPORT-9115_fix_directories' into develop
This commit is contained in:
commit
e4dc36cb59
6 changed files with 84 additions and 56 deletions
|
|
@ -14,13 +14,35 @@ public class ErvuDirectoriesListener {
|
|||
@Autowired
|
||||
private ErvuDirectoriesService ervuDirectoriesService;
|
||||
|
||||
@KafkaListener(id = "${kafka.domain.group.id}", topics = "${kafka.domain.reconciliation}")
|
||||
@KafkaListener(id = "${kafka.domain.group.id}", topics = "${kafka.domain.reconciliation}",
|
||||
autoStartup = "false")
|
||||
public void listenKafkaDomain(String kafkaMessage) {
|
||||
ervuDirectoriesService.upsertKafkaDomainMessage(kafkaMessage);
|
||||
}
|
||||
|
||||
@KafkaListener(id = "${kafka.role.group.id}", topics = "${kafka.role.reconciliation}")
|
||||
@KafkaListener(id = "${kafka.role.group.id}", topics = "${kafka.role.reconciliation}",
|
||||
autoStartup = "false")
|
||||
public void listenKafkaRole(String kafkaMessage) {
|
||||
ervuDirectoriesService.upsertKafkaRoleMessage(kafkaMessage);
|
||||
}
|
||||
// Пока не заведены, обещают в будущих апдейтах создать
|
||||
// @KafkaListener(id = "${kafka.role.updated.group.id}", topics = "${kafka.role.updated}")
|
||||
// public void listenKafkaRoleUpdated(String kafkaMessage) {
|
||||
// ervuDirectoriesService.upsertKafkaRoleMessage(kafkaMessage);
|
||||
// }
|
||||
|
||||
// @KafkaListener(id = "${kafka.role.created.group.id}", topics = "${kafka.role.created}")
|
||||
// public void listenKafkaRoleUpdated(String kafkaMessage) {
|
||||
// ervuDirectoriesService.upsertKafkaRoleMessage(kafkaMessage);
|
||||
// }
|
||||
|
||||
@KafkaListener(id = "${kafka.domain.updated.group.id}", topics = "${kafka.domain.updated}")
|
||||
public void listenKafkaDomainUpdated(String kafkaMessage) {
|
||||
ervuDirectoriesService.upsertKafkaDomainMessage(kafkaMessage);
|
||||
}
|
||||
|
||||
@KafkaListener(id = "${kafka.domain.created.group.id}", topics = "${kafka.domain.created}")
|
||||
public void listenKafkaDomainCreated(String kafkaMessage) {
|
||||
ervuDirectoriesService.upsertKafkaDomainMessage(kafkaMessage);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -12,9 +12,9 @@ import org.springframework.context.annotation.Bean;
|
|||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.kafka.annotation.EnableKafka;
|
||||
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
||||
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
|
||||
import org.springframework.kafka.core.ConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.support.serializer.JsonDeserializer;
|
||||
|
||||
/**
|
||||
* @author Eduard Tihomirov
|
||||
|
|
@ -40,6 +40,11 @@ public class KafkaConfig {
|
|||
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry() {
|
||||
return new KafkaListenerEndpointRegistry();
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Map<String, Object> consumerConfigs() {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
|
|
|
|||
|
|
@ -0,0 +1,34 @@
|
|||
package ru.micord.ervu.account_applications.kafka;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.DependsOn;
|
||||
import org.springframework.kafka.config.KafkaListenerEndpointRegistry;
|
||||
import org.springframework.kafka.listener.MessageListenerContainer;
|
||||
import org.springframework.stereotype.Component;
|
||||
import ru.micord.ervu.account_applications.service.ErvuDirectoriesService;
|
||||
|
||||
/**
|
||||
* @author Eduard Tihomirov
|
||||
*/
|
||||
@Component
|
||||
@DependsOn("ervuDirectoriesListener")
|
||||
public class KafkaConsumerInitializer {
|
||||
@Value("${kafka.domain.group.id}")
|
||||
private String domainGroupId;
|
||||
|
||||
@Value("${kafka.role.group.id}")
|
||||
private String roleGroupId;
|
||||
|
||||
@Value("${load.directories:true}")
|
||||
private Boolean loadDirectories;
|
||||
|
||||
public KafkaConsumerInitializer(KafkaListenerEndpointRegistry kafkaListenerEndpointRegistry, ErvuDirectoriesService ervuDirectoriesService) {
|
||||
MessageListenerContainer listenerContainerDomain = kafkaListenerEndpointRegistry.getListenerContainer(domainGroupId);
|
||||
MessageListenerContainer listenerContainerRole = kafkaListenerEndpointRegistry.getListenerContainer(roleGroupId);
|
||||
listenerContainerDomain.start();
|
||||
listenerContainerRole.start();
|
||||
if (loadDirectories) {
|
||||
new Thread(ervuDirectoriesService::updateDirectories).start();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -7,6 +7,7 @@ import java.util.ArrayList;
|
|||
import java.util.Arrays;
|
||||
import java.util.List;
|
||||
import java.util.Map;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
|
|
@ -34,14 +35,16 @@ import ru.micord.ervu.account_applications.model.RoleResponse;
|
|||
* @author Eduard Tihomirov
|
||||
*/
|
||||
@Service
|
||||
@DependsOn({"liquibase", "ervuDirectoriesListener"})
|
||||
@DependsOn("liquibase")
|
||||
public class ErvuDirectoriesService {
|
||||
private static final Logger LOGGER = LoggerFactory.getLogger(
|
||||
MethodHandles.lookup().lookupClass());
|
||||
@Value("${idm.url}")
|
||||
private String idmUrl;
|
||||
@Value("${ervu.collection:domain, role}")
|
||||
private String ervuCollection;
|
||||
@Value("${ervu.directories:domain, role}")
|
||||
private String ervuDirectories;
|
||||
@Value("${ervu.admin.role:gomu_supervisor, system_administrator, security_administrator, Responsible_for_internal_control}")
|
||||
private String ervuAdminRole;
|
||||
@Autowired
|
||||
private RestTemplate restTemplate;
|
||||
@Autowired
|
||||
|
|
@ -56,9 +59,9 @@ public class ErvuDirectoriesService {
|
|||
})
|
||||
public void updateDirectories() {
|
||||
try {
|
||||
String[] ervuCollectionArray = ervuCollection.split(",");
|
||||
Arrays.stream(ervuCollectionArray).forEach(ervuCollection -> {
|
||||
String targetUrl = idmUrl + "/reconcile/"+ ervuCollection + "/to/kafka/v1";
|
||||
String[] ervuDirectoriesArray = ervuDirectories.split(",");
|
||||
Arrays.stream(ervuDirectoriesArray).forEach(ervuCollection -> {
|
||||
String targetUrl = idmUrl + "/reconcile/"+ ervuCollection.trim() + "/to/kafka/v1";
|
||||
HttpHeaders headers = new HttpHeaders();
|
||||
headers.setContentType(MediaType.APPLICATION_JSON);
|
||||
String emptyJson = "{}";
|
||||
|
|
@ -163,6 +166,7 @@ public class ErvuDirectoriesService {
|
|||
}
|
||||
|
||||
private void upsertRoleData(List<RoleResponse.Data> dataList) {
|
||||
String[] adminRoles = ervuAdminRole.split(",");
|
||||
List<UserApplicationRoleRecord> newRoleRecords = new ArrayList<>();
|
||||
List<UserApplicationRoleRecord> roleRecords = new ArrayList<>();
|
||||
List<String> ids = ervuDirectoriesDaoService.getRoleIds();
|
||||
|
|
@ -183,6 +187,12 @@ public class ErvuDirectoriesService {
|
|||
roleRecord.setCreated(createdAt);
|
||||
roleRecord.setUpdated(updatedAt);
|
||||
roleRecord.setFinished(finishAt);
|
||||
Optional<String> adminRoleOptional = Arrays.stream(adminRoles)
|
||||
.filter(role -> role.trim().equals(data.getName()))
|
||||
.findAny();
|
||||
if (adminRoleOptional.isPresent()) {
|
||||
roleRecord.setAdminRole(true);
|
||||
}
|
||||
if (ids.contains(data.getId())) {
|
||||
roleRecords.add(roleRecord);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,47 +0,0 @@
|
|||
package ru.micord.ervu.account_applications.service;
|
||||
|
||||
import javax.annotation.PostConstruct;
|
||||
|
||||
import net.javacrumbs.shedlock.core.SchedulerLock;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.scheduling.annotation.Scheduled;
|
||||
import org.springframework.stereotype.Service;
|
||||
|
||||
import static org.springframework.scheduling.config.ScheduledTaskRegistrar.CRON_DISABLED;
|
||||
|
||||
/**
|
||||
* @author Eduard Tihomirov
|
||||
*/
|
||||
@Service
|
||||
public class ErvuDirectoriesUpdateShedulerService {
|
||||
|
||||
@Autowired
|
||||
private ErvuDirectoriesService ervuDirectoriesService;
|
||||
|
||||
@Value("${directory.update.cron:0 0 */1 * * *}")
|
||||
private String cronLoad;
|
||||
|
||||
@PostConstruct
|
||||
public void init() {
|
||||
if (!cronLoad.equals(CRON_DISABLED)) {
|
||||
new Thread(this::runWithSleep).start();
|
||||
}
|
||||
}
|
||||
|
||||
private void runWithSleep() {
|
||||
try {
|
||||
Thread.sleep(100000);
|
||||
}
|
||||
catch (InterruptedException e) {
|
||||
throw new RuntimeException(e);
|
||||
}
|
||||
run();
|
||||
}
|
||||
|
||||
@Scheduled(cron = "${directory.update.cron:0 0 */1 * * *}")
|
||||
@SchedulerLock(name = "updateDirectories")
|
||||
public void run() {
|
||||
ervuDirectoriesService.updateDirectories();
|
||||
}
|
||||
}
|
||||
|
|
@ -20,6 +20,8 @@ KAFKA_USER=user1
|
|||
KAFKA_PASS=Blfi9d2OFG
|
||||
KAFKA_CONSUMER_GROUP_ID=1
|
||||
KAFKA_DOMAIN_GROUP_ID=ervu-account-applications-backend-domain
|
||||
KAFKA_DOMAIN_UPDATED_GROUP_ID=ervu-account-applications-backend-domain-updated
|
||||
KAFKA_DOMAIN_CREATED_GROUP_ID=ervu-account-applications-backend-domain-updated
|
||||
KAFKA_ROLE_GROUP_ID=ervu-account-applications-backend-role
|
||||
IDM_URL=http://idm
|
||||
|
||||
|
|
@ -28,4 +30,6 @@ ERVU_HTTP_TIMEOUT=30
|
|||
ERVU_PWD_SIGN_SECRET_KEY=xoL2Y3VRdQ4phXG85o6dRqcgqb4bk6ULdkJJdlRLhZM=
|
||||
KAFKA_ROLE_RECONCILIATION=idmv2.role.reconciliation
|
||||
KAFKA_DOMAIN_RECONCILIATION=idmv2.domain.reconciliation
|
||||
KAFKA_DOMAIN_UPDATED=idmv2.domain.created
|
||||
KAFKA_DOMAIN_CREATED=idmv2.domain.updated
|
||||
ERVU_ROLE_ADMIN=security_administrator
|
||||
Loading…
Add table
Add a link
Reference in a new issue