Merge remote-tracking branch 'origin/origin/hotfix/1.9.11' into origin/hotfix/1.9.11

# Conflicts:
#	backend/pom.xml
#	pom.xml
This commit is contained in:
gulnaz 2025-03-28 16:26:42 +03:00
commit 10a14dd37b
11 changed files with 408 additions and 271 deletions

View file

@ -233,8 +233,12 @@
<artifactId>spring-retry</artifactId>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -0,0 +1,61 @@
package ru.micord.ervu.account_applications.dao;
import java.util.List;
import java.util.UUID;
import org.jooq.DSLContext;
import org.jooq.Record2;
import org.jooq.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Repository;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.RecruitmentRecord;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.UserApplicationRoleRecord;
import static ru.micord.ervu.account_applications.db_beans.public_.tables.Recruitment.RECRUITMENT;
import static ru.micord.ervu.account_applications.db_beans.public_.tables.UserApplicationRole.USER_APPLICATION_ROLE;
/**
* @author Eduard Tihomirov
*/
@Repository
public class ErvuDirectoriesDao {
@Autowired
private DSLContext dsl;
public List<String> getRoleIds() {
return dsl.select(USER_APPLICATION_ROLE.USER_ROLE_ID)
.from(USER_APPLICATION_ROLE)
.fetch(USER_APPLICATION_ROLE.USER_ROLE_ID);
}
public Result<Record2<UUID, String>> getDomainIds() {
return dsl.select(RECRUITMENT.ID, RECRUITMENT.IDM_ID)
.from(RECRUITMENT)
.fetch();
}
public UserApplicationRoleRecord getRoleRecord() {
return dsl.newRecord(USER_APPLICATION_ROLE);
}
public RecruitmentRecord getRecruitmentRecord() {
return dsl.newRecord(RECRUITMENT);
}
public void insertRecruitmentRecords(List<RecruitmentRecord> newRecruitmentRecords) {
dsl.batchInsert(newRecruitmentRecords).execute();
}
public void updateRecruitmentRecords(List<RecruitmentRecord> recruitmentRecords) {
dsl.batchUpdate(recruitmentRecords).execute();
}
public void insertRoleRecords(List<UserApplicationRoleRecord> newRoleRecords) {
dsl.batchInsert(newRoleRecords).execute();
}
public void updateRoleRecords(List<UserApplicationRoleRecord> roleRecords ) {
dsl.batchUpdate(roleRecords).execute();
}
}

View file

@ -0,0 +1,26 @@
package ru.micord.ervu.account_applications.kafka;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.stereotype.Component;
import ru.micord.ervu.account_applications.service.ErvuDirectoriesService;
/**
* @author Eduard Tihomirov
*/
@Component
public class ErvuDirectoriesListner {
@Autowired
private ErvuDirectoriesService ervuDirectoriesService;
@KafkaListener(id = "${kafka.domain.group.id}", topics = "${kafka.domain.reconciliation}")
public void listenKafkaDomain(String kafkaMessage) {
ervuDirectoriesService.upsertKafkaDomainMessage(kafkaMessage);
}
@KafkaListener(id = "${kafka.role.group.id}", topics = "${kafka.role.reconciliation}")
public void listenKafkaRole(String kafkaMessage) {
ervuDirectoriesService.upsertKafkaRoleMessage(kafkaMessage);
}
}

View file

@ -0,0 +1,62 @@
package ru.micord.ervu.account_applications.kafka;
import java.util.HashMap;
import java.util.Map;
import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.ConsumerFactory;
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
/**
* @author Eduard Tihomirov
*/
@Configuration
@EnableKafka
public class KafkaConfig {
@Value("${kafka.hosts}")
private String bootstrapServers;
@Value("${kafka.auth_sec_proto}")
private String securityProtocol;
@Value("${kafka.auth_sasl_module}")
private String loginModule;
@Value("${kafka.user}")
private String username;
@Value("${kafka.pass}")
private String password;
@Value("${kafka.auth_sasl_mech}")
private String saslMechanism;
@Bean
public ConsumerFactory<String, String> consumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
@Bean
public Map<String, Object> consumerConfigs() {
Map<String, Object> props = new HashMap<>();
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
props.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
+ username + "\" password=\"" + password + "\";");
props.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
return props;
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
}

View file

@ -7,47 +7,20 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RecruitmentApiResponse {
private int totalRows;
private int page;
private int perPage;
private List<Record> records;
public class RecruitmentResponse {
private List<Data> data;
public int getTotalRows() {
return totalRows;
public List<Data> getData() {
return data;
}
public void setTotalRows(int totalRows) {
this.totalRows = totalRows;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getPerPage() {
return perPage;
}
public void setPerPage(int perPage) {
this.perPage = perPage;
}
public List<Record> getRecords() {
return records;
}
public void setRecords(
List<Record> records) {
this.records = records;
public void setData(
List<Data> data) {
this.data = data;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Record {
public static class Data {
private String id;
private int version;
private String schema;
@ -78,8 +51,9 @@ public class RecruitmentApiResponse {
private String militaryCode;
private Long createDate;
private Long modified;
private ParentRecord parent;
private RegionInfo regionInfo;
private String parent;
private String regionId;
private String regionCode;
public String getId() {
return id;
@ -273,21 +247,21 @@ public class RecruitmentApiResponse {
this.reportsEnabled = reportsEnabled;
}
public ParentRecord getParent() {
public String getParent() {
return parent;
}
public void setParent(ParentRecord parent) {
public void setParent(String parent) {
this.parent = parent;
}
public RegionInfo getRegionInfo() {
return regionInfo;
public String getRegionId() {
return regionId;
}
public void setRegionInfo(
RegionInfo regionInfo) {
this.regionInfo = regionInfo;
public void setRegionId(
String regionId) {
this.regionId = regionId;
}
public String getSubpoenaSeriesCode() {
@ -338,39 +312,12 @@ public class RecruitmentApiResponse {
this.modified = modified;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class ParentRecord {
private String id;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id;
}
public String getRegionCode() {
return regionCode;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class RegionInfo {
private String regionId;
private String regionCode;
public String getRegionId() {
return regionId;
}
public void setRegionId(String regionId) {
this.regionId = regionId;
}
public String getRegionCode() {
return regionCode;
}
public void setRegionCode(String regionCode) {
this.regionCode = regionCode;
}
public void setRegionCode(String regionCode) {
this.regionCode = regionCode;
}
}
}

View file

@ -8,46 +8,19 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class RoleApiResponse {
private int totalRows;
private int page;
private int perPage;
private List<RoleApiResponse.Record> records;
public class RoleResponse {
private List<Data> data;
public int getTotalRows() {
return totalRows;
public List<Data> getData() {
return data;
}
public void setTotalRows(int totalRows) {
this.totalRows = totalRows;
}
public int getPage() {
return page;
}
public void setPage(int page) {
this.page = page;
}
public int getPerPage() {
return perPage;
}
public void setPerPage(int perPage) {
this.perPage = perPage;
}
public List<RoleApiResponse.Record> getRecords() {
return records;
}
public void setRecords(
List<RoleApiResponse.Record> records) {
this.records = records;
public void setData(
List<Data> data) {
this.data = data;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Record {
public static class Data {
private String id;
private String displayName;
private Long createDate;

View file

@ -0,0 +1,57 @@
package ru.micord.ervu.account_applications.service;
import java.util.List;
import java.util.UUID;
import org.jooq.Record2;
import org.jooq.Result;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.cache.annotation.Cacheable;
import org.springframework.stereotype.Service;
import ru.micord.ervu.account_applications.dao.ErvuDirectoriesDao;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.RecruitmentRecord;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.UserApplicationRoleRecord;
/**
* @author Eduard Tihomirov
*/
@Service
public class ErvuDirectoriesDaoService {
@Autowired
private ErvuDirectoriesDao ervuDirectoriesDao;
public List<String> getRoleIds() {
return ervuDirectoriesDao.getRoleIds();
}
@Cacheable(value = "domain-ids", unless = "#result == null")
public Result<Record2<UUID, String>> getDomainIds() {
return ervuDirectoriesDao.getDomainIds();
}
@Cacheable(value = "role-ids", unless = "#result == null")
public UserApplicationRoleRecord getRoleRecord() {
return ervuDirectoriesDao.getRoleRecord();
}
public RecruitmentRecord getRecruitmentRecord() {
return ervuDirectoriesDao.getRecruitmentRecord();
}
public void insertRecruitmentRecords(List<RecruitmentRecord> newRecruitmentRecords) {
ervuDirectoriesDao.insertRecruitmentRecords(newRecruitmentRecords);
}
public void updateRecruitmentRecords(List<RecruitmentRecord> recruitmentRecords) {
ervuDirectoriesDao.updateRecruitmentRecords(recruitmentRecords);
}
public void insertRoleRecords(List<UserApplicationRoleRecord> newRoleRecords) {
ervuDirectoriesDao.insertRoleRecords(newRoleRecords);
}
public void updateRoleRecords(List<UserApplicationRoleRecord> roleRecords ) {
ervuDirectoriesDao.updateRoleRecords(roleRecords);
}
}

View file

@ -1,8 +1,6 @@
package ru.micord.ervu.account_applications.service;
import java.net.URI;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import java.lang.invoke.MethodHandles;
import java.sql.Timestamp;
import java.time.Instant;
import java.util.ArrayList;
@ -10,133 +8,150 @@ import java.util.Arrays;
import java.util.List;
import java.util.UUID;
import org.jooq.DSLContext;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.jooq.Record2;
import org.jooq.Result;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.cache.annotation.CacheEvict;
import org.springframework.cache.annotation.Caching;
import org.springframework.context.annotation.DependsOn;
import org.springframework.http.HttpEntity;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.web.client.RestTemplate;
import org.springframework.web.util.UriComponentsBuilder;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.RecruitmentRecord;
import ru.micord.ervu.account_applications.db_beans.public_.tables.records.UserApplicationRoleRecord;
import ru.micord.ervu.account_applications.model.RecruitmentApiResponse;
import ru.micord.ervu.account_applications.model.RoleApiResponse;
import ru.micord.ervu.account_applications.model.RecruitmentResponse;
import ru.micord.ervu.account_applications.model.RoleResponse;
import static ru.micord.ervu.account_applications.db_beans.public_.tables.Recruitment.RECRUITMENT;
import static ru.micord.ervu.account_applications.db_beans.public_.tables.UserApplicationRole.USER_APPLICATION_ROLE;
/**
* @author Eduard Tihomirov
*/
@Service
@DependsOn({"liquibase", "ervuDirectoriesListner"})
public class ErvuDirectoriesService {
private static final Logger LOGGER = LoggerFactory.getLogger(
MethodHandles.lookup().lookupClass());
@Value("${ervu.url}")
private String ervuUrl;
@Value("${ervu.page.size:50}")
private Integer perPage;
@Value("${ervu.recruitment.schemas:Organization, Department, Region, Ministry}")
private String schemas;
@Autowired
private DSLContext dsl;
@Value("${ervu.collection:domain, role}")
private String ervuCollection;
@Autowired
private RestTemplate restTemplate;
@Autowired
private ObjectMapper objectMapper;
@Autowired
private ErvuDirectoriesDaoService ervuDirectoriesDaoService;
@Caching(evict = {
@CacheEvict(value = "domain-ids", allEntries = true),
@CacheEvict(value = "role-ids", allEntries = true)
})
public void updateDirectories() {
try {
String[] ervuCollectionArray = ervuCollection.split(",");
Arrays.stream(ervuCollectionArray).forEach(ervuCollection -> {
String targetUrl = ervuUrl + "/service/idm/reconcile/"+ ervuCollection + "/to/kafka/v1";
HttpHeaders headers = new HttpHeaders();
headers.setContentType(MediaType.APPLICATION_JSON);
String emptyJson = "{}";
HttpEntity<String> requestEntity = new HttpEntity<>(emptyJson, headers);
ResponseEntity<String> response = restTemplate.postForEntity(targetUrl, requestEntity, String.class);
if (!response.getStatusCode().is2xxSuccessful()) {
LOGGER.error(
"Error in " + ervuCollection + " request. Status code: " + response.getStatusCode()
+ "; Body: " + response.getBody());
}
});
}
catch (Exception e) {
LOGGER.error(e.getMessage());
//trow error for clean cache
throw new RuntimeException(e);
}
}
@Transactional
public void updateDirectories() {
fetchAndUpsetRecruitmentData();
fetchAndUpsetRoleData();
public void upsertKafkaDomainMessage(String kafkaMessage) {
RecruitmentResponse[] response = null;
try {
response = objectMapper.readValue(kafkaMessage, RecruitmentResponse[].class);
}
catch (JsonProcessingException e) {
throw new RuntimeException("Error with parsing domain kafka message", e);
}
if (response.length > 0 && response[0].getData() != null && !response[0].getData().isEmpty()) {
upsertRecruitmentData(response[0].getData());
}
}
public void fetchAndUpsetRecruitmentData() {
String[] schemaArray = schemas.split(",");
Arrays.stream(schemaArray).forEach(schema -> {
schema = schema.trim();
int page = 1;
boolean hasMorePages;
Result<Record2<UUID, String>> ids = dsl.select(RECRUITMENT.ID, RECRUITMENT.IDM_ID)
.from(RECRUITMENT)
.fetch();
try {
do {
String url = ervuUrl + "/service/idm/domains";
String encodedSchemaValue = URLEncoder.encode("schema=in=(" + schema + ")",
StandardCharsets.UTF_8
);
URI uri = UriComponentsBuilder.fromHttpUrl(url)
.queryParam("query", encodedSchemaValue)
.queryParam("expand", "parent")
.queryParam("page", page)
.queryParam("perPage", perPage)
.build(true)
.toUri();
RecruitmentApiResponse recruitmentApiResponse = restTemplate.getForObject(uri,
RecruitmentApiResponse.class
);
if (recruitmentApiResponse != null && recruitmentApiResponse.getRecords() != null) {
upsertRecruitmentData(recruitmentApiResponse.getRecords(), ids);
hasMorePages = recruitmentApiResponse.getRecords().size() == perPage;
}
else {
hasMorePages = false;
}
page++;
}
while (hasMorePages);
}
catch (Exception e) {
throw new RuntimeException(e);
}
});
@Transactional
public void upsertKafkaRoleMessage(String kafkaMessage) {
RoleResponse[] response = null;
try {
response = objectMapper.readValue(kafkaMessage, RoleResponse[].class);
}
catch (JsonProcessingException e) {
throw new RuntimeException("Error with parsing role kafka message", e);
}
if (response.length > 0 && response[0].getData() != null && !response[0].getData().isEmpty()) {
upsertRoleData(response[0].getData());
}
}
private void upsertRecruitmentData(List<RecruitmentApiResponse.Record> recordList,
Result<Record2<UUID, String>> ids) {
private void upsertRecruitmentData(List<RecruitmentResponse.Data> dataList) {
List<RecruitmentRecord> newRecruitmentRecords = new ArrayList<>();
List<RecruitmentRecord> recruitmentRecords = new ArrayList<>();
recordList.forEach(record -> {
Timestamp updatedAt = Timestamp.from(Instant.ofEpochSecond(record.getModified()));
Timestamp createdAt = Timestamp.from(Instant.ofEpochSecond(record.getCreateDate()));
RecruitmentRecord recruitmentRecord = dsl.newRecord(RECRUITMENT);
recruitmentRecord.setIdmId(record.getId());
recruitmentRecord.setVersion(record.getVersion());
recruitmentRecord.setSchema(record.getSchema());
recruitmentRecord.setShortname(record.getShortname());
recruitmentRecord.setFullname(record.getFullname());
recruitmentRecord.setDns(record.getDns());
recruitmentRecord.setEmail(record.getEmail());
recruitmentRecord.setPhone(record.getPhone());
recruitmentRecord.setAddress(record.getAddress());
recruitmentRecord.setPostalAddress(record.getPostalAddress());
recruitmentRecord.setNsiDepartmentId(record.getNsiDepartmentId());
recruitmentRecord.setNsiOrganizationId(record.getNsiOrganizationId());
recruitmentRecord.setOktmo(record.getOktmo());
recruitmentRecord.setOrgOgrn(record.getOrgOgrn());
recruitmentRecord.setDepOgrn(record.getDepOgrn());
recruitmentRecord.setEpguId(record.getEpguId());
recruitmentRecord.setKpp(record.getKpp());
recruitmentRecord.setInn(record.getInn());
recruitmentRecord.setOkato(record.getOkato());
recruitmentRecord.setDivisionType(record.getDivisionType());
recruitmentRecord.setTnsDepartmentId(record.getTnsDepartmentId());
recruitmentRecord.setEnabled(record.getEnabled() != null ? record.getEnabled() : true);
recruitmentRecord.setTimezone(record.getTimezone());
recruitmentRecord.setReportsEnabled(record.getReportsEnabled());
recruitmentRecord.setParentId(record.getParent() != null ? record.getParent().getId() : null);
recruitmentRecord.setSubpoenaSeriesCode(record.getSubpoenaSeriesCode());
recruitmentRecord.setAddressId(record.getAddressId());
recruitmentRecord.setPostalAddressId(record.getPostalAddressId());
recruitmentRecord.setRegionId(
record.getRegionInfo() != null ? record.getRegionInfo().getRegionId() : null);
recruitmentRecord.setRegionCode(
record.getRegionInfo() != null ? record.getRegionInfo().getRegionCode() : null);
recruitmentRecord.setMilitaryCode(record.getMilitaryCode());
Result<Record2<UUID, String>> ids = ervuDirectoriesDaoService.getDomainIds();
dataList.forEach(data -> {
Timestamp updatedAt = Timestamp.from(Instant.ofEpochSecond(data.getModified()));
Timestamp createdAt = Timestamp.from(Instant.ofEpochSecond(data.getCreateDate()));
RecruitmentRecord recruitmentRecord = ervuDirectoriesDaoService.getRecruitmentRecord();
recruitmentRecord.setIdmId(data.getId());
recruitmentRecord.setVersion(data.getVersion());
recruitmentRecord.setSchema(data.getSchema());
recruitmentRecord.setShortname(data.getShortname());
recruitmentRecord.setFullname(data.getFullname());
recruitmentRecord.setDns(data.getDns());
recruitmentRecord.setEmail(data.getEmail());
recruitmentRecord.setPhone(data.getPhone());
recruitmentRecord.setAddress(data.getAddress());
recruitmentRecord.setPostalAddress(data.getPostalAddress());
recruitmentRecord.setNsiDepartmentId(data.getNsiDepartmentId());
recruitmentRecord.setNsiOrganizationId(data.getNsiOrganizationId());
recruitmentRecord.setOktmo(data.getOktmo());
recruitmentRecord.setOrgOgrn(data.getOrgOgrn());
recruitmentRecord.setDepOgrn(data.getDepOgrn());
recruitmentRecord.setEpguId(data.getEpguId());
recruitmentRecord.setKpp(data.getKpp());
recruitmentRecord.setInn(data.getInn());
recruitmentRecord.setOkato(data.getOkato());
recruitmentRecord.setDivisionType(data.getDivisionType());
recruitmentRecord.setTnsDepartmentId(data.getTnsDepartmentId());
recruitmentRecord.setEnabled(data.getEnabled() != null ? data.getEnabled() : true);
recruitmentRecord.setTimezone(data.getTimezone());
recruitmentRecord.setReportsEnabled(data.getReportsEnabled());
recruitmentRecord.setParentId(data.getParent());
recruitmentRecord.setSubpoenaSeriesCode(data.getSubpoenaSeriesCode());
recruitmentRecord.setAddressId(data.getAddressId());
recruitmentRecord.setPostalAddressId(data.getPostalAddressId());
recruitmentRecord.setRegionId(data.getRegionId());
recruitmentRecord.setRegionCode(data.getRegionCode());
recruitmentRecord.setMilitaryCode(data.getMilitaryCode());
recruitmentRecord.setCreatedAt(createdAt);
recruitmentRecord.setUpdatedAt(updatedAt);
recruitmentRecord.setTs(new Timestamp(System.currentTimeMillis()));
boolean isExisting = false;
for (Record2<UUID, String> resultRecord : ids) {
if (resultRecord.get(RECRUITMENT.IDM_ID).equals(recruitmentRecord.getIdmId())) {
@ -150,70 +165,35 @@ public class ErvuDirectoriesService {
newRecruitmentRecords.add(recruitmentRecord);
}
});
dsl.batchInsert(newRecruitmentRecords).execute();
dsl.batchUpdate(recruitmentRecords).execute();
ervuDirectoriesDaoService.insertRecruitmentRecords(newRecruitmentRecords);
ervuDirectoriesDaoService.updateRecruitmentRecords(recruitmentRecords);
}
public void fetchAndUpsetRoleData() {
int page = 1;
boolean hasMorePages;
List<String> ids = dsl.select(USER_APPLICATION_ROLE.USER_ROLE_ID)
.from(USER_APPLICATION_ROLE)
.fetch(USER_APPLICATION_ROLE.USER_ROLE_ID);
try {
do {
String url = ervuUrl + "/service/idm/roles";
String encodedErvuRole = URLEncoder.encode("ervuRole=like=true", StandardCharsets.UTF_8);
URI uri = UriComponentsBuilder.fromHttpUrl(url)
.queryParam("query", encodedErvuRole)
.queryParam("page", page)
.queryParam("perPage", perPage)
.build(true)
.toUri();
RoleApiResponse roleApiResponse = restTemplate.getForObject(uri, RoleApiResponse.class);
if (roleApiResponse != null && roleApiResponse.getRecords() != null) {
upsertRoleData(roleApiResponse.getRecords(), ids);
hasMorePages = roleApiResponse.getRecords().size() == perPage;
}
else {
hasMorePages = false;
}
page++;
}
while (hasMorePages);
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private void upsertRoleData(List<RoleApiResponse.Record> recordList, List<String> ids) {
private void upsertRoleData(List<RoleResponse.Data> dataList) {
List<UserApplicationRoleRecord> newRoleRecords = new ArrayList<>();
List<UserApplicationRoleRecord> roleRecords = new ArrayList<>();
recordList.forEach(record -> {
Timestamp updatedAt = Timestamp.from(Instant.ofEpochSecond(record.getModified()));
Timestamp createdAt = Timestamp.from(Instant.ofEpochSecond(record.getCreateDate()));
List<String> ids = ervuDirectoriesDaoService.getRoleIds();
dataList.forEach(data -> {
Timestamp updatedAt = Timestamp.from(Instant.ofEpochSecond(data.getModified()));
Timestamp createdAt = Timestamp.from(Instant.ofEpochSecond(data.getCreateDate()));
Timestamp finishAt = null;
if (record.getFinish() != null) {
finishAt = Timestamp.from(Instant.ofEpochSecond(record.getFinish()));
if (data.getFinish() != null) {
finishAt = Timestamp.from(Instant.ofEpochSecond(data.getFinish()));
}
UserApplicationRoleRecord roleRecord = dsl.newRecord(USER_APPLICATION_ROLE);
roleRecord.setUserRoleId(record.getId());
roleRecord.setRoleName(record.getDisplayName());
UserApplicationRoleRecord roleRecord = ervuDirectoriesDaoService.getRoleRecord();
roleRecord.setUserRoleId(data.getId());
roleRecord.setRoleName(data.getDisplayName());
roleRecord.setCreated(createdAt);
roleRecord.setUpdated(updatedAt);
roleRecord.setFinished(finishAt);
if (ids.contains(record.getId())) {
if (ids.contains(data.getId())) {
roleRecords.add(roleRecord);
}
else {
newRoleRecords.add(roleRecord);
}
});
dsl.batchInsert(newRoleRecords).execute();
dsl.batchUpdate(roleRecords).execute();
ervuDirectoriesDaoService.insertRoleRecords(newRoleRecords);
ervuDirectoriesDaoService.updateRoleRecords(roleRecords);
}
}

View file

@ -5,10 +5,8 @@ import javax.annotation.PostConstruct;
import net.javacrumbs.shedlock.core.SchedulerLock;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.DependsOn;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import static org.springframework.scheduling.config.ScheduledTaskRegistrar.CRON_DISABLED;
@ -16,7 +14,6 @@ import static org.springframework.scheduling.config.ScheduledTaskRegistrar.CRON_
* @author Eduard Tihomirov
*/
@Service
@DependsOn("liquibase")
public class ErvuDirectoriesUpdateShedulerService {
@Autowired
@ -28,7 +25,7 @@ public class ErvuDirectoriesUpdateShedulerService {
@PostConstruct
public void init() {
if (!cronLoad.equals(CRON_DISABLED)) {
run();
new Thread(this::run).start();
}
}

View file

@ -12,6 +12,18 @@ DB_SEC_HOST=10.10.31.118
DB_SEC_PORT=5432
DB_SEC_NAME=account_applications
KAFKA_HOSTS=10.10.31.11:32609
KAFKA_AUTH_SEC_PROTO=SASL_PLAINTEXT
KAFKA_AUTH_SASL_MECH=SCRAM-SHA-256
KAFKA_AUTH_SASL_MODULE=org.apache.kafka.common.security.scram.ScramLoginModule
KAFKA_USER=user1
KAFKA_PASS=Blfi9d2OFG
KAFKA_CONSUMER_GROUP_ID=1
KAFKA_DOMAIN_GROUP_ID=ervu-account-applications-backend-domain
KAFKA_ROLE_GROUP_ID=ervu-account-applications-backend-role
ERVU_URL=https://ervu-dev.pgs.rtlabs.ru
ERVU_HTTP_TIMEOUT=30
ERVU_PWD_SIGN_SECRET_KEY=xoL2Y3VRdQ4phXG85o6dRqcgqb4bk6ULdkJJdlRLhZM=
ERVU_PWD_SIGN_SECRET_KEY=xoL2Y3VRdQ4phXG85o6dRqcgqb4bk6ULdkJJdlRLhZM=
KAFKA_ROLE_RECONCILIATION=idmv2.role.reconciliation
KAFKA_DOMAIN_RECONCILIATION=idmv2.domain.reconciliation

28
pom.xml
View file

@ -21,6 +21,7 @@
<webbpm-platform.version>3.192.4</webbpm-platform.version>
<wbp.overall-timeout>72000</wbp.overall-timeout>
<jasperreports.version>6.15.0</jasperreports.version>
<spring-kafka.version>2.9.13</spring-kafka.version>
</properties>
<dependencyManagement>
<dependencies>
@ -312,16 +313,33 @@
<artifactId>spring-retry</artifactId>
<version>2.0.11</version>
</dependency>
<dependency>
<groupId>org.springframework.kafka</groupId>
<artifactId>spring-kafka</artifactId>
<version>${spring-kafka.version}</version>
<exclusions>
<exclusion>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
<version>3.9.0</version>
<exclusions>
<exclusion>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</exclusion>
</exclusions>
</dependency>
<dependency>
<groupId>org.springframework.retry</groupId>
<artifactId>spring-retry</artifactId>
<version>2.0.11</version>
</dependency>
<dependency>
<groupId>org.springframework</groupId>
<artifactId>spring-aspects</artifactId>
<version>6.2.5</version>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>