SUPPORT-8474: Fix

This commit is contained in:
Eduard Tihomirov 2024-09-06 11:32:40 +03:00
parent 3f91857a16
commit 66bdc4cb01
7 changed files with 221 additions and 10 deletions

View file

@ -0,0 +1,31 @@
package ervu.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class Data implements Serializable {
private static final long serialVersionUID = 1L;
private String orgId_ERVU;
private String prnOid;
public String getOrgId_ERVU() {
return orgId_ERVU;
}
public void setOrgId_ERVU(String orgId_ERVU) {
this.orgId_ERVU = orgId_ERVU;
}
public String getPrnOid() {
return prnOid;
}
public void setPrnOid(String prnOid) {
this.prnOid = prnOid;
}
}

View file

@ -0,0 +1,41 @@
package ervu.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class ErvuOrgResponse implements Serializable {
private static final long serialVersionUID = 1L;
private boolean success;
private String message;
private Data data;
public boolean getSuccess() {
return success;
}
public void setSuccess(boolean success) {
this.success = success;
}
public String getMessage() {
return message;
}
public void setMessage(String message) {
this.message = message;
}
public Data getData() {
return data;
}
public void setData(Data data) {
this.data = data;
}
}

View file

@ -0,0 +1,81 @@
package ervu.service.kafka;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.kafka.annotation.EnableKafka;
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
import org.springframework.kafka.core.*;
import org.springframework.kafka.listener.ConcurrentMessageListenerContainer;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import java.util.HashMap;
import java.util.Map;
@Configuration
@EnableKafka
public class ErvuKafkaConfig {
@Value("${ervu-kafka.bootstrap-servers}")
private String bootstrapServers;
@Value("${ervu-kafka.reply-topic}:ervu.organization.response")
private String replyTopic;
@Value("${ervu-kafka.group-id}")
private String groupId;
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return new DefaultKafkaProducerFactory<>(configProps);
}
@Bean
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(producerFactory());
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(configProps);
}
@Bean
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
return factory;
}
@Bean
public ConcurrentMessageListenerContainer<String, String> replyContainer(
ConcurrentKafkaListenerContainerFactory<String, String> factory) {
ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
replyTopic);
container.getContainerProperties().setGroupId(groupId);
return container;
}
@Bean
public ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate(
ProducerFactory<String, String> pf,
ConcurrentMessageListenerContainer<String, String> container) {
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate =
new ReplyingKafkaTemplate<>(pf, container);
replyingKafkaTemplate.setCorrelationHeaderName("messageID");
return replyingKafkaTemplate;
}
}

View file

@ -0,0 +1,41 @@
package ervu.service.kafka.service;
import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.requestreply.RequestReplyFuture;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
/**
* @author Eduard Tihomirov
*/
@Service
public class ErvuKafkaService {
@Value("${ervu-kafka.reply-topic}:ervu.organization.request")
private String requestReplyTopic;
@Value("${ervu-kafka.request-topic}:ervu.organization.response")
private String requestTopic;
@Autowired
private ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate;
public String sendMessageAndGetReply(String requestMessage)
throws ExecutionException, InterruptedException {
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, requestReplyTopic.getBytes()));
RequestReplyFuture<String, String, String> replyFuture = replyingKafkaTemplate.sendAndReceive(
record);
ConsumerRecord<String, String> consumerRecord = replyFuture.get();
return consumerRecord.value();
}
}

View file

@ -19,7 +19,9 @@ import javax.servlet.http.HttpServletResponse;
import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.model.Brhs;
import ervu.model.Employee;
import ervu.model.ErvuOrgResponse;
import ervu.model.OrgInfo;
import ervu.service.kafka.service.ErvuKafkaService;
import org.apache.kafka.clients.consumer.Consumer;
import org.apache.kafka.clients.consumer.ConsumerRecords;
import org.apache.kafka.common.utils.Bytes;
@ -51,6 +53,9 @@ public class EsiaAuthService {
@Autowired
private JwtTokenService jwtTokenService;
@Autowired
private ErvuKafkaService ervuKafkaService;
public String generateAuthCodeUrl() {
try {
String clientId = esiaConfig.getClientId();
@ -193,7 +198,8 @@ public class EsiaAuthService {
response.addCookie(cookieRefresh);
EsiaAccessToken esiaAccessToken = ulDataService.readToken(accessToken);
Token token = jwtTokenService.createAccessToken(esiaAccessToken.getSbj_id(), tokenResponse.getExpires_in());
String ervuId = getErvuId(accessToken);
Token token = jwtTokenService.createAccessToken(esiaAccessToken.getSbj_id(), tokenResponse.getExpires_in(), ervuId);
Cookie isAuthToken = new Cookie("auth_token", token.getValue());
isAuthToken.setPath("/");
response.addCookie(isAuthToken);
@ -277,7 +283,8 @@ public class EsiaAuthService {
cookieRefresh.setPath("/");
response.addCookie(cookieRefresh);
EsiaAccessToken esiaAccessToken = ulDataService.readToken(accessToken);
Token token = jwtTokenService.createAccessToken(esiaAccessToken.getSbj_id(), tokenResponse.getExpires_in());
String ervuId = getErvuId(accessToken);
Token token = jwtTokenService.createAccessToken(esiaAccessToken.getSbj_id(), tokenResponse.getExpires_in(), ervuId);
Cookie isAuthToken = new Cookie("auth_token", token.getValue());
isAuthToken.setPath("/");
response.addCookie(isAuthToken);
@ -350,11 +357,20 @@ public class EsiaAuthService {
}
}
public String getErvuId(String accessToken, HttpServletResponse response) {
OrganizationModel organizationModel = ulDataService.getOrganizationModel(accessToken);
EmployeeModel employeeModel = ulDataService.getEmployeeModel(accessToken);
EmployeeModel chiefModel = ulDataService.getChiefEmployeeModel(accessToken);
OrgInfo orgInfo = copyToOrgInfo(organizationModel, employeeModel, chiefModel);
public String getErvuId(String accessToken) {
try {
OrganizationModel organizationModel = ulDataService.getOrganizationModel(accessToken);
EmployeeModel employeeModel = ulDataService.getEmployeeModel(accessToken);
EmployeeModel chiefModel = ulDataService.getChiefEmployeeModel(accessToken);
OrgInfo orgInfo = copyToOrgInfo(organizationModel, employeeModel, chiefModel);
String kafkaResponse = ervuKafkaService.sendMessageAndGetReply(objectMapper.writeValueAsString(orgInfo));
ErvuOrgResponse ervuOrgResponse= objectMapper.readValue(kafkaResponse, ErvuOrgResponse.class);
return ervuOrgResponse.getData().getOrgId_ERVU();
}
catch (Exception e) {
throw new RuntimeException(e);
}
}

View file

@ -38,11 +38,11 @@ public class JwtTokenService {
this.SIGNING_KEY = Keys.hmacShaKeyFor(encodedKey);
}
public Token createAccessToken(String userAccountId, Long expiresIn) {
public Token createAccessToken(String userAccountId, Long expiresIn, String ervuId) {
Date expirationDate = new Date(System.currentTimeMillis() + 1000L * expiresIn);
String value = Jwts.builder()
.setSubject(userAccountId)
.setSubject(userAccountId + ":" + ervuId)
.setIssuer(tokenIssuerName)
.setIssuedAt(new Date(System.currentTimeMillis()))
.setExpiration(expirationDate)

View file

@ -50,3 +50,4 @@ xa-data-source add \
/system-property=sign-url:add(value="https://ervu-sign-dev.k8s.micord.ru/sign")
/system-property=esia-uri.logout:add(value="https://esia-portal1.test.gosuslugi.ru/idp/ext/Logout")
/system-property=client-cert-hash:add(value="04508B4B0B58776A954A0E15F574B4E58799D74C61EE020B3330716C203E3BDD")
/system-property=ervu-kafka.bootstrap-servers:add(value="localhost:9092")