Merge branch 'release/1.8' into feature/SUPPORT-8546_cut_unused
This commit is contained in:
commit
a93448f461
33 changed files with 316 additions and 168 deletions
|
|
@ -5,7 +5,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||||
<artifactId>ul</artifactId>
|
<artifactId>ul</artifactId>
|
||||||
<version>1.0.0-SNAPSHOT</version>
|
<version>1.8.0-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||||
<artifactId>backend</artifactId>
|
<artifactId>backend</artifactId>
|
||||||
|
|
|
||||||
|
|
@ -1,4 +1,5 @@
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
|
import java.util.List;
|
||||||
import javax.sql.DataSource;
|
import javax.sql.DataSource;
|
||||||
|
|
||||||
import liquibase.integration.spring.SpringLiquibase;
|
import liquibase.integration.spring.SpringLiquibase;
|
||||||
|
|
@ -14,10 +15,13 @@ import org.springframework.context.annotation.Configuration;
|
||||||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||||
import org.springframework.context.annotation.FilterType;
|
import org.springframework.context.annotation.FilterType;
|
||||||
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
|
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
|
||||||
|
import org.springframework.http.converter.HttpMessageConverter;
|
||||||
|
import org.springframework.http.converter.ResourceHttpMessageConverter;
|
||||||
import org.springframework.retry.annotation.EnableRetry;
|
import org.springframework.retry.annotation.EnableRetry;
|
||||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
||||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||||
|
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Root application context
|
* Root application context
|
||||||
|
|
@ -47,7 +51,7 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||||
@EnableWebMvc
|
@EnableWebMvc
|
||||||
@EnableScheduling
|
@EnableScheduling
|
||||||
@EnableRetry
|
@EnableRetry
|
||||||
public class AppConfig {
|
public class AppConfig implements WebMvcConfigurer {
|
||||||
|
|
||||||
@Bean
|
@Bean
|
||||||
public PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer(){
|
public PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer(){
|
||||||
|
|
@ -78,4 +82,9 @@ public class AppConfig {
|
||||||
liquibase.setChangeLog("classpath:config/changelog-master.xml");
|
liquibase.setChangeLog("classpath:config/changelog-master.xml");
|
||||||
return liquibase;
|
return liquibase;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@Override
|
||||||
|
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
|
||||||
|
converters.add(new ResourceHttpMessageConverter());
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,24 +1,20 @@
|
||||||
|
import org.slf4j.Logger;
|
||||||
|
import org.slf4j.LoggerFactory;
|
||||||
|
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
|
||||||
|
import org.springframework.web.util.IntrospectorCleanupListener;
|
||||||
|
|
||||||
import javax.servlet.MultipartConfigElement;
|
import javax.servlet.MultipartConfigElement;
|
||||||
import javax.servlet.ServletContext;
|
import javax.servlet.ServletContext;
|
||||||
import javax.servlet.ServletException;
|
import javax.servlet.ServletException;
|
||||||
import javax.servlet.ServletRegistration;
|
import javax.servlet.ServletRegistration;
|
||||||
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
|
||||||
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
|
|
||||||
import org.springframework.web.util.IntrospectorCleanupListener;
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* This initializer creates root context and registers dispatcher servlet
|
* This initializer creates root context and registers dispatcher servlet
|
||||||
* Spring scans for initializers automatically
|
* Spring scans for initializers automatically
|
||||||
*/
|
*/
|
||||||
public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
|
public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
|
||||||
|
|
||||||
@Value("${ervu.fileupload.max_file_size}")
|
private static final Logger logger = LoggerFactory.getLogger(WebAppInitializer.class);
|
||||||
private int maxFileSize;
|
|
||||||
@Value("${ervu.fileupload.max_request_size}")
|
|
||||||
private int maxRequestSize;
|
|
||||||
@Value("${ervu.fileupload.file_size_threshold}")
|
|
||||||
private int fileSizeThreshold;
|
|
||||||
|
|
||||||
public void onStartup(ServletContext servletContext) throws ServletException {
|
public void onStartup(ServletContext servletContext) throws ServletException {
|
||||||
super.onStartup(servletContext);
|
super.onStartup(servletContext);
|
||||||
|
|
@ -41,11 +37,37 @@ public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServlet
|
||||||
|
|
||||||
@Override
|
@Override
|
||||||
protected void customizeRegistration(ServletRegistration.Dynamic registration) {
|
protected void customizeRegistration(ServletRegistration.Dynamic registration) {
|
||||||
|
|
||||||
|
// read from env or assign default values
|
||||||
|
int maxFileSize = parseOrDefault("ERVU_FILE_UPLOAD_MAX_FILE_SIZE", 5242880);
|
||||||
|
int maxRequestSize = parseOrDefault("ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE", 6291456);
|
||||||
|
int fileSizeThreshold = parseOrDefault("ERVU_FILE_UPLOAD_FILE_SIZE_THRESHOLD", 0);
|
||||||
|
|
||||||
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(
|
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(
|
||||||
"/tmp",
|
"/tmp",
|
||||||
maxFileSize,
|
maxFileSize,
|
||||||
maxRequestSize,
|
maxRequestSize,
|
||||||
fileSizeThreshold);
|
fileSizeThreshold);
|
||||||
registration.setMultipartConfig(multipartConfigElement);
|
registration.setMultipartConfig(multipartConfigElement);
|
||||||
|
|
||||||
|
logger.info("Max file upload size is set to: " + multipartConfigElement.getMaxFileSize());
|
||||||
|
logger.info("Max file upload request size is set to: " + multipartConfigElement.getMaxRequestSize());
|
||||||
|
logger.info("File size threshold is set to: " + multipartConfigElement.getFileSizeThreshold());
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
|
private int parseOrDefault(String envVar, int defaultVal) {
|
||||||
|
String envVarValue = System.getenv(envVar);
|
||||||
|
if (envVar == null) {
|
||||||
|
logger.info("Environment variable {} is null, using default value: {}", envVar, defaultVal);
|
||||||
|
return defaultVal;
|
||||||
|
}
|
||||||
|
try {
|
||||||
|
return Integer.parseInt(envVarValue);
|
||||||
|
} catch (NumberFormatException e) {
|
||||||
|
logger.info("Environment variable {} is not an integer, using default value: {}", envVar, defaultVal);
|
||||||
|
return defaultVal;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -4,12 +4,17 @@ import java.util.HashMap;
|
||||||
import java.util.Map;
|
import java.util.Map;
|
||||||
|
|
||||||
import org.apache.kafka.clients.CommonClientConfigs;
|
import org.apache.kafka.clients.CommonClientConfigs;
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||||
import org.apache.kafka.common.config.SaslConfigs;
|
import org.apache.kafka.common.config.SaslConfigs;
|
||||||
|
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||||
import org.apache.kafka.common.serialization.StringSerializer;
|
import org.apache.kafka.common.serialization.StringSerializer;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
||||||
|
import org.springframework.kafka.core.ConsumerFactory;
|
||||||
|
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||||
import org.springframework.kafka.core.KafkaTemplate;
|
import org.springframework.kafka.core.KafkaTemplate;
|
||||||
import org.springframework.kafka.core.ProducerFactory;
|
import org.springframework.kafka.core.ProducerFactory;
|
||||||
|
|
@ -18,7 +23,7 @@ import org.springframework.kafka.core.ProducerFactory;
|
||||||
* @author Alexandr Shalaginov
|
* @author Alexandr Shalaginov
|
||||||
*/
|
*/
|
||||||
@Configuration
|
@Configuration
|
||||||
public class KafkaProducerConfig {
|
public class AvKafkaConfig {
|
||||||
@Value("${av.kafka.bootstrap.servers}")
|
@Value("${av.kafka.bootstrap.servers}")
|
||||||
private String kafkaUrl;
|
private String kafkaUrl;
|
||||||
@Value("${av.kafka.security.protocol}")
|
@Value("${av.kafka.security.protocol}")
|
||||||
|
|
@ -32,12 +37,12 @@ public class KafkaProducerConfig {
|
||||||
@Value("${av.kafka.sasl.mechanism}")
|
@Value("${av.kafka.sasl.mechanism}")
|
||||||
private String saslMechanism;
|
private String saslMechanism;
|
||||||
|
|
||||||
@Bean("av-factory")
|
@Bean
|
||||||
public ProducerFactory<String, String> producerFactory() {
|
public ProducerFactory<String, String> avProducerFactory() {
|
||||||
return new DefaultKafkaProducerFactory<>(producerConfigs());
|
return new DefaultKafkaProducerFactory<>(producerConfigs());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean("av-configs")
|
@Bean
|
||||||
public Map<String, Object> producerConfigs() {
|
public Map<String, Object> producerConfigs() {
|
||||||
Map<String, Object> props = new HashMap<>();
|
Map<String, Object> props = new HashMap<>();
|
||||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaUrl);
|
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaUrl);
|
||||||
|
|
@ -52,9 +57,36 @@ public class KafkaProducerConfig {
|
||||||
return props;
|
return props;
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean("av-template")
|
@Bean
|
||||||
|
public ConsumerFactory<String, String> avConsumerFactory() {
|
||||||
|
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean
|
||||||
|
public Map<String, Object> consumerConfigs() {
|
||||||
|
Map<String, Object> props = new HashMap<>();
|
||||||
|
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaUrl);
|
||||||
|
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||||
|
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||||
|
|
||||||
|
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
|
||||||
|
props.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
|
||||||
|
+ username + "\" password=\"" + password + "\";");
|
||||||
|
props.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
|
||||||
|
|
||||||
|
return props;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean("avContainerFactory")
|
||||||
|
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
|
||||||
|
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||||
|
factory.setConsumerFactory(avConsumerFactory());
|
||||||
|
return factory;
|
||||||
|
}
|
||||||
|
|
||||||
|
@Bean("avTemplate")
|
||||||
public KafkaTemplate<String, String> kafkaTemplate() {
|
public KafkaTemplate<String, String> kafkaTemplate() {
|
||||||
return new KafkaTemplate<>(producerFactory());
|
return new KafkaTemplate<>(avProducerFactory());
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -1,11 +1,17 @@
|
||||||
package ervu.controller;
|
package ervu.controller;
|
||||||
|
|
||||||
|
import java.time.ZonedDateTime;
|
||||||
|
import java.util.TimeZone;
|
||||||
import javax.servlet.http.Cookie;
|
import javax.servlet.http.Cookie;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
|
||||||
import ervu.service.fileupload.EmployeeInfoFileUploadService;
|
import ervu.service.fileupload.EmployeeInfoFileUploadService;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
import org.springframework.web.bind.annotation.*;
|
import org.springframework.web.bind.annotation.RequestHeader;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
|
import org.springframework.web.bind.annotation.RequestMethod;
|
||||||
|
import org.springframework.web.bind.annotation.RequestParam;
|
||||||
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
import org.springframework.web.multipart.MultipartFile;
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -21,7 +27,8 @@ public class EmployeeInfoFileUploadController {
|
||||||
|
|
||||||
@RequestMapping(value = "/employee/document", method = RequestMethod.POST)
|
@RequestMapping(value = "/employee/document", method = RequestMethod.POST)
|
||||||
public ResponseEntity<?> saveEmployeeInformationFile(@RequestParam("file") MultipartFile multipartFile,
|
public ResponseEntity<?> saveEmployeeInformationFile(@RequestParam("file") MultipartFile multipartFile,
|
||||||
@RequestHeader("X-Employee-Info-File-Form-Type") String formType, HttpServletRequest request) {
|
@RequestHeader("X-Employee-Info-File-Form-Type") String formType,
|
||||||
|
@RequestHeader("Client-Time-Zone") String clientTimeZone, HttpServletRequest request) {
|
||||||
String accessToken = null;
|
String accessToken = null;
|
||||||
String authToken = null;
|
String authToken = null;
|
||||||
Cookie[] cookies = request.getCookies();
|
Cookie[] cookies = request.getCookies();
|
||||||
|
|
@ -35,11 +42,17 @@ public class EmployeeInfoFileUploadController {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if (accessToken != null && this.fileUploadService.saveEmployeeInformationFile(multipartFile, formType, accessToken, authToken)) {
|
|
||||||
return ResponseEntity.ok("File successfully uploaded.");
|
if (accessToken != null) {
|
||||||
}
|
String offset = ZonedDateTime.now(TimeZone.getTimeZone(clientTimeZone).toZoneId())
|
||||||
else {
|
.getOffset().getId();
|
||||||
return ResponseEntity.internalServerError().body("An error occurred while uploading file.");
|
|
||||||
|
if (this.fileUploadService.saveEmployeeInformationFile(multipartFile, formType, accessToken,
|
||||||
|
authToken, offset)) {
|
||||||
|
return ResponseEntity.ok("File successfully uploaded.");
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
return ResponseEntity.internalServerError().body("An error occurred while uploading file.");
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,7 @@
|
||||||
|
package ervu.model.fileupload;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* @author r.latypov
|
||||||
|
*/
|
||||||
|
public record DownloadResponse(OrgInfo orgInfo, FileInfo fileInfo) {
|
||||||
|
}
|
||||||
|
|
@ -6,14 +6,17 @@ import java.util.Objects;
|
||||||
* @author Alexandr Shalaginov
|
* @author Alexandr Shalaginov
|
||||||
*/
|
*/
|
||||||
public class FileInfo {
|
public class FileInfo {
|
||||||
private final String fileId;
|
private String fileId;
|
||||||
private final String fileUrl;
|
private String fileUrl;
|
||||||
private final String fileName;
|
private String fileName;
|
||||||
private final String filePatternCode;
|
private String filePatternCode;
|
||||||
private final String filePatternName;
|
private String filePatternName;
|
||||||
private final String departureDateTime;
|
private String departureDateTime;
|
||||||
private final String timeZone;
|
private String timeZone;
|
||||||
private final FileStatus fileStatus;
|
private FileStatus fileStatus;
|
||||||
|
|
||||||
|
public FileInfo() {
|
||||||
|
}
|
||||||
|
|
||||||
public FileInfo(String fileId, String fileUrl, String fileName, String filePatternCode,
|
public FileInfo(String fileId, String fileUrl, String fileName, String filePatternCode,
|
||||||
String filePatternName, String departureDateTime, String timeZone, FileStatus fileStatus) {
|
String filePatternName, String departureDateTime, String timeZone, FileStatus fileStatus) {
|
||||||
|
|
|
||||||
|
|
@ -8,9 +8,12 @@ import ru.micord.ervu.journal.SenderInfo;
|
||||||
* @author Alexandr Shalaginov
|
* @author Alexandr Shalaginov
|
||||||
*/
|
*/
|
||||||
public class OrgInfo {
|
public class OrgInfo {
|
||||||
private final String orgName;
|
private String orgName;
|
||||||
private final String orgId;
|
private String orgId;
|
||||||
private final SenderInfo senderInfo;
|
private SenderInfo senderInfo;
|
||||||
|
|
||||||
|
public OrgInfo() {
|
||||||
|
}
|
||||||
|
|
||||||
public OrgInfo(String orgName, String orgId, SenderInfo senderInfo) {
|
public OrgInfo(String orgName, String orgId, SenderInfo senderInfo) {
|
||||||
this.orgName = orgName;
|
this.orgName = orgName;
|
||||||
|
|
@ -26,7 +29,7 @@ public class OrgInfo {
|
||||||
return orgId;
|
return orgId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public SenderInfo getPrnOid() {
|
public SenderInfo getSenderInfo() {
|
||||||
return senderInfo;
|
return senderInfo;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -1,22 +1,25 @@
|
||||||
package ervu.service.fileupload;
|
package ervu.service.fileupload;
|
||||||
|
|
||||||
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.time.LocalDateTime;
|
import java.time.LocalDateTime;
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
import java.time.format.DateTimeFormatter;
|
import java.time.format.DateTimeFormatter;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
|
|
||||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import ervu.client.fileupload.FileUploadWebDavClient;
|
import ervu.client.fileupload.FileUploadWebDavClient;
|
||||||
|
import ervu.model.fileupload.DownloadResponse;
|
||||||
import ervu.model.fileupload.EmployeeInfoFileFormType;
|
import ervu.model.fileupload.EmployeeInfoFileFormType;
|
||||||
import ervu.model.fileupload.EmployeeInfoKafkaMessage;
|
import ervu.model.fileupload.EmployeeInfoKafkaMessage;
|
||||||
|
import ervu.model.fileupload.FileInfo;
|
||||||
import ervu.model.fileupload.FileStatus;
|
import ervu.model.fileupload.FileStatus;
|
||||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||||
import org.slf4j.Logger;
|
import org.slf4j.Logger;
|
||||||
import org.slf4j.LoggerFactory;
|
import org.slf4j.LoggerFactory;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
|
import org.springframework.kafka.annotation.KafkaListener;
|
||||||
import org.springframework.kafka.core.KafkaTemplate;
|
import org.springframework.kafka.core.KafkaTemplate;
|
||||||
import org.springframework.stereotype.Service;
|
import org.springframework.stereotype.Service;
|
||||||
import org.springframework.web.multipart.MultipartFile;
|
import org.springframework.web.multipart.MultipartFile;
|
||||||
|
|
@ -27,13 +30,15 @@ import ru.micord.ervu.security.webbpm.jwt.model.Token;
|
||||||
import ru.micord.ervu.security.webbpm.jwt.service.JwtTokenService;
|
import ru.micord.ervu.security.webbpm.jwt.service.JwtTokenService;
|
||||||
import ru.micord.ervu.service.InteractionService;
|
import ru.micord.ervu.service.InteractionService;
|
||||||
|
|
||||||
|
import static ru.micord.ervu.util.StringUtils.convertToFio;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @author Alexandr Shalaginov
|
* @author Alexandr Shalaginov
|
||||||
*/
|
*/
|
||||||
@Service
|
@Service
|
||||||
public class EmployeeInfoFileUploadService {
|
public class EmployeeInfoFileUploadService {
|
||||||
private static final Logger logger = LoggerFactory.getLogger(EmployeeInfoFileUploadService.class);
|
private static final Logger logger = LoggerFactory.getLogger(EmployeeInfoFileUploadService.class);
|
||||||
private static final String FORMAT = "dd.MM.yyyy HH:mm";
|
private static final String FORMAT = "dd.MM.yyyy HH:mm:ss";
|
||||||
|
|
||||||
private final FileUploadWebDavClient fileWebDavUploadClient;
|
private final FileUploadWebDavClient fileWebDavUploadClient;
|
||||||
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
|
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
|
||||||
|
|
@ -54,7 +59,8 @@ public class EmployeeInfoFileUploadService {
|
||||||
public EmployeeInfoFileUploadService(
|
public EmployeeInfoFileUploadService(
|
||||||
FileUploadWebDavClient fileWebDavUploadClient,
|
FileUploadWebDavClient fileWebDavUploadClient,
|
||||||
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
|
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
|
||||||
@Qualifier("av-template") KafkaTemplate<String, String> kafkaTemplate, InteractionService interactionService,
|
@Qualifier("avTemplate") KafkaTemplate<String, String> kafkaTemplate,
|
||||||
|
InteractionService interactionService,
|
||||||
UlDataService ulDataService, JwtTokenService jwtTokenService) {
|
UlDataService ulDataService, JwtTokenService jwtTokenService) {
|
||||||
this.fileWebDavUploadClient = fileWebDavUploadClient;
|
this.fileWebDavUploadClient = fileWebDavUploadClient;
|
||||||
this.employeeInfoKafkaMessageService = employeeInfoKafkaMessageService;
|
this.employeeInfoKafkaMessageService = employeeInfoKafkaMessageService;
|
||||||
|
|
@ -65,11 +71,9 @@ public class EmployeeInfoFileUploadService {
|
||||||
}
|
}
|
||||||
|
|
||||||
public boolean saveEmployeeInformationFile(MultipartFile multipartFile, String formType,
|
public boolean saveEmployeeInformationFile(MultipartFile multipartFile, String formType,
|
||||||
String accessToken, String authToken) {
|
String accessToken, String authToken, String offset) {
|
||||||
String fileUploadUrl = this.url + "/" + getNewFilename(multipartFile.getOriginalFilename());
|
String fileUploadUrl = this.url + "/" + getNewFilename(multipartFile.getOriginalFilename());
|
||||||
LocalDateTime now = LocalDateTime.now();
|
LocalDateTime now = LocalDateTime.now();
|
||||||
String departureDateTime = now.format(DateTimeFormatter.ofPattern(FORMAT));;
|
|
||||||
String timeZone = getTimeZone();
|
|
||||||
|
|
||||||
if (this.fileWebDavUploadClient.webDavUploadFile(fileUploadUrl, username, password, multipartFile)) {
|
if (this.fileWebDavUploadClient.webDavUploadFile(fileUploadUrl, username, password, multipartFile)) {
|
||||||
FileStatus fileStatus = new FileStatus();
|
FileStatus fileStatus = new FileStatus();
|
||||||
|
|
@ -83,6 +87,7 @@ public class EmployeeInfoFileUploadService {
|
||||||
PersonModel personModel = employeeModel.getPerson();
|
PersonModel personModel = employeeModel.getPerson();
|
||||||
Token token = jwtTokenService.getToken(authToken);
|
Token token = jwtTokenService.getToken(authToken);
|
||||||
String[] ids = token.getUserAccountId().split(":");
|
String[] ids = token.getUserAccountId().split(":");
|
||||||
|
String departureDateTime = now.format(DateTimeFormatter.ofPattern(FORMAT));
|
||||||
String jsonMessage = getJsonKafkaMessage(
|
String jsonMessage = getJsonKafkaMessage(
|
||||||
employeeInfoKafkaMessageService.getKafkaMessage(
|
employeeInfoKafkaMessageService.getKafkaMessage(
|
||||||
fileId,
|
fileId,
|
||||||
|
|
@ -91,16 +96,17 @@ public class EmployeeInfoFileUploadService {
|
||||||
employeeInfoFileFormType,
|
employeeInfoFileFormType,
|
||||||
departureDateTime,
|
departureDateTime,
|
||||||
accessToken,
|
accessToken,
|
||||||
timeZone,
|
offset,
|
||||||
fileStatus,
|
fileStatus,
|
||||||
ids[1],
|
ids[1],
|
||||||
ids[0],
|
ids[0],
|
||||||
personModel
|
personModel
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
interactionService.setStatus(fileId, fileStatus.getStatus(), fileName, employeeInfoFileFormType.getFilePatternName(), Timestamp.valueOf(now),
|
interactionService.setStatus(fileId, fileStatus.getStatus(), fileName,
|
||||||
personModel.getLastName() + " " + personModel.getFirstName().charAt(0) + ". " + personModel.getMiddleName().charAt(0) + ".", (int) multipartFile.getSize(),
|
employeeInfoFileFormType.getFilePatternCode(), Timestamp.valueOf(now),
|
||||||
ids[1]);
|
convertToFio(personModel.getFirstName(), personModel.getMiddleName(), personModel.getLastName()),
|
||||||
|
(int) multipartFile.getSize(), ids[1]);
|
||||||
return sendMessage(jsonMessage);
|
return sendMessage(jsonMessage);
|
||||||
}
|
}
|
||||||
else {
|
else {
|
||||||
|
|
@ -111,6 +117,8 @@ public class EmployeeInfoFileUploadService {
|
||||||
|
|
||||||
private boolean sendMessage(String message) {
|
private boolean sendMessage(String message) {
|
||||||
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaTopicName, message);
|
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaTopicName, message);
|
||||||
|
record.headers().add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
|
||||||
|
|
||||||
try {
|
try {
|
||||||
this.kafkaTemplate.send(record).get();
|
this.kafkaTemplate.send(record).get();
|
||||||
logger.debug("Success send record: {}", record);
|
logger.debug("Success send record: {}", record);
|
||||||
|
|
@ -144,7 +152,19 @@ public class EmployeeInfoFileUploadService {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
private String getTimeZone() {
|
@KafkaListener(id = "${av.kafka.group.id}", topics = "${av.kafka.download.response}",
|
||||||
return ZonedDateTime.now().getOffset().toString();
|
containerFactory = "avContainerFactory")
|
||||||
|
public void listenKafka(String kafkaMessage) {
|
||||||
|
ObjectMapper mapper = new ObjectMapper();
|
||||||
|
try {
|
||||||
|
DownloadResponse downloadResponse = mapper.readValue(kafkaMessage, DownloadResponse.class);
|
||||||
|
FileInfo fileInfo = downloadResponse.fileInfo();
|
||||||
|
interactionService.updateStatus(fileInfo.getFileId(), fileInfo.getFileStatus().getStatus(),
|
||||||
|
downloadResponse.orgInfo().getOrgId()
|
||||||
|
);
|
||||||
|
}
|
||||||
|
catch (JsonProcessingException e) {
|
||||||
|
throw new RuntimeException(String.format("Fail get json from: %s", kafkaMessage), e);
|
||||||
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -25,7 +25,7 @@ public class EmployeeInfoKafkaMessageService {
|
||||||
|
|
||||||
public EmployeeInfoKafkaMessage getKafkaMessage(String fileId, String fileUrl, String fileName,
|
public EmployeeInfoKafkaMessage getKafkaMessage(String fileId, String fileUrl, String fileName,
|
||||||
EmployeeInfoFileFormType formType, String departureDateTime, String accessToken,
|
EmployeeInfoFileFormType formType, String departureDateTime, String accessToken,
|
||||||
String timeZone, FileStatus fileStatus, String ervuId, String prnOid, PersonModel personModel) {
|
String offset, FileStatus fileStatus, String ervuId, String prnOid, PersonModel personModel) {
|
||||||
return new EmployeeInfoKafkaMessage(
|
return new EmployeeInfoKafkaMessage(
|
||||||
getOrgInfo(accessToken, ervuId, prnOid, personModel),
|
getOrgInfo(accessToken, ervuId, prnOid, personModel),
|
||||||
getFileInfo(
|
getFileInfo(
|
||||||
|
|
@ -34,14 +34,14 @@ public class EmployeeInfoKafkaMessageService {
|
||||||
fileName,
|
fileName,
|
||||||
formType,
|
formType,
|
||||||
departureDateTime,
|
departureDateTime,
|
||||||
timeZone,
|
offset,
|
||||||
fileStatus
|
fileStatus
|
||||||
)
|
)
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private FileInfo getFileInfo(String fileId, String fileUrl, String fileName,
|
private FileInfo getFileInfo(String fileId, String fileUrl, String fileName,
|
||||||
EmployeeInfoFileFormType formType, String departureDateTime, String timeZone,
|
EmployeeInfoFileFormType formType, String departureDateTime, String offset,
|
||||||
FileStatus fileStatus) {
|
FileStatus fileStatus) {
|
||||||
return new FileInfo(
|
return new FileInfo(
|
||||||
fileId,
|
fileId,
|
||||||
|
|
@ -50,7 +50,7 @@ public class EmployeeInfoKafkaMessageService {
|
||||||
formType.getFilePatternCode(),
|
formType.getFilePatternCode(),
|
||||||
formType.getFilePatternName(),
|
formType.getFilePatternName(),
|
||||||
departureDateTime,
|
departureDateTime,
|
||||||
timeZone,
|
offset,
|
||||||
fileStatus
|
fileStatus
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,8 +1,6 @@
|
||||||
package ru.micord.ervu.journal.mapper;
|
package ru.micord.ervu.journal.mapper;
|
||||||
|
|
||||||
import java.sql.Timestamp;
|
import java.sql.Timestamp;
|
||||||
import java.time.ZoneOffset;
|
|
||||||
import java.time.ZonedDateTime;
|
|
||||||
|
|
||||||
import ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.records.InteractionLogRecord;
|
import ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.records.InteractionLogRecord;
|
||||||
import ru.micord.ervu.journal.JournalDto;
|
import ru.micord.ervu.journal.JournalDto;
|
||||||
|
|
@ -16,9 +14,7 @@ public class JournalDtoMapper {
|
||||||
public static JournalDto mapToJournalDto(JournalFileInfo journalFileInfo) {
|
public static JournalDto mapToJournalDto(JournalFileInfo journalFileInfo) {
|
||||||
SenderInfo senderInfo = journalFileInfo.getSenderInfo();
|
SenderInfo senderInfo = journalFileInfo.getSenderInfo();
|
||||||
return new JournalDto()
|
return new JournalDto()
|
||||||
.setDepartureDateTime(Timestamp.from(ZonedDateTime.of(journalFileInfo.getDepartureDateTime(),
|
.setDepartureDateTime(Timestamp.valueOf(journalFileInfo.getDepartureDateTime()).toString())
|
||||||
ZoneOffset.of(journalFileInfo.getTimeZone())
|
|
||||||
).toInstant()).toString())
|
|
||||||
.setFileName(journalFileInfo.getFileName())
|
.setFileName(journalFileInfo.getFileName())
|
||||||
.setFilePatternCode(journalFileInfo.getFilePatternCode())
|
.setFilePatternCode(journalFileInfo.getFilePatternCode())
|
||||||
.setSenderFio(convertToFio(senderInfo.getFirstName(), senderInfo.getMiddleName(),
|
.setSenderFio(convertToFio(senderInfo.getFirstName(), senderInfo.getMiddleName(),
|
||||||
|
|
@ -33,7 +29,7 @@ public class JournalDtoMapper {
|
||||||
return new JournalDto()
|
return new JournalDto()
|
||||||
.setDepartureDateTime(record.getSentDate().toString())
|
.setDepartureDateTime(record.getSentDate().toString())
|
||||||
.setFileName(record.getFileName())
|
.setFileName(record.getFileName())
|
||||||
.setFilePatternCode(Integer.valueOf(record.getForm().replace("№", "")))
|
.setFilePatternCode(Integer.valueOf(record.getForm()))
|
||||||
.setSenderFio(record.getSender())
|
.setSenderFio(record.getSender())
|
||||||
.setStatus(record.getStatus())
|
.setStatus(record.getStatus())
|
||||||
.setFilesSentCount(record.getRecordsSent())
|
.setFilesSentCount(record.getRecordsSent())
|
||||||
|
|
|
||||||
|
|
@ -6,7 +6,6 @@ import org.apache.kafka.clients.producer.ProducerConfig;
|
||||||
import org.apache.kafka.common.config.SaslConfigs;
|
import org.apache.kafka.common.config.SaslConfigs;
|
||||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||||
import org.apache.kafka.common.serialization.StringSerializer;
|
import org.apache.kafka.common.serialization.StringSerializer;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.context.annotation.Bean;
|
import org.springframework.context.annotation.Bean;
|
||||||
import org.springframework.context.annotation.Configuration;
|
import org.springframework.context.annotation.Configuration;
|
||||||
|
|
@ -48,8 +47,8 @@ public class ReplyingKafkaConfig {
|
||||||
@Value("${ervu.kafka.sasl.mechanism}")
|
@Value("${ervu.kafka.sasl.mechanism}")
|
||||||
private String saslMechanism;
|
private String saslMechanism;
|
||||||
|
|
||||||
@Bean("ervu")
|
@Bean
|
||||||
public ProducerFactory<String, String> producerFactory() {
|
public ProducerFactory<String, String> ervuProducerFactory() {
|
||||||
Map<String, Object> configProps = new HashMap<>();
|
Map<String, Object> configProps = new HashMap<>();
|
||||||
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||||
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||||
|
|
@ -61,14 +60,13 @@ public class ReplyingKafkaConfig {
|
||||||
return new DefaultKafkaProducerFactory<>(configProps);
|
return new DefaultKafkaProducerFactory<>(configProps);
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean("ervuTemplate")
|
||||||
@Qualifier("ervu")
|
|
||||||
public KafkaTemplate<String, String> kafkaTemplate() {
|
public KafkaTemplate<String, String> kafkaTemplate() {
|
||||||
return new KafkaTemplate<>(producerFactory());
|
return new KafkaTemplate<>(ervuProducerFactory());
|
||||||
}
|
}
|
||||||
|
|
||||||
@Bean
|
@Bean("ervuConsumerFactory")
|
||||||
public ConsumerFactory<String, String> consumerFactory() {
|
public ConsumerFactory<String, String> ervuConsumerFactory() {
|
||||||
Map<String, Object> configProps = new HashMap<>();
|
Map<String, Object> configProps = new HashMap<>();
|
||||||
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||||
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
|
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
|
||||||
|
|
@ -84,12 +82,11 @@ public class ReplyingKafkaConfig {
|
||||||
@Bean
|
@Bean
|
||||||
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
|
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
|
||||||
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||||
factory.setConsumerFactory(consumerFactory());
|
factory.setConsumerFactory(ervuConsumerFactory());
|
||||||
return factory;
|
return factory;
|
||||||
}
|
}
|
||||||
|
|
||||||
// @Bean()
|
// @Bean("excerpt-container")
|
||||||
// @Qualifier("excerpt-container")
|
|
||||||
// public ConcurrentMessageListenerContainer<String, String> excerptReplyContainer(
|
// public ConcurrentMessageListenerContainer<String, String> excerptReplyContainer(
|
||||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||||
|
|
@ -98,16 +95,14 @@ public class ReplyingKafkaConfig {
|
||||||
// return container;
|
// return container;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// @Bean()
|
// @Bean("excerpt-template")
|
||||||
// @Qualifier("excerpt-template")
|
|
||||||
// public ReplyingKafkaTemplate<String, String, String> excerptReplyingKafkaTemplate(
|
// public ReplyingKafkaTemplate<String, String, String> excerptReplyingKafkaTemplate(
|
||||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||||
// @Qualifier("excerpt-container") ConcurrentMessageListenerContainer<String, String> container) {
|
// @Qualifier("excerpt-container") ConcurrentMessageListenerContainer<String, String> container) {
|
||||||
// return initReplyingKafkaTemplate(pf, container);
|
// return initReplyingKafkaTemplate(pf, container);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// @Bean
|
// @Bean("org")
|
||||||
// @Qualifier("org")
|
|
||||||
// public ConcurrentMessageListenerContainer<String, String> replyContainer(
|
// public ConcurrentMessageListenerContainer<String, String> replyContainer(
|
||||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||||
|
|
@ -116,8 +111,7 @@ public class ReplyingKafkaConfig {
|
||||||
// return container;
|
// return container;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// @Bean
|
// @Bean("journal")
|
||||||
// @Qualifier("journal")
|
|
||||||
// public ConcurrentMessageListenerContainer<String, String> journalReplyContainer(
|
// public ConcurrentMessageListenerContainer<String, String> journalReplyContainer(
|
||||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||||
|
|
@ -126,16 +120,14 @@ public class ReplyingKafkaConfig {
|
||||||
// return container;
|
// return container;
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// @Bean
|
// @Bean("org")
|
||||||
// @Qualifier("org")
|
|
||||||
// public ReplyingKafkaTemplate<String, String, String> orgReplyingKafkaTemplate(
|
// public ReplyingKafkaTemplate<String, String, String> orgReplyingKafkaTemplate(
|
||||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||||
// @Qualifier("org") ConcurrentMessageListenerContainer<String, String> container) {
|
// @Qualifier("org") ConcurrentMessageListenerContainer<String, String> container) {
|
||||||
// return initReplyingKafkaTemplate(pf, container);
|
// return initReplyingKafkaTemplate(pf, container);
|
||||||
// }
|
// }
|
||||||
//
|
//
|
||||||
// @Bean
|
// @Bean("journal")
|
||||||
// @Qualifier("journal")
|
|
||||||
// public ReplyingKafkaTemplate<String, String, String> journalReplyingKafkaTemplate(
|
// public ReplyingKafkaTemplate<String, String, String> journalReplyingKafkaTemplate(
|
||||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||||
// @Qualifier("journal") ConcurrentMessageListenerContainer<String, String> container) {
|
// @Qualifier("journal") ConcurrentMessageListenerContainer<String, String> container) {
|
||||||
|
|
|
||||||
|
|
@ -1,20 +1,16 @@
|
||||||
package ru.micord.ervu.kafka.controller;
|
package ru.micord.ervu.kafka.controller;
|
||||||
|
|
||||||
import java.util.Arrays;
|
|
||||||
import java.util.Optional;
|
|
||||||
import javax.servlet.http.Cookie;
|
import javax.servlet.http.Cookie;
|
||||||
import javax.servlet.http.HttpServletRequest;
|
import javax.servlet.http.HttpServletRequest;
|
||||||
|
|
||||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.core.io.InputStreamResource;
|
import org.springframework.core.io.Resource;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
import org.springframework.web.bind.annotation.RequestMapping;
|
import org.springframework.web.bind.annotation.RequestMapping;
|
||||||
import org.springframework.web.bind.annotation.RestController;
|
import org.springframework.web.bind.annotation.RestController;
|
||||||
import ru.micord.ervu.kafka.model.Data;
|
import ru.micord.ervu.kafka.model.Data;
|
||||||
import ru.micord.ervu.kafka.model.ErvuOrgResponse;
|
|
||||||
import ru.micord.ervu.kafka.model.ExcerptResponse;
|
import ru.micord.ervu.kafka.model.ExcerptResponse;
|
||||||
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
|
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
|
||||||
import ru.micord.ervu.s3.S3Service;
|
import ru.micord.ervu.s3.S3Service;
|
||||||
|
|
@ -47,7 +43,7 @@ public class ErvuKafkaController {
|
||||||
private ObjectMapper objectMapper;
|
private ObjectMapper objectMapper;
|
||||||
|
|
||||||
@RequestMapping(value = "/kafka/excerpt")
|
@RequestMapping(value = "/kafka/excerpt")
|
||||||
public ResponseEntity<InputStreamResource> getExcerptFile(HttpServletRequest request) {
|
public ResponseEntity<Resource> getExcerptFile(HttpServletRequest request) {
|
||||||
try {
|
try {
|
||||||
String authToken = getAuthToken(request);
|
String authToken = getAuthToken(request);
|
||||||
Token token = jwtTokenService.getToken(authToken);
|
Token token = jwtTokenService.getToken(authToken);
|
||||||
|
|
|
||||||
|
|
@ -1,9 +1,6 @@
|
||||||
package ru.micord.ervu.kafka.model;
|
package ru.micord.ervu.kafka.model;
|
||||||
|
|
||||||
import java.io.Serializable;
|
import java.io.Serializable;
|
||||||
import java.util.Date;
|
|
||||||
|
|
||||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
|
||||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
|
|
@ -13,25 +10,10 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||||
public class ExcerptResponse implements Serializable {
|
public class ExcerptResponse implements Serializable {
|
||||||
private static final long serialVersionUID = 1L;
|
private static final long serialVersionUID = 1L;
|
||||||
|
|
||||||
private String excerptId;
|
|
||||||
|
|
||||||
private String orgId;
|
private String orgId;
|
||||||
|
|
||||||
private String fileUrl;
|
private String fileUrl;
|
||||||
|
|
||||||
@JsonFormat(pattern = "dd.MM.yyyy HH:mm")
|
|
||||||
private Date departureDateTime;
|
|
||||||
|
|
||||||
private String timeZone;
|
|
||||||
|
|
||||||
public String getExcerptId() {
|
|
||||||
return excerptId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setExcerptId(String excerptId) {
|
|
||||||
this.excerptId = excerptId;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getOrgId() {
|
public String getOrgId() {
|
||||||
return orgId;
|
return orgId;
|
||||||
}
|
}
|
||||||
|
|
@ -40,22 +22,6 @@ public class ExcerptResponse implements Serializable {
|
||||||
this.orgId = orgId;
|
this.orgId = orgId;
|
||||||
}
|
}
|
||||||
|
|
||||||
public Date getDepartureDateTime() {
|
|
||||||
return departureDateTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setDepartureDateTime(Date departureDateTime) {
|
|
||||||
this.departureDateTime = departureDateTime;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getTimeZone() {
|
|
||||||
return timeZone;
|
|
||||||
}
|
|
||||||
|
|
||||||
public void setTimeZone(String timeZone) {
|
|
||||||
this.timeZone = timeZone;
|
|
||||||
}
|
|
||||||
|
|
||||||
public String getFileUrl() {
|
public String getFileUrl() {
|
||||||
return fileUrl;
|
return fileUrl;
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -2,16 +2,17 @@ package ru.micord.ervu.kafka.service.impl;
|
||||||
|
|
||||||
import java.nio.charset.StandardCharsets;
|
import java.nio.charset.StandardCharsets;
|
||||||
import java.time.Duration;
|
import java.time.Duration;
|
||||||
|
import java.util.Arrays;
|
||||||
import java.util.Collections;
|
import java.util.Collections;
|
||||||
import java.util.Optional;
|
import java.util.Optional;
|
||||||
import java.util.UUID;
|
import java.util.UUID;
|
||||||
import java.util.concurrent.atomic.AtomicReference;
|
import java.util.concurrent.atomic.AtomicReference;
|
||||||
|
|
||||||
import org.apache.kafka.clients.consumer.Consumer;
|
import org.apache.kafka.clients.consumer.Consumer;
|
||||||
|
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||||
import org.apache.kafka.common.header.internals.RecordHeader;
|
import org.apache.kafka.common.header.internals.RecordHeader;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
|
||||||
import org.springframework.beans.factory.annotation.Qualifier;
|
import org.springframework.beans.factory.annotation.Qualifier;
|
||||||
import org.springframework.beans.factory.annotation.Value;
|
import org.springframework.beans.factory.annotation.Value;
|
||||||
import org.springframework.kafka.core.ConsumerFactory;
|
import org.springframework.kafka.core.ConsumerFactory;
|
||||||
|
|
@ -26,24 +27,30 @@ import ru.micord.ervu.kafka.service.ReplyingKafkaService;
|
||||||
@Service
|
@Service
|
||||||
public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
||||||
|
|
||||||
// protected abstract ReplyingKafkaTemplate<String, String, String> getReplyingKafkaTemplate();
|
private static final String MESSAGE_ID_HEADER = "messageId";
|
||||||
@Autowired
|
|
||||||
@Qualifier("ervu")
|
// protected abstract ReplyingKafkaTemplate<String, String, String> getReplyingKafkaTemplate()
|
||||||
private KafkaTemplate<String, String> kafkaTemplate;
|
private final KafkaTemplate<String, String> kafkaTemplate;
|
||||||
@Autowired
|
private final ConsumerFactory<String, String> consumerFactory;
|
||||||
private ConsumerFactory<String, String> consumerFactory;
|
|
||||||
|
|
||||||
@Value("${ervu.kafka.group.id}")
|
@Value("${ervu.kafka.group.id}")
|
||||||
private String groupId;
|
private String groupId;
|
||||||
@Value("${ervu.kafka.reply.timeout:30}")
|
@Value("${ervu.kafka.reply.timeout:30}")
|
||||||
private long replyTimeout;
|
private long replyTimeout;
|
||||||
|
|
||||||
|
public BaseReplyingKafkaServiceImpl(
|
||||||
|
@Qualifier("ervuTemplate") KafkaTemplate<String, String> kafkaTemplate,
|
||||||
|
@Qualifier("ervuConsumerFactory") ConsumerFactory<String, String> consumerFactory) {
|
||||||
|
this.kafkaTemplate = kafkaTemplate;
|
||||||
|
this.consumerFactory = consumerFactory;
|
||||||
|
}
|
||||||
|
|
||||||
public String sendMessageAndGetReply(String requestTopic,
|
public String sendMessageAndGetReply(String requestTopic,
|
||||||
String replyTopic,
|
String replyTopic,
|
||||||
String requestMessage) {
|
String requestMessage) {
|
||||||
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
|
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
|
||||||
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
|
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
|
||||||
//TODO fix No pending reply error
|
//TODO fix No pending reply error SUPPORT-8591
|
||||||
// RequestReplyFuture<String, String, String> replyFuture = getReplyingKafkaTemplate()
|
// RequestReplyFuture<String, String, String> replyFuture = getReplyingKafkaTemplate()
|
||||||
// .sendAndReceive(record);
|
// .sendAndReceive(record);
|
||||||
//
|
//
|
||||||
|
|
@ -56,7 +63,8 @@ public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
||||||
// throw new RuntimeException("Failed to get kafka response.", e);
|
// throw new RuntimeException("Failed to get kafka response.", e);
|
||||||
// }
|
// }
|
||||||
|
|
||||||
record.headers().add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
|
String messageId = UUID.randomUUID().toString();
|
||||||
|
record.headers().add(MESSAGE_ID_HEADER, messageId.getBytes(StandardCharsets.UTF_8));
|
||||||
kafkaTemplate.send(record);
|
kafkaTemplate.send(record);
|
||||||
AtomicReference<String> responseRef = new AtomicReference<>(null);
|
AtomicReference<String> responseRef = new AtomicReference<>(null);
|
||||||
|
|
||||||
|
|
@ -64,8 +72,19 @@ public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
||||||
consumerFactory.createConsumer(groupId, null)) {
|
consumerFactory.createConsumer(groupId, null)) {
|
||||||
consumer.subscribe(Collections.singletonList(replyTopic));
|
consumer.subscribe(Collections.singletonList(replyTopic));
|
||||||
ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(replyTimeout));
|
ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(replyTimeout));
|
||||||
consumerRecords.forEach(consumerRecord -> responseRef.set(consumerRecord.value()));
|
|
||||||
consumer.commitSync();
|
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
||||||
|
boolean match = Arrays.stream(consumerRecord.headers().toArray())
|
||||||
|
.anyMatch(header -> header.key().equals(MESSAGE_ID_HEADER)
|
||||||
|
&& messageId.equals(
|
||||||
|
new String(header.value(), StandardCharsets.UTF_8)));
|
||||||
|
|
||||||
|
if (match) {
|
||||||
|
responseRef.set(consumerRecord.value());
|
||||||
|
consumer.commitSync();
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
return Optional.ofNullable(responseRef.get())
|
return Optional.ofNullable(responseRef.get())
|
||||||
.orElseThrow(() -> new RuntimeException("Kafka return result is null"));
|
.orElseThrow(() -> new RuntimeException("Kafka return result is null"));
|
||||||
|
|
|
||||||
|
|
@ -23,19 +23,22 @@ public class S3Connection {
|
||||||
private String accessKey;
|
private String accessKey;
|
||||||
@Value("${s3.secret_key}")
|
@Value("${s3.secret_key}")
|
||||||
private String secretKey;
|
private String secretKey;
|
||||||
|
@Value("${s3.path.style.access.enabled:true}")
|
||||||
|
private boolean pathStyleAccessEnabled;
|
||||||
|
|
||||||
@Bean("outClient")
|
@Bean("outClient")
|
||||||
public AmazonS3 getS3OutClient() {
|
public AmazonS3 getS3OutClient() {
|
||||||
return getS3Client(endpoint, accessKey, secretKey);
|
return getS3Client(endpoint, accessKey, secretKey, pathStyleAccessEnabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
private static AmazonS3 getS3Client(String endpoint, String accessKey, String secretKey) {
|
private static AmazonS3 getS3Client(String endpoint, String accessKey, String secretKey, Boolean pathStyleAccessEnabled) {
|
||||||
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
|
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
|
||||||
String region = Region.getRegion(Regions.DEFAULT_REGION).toString();
|
String region = Region.getRegion(Regions.DEFAULT_REGION).toString();
|
||||||
|
|
||||||
return AmazonS3ClientBuilder.standard()
|
return AmazonS3ClientBuilder.standard()
|
||||||
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region))
|
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region))
|
||||||
.withCredentials(new AWSStaticCredentialsProvider(credentials))
|
.withCredentials(new AWSStaticCredentialsProvider(credentials))
|
||||||
|
.withPathStyleAccessEnabled(pathStyleAccessEnabled)
|
||||||
.build();
|
.build();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -1,7 +1,7 @@
|
||||||
package ru.micord.ervu.s3;
|
package ru.micord.ervu.s3;
|
||||||
|
|
||||||
import java.io.File;
|
import java.net.URLEncoder;
|
||||||
import java.net.URI;
|
import java.nio.charset.StandardCharsets;
|
||||||
|
|
||||||
import com.amazonaws.AmazonServiceException;
|
import com.amazonaws.AmazonServiceException;
|
||||||
import com.amazonaws.services.s3.AmazonS3;
|
import com.amazonaws.services.s3.AmazonS3;
|
||||||
|
|
@ -9,6 +9,7 @@ import com.amazonaws.services.s3.AmazonS3URI;
|
||||||
import com.amazonaws.services.s3.model.S3Object;
|
import com.amazonaws.services.s3.model.S3Object;
|
||||||
import org.springframework.beans.factory.annotation.Autowired;
|
import org.springframework.beans.factory.annotation.Autowired;
|
||||||
import org.springframework.core.io.InputStreamResource;
|
import org.springframework.core.io.InputStreamResource;
|
||||||
|
import org.springframework.core.io.Resource;
|
||||||
import org.springframework.http.HttpHeaders;
|
import org.springframework.http.HttpHeaders;
|
||||||
import org.springframework.http.MediaType;
|
import org.springframework.http.MediaType;
|
||||||
import org.springframework.http.ResponseEntity;
|
import org.springframework.http.ResponseEntity;
|
||||||
|
|
@ -25,13 +26,15 @@ public class S3Service {
|
||||||
this.outClient = outClient;
|
this.outClient = outClient;
|
||||||
}
|
}
|
||||||
|
|
||||||
public ResponseEntity<InputStreamResource> getFile(String fileUrl) {
|
public ResponseEntity<Resource> getFile(String fileUrl) {
|
||||||
try {
|
try {
|
||||||
AmazonS3URI uri = new AmazonS3URI(fileUrl);
|
AmazonS3URI uri = new AmazonS3URI(fileUrl);
|
||||||
S3Object s3Object = outClient.getObject(uri.getBucket(), uri.getKey());
|
S3Object s3Object = outClient.getObject(uri.getBucket(), uri.getKey());
|
||||||
InputStreamResource resource = new InputStreamResource(s3Object.getObjectContent());
|
InputStreamResource resource = new InputStreamResource(s3Object.getObjectContent());
|
||||||
|
String encodedFilename = URLEncoder.encode(uri.getKey(), StandardCharsets.UTF_8);
|
||||||
return ResponseEntity.ok()
|
return ResponseEntity.ok()
|
||||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment;filename=" + uri.getKey())
|
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename*=UTF-8''" + encodedFilename)
|
||||||
|
.contentLength(s3Object.getObjectMetadata().getContentLength())
|
||||||
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
||||||
.body(resource);
|
.body(resource);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -451,8 +451,8 @@ public class EsiaAuthService {
|
||||||
brhs.setBrhOid(brhsModel.getBrhOid());
|
brhs.setBrhOid(brhsModel.getBrhOid());
|
||||||
brhs.setKpp(brhsModel.getKpp());
|
brhs.setKpp(brhsModel.getKpp());
|
||||||
brhs.setLeg(brhsModel.getLeg());
|
brhs.setLeg(brhsModel.getLeg());
|
||||||
brhs.setAddresses(brhsModel.getAddresses().getElements());
|
brhs.setAddresses(brhsModel.getAddresses() != null ? brhsModel.getAddresses().getElements() : null);
|
||||||
brhs.setContacts(brhsModel.getContacts().getElements());
|
brhs.setContacts(brhsModel.getContacts() != null ? brhsModel.getContacts().getElements() : null);
|
||||||
return brhs;
|
return brhs;
|
||||||
}).toArray(Brhs[]::new));
|
}).toArray(Brhs[]::new));
|
||||||
orgInfo.setAddresses(organizationModel.getAddresses().getElements());
|
orgInfo.setAddresses(organizationModel.getAddresses().getElements());
|
||||||
|
|
|
||||||
|
|
@ -13,4 +13,6 @@ public interface InteractionService {
|
||||||
List<InteractionLogRecord> get(String ervuId, String[] excludedStatuses);
|
List<InteractionLogRecord> get(String ervuId, String[] excludedStatuses);
|
||||||
|
|
||||||
void setStatus(String fileId, String status, String fileName, String form, Timestamp timestamp, String sender, Integer count, String ervuId);
|
void setStatus(String fileId, String status, String fileName, String form, Timestamp timestamp, String sender, Integer count, String ervuId);
|
||||||
|
|
||||||
|
void updateStatus(String fileId, String status, String ervuId);
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -42,6 +42,15 @@ public class InteractionServiceImpl implements InteractionService {
|
||||||
.set(INTERACTION_LOG.SENDER, sender)
|
.set(INTERACTION_LOG.SENDER, sender)
|
||||||
.set(INTERACTION_LOG.FILE_NAME, fileName)
|
.set(INTERACTION_LOG.FILE_NAME, fileName)
|
||||||
.set(INTERACTION_LOG.RECORDS_SENT, count)
|
.set(INTERACTION_LOG.RECORDS_SENT, count)
|
||||||
.set(INTERACTION_LOG.ERVU_ID, ervuId);
|
.set(INTERACTION_LOG.ERVU_ID, ervuId)
|
||||||
|
.execute();
|
||||||
|
}
|
||||||
|
|
||||||
|
public void updateStatus(String fileId, String status, String ervuId) {
|
||||||
|
dslContext.update(INTERACTION_LOG)
|
||||||
|
.set(INTERACTION_LOG.STATUS, status)
|
||||||
|
.where(INTERACTION_LOG.ERVU_ID.eq(ervuId))
|
||||||
|
.and(INTERACTION_LOG.FILE_ID.eq(fileId))
|
||||||
|
.execute();
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
||||||
|
|
@ -14,7 +14,7 @@ import static org.springframework.util.StringUtils.hasText;
|
||||||
public final class DateUtil {
|
public final class DateUtil {
|
||||||
|
|
||||||
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy");
|
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy");
|
||||||
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm");
|
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss");
|
||||||
|
|
||||||
private DateUtil() {}
|
private DateUtil() {}
|
||||||
|
|
||||||
|
|
|
||||||
|
|
@ -15,6 +15,8 @@ AV_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
|
||||||
AV_KAFKA_SASL_MECHANISM=SCRAM-SHA-256
|
AV_KAFKA_SASL_MECHANISM=SCRAM-SHA-256
|
||||||
AV_KAFKA_USERNAME=user1
|
AV_KAFKA_USERNAME=user1
|
||||||
AV_KAFKA_PASSWORD=Blfi9d2OFG
|
AV_KAFKA_PASSWORD=Blfi9d2OFG
|
||||||
|
AV_KAFKA_GROUP_ID=1
|
||||||
|
AV_KAFKA_DOWNLOAD_RESPONSE=ervu.lkrp.av-fileupload-status
|
||||||
ERVU_FILEUPLOAD_MAX_FILE_SIZE=5242880
|
ERVU_FILEUPLOAD_MAX_FILE_SIZE=5242880
|
||||||
ERVU_FILEUPLOAD_MAX_REQUEST_SIZE=6291456
|
ERVU_FILEUPLOAD_MAX_REQUEST_SIZE=6291456
|
||||||
ERVU_FILEUPLOAD_FILE_SIZE_THRESHOLD=0
|
ERVU_FILEUPLOAD_FILE_SIZE_THRESHOLD=0
|
||||||
|
|
@ -34,6 +36,7 @@ ERVU_KAFKA_ORG_REQUEST_TOPIC=ervu.organization.request
|
||||||
ERVU_KAFKA_REPLY_TIMEOUT=30
|
ERVU_KAFKA_REPLY_TIMEOUT=30
|
||||||
ERVU_KAFKA_JOURNAL_REQUEST_TOPIC=ervu.organization.journal.request
|
ERVU_KAFKA_JOURNAL_REQUEST_TOPIC=ervu.organization.journal.request
|
||||||
ERVU_KAFKA_JOURNAL_REPLY_TOPIC=ervu.organization.journal.response
|
ERVU_KAFKA_JOURNAL_REPLY_TOPIC=ervu.organization.journal.response
|
||||||
|
DB.JOURNAL.EXCLUDED.STATUSES=Направлено в ЕРВУ,Получен ЕРВУ
|
||||||
ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8
|
ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8
|
||||||
ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * *
|
ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * *
|
||||||
ERVU_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
|
ERVU_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
|
||||||
|
|
@ -43,6 +46,10 @@ ERVU_KAFKA_PASSWORD=Blfi9d2OFG
|
||||||
ERVU_KAFKA_EXCERPT_REPLY_TOPIC=ervu.lkrp.excerpt.response
|
ERVU_KAFKA_EXCERPT_REPLY_TOPIC=ervu.lkrp.excerpt.response
|
||||||
ERVU_KAFKA_EXCERPT_REQUEST_TOPIC=ervu.lkrp.excerpt.request
|
ERVU_KAFKA_EXCERPT_REQUEST_TOPIC=ervu.lkrp.excerpt.request
|
||||||
|
|
||||||
|
ERVU_FILE_UPLOAD_MAX_FILE_SIZE=5242880
|
||||||
|
ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE=6291456
|
||||||
|
ERVU_FILE_UPLOAD_FILE_SIZE_THRESHOLD=0
|
||||||
|
|
||||||
S3_ENDPOINT=http://ervu-minio.k8s.micord.ru:31900
|
S3_ENDPOINT=http://ervu-minio.k8s.micord.ru:31900
|
||||||
S3_ACCESS_KEY=rlTdTvkmSXu9FsLhfecw
|
S3_ACCESS_KEY=rlTdTvkmSXu9FsLhfecw
|
||||||
S3_SECRET_KEY=NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0
|
S3_SECRET_KEY=NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0
|
||||||
|
|
|
||||||
|
|
@ -64,9 +64,6 @@
|
||||||
<property name="av.kafka.username" value="user1"/>
|
<property name="av.kafka.username" value="user1"/>
|
||||||
<property name="av.kafka.password" value="Blfi9d2OFG"/>
|
<property name="av.kafka.password" value="Blfi9d2OFG"/>
|
||||||
<property name="av.kafka.message.topic.name" value="file-to-upload"/>
|
<property name="av.kafka.message.topic.name" value="file-to-upload"/>
|
||||||
<property name="ervu.fileupload.max_file_size" value="5242880"/>
|
|
||||||
<property name="ervu.fileupload.max_request_size" value="6291456"/>
|
|
||||||
<property name="ervu.fileupload.file_size_threshold" value="0"/>
|
|
||||||
<property name="esia.scopes" value="fullname, snils, id_doc, birthdate, usr_org, openid"/>
|
<property name="esia.scopes" value="fullname, snils, id_doc, birthdate, usr_org, openid"/>
|
||||||
<property name="esia.org.scopes" value="org_fullname, org_shortname, org_brhs, org_brhs_ctts, org_brhs_addrs, org_type, org_ogrn, org_inn, org_leg, org_kpp, org_ctts, org_addrs, org_grps, org_emps"/>
|
<property name="esia.org.scopes" value="org_fullname, org_shortname, org_brhs, org_brhs_ctts, org_brhs_addrs, org_type, org_ogrn, org_inn, org_leg, org_kpp, org_ctts, org_addrs, org_grps, org_emps"/>
|
||||||
<property name="esia.org.scope.url" value="http://esia.gosuslugi.ru/"/>
|
<property name="esia.org.scope.url" value="http://esia.gosuslugi.ru/"/>
|
||||||
|
|
@ -89,11 +86,14 @@
|
||||||
<property name="esnsi.okopf.url" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8"/>
|
<property name="esnsi.okopf.url" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8"/>
|
||||||
<property name="ervu.kafka.journal.request.topic" value="ervu.organization.journal.request"/>
|
<property name="ervu.kafka.journal.request.topic" value="ervu.organization.journal.request"/>
|
||||||
<property name="ervu.kafka.journal.reply.topic" value="ervu.organization.journal.response"/>
|
<property name="ervu.kafka.journal.reply.topic" value="ervu.organization.journal.response"/>
|
||||||
|
<property name="db.journal.excluded.statuses" value="Направлено в ЕРВУ,Получен ЕРВУ"/>
|
||||||
<property name="ervu.kafka.excerpt.reply.topic" value="ervu.lkrp.excerpt.response"/>
|
<property name="ervu.kafka.excerpt.reply.topic" value="ervu.lkrp.excerpt.response"/>
|
||||||
<property name="ervu.kafka.excerpt.request.topic" value="ervu.lkrp.excerpt.request"/>
|
<property name="ervu.kafka.excerpt.request.topic" value="ervu.lkrp.excerpt.request"/>
|
||||||
<property name="s3.endpoint" value="http://ervu-minio.k8s.micord.ru:31900"/>
|
<property name="s3.endpoint" value="http://ervu-minio.k8s.micord.ru:31900"/>
|
||||||
<property name="s3.access_key" value="rlTdTvkmSXu9FsLhfecw"/>
|
<property name="s3.access_key" value="rlTdTvkmSXu9FsLhfecw"/>
|
||||||
<property name="s3.secret_key" value="NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0"/>
|
<property name="s3.secret_key" value="NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0"/>
|
||||||
|
<property name="av.kafka.group.id" value="1"/>
|
||||||
|
<property name="av.kafka.download.response" value="ervu.lkrp.av-fileupload-status"/>
|
||||||
</system-properties>
|
</system-properties>
|
||||||
<management>
|
<management>
|
||||||
<audit-log>
|
<audit-log>
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||||
<artifactId>ul</artifactId>
|
<artifactId>ul</artifactId>
|
||||||
<version>1.0.0-SNAPSHOT</version>
|
<version>1.8.0-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||||
<artifactId>ul</artifactId>
|
<artifactId>ul</artifactId>
|
||||||
<version>1.0.0-SNAPSHOT</version>
|
<version>1.8.0-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||||
|
|
|
||||||
|
|
@ -564,7 +564,7 @@
|
||||||
.webbpm.ervu_lkrp_ul .journal .grid {
|
.webbpm.ervu_lkrp_ul .journal .grid {
|
||||||
flex-direction: column;
|
flex-direction: column;
|
||||||
flex: 1 1 auto;
|
flex: 1 1 auto;
|
||||||
height: 100px;
|
height: 300px;
|
||||||
}
|
}
|
||||||
.webbpm.ervu_lkrp_ul .journal .fieldset button-component {
|
.webbpm.ervu_lkrp_ul .journal .fieldset button-component {
|
||||||
display: block;
|
display: block;
|
||||||
|
|
|
||||||
|
|
@ -28,10 +28,16 @@ export class ErvuDownloadFileButton extends AbstractButton {
|
||||||
|
|
||||||
public doClickActions(): Promise<any> {
|
public doClickActions(): Promise<any> {
|
||||||
return this.httpClient.get('kafka/excerpt', {
|
return this.httpClient.get('kafka/excerpt', {
|
||||||
responseType: 'blob'
|
responseType: 'blob',
|
||||||
|
observe: 'response'
|
||||||
}).toPromise().then((response) => {
|
}).toPromise().then((response) => {
|
||||||
const url = window.URL.createObjectURL(response);
|
const contentDisposition = response.headers.get('Content-Disposition');
|
||||||
|
const url = window.URL.createObjectURL(response.body);
|
||||||
const a = document.createElement('a');
|
const a = document.createElement('a');
|
||||||
|
const fileNameMatch = contentDisposition.match(/filename\*=?UTF-8''(.+)/i);
|
||||||
|
if (fileNameMatch && fileNameMatch.length > 1) {
|
||||||
|
this.fileName = decodeURIComponent(fileNameMatch[1].replace(/\+/g, '%20'));
|
||||||
|
}
|
||||||
a.href = url;
|
a.href = url;
|
||||||
a.download = this.fileName;
|
a.download = this.fileName;
|
||||||
document.body.appendChild(a);
|
document.body.appendChild(a);
|
||||||
|
|
|
||||||
|
|
@ -91,10 +91,16 @@ export class ErvuFileUpload extends InputControl {
|
||||||
}],
|
}],
|
||||||
maxFileSize: this.maxFileSizeMb ? this.maxFileSizeMb * 1024 * 1024 : undefined,
|
maxFileSize: this.maxFileSizeMb ? this.maxFileSizeMb * 1024 * 1024 : undefined,
|
||||||
queueLimit: this.maxFilesToUpload ? this.maxFilesToUpload : undefined,
|
queueLimit: this.maxFilesToUpload ? this.maxFilesToUpload : undefined,
|
||||||
headers: [{
|
headers: [
|
||||||
name: "X-Employee-Info-File-Form-Type",
|
{
|
||||||
value: EmployeeInfoFileFormType[this.formType]
|
name: "X-Employee-Info-File-Form-Type",
|
||||||
}]
|
value: EmployeeInfoFileFormType[this.formType]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Client-Time-Zone",
|
||||||
|
value: Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||||
|
}
|
||||||
|
]
|
||||||
});
|
});
|
||||||
|
|
||||||
this.setUploaderMethods();
|
this.setUploaderMethods();
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,38 @@
|
||||||
|
import {DateTimeUtil, DefaultValueFormatter, GridValueFormatter} from "@webbpm/base-package";
|
||||||
|
import {ValueFormatterParams} from "ag-grid-community";
|
||||||
|
|
||||||
|
export class ClientDateTimeFormatter extends DefaultValueFormatter implements GridValueFormatter {
|
||||||
|
|
||||||
|
public dateFormat: string = '';
|
||||||
|
|
||||||
|
format(params: ValueFormatterParams): string {
|
||||||
|
if (this.isValueEmpty(params)) {
|
||||||
|
return super.format(params);
|
||||||
|
}
|
||||||
|
|
||||||
|
// don't apply formatter to row with aggregation function
|
||||||
|
if (params.node.isRowPinned()) {
|
||||||
|
return params.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!this.dateFormat) {
|
||||||
|
return ClientDateTimeFormatter.parseForClientTimeZoneAndFormat(params.value, DateTimeUtil.TIMESTAMP_FORMAT);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!ClientDateTimeFormatter.isValidFormat(this.dateFormat)) {
|
||||||
|
throw new Error('Invalid date format = ' + this.dateFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
return ClientDateTimeFormatter.parseForClientTimeZoneAndFormat(params.value, this.dateFormat);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static isValidFormat(format: string): boolean {
|
||||||
|
const validCharsRegex = /^[YyMmDdHhSsTZ.:\[\] -]*$/;
|
||||||
|
return format && validCharsRegex.test(format);
|
||||||
|
}
|
||||||
|
|
||||||
|
private static parseForClientTimeZoneAndFormat(value: string, dateFormat: string): string {
|
||||||
|
let timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||||
|
return DateTimeUtil.parseIsoDateTime(value).tz(timezone).format(dateFormat);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
@ -3,6 +3,7 @@ import {ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot} from "
|
||||||
import {Observable} from "rxjs";
|
import {Observable} from "rxjs";
|
||||||
import {HttpClient, HttpParams} from "@angular/common/http";
|
import {HttpClient, HttpParams} from "@angular/common/http";
|
||||||
import {CookieService} from "ngx-cookie";
|
import {CookieService} from "ngx-cookie";
|
||||||
|
import {MessagesService} from "@webbpm/base-package";
|
||||||
|
|
||||||
@Injectable({providedIn:'root'})
|
@Injectable({providedIn:'root'})
|
||||||
export abstract class AuthGuard implements CanActivate {
|
export abstract class AuthGuard implements CanActivate {
|
||||||
|
|
@ -10,7 +11,8 @@ export abstract class AuthGuard implements CanActivate {
|
||||||
protected constructor(
|
protected constructor(
|
||||||
protected router: Router,
|
protected router: Router,
|
||||||
private httpClient: HttpClient,
|
private httpClient: HttpClient,
|
||||||
private cookieService: CookieService
|
private cookieService: CookieService,
|
||||||
|
private messageService: MessagesService
|
||||||
) {
|
) {
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
@ -28,7 +30,9 @@ export abstract class AuthGuard implements CanActivate {
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
else if (error) {
|
else if (error) {
|
||||||
throw new Error(error + ', error description =' + errorDescription);
|
let errorMessage = error + ', error description =' + errorDescription;
|
||||||
|
this.messageService.error(errorMessage)
|
||||||
|
throw new Error(errorMessage);
|
||||||
}
|
}
|
||||||
else if (code) {
|
else if (code) {
|
||||||
const params = new HttpParams().set('code', code);
|
const params = new HttpParams().set('code', code);
|
||||||
|
|
|
||||||
4
pom.xml
4
pom.xml
|
|
@ -4,7 +4,7 @@
|
||||||
<modelVersion>4.0.0</modelVersion>
|
<modelVersion>4.0.0</modelVersion>
|
||||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||||
<artifactId>ul</artifactId>
|
<artifactId>ul</artifactId>
|
||||||
<version>1.0.0-SNAPSHOT</version>
|
<version>1.8.0-SNAPSHOT</version>
|
||||||
<packaging>pom</packaging>
|
<packaging>pom</packaging>
|
||||||
<modules>
|
<modules>
|
||||||
<module>backend</module>
|
<module>backend</module>
|
||||||
|
|
@ -15,7 +15,7 @@
|
||||||
</scm>
|
</scm>
|
||||||
<properties>
|
<properties>
|
||||||
<spring-security-kerberos.version>1.0.1.RELEASE</spring-security-kerberos.version>
|
<spring-security-kerberos.version>1.0.1.RELEASE</spring-security-kerberos.version>
|
||||||
<spring-kafka.version>2.6.13</spring-kafka.version>
|
<spring-kafka.version>2.9.13</spring-kafka.version>
|
||||||
<org.bouncycastle.version>1.60</org.bouncycastle.version>
|
<org.bouncycastle.version>1.60</org.bouncycastle.version>
|
||||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||||
<enable.version.in.url>false</enable.version.in.url>
|
<enable.version.in.url>false</enable.version.in.url>
|
||||||
|
|
|
||||||
|
|
@ -4,7 +4,7 @@
|
||||||
<parent>
|
<parent>
|
||||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||||
<artifactId>ul</artifactId>
|
<artifactId>ul</artifactId>
|
||||||
<version>1.0.0-SNAPSHOT</version>
|
<version>1.8.0-SNAPSHOT</version>
|
||||||
</parent>
|
</parent>
|
||||||
|
|
||||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||||
|
|
|
||||||
|
|
@ -700,17 +700,9 @@
|
||||||
<entry>
|
<entry>
|
||||||
<key>valueFormatter</key>
|
<key>valueFormatter</key>
|
||||||
<value>
|
<value>
|
||||||
<complex>
|
|
||||||
<entry>
|
|
||||||
<key>dateFormat</key>
|
|
||||||
<value>
|
|
||||||
<simple>"DD.MM.YYYY HH:mm:ss"</simple>
|
|
||||||
</value>
|
|
||||||
</entry>
|
|
||||||
</complex>
|
|
||||||
<implRef type="TS">
|
<implRef type="TS">
|
||||||
<className>DateTimeFormatter</className>
|
<className>ClientDateTimeFormatter</className>
|
||||||
<packageName>component.grid.formatters</packageName>
|
<packageName>ervu.component.grid.formatter</packageName>
|
||||||
</implRef>
|
</implRef>
|
||||||
</value>
|
</value>
|
||||||
</entry>
|
</entry>
|
||||||
|
|
|
||||||
Loading…
Add table
Add a link
Reference in a new issue