Merge branch 'release/1.8' into feature/SUPPORT-8546_cut_unused
This commit is contained in:
commit
a93448f461
33 changed files with 316 additions and 168 deletions
|
|
@ -5,7 +5,7 @@
|
|||
<parent>
|
||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||
<artifactId>ul</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.8.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||
<artifactId>backend</artifactId>
|
||||
|
|
|
|||
|
|
@ -1,4 +1,5 @@
|
|||
import java.time.Duration;
|
||||
import java.util.List;
|
||||
import javax.sql.DataSource;
|
||||
|
||||
import liquibase.integration.spring.SpringLiquibase;
|
||||
|
|
@ -14,10 +15,13 @@ import org.springframework.context.annotation.Configuration;
|
|||
import org.springframework.context.annotation.EnableAspectJAutoProxy;
|
||||
import org.springframework.context.annotation.FilterType;
|
||||
import org.springframework.context.support.PropertySourcesPlaceholderConfigurer;
|
||||
import org.springframework.http.converter.HttpMessageConverter;
|
||||
import org.springframework.http.converter.ResourceHttpMessageConverter;
|
||||
import org.springframework.retry.annotation.EnableRetry;
|
||||
import org.springframework.scheduling.annotation.EnableScheduling;
|
||||
import org.springframework.scheduling.concurrent.ThreadPoolTaskScheduler;
|
||||
import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
||||
import org.springframework.web.servlet.config.annotation.WebMvcConfigurer;
|
||||
|
||||
/**
|
||||
* Root application context
|
||||
|
|
@ -47,7 +51,7 @@ import org.springframework.web.servlet.config.annotation.EnableWebMvc;
|
|||
@EnableWebMvc
|
||||
@EnableScheduling
|
||||
@EnableRetry
|
||||
public class AppConfig {
|
||||
public class AppConfig implements WebMvcConfigurer {
|
||||
|
||||
@Bean
|
||||
public PropertySourcesPlaceholderConfigurer propertySourcesPlaceholderConfigurer(){
|
||||
|
|
@ -78,4 +82,9 @@ public class AppConfig {
|
|||
liquibase.setChangeLog("classpath:config/changelog-master.xml");
|
||||
return liquibase;
|
||||
}
|
||||
|
||||
@Override
|
||||
public void configureMessageConverters(List<HttpMessageConverter<?>> converters) {
|
||||
converters.add(new ResourceHttpMessageConverter());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,24 +1,20 @@
|
|||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
|
||||
import org.springframework.web.util.IntrospectorCleanupListener;
|
||||
|
||||
import javax.servlet.MultipartConfigElement;
|
||||
import javax.servlet.ServletContext;
|
||||
import javax.servlet.ServletException;
|
||||
import javax.servlet.ServletRegistration;
|
||||
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.web.servlet.support.AbstractAnnotationConfigDispatcherServletInitializer;
|
||||
import org.springframework.web.util.IntrospectorCleanupListener;
|
||||
|
||||
/**
|
||||
* This initializer creates root context and registers dispatcher servlet
|
||||
* Spring scans for initializers automatically
|
||||
*/
|
||||
public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServletInitializer {
|
||||
|
||||
@Value("${ervu.fileupload.max_file_size}")
|
||||
private int maxFileSize;
|
||||
@Value("${ervu.fileupload.max_request_size}")
|
||||
private int maxRequestSize;
|
||||
@Value("${ervu.fileupload.file_size_threshold}")
|
||||
private int fileSizeThreshold;
|
||||
private static final Logger logger = LoggerFactory.getLogger(WebAppInitializer.class);
|
||||
|
||||
public void onStartup(ServletContext servletContext) throws ServletException {
|
||||
super.onStartup(servletContext);
|
||||
|
|
@ -41,11 +37,37 @@ public class WebAppInitializer extends AbstractAnnotationConfigDispatcherServlet
|
|||
|
||||
@Override
|
||||
protected void customizeRegistration(ServletRegistration.Dynamic registration) {
|
||||
|
||||
// read from env or assign default values
|
||||
int maxFileSize = parseOrDefault("ERVU_FILE_UPLOAD_MAX_FILE_SIZE", 5242880);
|
||||
int maxRequestSize = parseOrDefault("ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE", 6291456);
|
||||
int fileSizeThreshold = parseOrDefault("ERVU_FILE_UPLOAD_FILE_SIZE_THRESHOLD", 0);
|
||||
|
||||
MultipartConfigElement multipartConfigElement = new MultipartConfigElement(
|
||||
"/tmp",
|
||||
maxFileSize,
|
||||
maxRequestSize,
|
||||
fileSizeThreshold);
|
||||
registration.setMultipartConfig(multipartConfigElement);
|
||||
|
||||
logger.info("Max file upload size is set to: " + multipartConfigElement.getMaxFileSize());
|
||||
logger.info("Max file upload request size is set to: " + multipartConfigElement.getMaxRequestSize());
|
||||
logger.info("File size threshold is set to: " + multipartConfigElement.getFileSizeThreshold());
|
||||
|
||||
}
|
||||
|
||||
private int parseOrDefault(String envVar, int defaultVal) {
|
||||
String envVarValue = System.getenv(envVar);
|
||||
if (envVar == null) {
|
||||
logger.info("Environment variable {} is null, using default value: {}", envVar, defaultVal);
|
||||
return defaultVal;
|
||||
}
|
||||
try {
|
||||
return Integer.parseInt(envVarValue);
|
||||
} catch (NumberFormatException e) {
|
||||
logger.info("Environment variable {} is not an integer, using default value: {}", envVar, defaultVal);
|
||||
return defaultVal;
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -4,12 +4,17 @@ import java.util.HashMap;
|
|||
import java.util.Map;
|
||||
|
||||
import org.apache.kafka.clients.CommonClientConfigs;
|
||||
import org.apache.kafka.clients.consumer.ConsumerConfig;
|
||||
import org.apache.kafka.clients.producer.ProducerConfig;
|
||||
import org.apache.kafka.common.config.SaslConfigs;
|
||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
import org.springframework.kafka.config.ConcurrentKafkaListenerContainerFactory;
|
||||
import org.springframework.kafka.core.ConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaConsumerFactory;
|
||||
import org.springframework.kafka.core.DefaultKafkaProducerFactory;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.kafka.core.ProducerFactory;
|
||||
|
|
@ -18,7 +23,7 @@ import org.springframework.kafka.core.ProducerFactory;
|
|||
* @author Alexandr Shalaginov
|
||||
*/
|
||||
@Configuration
|
||||
public class KafkaProducerConfig {
|
||||
public class AvKafkaConfig {
|
||||
@Value("${av.kafka.bootstrap.servers}")
|
||||
private String kafkaUrl;
|
||||
@Value("${av.kafka.security.protocol}")
|
||||
|
|
@ -32,12 +37,12 @@ public class KafkaProducerConfig {
|
|||
@Value("${av.kafka.sasl.mechanism}")
|
||||
private String saslMechanism;
|
||||
|
||||
@Bean("av-factory")
|
||||
public ProducerFactory<String, String> producerFactory() {
|
||||
@Bean
|
||||
public ProducerFactory<String, String> avProducerFactory() {
|
||||
return new DefaultKafkaProducerFactory<>(producerConfigs());
|
||||
}
|
||||
|
||||
@Bean("av-configs")
|
||||
@Bean
|
||||
public Map<String, Object> producerConfigs() {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaUrl);
|
||||
|
|
@ -52,9 +57,36 @@ public class KafkaProducerConfig {
|
|||
return props;
|
||||
}
|
||||
|
||||
@Bean("av-template")
|
||||
@Bean
|
||||
public ConsumerFactory<String, String> avConsumerFactory() {
|
||||
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public Map<String, Object> consumerConfigs() {
|
||||
Map<String, Object> props = new HashMap<>();
|
||||
props.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, this.kafkaUrl);
|
||||
props.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
props.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
|
||||
|
||||
props.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
|
||||
props.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
|
||||
+ username + "\" password=\"" + password + "\";");
|
||||
props.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
|
||||
|
||||
return props;
|
||||
}
|
||||
|
||||
@Bean("avContainerFactory")
|
||||
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
|
||||
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||
factory.setConsumerFactory(avConsumerFactory());
|
||||
return factory;
|
||||
}
|
||||
|
||||
@Bean("avTemplate")
|
||||
public KafkaTemplate<String, String> kafkaTemplate() {
|
||||
return new KafkaTemplate<>(producerFactory());
|
||||
return new KafkaTemplate<>(avProducerFactory());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
@ -1,11 +1,17 @@
|
|||
package ervu.controller;
|
||||
|
||||
import java.time.ZonedDateTime;
|
||||
import java.util.TimeZone;
|
||||
import javax.servlet.http.Cookie;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import ervu.service.fileupload.EmployeeInfoFileUploadService;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.*;
|
||||
import org.springframework.web.bind.annotation.RequestHeader;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RequestMethod;
|
||||
import org.springframework.web.bind.annotation.RequestParam;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
||||
/**
|
||||
|
|
@ -21,7 +27,8 @@ public class EmployeeInfoFileUploadController {
|
|||
|
||||
@RequestMapping(value = "/employee/document", method = RequestMethod.POST)
|
||||
public ResponseEntity<?> saveEmployeeInformationFile(@RequestParam("file") MultipartFile multipartFile,
|
||||
@RequestHeader("X-Employee-Info-File-Form-Type") String formType, HttpServletRequest request) {
|
||||
@RequestHeader("X-Employee-Info-File-Form-Type") String formType,
|
||||
@RequestHeader("Client-Time-Zone") String clientTimeZone, HttpServletRequest request) {
|
||||
String accessToken = null;
|
||||
String authToken = null;
|
||||
Cookie[] cookies = request.getCookies();
|
||||
|
|
@ -35,11 +42,17 @@ public class EmployeeInfoFileUploadController {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (accessToken != null && this.fileUploadService.saveEmployeeInformationFile(multipartFile, formType, accessToken, authToken)) {
|
||||
return ResponseEntity.ok("File successfully uploaded.");
|
||||
}
|
||||
else {
|
||||
return ResponseEntity.internalServerError().body("An error occurred while uploading file.");
|
||||
|
||||
if (accessToken != null) {
|
||||
String offset = ZonedDateTime.now(TimeZone.getTimeZone(clientTimeZone).toZoneId())
|
||||
.getOffset().getId();
|
||||
|
||||
if (this.fileUploadService.saveEmployeeInformationFile(multipartFile, formType, accessToken,
|
||||
authToken, offset)) {
|
||||
return ResponseEntity.ok("File successfully uploaded.");
|
||||
}
|
||||
}
|
||||
|
||||
return ResponseEntity.internalServerError().body("An error occurred while uploading file.");
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -0,0 +1,7 @@
|
|||
package ervu.model.fileupload;
|
||||
|
||||
/**
|
||||
* @author r.latypov
|
||||
*/
|
||||
public record DownloadResponse(OrgInfo orgInfo, FileInfo fileInfo) {
|
||||
}
|
||||
|
|
@ -6,14 +6,17 @@ import java.util.Objects;
|
|||
* @author Alexandr Shalaginov
|
||||
*/
|
||||
public class FileInfo {
|
||||
private final String fileId;
|
||||
private final String fileUrl;
|
||||
private final String fileName;
|
||||
private final String filePatternCode;
|
||||
private final String filePatternName;
|
||||
private final String departureDateTime;
|
||||
private final String timeZone;
|
||||
private final FileStatus fileStatus;
|
||||
private String fileId;
|
||||
private String fileUrl;
|
||||
private String fileName;
|
||||
private String filePatternCode;
|
||||
private String filePatternName;
|
||||
private String departureDateTime;
|
||||
private String timeZone;
|
||||
private FileStatus fileStatus;
|
||||
|
||||
public FileInfo() {
|
||||
}
|
||||
|
||||
public FileInfo(String fileId, String fileUrl, String fileName, String filePatternCode,
|
||||
String filePatternName, String departureDateTime, String timeZone, FileStatus fileStatus) {
|
||||
|
|
|
|||
|
|
@ -8,9 +8,12 @@ import ru.micord.ervu.journal.SenderInfo;
|
|||
* @author Alexandr Shalaginov
|
||||
*/
|
||||
public class OrgInfo {
|
||||
private final String orgName;
|
||||
private final String orgId;
|
||||
private final SenderInfo senderInfo;
|
||||
private String orgName;
|
||||
private String orgId;
|
||||
private SenderInfo senderInfo;
|
||||
|
||||
public OrgInfo() {
|
||||
}
|
||||
|
||||
public OrgInfo(String orgName, String orgId, SenderInfo senderInfo) {
|
||||
this.orgName = orgName;
|
||||
|
|
@ -26,7 +29,7 @@ public class OrgInfo {
|
|||
return orgId;
|
||||
}
|
||||
|
||||
public SenderInfo getPrnOid() {
|
||||
public SenderInfo getSenderInfo() {
|
||||
return senderInfo;
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,22 +1,25 @@
|
|||
package ervu.service.fileupload;
|
||||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.sql.Timestamp;
|
||||
import java.time.LocalDateTime;
|
||||
import java.time.ZonedDateTime;
|
||||
import java.time.format.DateTimeFormatter;
|
||||
import java.util.UUID;
|
||||
|
||||
import com.fasterxml.jackson.core.JsonProcessingException;
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import ervu.client.fileupload.FileUploadWebDavClient;
|
||||
import ervu.model.fileupload.DownloadResponse;
|
||||
import ervu.model.fileupload.EmployeeInfoFileFormType;
|
||||
import ervu.model.fileupload.EmployeeInfoKafkaMessage;
|
||||
import ervu.model.fileupload.FileInfo;
|
||||
import ervu.model.fileupload.FileStatus;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.slf4j.Logger;
|
||||
import org.slf4j.LoggerFactory;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.kafka.annotation.KafkaListener;
|
||||
import org.springframework.kafka.core.KafkaTemplate;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.web.multipart.MultipartFile;
|
||||
|
|
@ -27,13 +30,15 @@ import ru.micord.ervu.security.webbpm.jwt.model.Token;
|
|||
import ru.micord.ervu.security.webbpm.jwt.service.JwtTokenService;
|
||||
import ru.micord.ervu.service.InteractionService;
|
||||
|
||||
import static ru.micord.ervu.util.StringUtils.convertToFio;
|
||||
|
||||
/**
|
||||
* @author Alexandr Shalaginov
|
||||
*/
|
||||
@Service
|
||||
public class EmployeeInfoFileUploadService {
|
||||
private static final Logger logger = LoggerFactory.getLogger(EmployeeInfoFileUploadService.class);
|
||||
private static final String FORMAT = "dd.MM.yyyy HH:mm";
|
||||
private static final String FORMAT = "dd.MM.yyyy HH:mm:ss";
|
||||
|
||||
private final FileUploadWebDavClient fileWebDavUploadClient;
|
||||
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
|
||||
|
|
@ -54,7 +59,8 @@ public class EmployeeInfoFileUploadService {
|
|||
public EmployeeInfoFileUploadService(
|
||||
FileUploadWebDavClient fileWebDavUploadClient,
|
||||
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
|
||||
@Qualifier("av-template") KafkaTemplate<String, String> kafkaTemplate, InteractionService interactionService,
|
||||
@Qualifier("avTemplate") KafkaTemplate<String, String> kafkaTemplate,
|
||||
InteractionService interactionService,
|
||||
UlDataService ulDataService, JwtTokenService jwtTokenService) {
|
||||
this.fileWebDavUploadClient = fileWebDavUploadClient;
|
||||
this.employeeInfoKafkaMessageService = employeeInfoKafkaMessageService;
|
||||
|
|
@ -65,11 +71,9 @@ public class EmployeeInfoFileUploadService {
|
|||
}
|
||||
|
||||
public boolean saveEmployeeInformationFile(MultipartFile multipartFile, String formType,
|
||||
String accessToken, String authToken) {
|
||||
String accessToken, String authToken, String offset) {
|
||||
String fileUploadUrl = this.url + "/" + getNewFilename(multipartFile.getOriginalFilename());
|
||||
LocalDateTime now = LocalDateTime.now();
|
||||
String departureDateTime = now.format(DateTimeFormatter.ofPattern(FORMAT));;
|
||||
String timeZone = getTimeZone();
|
||||
|
||||
if (this.fileWebDavUploadClient.webDavUploadFile(fileUploadUrl, username, password, multipartFile)) {
|
||||
FileStatus fileStatus = new FileStatus();
|
||||
|
|
@ -83,6 +87,7 @@ public class EmployeeInfoFileUploadService {
|
|||
PersonModel personModel = employeeModel.getPerson();
|
||||
Token token = jwtTokenService.getToken(authToken);
|
||||
String[] ids = token.getUserAccountId().split(":");
|
||||
String departureDateTime = now.format(DateTimeFormatter.ofPattern(FORMAT));
|
||||
String jsonMessage = getJsonKafkaMessage(
|
||||
employeeInfoKafkaMessageService.getKafkaMessage(
|
||||
fileId,
|
||||
|
|
@ -91,16 +96,17 @@ public class EmployeeInfoFileUploadService {
|
|||
employeeInfoFileFormType,
|
||||
departureDateTime,
|
||||
accessToken,
|
||||
timeZone,
|
||||
offset,
|
||||
fileStatus,
|
||||
ids[1],
|
||||
ids[0],
|
||||
personModel
|
||||
)
|
||||
);
|
||||
interactionService.setStatus(fileId, fileStatus.getStatus(), fileName, employeeInfoFileFormType.getFilePatternName(), Timestamp.valueOf(now),
|
||||
personModel.getLastName() + " " + personModel.getFirstName().charAt(0) + ". " + personModel.getMiddleName().charAt(0) + ".", (int) multipartFile.getSize(),
|
||||
ids[1]);
|
||||
interactionService.setStatus(fileId, fileStatus.getStatus(), fileName,
|
||||
employeeInfoFileFormType.getFilePatternCode(), Timestamp.valueOf(now),
|
||||
convertToFio(personModel.getFirstName(), personModel.getMiddleName(), personModel.getLastName()),
|
||||
(int) multipartFile.getSize(), ids[1]);
|
||||
return sendMessage(jsonMessage);
|
||||
}
|
||||
else {
|
||||
|
|
@ -111,6 +117,8 @@ public class EmployeeInfoFileUploadService {
|
|||
|
||||
private boolean sendMessage(String message) {
|
||||
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaTopicName, message);
|
||||
record.headers().add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
|
||||
|
||||
try {
|
||||
this.kafkaTemplate.send(record).get();
|
||||
logger.debug("Success send record: {}", record);
|
||||
|
|
@ -144,7 +152,19 @@ public class EmployeeInfoFileUploadService {
|
|||
}
|
||||
}
|
||||
|
||||
private String getTimeZone() {
|
||||
return ZonedDateTime.now().getOffset().toString();
|
||||
@KafkaListener(id = "${av.kafka.group.id}", topics = "${av.kafka.download.response}",
|
||||
containerFactory = "avContainerFactory")
|
||||
public void listenKafka(String kafkaMessage) {
|
||||
ObjectMapper mapper = new ObjectMapper();
|
||||
try {
|
||||
DownloadResponse downloadResponse = mapper.readValue(kafkaMessage, DownloadResponse.class);
|
||||
FileInfo fileInfo = downloadResponse.fileInfo();
|
||||
interactionService.updateStatus(fileInfo.getFileId(), fileInfo.getFileStatus().getStatus(),
|
||||
downloadResponse.orgInfo().getOrgId()
|
||||
);
|
||||
}
|
||||
catch (JsonProcessingException e) {
|
||||
throw new RuntimeException(String.format("Fail get json from: %s", kafkaMessage), e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -25,7 +25,7 @@ public class EmployeeInfoKafkaMessageService {
|
|||
|
||||
public EmployeeInfoKafkaMessage getKafkaMessage(String fileId, String fileUrl, String fileName,
|
||||
EmployeeInfoFileFormType formType, String departureDateTime, String accessToken,
|
||||
String timeZone, FileStatus fileStatus, String ervuId, String prnOid, PersonModel personModel) {
|
||||
String offset, FileStatus fileStatus, String ervuId, String prnOid, PersonModel personModel) {
|
||||
return new EmployeeInfoKafkaMessage(
|
||||
getOrgInfo(accessToken, ervuId, prnOid, personModel),
|
||||
getFileInfo(
|
||||
|
|
@ -34,14 +34,14 @@ public class EmployeeInfoKafkaMessageService {
|
|||
fileName,
|
||||
formType,
|
||||
departureDateTime,
|
||||
timeZone,
|
||||
offset,
|
||||
fileStatus
|
||||
)
|
||||
);
|
||||
}
|
||||
|
||||
private FileInfo getFileInfo(String fileId, String fileUrl, String fileName,
|
||||
EmployeeInfoFileFormType formType, String departureDateTime, String timeZone,
|
||||
EmployeeInfoFileFormType formType, String departureDateTime, String offset,
|
||||
FileStatus fileStatus) {
|
||||
return new FileInfo(
|
||||
fileId,
|
||||
|
|
@ -50,7 +50,7 @@ public class EmployeeInfoKafkaMessageService {
|
|||
formType.getFilePatternCode(),
|
||||
formType.getFilePatternName(),
|
||||
departureDateTime,
|
||||
timeZone,
|
||||
offset,
|
||||
fileStatus
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,8 +1,6 @@
|
|||
package ru.micord.ervu.journal.mapper;
|
||||
|
||||
import java.sql.Timestamp;
|
||||
import java.time.ZoneOffset;
|
||||
import java.time.ZonedDateTime;
|
||||
|
||||
import ervu_lkrp_ul.ervu_lkrp_ul.db_beans.public_.tables.records.InteractionLogRecord;
|
||||
import ru.micord.ervu.journal.JournalDto;
|
||||
|
|
@ -16,9 +14,7 @@ public class JournalDtoMapper {
|
|||
public static JournalDto mapToJournalDto(JournalFileInfo journalFileInfo) {
|
||||
SenderInfo senderInfo = journalFileInfo.getSenderInfo();
|
||||
return new JournalDto()
|
||||
.setDepartureDateTime(Timestamp.from(ZonedDateTime.of(journalFileInfo.getDepartureDateTime(),
|
||||
ZoneOffset.of(journalFileInfo.getTimeZone())
|
||||
).toInstant()).toString())
|
||||
.setDepartureDateTime(Timestamp.valueOf(journalFileInfo.getDepartureDateTime()).toString())
|
||||
.setFileName(journalFileInfo.getFileName())
|
||||
.setFilePatternCode(journalFileInfo.getFilePatternCode())
|
||||
.setSenderFio(convertToFio(senderInfo.getFirstName(), senderInfo.getMiddleName(),
|
||||
|
|
@ -33,7 +29,7 @@ public class JournalDtoMapper {
|
|||
return new JournalDto()
|
||||
.setDepartureDateTime(record.getSentDate().toString())
|
||||
.setFileName(record.getFileName())
|
||||
.setFilePatternCode(Integer.valueOf(record.getForm().replace("№", "")))
|
||||
.setFilePatternCode(Integer.valueOf(record.getForm()))
|
||||
.setSenderFio(record.getSender())
|
||||
.setStatus(record.getStatus())
|
||||
.setFilesSentCount(record.getRecordsSent())
|
||||
|
|
|
|||
|
|
@ -6,7 +6,6 @@ import org.apache.kafka.clients.producer.ProducerConfig;
|
|||
import org.apache.kafka.common.config.SaslConfigs;
|
||||
import org.apache.kafka.common.serialization.StringDeserializer;
|
||||
import org.apache.kafka.common.serialization.StringSerializer;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.context.annotation.Bean;
|
||||
import org.springframework.context.annotation.Configuration;
|
||||
|
|
@ -48,8 +47,8 @@ public class ReplyingKafkaConfig {
|
|||
@Value("${ervu.kafka.sasl.mechanism}")
|
||||
private String saslMechanism;
|
||||
|
||||
@Bean("ervu")
|
||||
public ProducerFactory<String, String> producerFactory() {
|
||||
@Bean
|
||||
public ProducerFactory<String, String> ervuProducerFactory() {
|
||||
Map<String, Object> configProps = new HashMap<>();
|
||||
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
|
||||
|
|
@ -61,14 +60,13 @@ public class ReplyingKafkaConfig {
|
|||
return new DefaultKafkaProducerFactory<>(configProps);
|
||||
}
|
||||
|
||||
@Bean
|
||||
@Qualifier("ervu")
|
||||
@Bean("ervuTemplate")
|
||||
public KafkaTemplate<String, String> kafkaTemplate() {
|
||||
return new KafkaTemplate<>(producerFactory());
|
||||
return new KafkaTemplate<>(ervuProducerFactory());
|
||||
}
|
||||
|
||||
@Bean
|
||||
public ConsumerFactory<String, String> consumerFactory() {
|
||||
@Bean("ervuConsumerFactory")
|
||||
public ConsumerFactory<String, String> ervuConsumerFactory() {
|
||||
Map<String, Object> configProps = new HashMap<>();
|
||||
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
|
||||
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId);
|
||||
|
|
@ -84,12 +82,11 @@ public class ReplyingKafkaConfig {
|
|||
@Bean
|
||||
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
|
||||
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
|
||||
factory.setConsumerFactory(consumerFactory());
|
||||
factory.setConsumerFactory(ervuConsumerFactory());
|
||||
return factory;
|
||||
}
|
||||
|
||||
// @Bean()
|
||||
// @Qualifier("excerpt-container")
|
||||
// @Bean("excerpt-container")
|
||||
// public ConcurrentMessageListenerContainer<String, String> excerptReplyContainer(
|
||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||
|
|
@ -98,16 +95,14 @@ public class ReplyingKafkaConfig {
|
|||
// return container;
|
||||
// }
|
||||
//
|
||||
// @Bean()
|
||||
// @Qualifier("excerpt-template")
|
||||
// @Bean("excerpt-template")
|
||||
// public ReplyingKafkaTemplate<String, String, String> excerptReplyingKafkaTemplate(
|
||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||
// @Qualifier("excerpt-container") ConcurrentMessageListenerContainer<String, String> container) {
|
||||
// return initReplyingKafkaTemplate(pf, container);
|
||||
// }
|
||||
//
|
||||
// @Bean
|
||||
// @Qualifier("org")
|
||||
// @Bean("org")
|
||||
// public ConcurrentMessageListenerContainer<String, String> replyContainer(
|
||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||
|
|
@ -116,8 +111,7 @@ public class ReplyingKafkaConfig {
|
|||
// return container;
|
||||
// }
|
||||
//
|
||||
// @Bean
|
||||
// @Qualifier("journal")
|
||||
// @Bean("journal")
|
||||
// public ConcurrentMessageListenerContainer<String, String> journalReplyContainer(
|
||||
// ConcurrentKafkaListenerContainerFactory<String, String> factory) {
|
||||
// ConcurrentMessageListenerContainer<String, String> container = factory.createContainer(
|
||||
|
|
@ -126,16 +120,14 @@ public class ReplyingKafkaConfig {
|
|||
// return container;
|
||||
// }
|
||||
//
|
||||
// @Bean
|
||||
// @Qualifier("org")
|
||||
// @Bean("org")
|
||||
// public ReplyingKafkaTemplate<String, String, String> orgReplyingKafkaTemplate(
|
||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||
// @Qualifier("org") ConcurrentMessageListenerContainer<String, String> container) {
|
||||
// return initReplyingKafkaTemplate(pf, container);
|
||||
// }
|
||||
//
|
||||
// @Bean
|
||||
// @Qualifier("journal")
|
||||
// @Bean("journal")
|
||||
// public ReplyingKafkaTemplate<String, String, String> journalReplyingKafkaTemplate(
|
||||
// @Qualifier("ervu") ProducerFactory<String, String> pf,
|
||||
// @Qualifier("journal") ConcurrentMessageListenerContainer<String, String> container) {
|
||||
|
|
|
|||
|
|
@ -1,20 +1,16 @@
|
|||
package ru.micord.ervu.kafka.controller;
|
||||
|
||||
import java.util.Arrays;
|
||||
import java.util.Optional;
|
||||
import javax.servlet.http.Cookie;
|
||||
import javax.servlet.http.HttpServletRequest;
|
||||
|
||||
import com.fasterxml.jackson.databind.ObjectMapper;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.web.bind.annotation.RequestMapping;
|
||||
import org.springframework.web.bind.annotation.RestController;
|
||||
import ru.micord.ervu.kafka.model.Data;
|
||||
import ru.micord.ervu.kafka.model.ErvuOrgResponse;
|
||||
import ru.micord.ervu.kafka.model.ExcerptResponse;
|
||||
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
|
||||
import ru.micord.ervu.s3.S3Service;
|
||||
|
|
@ -47,7 +43,7 @@ public class ErvuKafkaController {
|
|||
private ObjectMapper objectMapper;
|
||||
|
||||
@RequestMapping(value = "/kafka/excerpt")
|
||||
public ResponseEntity<InputStreamResource> getExcerptFile(HttpServletRequest request) {
|
||||
public ResponseEntity<Resource> getExcerptFile(HttpServletRequest request) {
|
||||
try {
|
||||
String authToken = getAuthToken(request);
|
||||
Token token = jwtTokenService.getToken(authToken);
|
||||
|
|
|
|||
|
|
@ -1,9 +1,6 @@
|
|||
package ru.micord.ervu.kafka.model;
|
||||
|
||||
import java.io.Serializable;
|
||||
import java.util.Date;
|
||||
|
||||
import com.fasterxml.jackson.annotation.JsonFormat;
|
||||
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
||||
|
||||
/**
|
||||
|
|
@ -13,25 +10,10 @@ import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
|
|||
public class ExcerptResponse implements Serializable {
|
||||
private static final long serialVersionUID = 1L;
|
||||
|
||||
private String excerptId;
|
||||
|
||||
private String orgId;
|
||||
|
||||
private String fileUrl;
|
||||
|
||||
@JsonFormat(pattern = "dd.MM.yyyy HH:mm")
|
||||
private Date departureDateTime;
|
||||
|
||||
private String timeZone;
|
||||
|
||||
public String getExcerptId() {
|
||||
return excerptId;
|
||||
}
|
||||
|
||||
public void setExcerptId(String excerptId) {
|
||||
this.excerptId = excerptId;
|
||||
}
|
||||
|
||||
public String getOrgId() {
|
||||
return orgId;
|
||||
}
|
||||
|
|
@ -40,22 +22,6 @@ public class ExcerptResponse implements Serializable {
|
|||
this.orgId = orgId;
|
||||
}
|
||||
|
||||
public Date getDepartureDateTime() {
|
||||
return departureDateTime;
|
||||
}
|
||||
|
||||
public void setDepartureDateTime(Date departureDateTime) {
|
||||
this.departureDateTime = departureDateTime;
|
||||
}
|
||||
|
||||
public String getTimeZone() {
|
||||
return timeZone;
|
||||
}
|
||||
|
||||
public void setTimeZone(String timeZone) {
|
||||
this.timeZone = timeZone;
|
||||
}
|
||||
|
||||
public String getFileUrl() {
|
||||
return fileUrl;
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,16 +2,17 @@ package ru.micord.ervu.kafka.service.impl;
|
|||
|
||||
import java.nio.charset.StandardCharsets;
|
||||
import java.time.Duration;
|
||||
import java.util.Arrays;
|
||||
import java.util.Collections;
|
||||
import java.util.Optional;
|
||||
import java.util.UUID;
|
||||
import java.util.concurrent.atomic.AtomicReference;
|
||||
|
||||
import org.apache.kafka.clients.consumer.Consumer;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecord;
|
||||
import org.apache.kafka.clients.consumer.ConsumerRecords;
|
||||
import org.apache.kafka.clients.producer.ProducerRecord;
|
||||
import org.apache.kafka.common.header.internals.RecordHeader;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.beans.factory.annotation.Qualifier;
|
||||
import org.springframework.beans.factory.annotation.Value;
|
||||
import org.springframework.kafka.core.ConsumerFactory;
|
||||
|
|
@ -26,24 +27,30 @@ import ru.micord.ervu.kafka.service.ReplyingKafkaService;
|
|||
@Service
|
||||
public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
||||
|
||||
// protected abstract ReplyingKafkaTemplate<String, String, String> getReplyingKafkaTemplate();
|
||||
@Autowired
|
||||
@Qualifier("ervu")
|
||||
private KafkaTemplate<String, String> kafkaTemplate;
|
||||
@Autowired
|
||||
private ConsumerFactory<String, String> consumerFactory;
|
||||
private static final String MESSAGE_ID_HEADER = "messageId";
|
||||
|
||||
// protected abstract ReplyingKafkaTemplate<String, String, String> getReplyingKafkaTemplate()
|
||||
private final KafkaTemplate<String, String> kafkaTemplate;
|
||||
private final ConsumerFactory<String, String> consumerFactory;
|
||||
|
||||
@Value("${ervu.kafka.group.id}")
|
||||
private String groupId;
|
||||
@Value("${ervu.kafka.reply.timeout:30}")
|
||||
private long replyTimeout;
|
||||
|
||||
public BaseReplyingKafkaServiceImpl(
|
||||
@Qualifier("ervuTemplate") KafkaTemplate<String, String> kafkaTemplate,
|
||||
@Qualifier("ervuConsumerFactory") ConsumerFactory<String, String> consumerFactory) {
|
||||
this.kafkaTemplate = kafkaTemplate;
|
||||
this.consumerFactory = consumerFactory;
|
||||
}
|
||||
|
||||
public String sendMessageAndGetReply(String requestTopic,
|
||||
String replyTopic,
|
||||
String requestMessage) {
|
||||
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
|
||||
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
|
||||
//TODO fix No pending reply error
|
||||
//TODO fix No pending reply error SUPPORT-8591
|
||||
// RequestReplyFuture<String, String, String> replyFuture = getReplyingKafkaTemplate()
|
||||
// .sendAndReceive(record);
|
||||
//
|
||||
|
|
@ -56,7 +63,8 @@ public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
|||
// throw new RuntimeException("Failed to get kafka response.", e);
|
||||
// }
|
||||
|
||||
record.headers().add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
|
||||
String messageId = UUID.randomUUID().toString();
|
||||
record.headers().add(MESSAGE_ID_HEADER, messageId.getBytes(StandardCharsets.UTF_8));
|
||||
kafkaTemplate.send(record);
|
||||
AtomicReference<String> responseRef = new AtomicReference<>(null);
|
||||
|
||||
|
|
@ -64,8 +72,19 @@ public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
|
|||
consumerFactory.createConsumer(groupId, null)) {
|
||||
consumer.subscribe(Collections.singletonList(replyTopic));
|
||||
ConsumerRecords<String, String> consumerRecords = consumer.poll(Duration.ofSeconds(replyTimeout));
|
||||
consumerRecords.forEach(consumerRecord -> responseRef.set(consumerRecord.value()));
|
||||
consumer.commitSync();
|
||||
|
||||
for (ConsumerRecord<String, String> consumerRecord : consumerRecords) {
|
||||
boolean match = Arrays.stream(consumerRecord.headers().toArray())
|
||||
.anyMatch(header -> header.key().equals(MESSAGE_ID_HEADER)
|
||||
&& messageId.equals(
|
||||
new String(header.value(), StandardCharsets.UTF_8)));
|
||||
|
||||
if (match) {
|
||||
responseRef.set(consumerRecord.value());
|
||||
consumer.commitSync();
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
return Optional.ofNullable(responseRef.get())
|
||||
.orElseThrow(() -> new RuntimeException("Kafka return result is null"));
|
||||
|
|
|
|||
|
|
@ -23,19 +23,22 @@ public class S3Connection {
|
|||
private String accessKey;
|
||||
@Value("${s3.secret_key}")
|
||||
private String secretKey;
|
||||
@Value("${s3.path.style.access.enabled:true}")
|
||||
private boolean pathStyleAccessEnabled;
|
||||
|
||||
@Bean("outClient")
|
||||
public AmazonS3 getS3OutClient() {
|
||||
return getS3Client(endpoint, accessKey, secretKey);
|
||||
return getS3Client(endpoint, accessKey, secretKey, pathStyleAccessEnabled);
|
||||
}
|
||||
|
||||
private static AmazonS3 getS3Client(String endpoint, String accessKey, String secretKey) {
|
||||
private static AmazonS3 getS3Client(String endpoint, String accessKey, String secretKey, Boolean pathStyleAccessEnabled) {
|
||||
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
|
||||
String region = Region.getRegion(Regions.DEFAULT_REGION).toString();
|
||||
|
||||
return AmazonS3ClientBuilder.standard()
|
||||
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region))
|
||||
.withCredentials(new AWSStaticCredentialsProvider(credentials))
|
||||
.withPathStyleAccessEnabled(pathStyleAccessEnabled)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
package ru.micord.ervu.s3;
|
||||
|
||||
import java.io.File;
|
||||
import java.net.URI;
|
||||
import java.net.URLEncoder;
|
||||
import java.nio.charset.StandardCharsets;
|
||||
|
||||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
|
|
@ -9,6 +9,7 @@ import com.amazonaws.services.s3.AmazonS3URI;
|
|||
import com.amazonaws.services.s3.model.S3Object;
|
||||
import org.springframework.beans.factory.annotation.Autowired;
|
||||
import org.springframework.core.io.InputStreamResource;
|
||||
import org.springframework.core.io.Resource;
|
||||
import org.springframework.http.HttpHeaders;
|
||||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
|
|
@ -25,13 +26,15 @@ public class S3Service {
|
|||
this.outClient = outClient;
|
||||
}
|
||||
|
||||
public ResponseEntity<InputStreamResource> getFile(String fileUrl) {
|
||||
public ResponseEntity<Resource> getFile(String fileUrl) {
|
||||
try {
|
||||
AmazonS3URI uri = new AmazonS3URI(fileUrl);
|
||||
S3Object s3Object = outClient.getObject(uri.getBucket(), uri.getKey());
|
||||
InputStreamResource resource = new InputStreamResource(s3Object.getObjectContent());
|
||||
String encodedFilename = URLEncoder.encode(uri.getKey(), StandardCharsets.UTF_8);
|
||||
return ResponseEntity.ok()
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment;filename=" + uri.getKey())
|
||||
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename*=UTF-8''" + encodedFilename)
|
||||
.contentLength(s3Object.getObjectMetadata().getContentLength())
|
||||
.contentType(MediaType.APPLICATION_OCTET_STREAM)
|
||||
.body(resource);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -451,8 +451,8 @@ public class EsiaAuthService {
|
|||
brhs.setBrhOid(brhsModel.getBrhOid());
|
||||
brhs.setKpp(brhsModel.getKpp());
|
||||
brhs.setLeg(brhsModel.getLeg());
|
||||
brhs.setAddresses(brhsModel.getAddresses().getElements());
|
||||
brhs.setContacts(brhsModel.getContacts().getElements());
|
||||
brhs.setAddresses(brhsModel.getAddresses() != null ? brhsModel.getAddresses().getElements() : null);
|
||||
brhs.setContacts(brhsModel.getContacts() != null ? brhsModel.getContacts().getElements() : null);
|
||||
return brhs;
|
||||
}).toArray(Brhs[]::new));
|
||||
orgInfo.setAddresses(organizationModel.getAddresses().getElements());
|
||||
|
|
|
|||
|
|
@ -13,4 +13,6 @@ public interface InteractionService {
|
|||
List<InteractionLogRecord> get(String ervuId, String[] excludedStatuses);
|
||||
|
||||
void setStatus(String fileId, String status, String fileName, String form, Timestamp timestamp, String sender, Integer count, String ervuId);
|
||||
|
||||
void updateStatus(String fileId, String status, String ervuId);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -42,6 +42,15 @@ public class InteractionServiceImpl implements InteractionService {
|
|||
.set(INTERACTION_LOG.SENDER, sender)
|
||||
.set(INTERACTION_LOG.FILE_NAME, fileName)
|
||||
.set(INTERACTION_LOG.RECORDS_SENT, count)
|
||||
.set(INTERACTION_LOG.ERVU_ID, ervuId);
|
||||
.set(INTERACTION_LOG.ERVU_ID, ervuId)
|
||||
.execute();
|
||||
}
|
||||
|
||||
public void updateStatus(String fileId, String status, String ervuId) {
|
||||
dslContext.update(INTERACTION_LOG)
|
||||
.set(INTERACTION_LOG.STATUS, status)
|
||||
.where(INTERACTION_LOG.ERVU_ID.eq(ervuId))
|
||||
.and(INTERACTION_LOG.FILE_ID.eq(fileId))
|
||||
.execute();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -14,7 +14,7 @@ import static org.springframework.util.StringUtils.hasText;
|
|||
public final class DateUtil {
|
||||
|
||||
private static final DateTimeFormatter DATE_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy");
|
||||
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm");
|
||||
private static final DateTimeFormatter DATE_TIME_FORMATTER = DateTimeFormatter.ofPattern("dd.MM.yyyy HH:mm:ss");
|
||||
|
||||
private DateUtil() {}
|
||||
|
||||
|
|
|
|||
|
|
@ -15,6 +15,8 @@ AV_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
|
|||
AV_KAFKA_SASL_MECHANISM=SCRAM-SHA-256
|
||||
AV_KAFKA_USERNAME=user1
|
||||
AV_KAFKA_PASSWORD=Blfi9d2OFG
|
||||
AV_KAFKA_GROUP_ID=1
|
||||
AV_KAFKA_DOWNLOAD_RESPONSE=ervu.lkrp.av-fileupload-status
|
||||
ERVU_FILEUPLOAD_MAX_FILE_SIZE=5242880
|
||||
ERVU_FILEUPLOAD_MAX_REQUEST_SIZE=6291456
|
||||
ERVU_FILEUPLOAD_FILE_SIZE_THRESHOLD=0
|
||||
|
|
@ -34,6 +36,7 @@ ERVU_KAFKA_ORG_REQUEST_TOPIC=ervu.organization.request
|
|||
ERVU_KAFKA_REPLY_TIMEOUT=30
|
||||
ERVU_KAFKA_JOURNAL_REQUEST_TOPIC=ervu.organization.journal.request
|
||||
ERVU_KAFKA_JOURNAL_REPLY_TOPIC=ervu.organization.journal.response
|
||||
DB.JOURNAL.EXCLUDED.STATUSES=Направлено в ЕРВУ,Получен ЕРВУ
|
||||
ESNSI_OKOPF_URL=https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8
|
||||
ESNSI_OKOPF_CRON_LOAD=0 0 */1 * * *
|
||||
ERVU_KAFKA_SECURITY_PROTOCOL=SASL_PLAINTEXT
|
||||
|
|
@ -43,6 +46,10 @@ ERVU_KAFKA_PASSWORD=Blfi9d2OFG
|
|||
ERVU_KAFKA_EXCERPT_REPLY_TOPIC=ervu.lkrp.excerpt.response
|
||||
ERVU_KAFKA_EXCERPT_REQUEST_TOPIC=ervu.lkrp.excerpt.request
|
||||
|
||||
ERVU_FILE_UPLOAD_MAX_FILE_SIZE=5242880
|
||||
ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE=6291456
|
||||
ERVU_FILE_UPLOAD_FILE_SIZE_THRESHOLD=0
|
||||
|
||||
S3_ENDPOINT=http://ervu-minio.k8s.micord.ru:31900
|
||||
S3_ACCESS_KEY=rlTdTvkmSXu9FsLhfecw
|
||||
S3_SECRET_KEY=NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0
|
||||
|
|
|
|||
|
|
@ -64,9 +64,6 @@
|
|||
<property name="av.kafka.username" value="user1"/>
|
||||
<property name="av.kafka.password" value="Blfi9d2OFG"/>
|
||||
<property name="av.kafka.message.topic.name" value="file-to-upload"/>
|
||||
<property name="ervu.fileupload.max_file_size" value="5242880"/>
|
||||
<property name="ervu.fileupload.max_request_size" value="6291456"/>
|
||||
<property name="ervu.fileupload.file_size_threshold" value="0"/>
|
||||
<property name="esia.scopes" value="fullname, snils, id_doc, birthdate, usr_org, openid"/>
|
||||
<property name="esia.org.scopes" value="org_fullname, org_shortname, org_brhs, org_brhs_ctts, org_brhs_addrs, org_type, org_ogrn, org_inn, org_leg, org_kpp, org_ctts, org_addrs, org_grps, org_emps"/>
|
||||
<property name="esia.org.scope.url" value="http://esia.gosuslugi.ru/"/>
|
||||
|
|
@ -89,11 +86,14 @@
|
|||
<property name="esnsi.okopf.url" value="https://esnsi.gosuslugi.ru/rest/ext/v1/classifiers/11465/file?extension=JSON&encoding=UTF_8"/>
|
||||
<property name="ervu.kafka.journal.request.topic" value="ervu.organization.journal.request"/>
|
||||
<property name="ervu.kafka.journal.reply.topic" value="ervu.organization.journal.response"/>
|
||||
<property name="db.journal.excluded.statuses" value="Направлено в ЕРВУ,Получен ЕРВУ"/>
|
||||
<property name="ervu.kafka.excerpt.reply.topic" value="ervu.lkrp.excerpt.response"/>
|
||||
<property name="ervu.kafka.excerpt.request.topic" value="ervu.lkrp.excerpt.request"/>
|
||||
<property name="s3.endpoint" value="http://ervu-minio.k8s.micord.ru:31900"/>
|
||||
<property name="s3.access_key" value="rlTdTvkmSXu9FsLhfecw"/>
|
||||
<property name="s3.secret_key" value="NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0"/>
|
||||
<property name="av.kafka.group.id" value="1"/>
|
||||
<property name="av.kafka.download.response" value="ervu.lkrp.av-fileupload-status"/>
|
||||
</system-properties>
|
||||
<management>
|
||||
<audit-log>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||
<artifactId>ul</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.8.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||
<artifactId>ul</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.8.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||
|
|
|
|||
|
|
@ -564,7 +564,7 @@
|
|||
.webbpm.ervu_lkrp_ul .journal .grid {
|
||||
flex-direction: column;
|
||||
flex: 1 1 auto;
|
||||
height: 100px;
|
||||
height: 300px;
|
||||
}
|
||||
.webbpm.ervu_lkrp_ul .journal .fieldset button-component {
|
||||
display: block;
|
||||
|
|
|
|||
|
|
@ -28,10 +28,16 @@ export class ErvuDownloadFileButton extends AbstractButton {
|
|||
|
||||
public doClickActions(): Promise<any> {
|
||||
return this.httpClient.get('kafka/excerpt', {
|
||||
responseType: 'blob'
|
||||
responseType: 'blob',
|
||||
observe: 'response'
|
||||
}).toPromise().then((response) => {
|
||||
const url = window.URL.createObjectURL(response);
|
||||
const contentDisposition = response.headers.get('Content-Disposition');
|
||||
const url = window.URL.createObjectURL(response.body);
|
||||
const a = document.createElement('a');
|
||||
const fileNameMatch = contentDisposition.match(/filename\*=?UTF-8''(.+)/i);
|
||||
if (fileNameMatch && fileNameMatch.length > 1) {
|
||||
this.fileName = decodeURIComponent(fileNameMatch[1].replace(/\+/g, '%20'));
|
||||
}
|
||||
a.href = url;
|
||||
a.download = this.fileName;
|
||||
document.body.appendChild(a);
|
||||
|
|
|
|||
|
|
@ -91,10 +91,16 @@ export class ErvuFileUpload extends InputControl {
|
|||
}],
|
||||
maxFileSize: this.maxFileSizeMb ? this.maxFileSizeMb * 1024 * 1024 : undefined,
|
||||
queueLimit: this.maxFilesToUpload ? this.maxFilesToUpload : undefined,
|
||||
headers: [{
|
||||
name: "X-Employee-Info-File-Form-Type",
|
||||
value: EmployeeInfoFileFormType[this.formType]
|
||||
}]
|
||||
headers: [
|
||||
{
|
||||
name: "X-Employee-Info-File-Form-Type",
|
||||
value: EmployeeInfoFileFormType[this.formType]
|
||||
},
|
||||
{
|
||||
name: "Client-Time-Zone",
|
||||
value: Intl.DateTimeFormat().resolvedOptions().timeZone
|
||||
}
|
||||
]
|
||||
});
|
||||
|
||||
this.setUploaderMethods();
|
||||
|
|
|
|||
|
|
@ -0,0 +1,38 @@
|
|||
import {DateTimeUtil, DefaultValueFormatter, GridValueFormatter} from "@webbpm/base-package";
|
||||
import {ValueFormatterParams} from "ag-grid-community";
|
||||
|
||||
export class ClientDateTimeFormatter extends DefaultValueFormatter implements GridValueFormatter {
|
||||
|
||||
public dateFormat: string = '';
|
||||
|
||||
format(params: ValueFormatterParams): string {
|
||||
if (this.isValueEmpty(params)) {
|
||||
return super.format(params);
|
||||
}
|
||||
|
||||
// don't apply formatter to row with aggregation function
|
||||
if (params.node.isRowPinned()) {
|
||||
return params.value;
|
||||
}
|
||||
|
||||
if (!this.dateFormat) {
|
||||
return ClientDateTimeFormatter.parseForClientTimeZoneAndFormat(params.value, DateTimeUtil.TIMESTAMP_FORMAT);
|
||||
}
|
||||
|
||||
if (!ClientDateTimeFormatter.isValidFormat(this.dateFormat)) {
|
||||
throw new Error('Invalid date format = ' + this.dateFormat);
|
||||
}
|
||||
|
||||
return ClientDateTimeFormatter.parseForClientTimeZoneAndFormat(params.value, this.dateFormat);
|
||||
}
|
||||
|
||||
private static isValidFormat(format: string): boolean {
|
||||
const validCharsRegex = /^[YyMmDdHhSsTZ.:\[\] -]*$/;
|
||||
return format && validCharsRegex.test(format);
|
||||
}
|
||||
|
||||
private static parseForClientTimeZoneAndFormat(value: string, dateFormat: string): string {
|
||||
let timezone = Intl.DateTimeFormat().resolvedOptions().timeZone;
|
||||
return DateTimeUtil.parseIsoDateTime(value).tz(timezone).format(dateFormat);
|
||||
}
|
||||
}
|
||||
|
|
@ -3,6 +3,7 @@ import {ActivatedRouteSnapshot, CanActivate, Router, RouterStateSnapshot} from "
|
|||
import {Observable} from "rxjs";
|
||||
import {HttpClient, HttpParams} from "@angular/common/http";
|
||||
import {CookieService} from "ngx-cookie";
|
||||
import {MessagesService} from "@webbpm/base-package";
|
||||
|
||||
@Injectable({providedIn:'root'})
|
||||
export abstract class AuthGuard implements CanActivate {
|
||||
|
|
@ -10,7 +11,8 @@ export abstract class AuthGuard implements CanActivate {
|
|||
protected constructor(
|
||||
protected router: Router,
|
||||
private httpClient: HttpClient,
|
||||
private cookieService: CookieService
|
||||
private cookieService: CookieService,
|
||||
private messageService: MessagesService
|
||||
) {
|
||||
}
|
||||
|
||||
|
|
@ -28,7 +30,9 @@ export abstract class AuthGuard implements CanActivate {
|
|||
return true;
|
||||
}
|
||||
else if (error) {
|
||||
throw new Error(error + ', error description =' + errorDescription);
|
||||
let errorMessage = error + ', error description =' + errorDescription;
|
||||
this.messageService.error(errorMessage)
|
||||
throw new Error(errorMessage);
|
||||
}
|
||||
else if (code) {
|
||||
const params = new HttpParams().set('code', code);
|
||||
|
|
|
|||
4
pom.xml
4
pom.xml
|
|
@ -4,7 +4,7 @@
|
|||
<modelVersion>4.0.0</modelVersion>
|
||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||
<artifactId>ul</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.8.0-SNAPSHOT</version>
|
||||
<packaging>pom</packaging>
|
||||
<modules>
|
||||
<module>backend</module>
|
||||
|
|
@ -15,7 +15,7 @@
|
|||
</scm>
|
||||
<properties>
|
||||
<spring-security-kerberos.version>1.0.1.RELEASE</spring-security-kerberos.version>
|
||||
<spring-kafka.version>2.6.13</spring-kafka.version>
|
||||
<spring-kafka.version>2.9.13</spring-kafka.version>
|
||||
<org.bouncycastle.version>1.60</org.bouncycastle.version>
|
||||
<project.build.sourceEncoding>UTF-8</project.build.sourceEncoding>
|
||||
<enable.version.in.url>false</enable.version.in.url>
|
||||
|
|
|
|||
|
|
@ -4,7 +4,7 @@
|
|||
<parent>
|
||||
<groupId>ru.micord.ervu.lkrp</groupId>
|
||||
<artifactId>ul</artifactId>
|
||||
<version>1.0.0-SNAPSHOT</version>
|
||||
<version>1.8.0-SNAPSHOT</version>
|
||||
</parent>
|
||||
|
||||
<groupId>ru.micord.ervu.lkrp.ul</groupId>
|
||||
|
|
|
|||
|
|
@ -700,17 +700,9 @@
|
|||
<entry>
|
||||
<key>valueFormatter</key>
|
||||
<value>
|
||||
<complex>
|
||||
<entry>
|
||||
<key>dateFormat</key>
|
||||
<value>
|
||||
<simple>"DD.MM.YYYY HH:mm:ss"</simple>
|
||||
</value>
|
||||
</entry>
|
||||
</complex>
|
||||
<implRef type="TS">
|
||||
<className>DateTimeFormatter</className>
|
||||
<packageName>component.grid.formatters</packageName>
|
||||
<className>ClientDateTimeFormatter</className>
|
||||
<packageName>ervu.component.grid.formatter</packageName>
|
||||
</implRef>
|
||||
</value>
|
||||
</entry>
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue