Merge branch 'feature/SUPPORT-9368_journal_new_version' into develop

This commit is contained in:
adel.ka 2025-10-20 10:49:00 +03:00
commit e7c983f70a
63 changed files with 9632 additions and 1042 deletions

View file

@ -97,14 +97,6 @@
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>ru.cg.webbpm.modules</groupId>
<artifactId>inject</artifactId>
@ -202,9 +194,28 @@
<groupId>org.apache.tika</groupId>
<artifactId>tika-core</artifactId>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
</dependency>
</dependencies>
<build>
<finalName>${project.parent.artifactId}</finalName>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>${os-maven-plugin.version}</version>
</extension>
</extensions>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
@ -233,6 +244,22 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
<configuration>
<protocArtifact>com.google.protobuf:protoc:4.27.3:exe:${os.detected.classifier}</protocArtifact>
<protoSourceRoot>${project.parent.basedir}/backend/src/main/resources</protoSourceRoot>
<outputDirectory>${project.parent.basedir}/backend/target/generated-sources/java/protobuf</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>
<profiles>

View file

@ -0,0 +1,10 @@
package ervu;
/**
* @author Eduard Tihomirov
*/
public final class FileConstants {
public static final String DOCUMENT = "DOCUMENT";
public static final String SIGNATURE = "SIGNATURE";
public static final String MCHD = "MCHD";
}

View file

@ -23,7 +23,7 @@ import org.springframework.kafka.core.ProducerFactory;
* @author Alexandr Shalaginov
*/
@Configuration
public class AvKafkaConfig {
public class FileKafkaConfig {
@Value("${kafka.hosts}")
private String kafkaUrl;
@Value("${kafka.auth_sec_proto}")
@ -38,7 +38,7 @@ public class AvKafkaConfig {
private String saslMechanism;
@Bean
public ProducerFactory<String, String> avProducerFactory() {
public ProducerFactory<String, String> fileProducerFactory() {
return new DefaultKafkaProducerFactory<>(producerConfigs());
}
@ -58,7 +58,7 @@ public class AvKafkaConfig {
}
@Bean
public ConsumerFactory<String, String> avConsumerFactory() {
public ConsumerFactory<String, String> fileConsumerFactory() {
return new DefaultKafkaConsumerFactory<>(consumerConfigs());
}
@ -77,16 +77,16 @@ public class AvKafkaConfig {
return props;
}
@Bean("avContainerFactory")
@Bean("fileContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(avConsumerFactory());
factory.setConsumerFactory(fileConsumerFactory());
return factory;
}
@Bean("avTemplate")
@Bean("fileTemplate")
public KafkaTemplate<String, String> kafkaTemplate() {
return new KafkaTemplate<>(avProducerFactory());
return new KafkaTemplate<>(fileProducerFactory());
}
}

View file

@ -15,7 +15,9 @@ import java.time.Duration;
import java.time.ZonedDateTime;
import java.util.Arrays;
import java.util.Comparator;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.UUID;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicBoolean;
@ -89,12 +91,20 @@ public class WebDavClient {
}
@Retryable(value = {IOException.class}, backoff = @Backoff(delayExpression = "${webdav.retry.delay:500}"))
public String uploadFile(MultipartFile multipartFile) {
String fileName = getNewFilename(multipartFile.getOriginalFilename());
public Map<String, String> uploadFiles(Map<String, MultipartFile> files) {
String fileCatalog = UUID.randomUUID() + "/";
Sardine sardine = initClient(username, password);
try {
return putAndGetUrl(multipartFile.getBytes(), fileName, sardine);
Map<String, String> result = new HashMap<>();
for (Map.Entry<String, MultipartFile> entry : files.entrySet()) {
String key = entry.getKey();
MultipartFile file = entry.getValue();
if (file != null) {
result.put(key, putAndGetUrl(file.getBytes(), getNewFilename(file.getOriginalFilename()), sardine, fileCatalog));
}
}
return result;
}
catch (IOException e) {
throw new WebDavException("Failed to put file into WebDAV", e);
@ -115,7 +125,7 @@ public class WebDavClient {
return UUID.randomUUID() + fileExtension;
}
public String putAndGetUrl(byte[] fileBytes, String fileName, Sardine client) throws IOException {
public String putAndGetUrl(byte[] fileBytes, String fileName, Sardine client, String fileCatalog) throws IOException {
if (badServersCache.size() == urls.length) {
return null;
}
@ -140,19 +150,23 @@ public class WebDavClient {
}
boolean isBad = false;
String serverUrl = server.getUrl();
String fileUploadUrl = serverUrl + "/" + fileName;
String directory = serverUrl + "/" + fileCatalog + "/";
if (!client.exists(directory)) {
client.createDirectory(directory);
}
String fileUploadUrl = directory + fileName;
try {
client.put(fileUploadUrl, fileBytes);
server.setLastCallTime(System.currentTimeMillis());
}
catch (ConnectException | ClientProtocolException ignore) {
isBad = true;
LOGGER.error("WebDAV error. Url: {}, Message: {}", fileUploadUrl, ignore.getMessage());;
}
if (isBad) {
badServersCache.getUnchecked(serverUrl);
return putAndGetUrl(fileBytes, fileName, client);
return putAndGetUrl(fileBytes, fileName, client, fileCatalog);
}
return fileUploadUrl;

View file

@ -26,13 +26,15 @@ public class EmployeeInfoFileUploadController {
@RequestMapping(value = "/employee/document", method = RequestMethod.POST)
public ResponseEntity<?> saveEmployeeInformationFile(
@RequestParam("file") MultipartFile multipartFile,
@RequestParam("signFile") MultipartFile signFile,
@RequestParam(value = "mchdFile", required = false) MultipartFile mchdFile,
@RequestHeader("X-Employee-Info-File-Form-Type") String formType,
@RequestHeader("Client-Time-Zone") String clientTimeZone) {
String offset = ZonedDateTime.now(TimeZone.getTimeZone(clientTimeZone).toZoneId())
.getOffset().getId();
if (this.fileUploadService.saveEmployeeInformationFile(multipartFile, formType, offset)) {
if (this.fileUploadService.saveEmployeeInformationFiles(multipartFile, signFile, mchdFile, formType, offset)) {
return ResponseEntity.ok("File successfully uploaded.");
}

View file

@ -7,7 +7,10 @@ public enum FileStatusCode {
FILE_UPLOADED("01"),
FILE_INFECTED("02"),
FILE_CLEAN("03"),
FILE_NOT_CHECKED("11");
FILE_ACCEPTED("04"),
FILE_NOT_CHECKED("11"),
SIGN_INVALID("12"),
MCHD_INVALID("13");
private final String code;

View file

@ -0,0 +1,19 @@
package ervu.exception;
/**
* @author Eduard Tihomirov
*/
public class FileUploadException extends RuntimeException{
public FileUploadException(String message) {
super(message);
}
public FileUploadException(String message, Throwable cause) {
super(message, cause);
}
public FileUploadException(Throwable cause) {
super(cause);
}
}

View file

@ -3,5 +3,5 @@ package ervu.model.fileupload;
/**
* @author r.latypov
*/
public record DownloadResponse(UploadOrgInfo orgInfo, FileInfo fileInfo) {
public record DownloadResponse(UploadOrgInfo orgInfo, FileInfo[] filesInfo) {
}

View file

@ -1,5 +1,6 @@
package ervu.model.fileupload;
import java.util.Arrays;
import java.util.Objects;
/**
@ -7,19 +8,19 @@ import java.util.Objects;
*/
public class EmployeeInfoKafkaMessage {
private final UploadOrgInfo orgInfo;
private final FileInfo fileInfo;
private final FileInfo[] filesInfo;
public EmployeeInfoKafkaMessage(UploadOrgInfo orgInfo, FileInfo fileInfo) {
public EmployeeInfoKafkaMessage(UploadOrgInfo orgInfo, FileInfo[] filesInfo) {
this.orgInfo = orgInfo;
this.fileInfo = fileInfo;
this.filesInfo = filesInfo;
}
public UploadOrgInfo getOrgInfo() {
return orgInfo;
}
public FileInfo getFileInfo() {
return fileInfo;
public FileInfo[] getFilesInfo() {
return filesInfo;
}
@Override
@ -27,19 +28,21 @@ public class EmployeeInfoKafkaMessage {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
EmployeeInfoKafkaMessage that = (EmployeeInfoKafkaMessage) o;
return Objects.equals(orgInfo, that.orgInfo) && Objects.equals(fileInfo, that.fileInfo);
return Objects.equals(orgInfo, that.orgInfo) && Arrays.equals(filesInfo, that.filesInfo);
}
@Override
public int hashCode() {
return Objects.hash(orgInfo, fileInfo);
int result = Objects.hash(orgInfo);
result = 31 * result + Arrays.hashCode(filesInfo);
return result;
}
@Override
public String toString() {
return "KafkaMessage{" +
"uploadOrgInfo=" + orgInfo +
", fileInfo=" + fileInfo +
", fileInfo=" + Arrays.toString(filesInfo) +
'}';
}
}

View file

@ -15,13 +15,14 @@ public class FileInfo {
private String departureDateTime;
private String timeZone;
private FileStatus fileStatus;
private String type;
public FileInfo() {
}
public FileInfo(String fileId, String fileUrl, String fileName, String filePatternCode,
String filePatternName, String fileSize, String departureDateTime, String timeZone,
FileStatus fileStatus) {
FileStatus fileStatus, String type) {
this.fileId = fileId;
this.fileUrl = fileUrl;
this.fileName = fileName;
@ -31,6 +32,7 @@ public class FileInfo {
this.departureDateTime = departureDateTime;
this.timeZone = timeZone;
this.fileStatus = fileStatus;
this.type = type;
}
public String getFileId() {
@ -69,6 +71,22 @@ public class FileInfo {
return fileStatus;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
public void setFileStatus(FileStatus fileStatus) {
this.fileStatus = fileStatus;
}
public void setFileUrl(String fileUrl) {
this.fileUrl = fileUrl;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;

View file

@ -0,0 +1,7 @@
package ervu.model.fileupload;
/**
* @author Eduard Tihomirov
*/
public record FileStatusResponse(UploadOrgInfo orgInfo, FileInfo[] filesInfo, FileStatus packInfo) {
}

View file

@ -5,17 +5,32 @@ import java.nio.charset.StandardCharsets;
import java.sql.Timestamp;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.HashMap;
import java.util.Locale;
import java.util.Map;
import java.util.UUID;
import javax.xml.parsers.DocumentBuilderFactory;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.client.fileupload.WebDavClient;
import ervu.exception.FileUploadException;
import ervu.model.fileupload.DownloadResponse;
import ervu.model.fileupload.EmployeeInfoFileFormType;
import ervu.model.fileupload.EmployeeInfoKafkaMessage;
import ervu.model.fileupload.FileInfo;
import ervu.model.fileupload.FileStatus;
import ervu.model.fileupload.FileStatusResponse;
import ervu.model.fileupload.UploadOrgInfo;
import org.apache.http.HttpEntity;
import org.apache.http.client.methods.CloseableHttpResponse;
import org.apache.http.client.methods.HttpPost;
import org.apache.http.entity.ContentType;
import org.apache.http.entity.mime.MultipartEntityBuilder;
import org.apache.http.impl.client.CloseableHttpClient;
import org.apache.http.impl.client.HttpClients;
import org.apache.http.util.EntityUtils;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.tika.Tika;
import org.apache.tika.mime.MediaType;
@ -26,14 +41,21 @@ import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.support.MessageSourceAccessor;
import org.springframework.kafka.annotation.KafkaListener;
import org.springframework.kafka.core.KafkaTemplate;
import org.springframework.stereotype.Service;
import org.springframework.web.multipart.MultipartFile;
import org.w3c.dom.Document;
import org.w3c.dom.Node;
import ru.micord.ervu.audit.service.AuditService;
import ru.micord.ervu.exception.JsonParsingException;
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
import ru.micord.ervu.security.esia.config.EsiaConfig;
import ru.micord.ervu.security.esia.model.EmployeeModel;
import ru.micord.ervu.security.esia.model.MchdInfoModel;
import ru.micord.ervu.security.esia.model.PersonModel;
import ru.micord.ervu.security.esia.model.VerifyDocumentSignResponse;
import ru.micord.ervu.security.esia.service.UlDataService;
import ru.micord.ervu.security.esia.EsiaAuthInfoStore;
import ru.micord.ervu.security.webbpm.jwt.UserIdsPair;
@ -41,10 +63,18 @@ import ru.micord.ervu.security.webbpm.jwt.util.SecurityUtil;
import ru.micord.ervu.service.InteractionService;
import ru.micord.ervu.util.DateUtils;
import static ervu.enums.FileStatusCode.FILE_CLEAN;
import ru.cg.webbpm.modules.core.runtime.api.LocalizedException;
import ru.cg.webbpm.modules.core.runtime.api.MessageBundleUtils;
import static ervu.FileConstants.DOCUMENT;
import static ervu.FileConstants.MCHD;
import static ervu.FileConstants.SIGNATURE;
import static ervu.enums.FileStatusCode.FILE_ACCEPTED;
import static ervu.enums.FileStatusCode.FILE_INFECTED;
import static ervu.enums.FileStatusCode.FILE_NOT_CHECKED;
import static ervu.enums.FileStatusCode.FILE_UPLOADED;
import static ervu.enums.FileStatusCode.MCHD_INVALID;
import static ervu.enums.FileStatusCode.SIGN_INVALID;
import static ru.micord.ervu.util.StringUtils.convertToFio;
/**
@ -53,127 +83,529 @@ import static ru.micord.ervu.util.StringUtils.convertToFio;
@Service
public class EmployeeInfoFileUploadService {
private static final Logger LOGGER = LoggerFactory.getLogger(EmployeeInfoFileUploadService.class);
private static final MessageSourceAccessor MESSAGE_SOURCE = MessageBundleUtils.createAccessor(
"messages/common_errors_messages");
private final WebDavClient webDavClient;
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
private final ReplyingKafkaService<String, String> replyingKafkaService;
private final KafkaTemplate<String, String> kafkaTemplate;
private final InteractionService interactionService;
private final UlDataService ulDataService;
private final AuditService auditService;
private final ObjectMapper objectMapper;
private final EsiaConfig esiaConfig;
@Value("${av.kafka.message.topic.name}")
private String kafkaTopicName;
@Value("${av.kafka.clear.s3}")
private String kafkaClearS3Topic;
@Value("${ervu.kafka.download.request}")
private String kafkaDownloadRequestTopic;
@Value("${av.kafka.download.request}")
private String kafkaRequestTopic;
@Value("${av.kafka.download.response}")
private String kafkaResponseTopic;
public EmployeeInfoFileUploadService(
WebDavClient webDavClient,
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
@Qualifier("avTemplate") KafkaTemplate<String, String> kafkaTemplate,
ReplyingKafkaService<String, String> replyingKafkaService,
InteractionService interactionService,
UlDataService ulDataService,
AuditService auditService) {
AuditService auditService,
ObjectMapper objectMapper,
EsiaConfig esiaConfig,
@Qualifier("fileTemplate") KafkaTemplate<String, String> kafkaTemplate) {
this.webDavClient = webDavClient;
this.employeeInfoKafkaMessageService = employeeInfoKafkaMessageService;
this.kafkaTemplate = kafkaTemplate;
this.replyingKafkaService = replyingKafkaService;
this.interactionService = interactionService;
this.ulDataService = ulDataService;
this.auditService = auditService;
this.objectMapper = objectMapper;
this.esiaConfig = esiaConfig;
this.kafkaTemplate = kafkaTemplate;
}
public boolean saveEmployeeInformationFile(MultipartFile multipartFile, String formType,
String offset) {
UserIdsPair userIdsPair = SecurityUtil.getUserIdsPair();
public boolean saveEmployeeInformationFiles(MultipartFile multipartFile,
MultipartFile signFile, MultipartFile mchdFile, String formType,
String offset) {
if (userIdsPair == null || !isValid(multipartFile)) {
UserIdsPair userIdsPair = SecurityUtil.getUserIdsPair();
LocalDateTime now = LocalDateTime.now();
String departureDateTime = DateUtils.convertToString(now);
if (userIdsPair == null || !isValidCsvWithSig(multipartFile, signFile)) {
return false;
}
String fileId = UUID.randomUUID().toString();
String fileName = multipartFile.getOriginalFilename();
EmployeeInfoFileFormType employeeInfoFileFormType = EmployeeInfoFileFormType.valueOf(
formType);
String esiaUserId = userIdsPair.getEsiaUserId();
String ervuId = userIdsPair.getErvuId();
String accessToken = EsiaAuthInfoStore.getAccessToken(esiaUserId);
EmployeeModel employeeModel = ulDataService.getEmployeeModel(accessToken);
PersonModel personModel = employeeModel.getPerson();
EmployeeModel chiefModel = ulDataService.getChiefEmployeeModel(accessToken);
String fio = convertToFio(personModel.getFirstName(), personModel.getMiddleName(),
personModel.getLastName()
);
UploadOrgInfo uploadOrgInfo = employeeInfoKafkaMessageService.getOrgInfo(accessToken, ervuId,
esiaUserId, personModel
);
EmployeeInfoFileFormType employeeInfoFileFormType = EmployeeInfoFileFormType.valueOf(formType);
FileInfo fileInfo = createFileInfo(multipartFile, employeeInfoFileFormType,
departureDateTime, offset, DOCUMENT
);
FileInfo signFileInfo = createFileInfo(signFile, employeeInfoFileFormType,
departureDateTime, offset, SIGNATURE
);
FileInfo mchdFileInfo = mchdFile != null ?
createFileInfo(mchdFile, employeeInfoFileFormType, departureDateTime,
offset, MCHD
) : null;
String fileUploadUrl = this.webDavClient.uploadFile(multipartFile);
FileStatus fileStatus = new FileStatus();
fileStatus.setStatus(fileUploadUrl == null ? "Невозможно проверить файл ЛК РП" : "Загрузка");
LocalDateTime now = LocalDateTime.now();
interactionService.setStatus(fileId, fileStatus.getStatus(), fileName,
employeeInfoFileFormType.getFilePatternCode(), Timestamp.valueOf(now),
convertToFio(personModel.getFirstName(), personModel.getMiddleName(),
personModel.getLastName()
),
ervuId
Map<String, String> uploadResults = uploadFiles(multipartFile, signFile, mchdFile);
String fileUploadUrl = uploadResults.get("fileUrl");
String signFileUploadUrl = uploadResults.get("signFile");
String mchdFileUploadUrl = uploadResults.get("mchdFile");
boolean uploadSuccess = checkUploadSuccess(fileUploadUrl, signFileUploadUrl, mchdFileUploadUrl,
mchdFile != null
);
long fileSize = multipartFile.getSize();
String departureDateTime = DateUtils.convertToString(now);
EmployeeInfoKafkaMessage kafkaMessage = employeeInfoKafkaMessageService.getKafkaMessage(
fileId,
fileUploadUrl,
fileName,
employeeInfoFileFormType,
departureDateTime,
accessToken,
offset,
fileStatus,
ervuId,
esiaUserId,
personModel,
fileSize
fileStatus.setStatus(uploadSuccess ? "Загрузка" : "Невозможно проверить файл ЛК РП");
interactionService.setStatus(fileInfo.getFileId(), fileStatus.getStatus(),
multipartFile.getOriginalFilename(), employeeInfoFileFormType.getFilePatternCode(),
Timestamp.valueOf(now), fio, ervuId
);
if (fileUploadUrl != null) {
fileStatus.setCode(FILE_UPLOADED.getCode());
fileStatus.setDescription("Файл принят до проверки на вирусы");
String jsonMessage = getJsonKafkaMessage(kafkaMessage);
return sendMessage(jsonMessage);
fileInfo.setFileStatus(fileStatus);
signFileInfo.setFileStatus(fileStatus);
if (mchdFileInfo != null) {
mchdFileInfo.setFileStatus(fileStatus);
}
if (!uploadSuccess) {
handleUploadFailure(fileStatus, uploadOrgInfo, fileInfo, signFileInfo, mchdFileInfo,
multipartFile.getOriginalFilename(), signFile.getOriginalFilename(),
mchdFile != null ? mchdFile.getOriginalFilename() : null
);
return false;
}
fileInfo.setFileUrl(fileUploadUrl);
signFileInfo.setFileUrl(signFileUploadUrl);
if (mchdFileInfo != null) {
mchdFileInfo.setFileUrl(mchdFileUploadUrl);
}
FileInfo[] fileInfos = mchdFileInfo != null ?
new FileInfo[] {fileInfo, signFileInfo, mchdFileInfo} :
new FileInfo[] {fileInfo, signFileInfo};
String response = sendKafkaMessage(uploadOrgInfo, fileInfos, fileStatus);
DownloadResponse downloadResponse;
try {
downloadResponse = processMessageFromAv(response);
}
catch (JsonProcessingException e) {
fileStatus.setCode(FILE_NOT_CHECKED.getCode());
fileStatus.setStatus("Невозможно проверить файл ЛК РП");
fileStatus.setDescription(
"Невозможно проверить файл по причине недоступности или ошибки в работе антивируса");
interactionService.updateStatus(fileInfo.getFileId(), fileInfo.getFileStatus().getStatus(),
uploadOrgInfo.getOrgId()
);
auditService.processUploadEvent(uploadOrgInfo, fileInfos, fileStatus);
throw new FileUploadException(e);
}
fileInfo = Arrays.stream(downloadResponse.filesInfo())
.filter(file -> DOCUMENT.equals(file.getType()))
.findFirst()
.orElse(fileInfo);
signFileInfo = Arrays.stream(downloadResponse.filesInfo())
.filter(file -> SIGNATURE.equals(file.getType()))
.findFirst()
.orElse(signFileInfo);
mchdFileInfo = Arrays.stream(downloadResponse.filesInfo())
.filter(file -> MCHD.equals(file.getType()))
.findFirst()
.orElse(mchdFileInfo);
VerifyDocumentSignResponse verifyDocumentSignResponse;
try {
verifyDocumentSignResponse = validateSign(multipartFile, signFile);
}
catch (Exception e) {
handeSignError(fileInfo, signFileInfo, uploadOrgInfo, response);
throw e;
}
return validateSignerAndMchd(verifyDocumentSignResponse, chiefModel, uploadOrgInfo,
mchdFile, accessToken, fileInfo, signFileInfo, mchdFileInfo, ervuId, response
);
}
private FileInfo createFileInfo(MultipartFile file, EmployeeInfoFileFormType formType,
String departureDateTime, String offset, String fileType) {
String fileId = UUID.randomUUID().toString();
return employeeInfoKafkaMessageService.getFileInfo(fileId, null, file.getOriginalFilename(),
formType, departureDateTime, offset, null, file.getSize(), fileType
);
}
private Map<String, String> uploadFiles(MultipartFile multipartFile, MultipartFile signFile,
MultipartFile mchdFile) {
Map<String, MultipartFile> filesToUpload = new HashMap<>();
filesToUpload.put("fileUrl", multipartFile);
filesToUpload.put("signFile", signFile);
if (mchdFile != null) {
filesToUpload.put("mchdFile", mchdFile);
}
return this.webDavClient.uploadFiles(filesToUpload);
}
private boolean checkUploadSuccess(String fileUploadUrl, String signFileUploadUrl,
String mchdFileUploadUrl, boolean hasMchdFile) {
if (hasMchdFile) {
return fileUploadUrl != null && signFileUploadUrl != null && mchdFileUploadUrl != null;
}
else {
LOGGER.error("Failed to upload file: {}", fileName);
fileStatus.setCode(FILE_NOT_CHECKED.getCode());
fileStatus.setDescription("Невозможно проверить файл по причине недоступности или ошибки в работе антивируса");
auditService.processUploadEvent(kafkaMessage.getOrgInfo(), kafkaMessage.getFileInfo());
return false;
return fileUploadUrl != null && signFileUploadUrl != null;
}
}
private boolean isValid(MultipartFile multipartFile) {
private void handleUploadFailure(FileStatus fileStatus, UploadOrgInfo uploadOrgInfo,
FileInfo fileInfo, FileInfo signFileInfo, FileInfo mchdFileInfo,
String fileName, String signFileName, String mchdFileName) {
if (multipartFile == null || multipartFile.getOriginalFilename() == null) {
LOGGER.error("Failed to upload files: {}, {}, {}", fileName, signFileName, mchdFileName);
fileStatus.setCode(FILE_NOT_CHECKED.getCode());
fileStatus.setDescription(
"Невозможно проверить файл по причине недоступности или ошибки в работе антивируса");
FileInfo[] fileInfos = mchdFileInfo != null ?
new FileInfo[] {fileInfo, signFileInfo, mchdFileInfo} :
new FileInfo[] {fileInfo, signFileInfo};
auditService.processUploadEvent(uploadOrgInfo, fileInfos, fileStatus);
}
private String sendKafkaMessage(UploadOrgInfo uploadOrgInfo, FileInfo[] fileInfos,
FileStatus fileStatus) {
EmployeeInfoKafkaMessage kafkaMessage = employeeInfoKafkaMessageService.getKafkaMessage(
uploadOrgInfo, fileInfos);
fileStatus.setCode(FILE_UPLOADED.getCode());
fileStatus.setDescription("Файл принят до проверки на вирусы");
String jsonMessage = getJsonKafkaMessage(kafkaMessage);
return replyingKafkaService.sendMessageAndGetReply(kafkaRequestTopic,
kafkaResponseTopic, jsonMessage
);
}
private boolean validateSignerAndMchd(VerifyDocumentSignResponse verifyDocumentSignResponse,
EmployeeModel chiefModel, UploadOrgInfo uploadOrgInfo, MultipartFile mchdFile,
String accessToken, FileInfo fileInfo, FileInfo signFileInfo,
FileInfo mchdFileInfo, String ervuId, String response) {
String signerInfo = verifyDocumentSignResponse.getSignerInfo();
Map<String, String> signerInfoMap = parseKeyValuePairs(signerInfo);
String chiefMiddleName = chiefModel.getPerson().getMiddleName();
String chiefLastName = chiefModel.getPerson().getLastName();
String chiefFirstName = chiefModel.getPerson().getFirstName();
boolean isSignerValid = signerInfoMap.get("SN").equalsIgnoreCase(chiefLastName) &&
signerInfoMap.get("G")
.equalsIgnoreCase(chiefFirstName + " " + chiefMiddleName) &&
signerInfoMap.get("O").equalsIgnoreCase(uploadOrgInfo.getOrgName());
if (isSignerValid) {
FileStatusResponse fileStatusResponse = new FileStatusResponse(uploadOrgInfo,
new FileInfo[] {fileInfo, signFileInfo}, fileInfo.getFileStatus()
);
try {
if (sendMessage(fileStatusResponse)) {
interactionService.updateStatus(fileInfo.getFileId(), "Направлено в ЕРВУ", ervuId);
return true;
}
else {
interactionService.delete(fileInfo.getFileId(), ervuId);
return false;
}
}
catch (JsonProcessingException e) {
handeSignError(fileInfo, signFileInfo, uploadOrgInfo, response);
throw new JsonParsingException(
String.format("Fail get json from: %s", fileStatusResponse), e);
}
}
if (mchdFile == null) {
handleMchdValidationError(uploadOrgInfo, fileInfo, signFileInfo, null, ervuId, response
);
throw new LocalizedException("mchd_null", MESSAGE_SOURCE);
}
FileStatusResponse fileStatusResponse = new FileStatusResponse(uploadOrgInfo,
new FileInfo[] {fileInfo, signFileInfo, mchdFileInfo}, fileInfo.getFileStatus()
);
try {
validateMchd(mchdFile, accessToken, signerInfoMap.get("SN") + " " + signerInfoMap.get("G"),
chiefFirstName, chiefLastName, chiefMiddleName
);
if (sendMessage(fileStatusResponse)) {
interactionService.updateStatus(fileInfo.getFileId(), "Направлено в ЕРВУ", ervuId);
return true;
}
else {
interactionService.delete(fileInfo.getFileId(), ervuId);
return false;
}
}
catch (JsonProcessingException e) {
handleMchdValidationError(uploadOrgInfo, fileInfo, signFileInfo, mchdFileInfo, ervuId, response
);
throw new JsonParsingException(
String.format("Fail get json from: %s", fileStatusResponse), e);
}
catch (Exception e) {
handleMchdValidationError(uploadOrgInfo, fileInfo, signFileInfo, mchdFileInfo, ervuId, response
);
throw e;
}
}
private void handleMchdValidationError(UploadOrgInfo uploadOrgInfo,
FileInfo fileInfo, FileInfo signFileInfo, FileInfo mchdFileInfo, String ervuId, String response) {
FileStatus packInfo = new FileStatus();
packInfo.setCode(MCHD_INVALID.getCode());
packInfo.setStatus("Некорректная МЧД");
packInfo.setDescription("Неуспешная проверка МЧД");
interactionService.updateStatus(fileInfo.getFileId(), packInfo.getStatus(), ervuId);
FileInfo[] fileInfos;
if (mchdFileInfo != null) {
mchdFileInfo.setFileStatus(packInfo);
fileInfos = new FileInfo[] {fileInfo, signFileInfo, mchdFileInfo};
}
else {
fileInfos = new FileInfo[] {fileInfo, signFileInfo};
}
auditService.processUploadEvent(uploadOrgInfo, fileInfos, packInfo);
clearS3(response);
}
private boolean isValidCsvWithSig(MultipartFile file, MultipartFile signFile) {
if (file == null || signFile == null || file.getOriginalFilename() == null
|| signFile.getOriginalFilename() == null) {
return false;
}
String fileName = multipartFile.getOriginalFilename();
Tika tika = new Tika();
MimeTypes defaultMimeTypes = MimeTypes.getDefaultMimeTypes();
try {
String contentType = new Tika().detect(multipartFile.getBytes());
MimeTypes defaultMimeTypes = MimeTypes.getDefaultMimeTypes();
MimeType mimeType = defaultMimeTypes.forName(contentType);
boolean isCsv = mimeType.getType().equals(MediaType.TEXT_PLAIN)
&& fileName.toLowerCase(Locale.getDefault()).endsWith(".csv");
String fileName = file.getOriginalFilename().toLowerCase(Locale.getDefault());
String signFileName = signFile.getOriginalFilename().toLowerCase(Locale.getDefault());
String fileContentType = tika.detect(file.getBytes());
String signContentType = tika.detect(signFile.getBytes());
MimeType fileMimeType = defaultMimeTypes.forName(fileContentType);
MimeType signMimeType = defaultMimeTypes.forName(signContentType);
boolean isCsv =
MediaType.TEXT_PLAIN.equals(fileMimeType.getType()) && fileName.endsWith(".csv");
boolean isSig =
"application/pkcs7-signature".equals(signMimeType.toString()) && signFileName.endsWith(
".sig");
if (!isCsv) {
LOGGER.info("Trying to upload file={} with wrong mime type={}",
fileName, mimeType
);
LOGGER.info("Invalid main file: name={}, mimeType={}", fileName, fileMimeType);
}
return isCsv;
if (!isSig) {
LOGGER.info("Invalid signature file: name={}, mimeType={}", signFileName, signMimeType);
}
return isCsv && isSig;
}
catch (MimeTypeException e) {
LOGGER.error(
"Couldn't get mime type from bytes for file=" + fileName, e);
return false;
}
catch (IOException e) {
LOGGER.error("Error while checking file type, file=" + fileName,
e
catch (MimeTypeException | IOException e) {
LOGGER.error("Failed to process files: {}, {}", file.getOriginalFilename(),
signFile.getOriginalFilename(), e
);
return false;
}
}
private boolean sendMessage(String message) {
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaTopicName, message);
private String getJsonKafkaMessage(EmployeeInfoKafkaMessage employeeInfoKafkaMessage) {
try {
return objectMapper.writeValueAsString(employeeInfoKafkaMessage);
}
catch (JsonProcessingException e) {
throw new JsonParsingException(
String.format("Fail get json from: %s", employeeInfoKafkaMessage), e);
}
}
@KafkaListener(id = "${av.kafka.group.id}", topics = "${ervu.kafka.download.response}",
containerFactory = "fileContainerFactory")
public void listenKafka(String kafkaMessage) {
try {
FileStatusResponse fileStatusResponse = objectMapper.readValue(kafkaMessage, FileStatusResponse.class);
FileInfo fileInfo = Arrays.stream(fileStatusResponse.filesInfo())
.filter(fileInfo1 -> fileInfo1.getType().equals(DOCUMENT))
.findFirst()
.get();
String statusCode = fileStatusResponse.packInfo().getCode();
if (FILE_ACCEPTED.getCode().equals(statusCode)) {
interactionService.delete(fileInfo.getFileId(), fileStatusResponse.orgInfo().getOrgId());
}
}
catch (JsonProcessingException e) {
throw new JsonParsingException(String.format("Fail get json from: %s", kafkaMessage), e);
}
}
private VerifyDocumentSignResponse validateSign(MultipartFile file, MultipartFile signFile) {
try (CloseableHttpClient httpClient = HttpClients.createDefault()) {
HttpPost upload = new HttpPost(esiaConfig.getFileSignVerifyUrl());
HttpEntity multipart = MultipartEntityBuilder.create()
.addBinaryBody("data", file.getBytes(), ContentType.APPLICATION_OCTET_STREAM,
file.getOriginalFilename()
)
.addBinaryBody("sign", signFile.getBytes(), ContentType.APPLICATION_OCTET_STREAM,
signFile.getOriginalFilename()
)
.build();
upload.setEntity(multipart);
try (CloseableHttpResponse response = httpClient.execute(upload)) {
int statusCode = response.getStatusLine().getStatusCode();
String body = EntityUtils.toString(response.getEntity());
if (statusCode == 401) {
throw new LocalizedException("file_sign_validate", MESSAGE_SOURCE);
}
if (statusCode == 500) {
try {
String errorCode = objectMapper.readTree(body)
.get("error_code")
.asText();
switch (errorCode) {
case "CERT_TRUST_REVOCATION_STATUS_UNKNOWN" ->
throw new LocalizedException("cert_trust_revocation", MESSAGE_SOURCE);
case "CERT_TRUST_IS_UNTRUSTED_ROOT" ->
throw new LocalizedException("cert_untrusted_root", MESSAGE_SOURCE);
case "CERT_TRUST_IS_NOT_TIME_VALID" ->
throw new LocalizedException("cert_is_not_time_valid", MESSAGE_SOURCE);
default -> throw new FileUploadException(
"Unknown 500 error in verify module. Error code " + errorCode);
}
}
catch (LocalizedException e) {
throw e;
}
catch (Exception e) {
throw new FileUploadException("Unknown 500 error in verify module. Response body " + body, e);
}
}
if (statusCode != 200) {
throw new FileUploadException("Unknown error in verify module. StatusCode " + statusCode + " .Response body " + body);
}
return objectMapper.readValue(body, VerifyDocumentSignResponse.class);
}
}
catch (IOException e) {
throw new FileUploadException("Failed to process sign module response ", e);
}
}
private void validateMchd(MultipartFile mchdFile, String accessToken, String agentFio,
String chiefFirstName, String chiefLastName, String chiefMiddleName) {
String mchdGuid;
try {
mchdGuid = getMchdGuid(mchdFile);
}
catch (Exception e) {
throw new LocalizedException("mchd_cant_parse", MESSAGE_SOURCE);
}
MchdInfoModel mchdInfoModel = ulDataService.getMchdInfoModel(mchdGuid, accessToken);
if (!mchdInfoModel.getStatus().equals("A")) {
throw new LocalizedException("mchd_expired", MESSAGE_SOURCE);
}
boolean validAgent = mchdInfoModel.getAgents()
.getElements()
.stream()
.anyMatch(agent1 -> agentFio.equalsIgnoreCase(
agent1.getPerson().getLastName() + " " + agent1.getPerson().getFirstName() + " "
+ agent1.getPerson().getMiddleName()));
if (!validAgent) {
throw new LocalizedException("mchd_validate_agent", MESSAGE_SOURCE);
}
while (mchdInfoModel.getParentGuid() != null) {
mchdInfoModel = ulDataService.getMchdInfoModel(mchdInfoModel.getParentGuid(), accessToken);
if (!mchdInfoModel.getStatus().equals("A")) {
throw new LocalizedException("mchd_tree_expired", MESSAGE_SOURCE);
}
}
MchdInfoModel.Element principal = mchdInfoModel.getPrincipals().getElements().get(0);
MchdInfoModel.Person chief = principal.getOrganization().getChief();
boolean principalFioEquals = chief.getFirstName().equalsIgnoreCase(chiefFirstName) &&
chief.getLastName().equalsIgnoreCase(chiefLastName) &&
chief.getMiddleName().equalsIgnoreCase(chiefMiddleName);
if (!principalFioEquals) {
throw new LocalizedException("mchd_validate_principal", MESSAGE_SOURCE);
}
}
private Map<String, String> parseKeyValuePairs(String input) {
Map<String, String> map = new HashMap<>();
String[] pairs = input.split(",");
for (String pair : pairs) {
String[] keyValue = pair.split("=", 2);
if (keyValue.length == 2) {
map.put(keyValue[0].trim(), keyValue[1].trim());
}
}
return map;
}
private String getMchdGuid(MultipartFile mchdFile) throws Exception {
Document doc = DocumentBuilderFactory
.newInstance()
.newDocumentBuilder()
.parse(mchdFile.getInputStream());
doc.getDocumentElement().normalize();
Node node = doc.getElementsByTagName("СвДов").item(0);
if (node != null && node.getAttributes().getNamedItem("НомДовер") != null) {
return node.getAttributes().getNamedItem("НомДовер").getNodeValue();
}
else {
throw new FileUploadException("Cannot parse mchd guid");
}
}
private boolean sendMessage(FileStatusResponse fileStatusResponse) throws JsonProcessingException {
String message = objectMapper.writeValueAsString(fileStatusResponse);
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaDownloadRequestTopic, message);
record.headers()
.add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
@ -188,39 +620,55 @@ public class EmployeeInfoFileUploadService {
}
}
private String getJsonKafkaMessage(EmployeeInfoKafkaMessage employeeInfoKafkaMessage) {
ObjectMapper mapper = new ObjectMapper();
private void clearS3(String message) {
ProducerRecord<String, String> record = new ProducerRecord<>(this.kafkaClearS3Topic, message);
record.headers()
.add("messageId", UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8));
try {
return mapper.writeValueAsString(employeeInfoKafkaMessage);
this.kafkaTemplate.send(record).get();
}
catch (JsonProcessingException e) {
throw new JsonParsingException(
String.format("Fail get json from: %s", employeeInfoKafkaMessage), e);
catch (Exception exception) {
LOGGER.error("Failed to clear s3", exception);
}
}
@KafkaListener(id = "${av.kafka.group.id}", topics = "${av.kafka.download.response}",
containerFactory = "avContainerFactory")
public void listenKafka(String kafkaMessage) {
ObjectMapper mapper = new ObjectMapper();
try {
DownloadResponse downloadResponse = mapper.readValue(kafkaMessage, DownloadResponse.class);
FileInfo fileInfo = downloadResponse.fileInfo();
String statusCode = fileInfo.getFileStatus().getCode();
if (Arrays.asList(FILE_INFECTED.getCode(), FILE_CLEAN.getCode()).contains(statusCode)) {
interactionService.delete(fileInfo.getFileId(), downloadResponse.orgInfo().getOrgId());
}
else if (statusCode.equals(FILE_NOT_CHECKED.getCode())) {
auditService.processUploadEvent(downloadResponse.orgInfo(), downloadResponse.fileInfo());
interactionService.updateStatus(fileInfo.getFileId(), fileInfo.getFileStatus().getStatus(),
downloadResponse.orgInfo().getOrgId()
);
}
private DownloadResponse processMessageFromAv(String response) throws JsonProcessingException {
DownloadResponse downloadResponse = objectMapper.readValue(response, DownloadResponse.class);
FileInfo avFile = Arrays.stream(downloadResponse.filesInfo())
.filter(fileInfo1 -> fileInfo1.getType().equals(DOCUMENT))
.findAny()
.get();
FileStatus fileStatus1 = avFile.getFileStatus();
if (fileStatus1.getCode().equals(FILE_INFECTED.getCode())) {
interactionService.updateStatus(avFile.getFileId(), avFile.getFileStatus().getStatus(),
downloadResponse.orgInfo().getOrgId());
FileStatusResponse fileStatusResponse = new FileStatusResponse(downloadResponse.orgInfo(),
downloadResponse.filesInfo(), fileStatus1
);
sendMessage(fileStatusResponse);
throw new LocalizedException("av_file_infected", MESSAGE_SOURCE);
}
catch (JsonProcessingException e) {
throw new JsonParsingException(String.format("Fail get json from: %s", kafkaMessage), e);
else if (fileStatus1.getCode().equals(FILE_NOT_CHECKED.getCode())) {
interactionService.updateStatus(avFile.getFileId(), avFile.getFileStatus().getStatus(),
downloadResponse.orgInfo().getOrgId());
auditService.processUploadEvent(downloadResponse.orgInfo(), downloadResponse.filesInfo(), fileStatus1);
throw new FileUploadException("File not checked: " + avFile.getFileName());
}
return downloadResponse;
}
private void handeSignError(FileInfo fileInfo, FileInfo signFileInfo, UploadOrgInfo uploadOrgInfo, String response) {
FileStatus packInfo = new FileStatus();
packInfo.setCode(SIGN_INVALID.getCode());
packInfo.setStatus("Некорректная ЭП");
packInfo.setDescription("Неуспешная проверка ЭП");
signFileInfo.setFileStatus(packInfo);
interactionService.updateStatus(fileInfo.getFileId(), packInfo.getStatus(),
uploadOrgInfo.getOrgId()
);
auditService.processUploadEvent(uploadOrgInfo, new FileInfo[] {fileInfo, signFileInfo}, packInfo);
clearS3(response);
}
}

View file

@ -26,25 +26,32 @@ public class EmployeeInfoKafkaMessageService {
public EmployeeInfoKafkaMessage getKafkaMessage(String fileId, String fileUrl, String fileName,
EmployeeInfoFileFormType formType, String departureDateTime, String accessToken,
String offset, FileStatus fileStatus, String ervuId, String prnOid,
PersonModel personModel, long fileSize) {
PersonModel personModel, long fileSize, String type) {
return new EmployeeInfoKafkaMessage(
getOrgInfo(accessToken, ervuId, prnOid, personModel),
getFileInfo(
fileId,
fileUrl,
fileName,
formType,
departureDateTime,
offset,
fileStatus,
fileSize
)
new FileInfo[] {
getFileInfo(
fileId,
fileUrl,
fileName,
formType,
departureDateTime,
offset,
fileStatus,
fileSize,
type
),
}
);
}
private FileInfo getFileInfo(String fileId, String fileUrl, String fileName,
public EmployeeInfoKafkaMessage getKafkaMessage(UploadOrgInfo orgInfo, FileInfo[] filesInfo) {
return new EmployeeInfoKafkaMessage(orgInfo, filesInfo);
}
public FileInfo getFileInfo(String fileId, String fileUrl, String fileName,
EmployeeInfoFileFormType formType, String departureDateTime, String offset,
FileStatus fileStatus, long fileSize) {
FileStatus fileStatus, long fileSize, String type) {
return new FileInfo(
fileId,
fileUrl,
@ -54,11 +61,12 @@ public class EmployeeInfoKafkaMessageService {
String.valueOf(fileSize),
departureDateTime,
offset,
fileStatus
fileStatus,
type
);
}
private UploadOrgInfo getOrgInfo(String accessToken, String ervuId, String prnOid, PersonModel personModel) {
public UploadOrgInfo getOrgInfo(String accessToken, String ervuId, String prnOid, PersonModel personModel) {
OrganizationModel organizationModel = ulDataService.getOrganizationModel(accessToken);
SenderInfo senderInfo = new SenderInfo();
senderInfo.setFirstName(personModel.getFirstName());

View file

@ -21,10 +21,10 @@ public final class AuditConstants {
);
private static final Map<Integer, String> downloadTypes = Map.of(
1, "Выписка из журнала взаимодействия ЮЛ"
1, "Выписка из журнала взаимодействия ЮЛ",
2, "Выгрузка описания ошибок по непринятым записям"
);
private AuditConstants() {
}

View file

@ -2,6 +2,7 @@ package ru.micord.ervu.audit.model;
import ervu.model.fileupload.FileInfo;
import ervu.model.fileupload.FileStatus;
import ervu.model.fileupload.UploadOrgInfo;
/**
@ -10,11 +11,13 @@ import ervu.model.fileupload.UploadOrgInfo;
public class AuditUploadEvent {
private UploadOrgInfo orgInfo;
private FileInfo fileInfo;
private FileInfo[] filesInfo;
private FileStatus packInfo;
public AuditUploadEvent(UploadOrgInfo orgInfo, FileInfo fileInfo) {
public AuditUploadEvent(UploadOrgInfo orgInfo, FileInfo[] filesInfo, FileStatus packInfo) {
this.orgInfo = orgInfo;
this.fileInfo = fileInfo;
this.filesInfo = filesInfo;
this.packInfo = packInfo;
}
public UploadOrgInfo getOrgInfo() {
@ -25,11 +28,19 @@ public class AuditUploadEvent {
this.orgInfo = orgInfo;
}
public FileInfo getFileInfo() {
return fileInfo;
public FileInfo[] getFilesInfo() {
return filesInfo;
}
public void setFileInfo(FileInfo fileInfo) {
this.fileInfo = fileInfo;
public void setFilesInfo(FileInfo[] filesInfo) {
this.filesInfo = filesInfo;
}
public FileStatus getPackInfo() {
return packInfo;
}
public void setPackInfo(FileStatus packInfo) {
this.packInfo = packInfo;
}
}

View file

@ -3,6 +3,7 @@ package ru.micord.ervu.audit.service;
import javax.servlet.http.HttpServletRequest;
import ervu.model.fileupload.FileInfo;
import ervu.model.fileupload.FileStatus;
import ervu.model.fileupload.UploadOrgInfo;
import ru.micord.ervu.audit.model.AuditActionRequest;
import ru.micord.ervu.kafka.model.OrgInfo;
@ -16,7 +17,7 @@ public interface AuditService {
void processAuthEvent(HttpServletRequest request, OrgInfo orgInfo, String prnOid, String status,
String eventType);
void processUploadEvent(UploadOrgInfo uploadOrgInfo, FileInfo fileInfo);
void processUploadEvent(UploadOrgInfo uploadOrgInfo, FileInfo[] filesInfo, FileStatus packInfo);
void processDownloadEvent(HttpServletRequest request, long fileSize, String fileName,
int formatRegistry, String status, String s3FileUrl);

View file

@ -9,6 +9,7 @@ import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.model.fileupload.FileInfo;
import ervu.model.fileupload.FileStatus;
import ervu.model.fileupload.UploadOrgInfo;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Conditional;
@ -110,10 +111,11 @@ public class BaseAuditService implements AuditService {
}
@Override
public void processUploadEvent(UploadOrgInfo orgInfo, FileInfo fileInfo) {
public void processUploadEvent(UploadOrgInfo orgInfo, FileInfo[] filesInfo, FileStatus packInfo) {
AuditUploadEvent auditUploadEvent = new AuditUploadEvent(
orgInfo,
fileInfo
filesInfo,
packInfo
);
String message = convertToMessage(auditUploadEvent);

View file

@ -3,6 +3,7 @@ package ru.micord.ervu.audit.service.impl;
import javax.servlet.http.HttpServletRequest;
import ervu.model.fileupload.FileInfo;
import ervu.model.fileupload.FileStatus;
import ervu.model.fileupload.UploadOrgInfo;
import org.springframework.context.annotation.Conditional;
import org.springframework.stereotype.Service;
@ -26,7 +27,7 @@ public class StubAuditService implements AuditService {
String status, String eventType) {}
@Override
public void processUploadEvent(UploadOrgInfo uploadOrgInfo, FileInfo fileInfo) {}
public void processUploadEvent(UploadOrgInfo uploadOrgInfo, FileInfo[] filesInfo, FileStatus packInfo) {}
@Override
public void processDownloadEvent(HttpServletRequest request, long fileSize, String fileName,

View file

@ -0,0 +1,88 @@
package ru.micord.ervu.controller;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletRequest;
import com.google.protobuf.ByteString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import ru.micord.ervu.audit.constants.AuditConstants;
import ru.micord.ervu.audit.service.AuditService;
import ru.micord.ervu.model.ValidateExportResponse;
import ru.micord.ervu.security.webbpm.jwt.UserIdsPair;
import ru.micord.ervu.security.webbpm.jwt.util.SecurityUtil;
import ru.micord.ervu.service.ValidationFileService;
@RestController
@RequestMapping("/validate")
public class ValidationFileController {
private static final Logger LOGGER = LoggerFactory.getLogger(ValidationFileController.class);
private final AuditService auditService;
private final ValidationFileService validationFileService;
public ValidationFileController(AuditService auditService,
ValidationFileService validationFileService) {
this.auditService = auditService;
this.validationFileService = validationFileService;
}
@GetMapping("/export/{fileId}")
public ResponseEntity<Resource> exportFile(HttpServletRequest servletRequest,
@PathVariable String fileId) {
int size = 0;
String fileName = null;
String status = null;
try {
if (!StringUtils.hasText(fileId)) {
return ResponseEntity.notFound().build();
}
UserIdsPair userIdsPair = SecurityUtil.getUserIdsPair();
String ervuId = userIdsPair.getErvuId();
ValidateExportResponse validateExportResponse = validationFileService.exportFile(ervuId,
fileId
);
if (!validateExportResponse.hasFile()) {
LOGGER.error("Response does not contain file content for fileId: {}, user: {}", fileId, ervuId);
status = AuditConstants.FAILURE_STATUS_TYPE;
return ResponseEntity.noContent().build();
}
ByteString file = validateExportResponse.getFile();
size = file.size();
fileName = validateExportResponse.getFileName();
String encodedFilename = URLEncoder.encode(fileName, StandardCharsets.UTF_8);
InputStreamResource resource = new InputStreamResource(file.newInput());
status = AuditConstants.SUCCESS_STATUS_TYPE;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename*=UTF-8''" + encodedFilename
)
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}
catch (Exception e) {
if (status == null) {
status = AuditConstants.FAILURE_STATUS_TYPE;
}
throw e;
}
finally {
auditService.processDownloadEvent(servletRequest, size, fileName, 2, status, null);
}
}
}

View file

@ -0,0 +1,14 @@
package ru.micord.ervu.exception;
/**
* @author Adel Kalimullin
*/
public class ExportException extends RuntimeException {
public ExportException(String message) {
super(message);
}
public ExportException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -2,14 +2,18 @@ package ru.micord.ervu.journal;
public class JournalDto {
private Integer documentNumber;
private String fileId;
private String departureDateTime;
private String fileName;
private Integer filePatternCode;
private String senderFio;
private String status;
public Integer filesSentCount;
public Integer acceptedFilesCount;
private Integer rowsCount;
private Integer rowsSuccess;
private Integer rowsError;
private Boolean hasFailedRows;
public String getDepartureDateTime() {
return departureDateTime;
@ -65,21 +69,48 @@ public class JournalDto {
return this;
}
public Integer getFilesSentCount() {
return filesSentCount;
public Integer getRowsCount() {
return rowsCount;
}
public JournalDto setFilesSentCount(Integer filesSentCount) {
this.filesSentCount = filesSentCount;
public JournalDto setRowsCount(Integer rowsCount) {
this.rowsCount = rowsCount;
return this;
}
public Integer getAcceptedFilesCount() {
return acceptedFilesCount;
public Integer getRowsSuccess() {
return rowsSuccess;
}
public JournalDto setAcceptedFilesCount(Integer acceptedFilesCount) {
this.acceptedFilesCount = acceptedFilesCount;
public JournalDto setRowsSuccess(Integer rowsSuccess) {
this.rowsSuccess = rowsSuccess;
return this;
}
public Integer getRowsError() {
return rowsError;
}
public JournalDto setRowsError(Integer rowsError) {
this.rowsError = rowsError;
return this;
}
public Integer getDocumentNumber() {
return documentNumber;
}
public JournalDto setDocumentNumber(Integer documentNumber) {
this.documentNumber = documentNumber;
return this;
}
public Boolean isHasFailedRows() {
return hasFailedRows;
}
public JournalDto setHasFailedRows(Boolean hasFailedRows) {
this.hasFailedRows = hasFailedRows;
return this;
}
}

View file

@ -1,6 +1,7 @@
package ru.micord.ervu.journal;
import java.time.LocalDateTime;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.databind.annotation.JsonDeserialize;
@ -9,78 +10,19 @@ import ru.micord.ervu.journal.deserializer.DepartureDateTimeDeserializer;
@JsonIgnoreProperties(ignoreUnknown = true)
public class JournalFileInfo {
private String fileId; //ИД файла полученный при создании записи о файле в реестр организаций (в ЕРВУ)
private String fileName; // Название файла
private Integer filePatternCode; // Номер шаблона(Формы)
private String filePatternName;
@JsonDeserialize(using = DepartureDateTimeDeserializer.class)
private LocalDateTime departureDateTime; // Дата-время отправки файла
private String timeZone; //Таймзона
private JournalFileStatus fileStatus;
private List<JournalFileDetails> packFiles; // Список файлов (csv, sig, mchd)
private JournalFileStatus packInfo; //общий статус по пакету
private SenderInfo senderInfo;
private Integer rowsCount; //Общее количество записей отправленных в файле
private Integer rowsSuccess; //Количество записей принятых в файле
private Integer rowsError; //Количество записей непринятых в файле
public String getFileId() {
return fileId;
public List<JournalFileDetails> getPackFiles() {
return packFiles;
}
public JournalFileInfo setFileId(String fileId) {
this.fileId = fileId;
return this;
}
public String getFileName() {
return fileName;
}
public JournalFileInfo setFileName(String fileName) {
this.fileName = fileName;
return this;
}
public Integer getFilePatternCode() {
return filePatternCode;
}
public JournalFileInfo setFilePatternCode(Integer filePatternCode) {
this.filePatternCode = filePatternCode;
return this;
}
public String getFilePatternName() {
return filePatternName;
}
public JournalFileInfo setFilePatternName(String filePatternName) {
this.filePatternName = filePatternName;
return this;
}
public LocalDateTime getDepartureDateTime() {
return departureDateTime;
}
public JournalFileInfo setDepartureDateTime(LocalDateTime departureDateTime) {
this.departureDateTime = departureDateTime;
return this;
}
public String getTimeZone() {
return timeZone;
}
public JournalFileInfo setTimeZone(String timeZone) {
this.timeZone = timeZone;
return this;
}
public JournalFileStatus getFileStatus() {
return fileStatus;
}
public JournalFileInfo setFileStatus(JournalFileStatus fileStatus) {
this.fileStatus = fileStatus;
public JournalFileInfo setPackFiles(List<JournalFileDetails> packFiles) {
this.packFiles = packFiles;
return this;
}
@ -110,4 +52,106 @@ public class JournalFileInfo {
this.rowsSuccess = rowsSuccess;
return this;
}
}
public Integer getRowsError() {
return rowsError;
}
public JournalFileInfo setRowsError(Integer rowsError) {
this.rowsError = rowsError;
return this;
}
public JournalFileStatus getPackInfo() {
return packInfo;
}
public JournalFileInfo setPackInfo(JournalFileStatus packInfo) {
this.packInfo = packInfo;
return this;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class JournalFileDetails {
private String fileId; //ИД файла полученный при создании записи о файле в реестр организаций (в ЕРВУ)
private String fileName; // Название файла
private Integer filePatternCode; // Номер шаблона(Формы)
private String filePatternName;
@JsonDeserialize(using = DepartureDateTimeDeserializer.class)
private LocalDateTime departureDateTime; // Дата-время отправки файла
private String timeZone; //Таймзона
private JournalFileStatus fileStatus;
private String type;
public String getFileId() {
return fileId;
}
public JournalFileDetails setFileId(String fileId) {
this.fileId = fileId;
return this;
}
public String getFileName() {
return fileName;
}
public JournalFileDetails setFileName(String fileName) {
this.fileName = fileName;
return this;
}
public Integer getFilePatternCode() {
return filePatternCode;
}
public JournalFileDetails setFilePatternCode(Integer filePatternCode) {
this.filePatternCode = filePatternCode;
return this;
}
public String getFilePatternName() {
return filePatternName;
}
public JournalFileDetails setFilePatternName(String filePatternName) {
this.filePatternName = filePatternName;
return this;
}
public LocalDateTime getDepartureDateTime() {
return departureDateTime;
}
public JournalFileDetails setDepartureDateTime(LocalDateTime departureDateTime) {
this.departureDateTime = departureDateTime;
return this;
}
public String getTimeZone() {
return timeZone;
}
public JournalFileDetails setTimeZone(String timeZone) {
this.timeZone = timeZone;
return this;
}
public JournalFileStatus getFileStatus() {
return fileStatus;
}
public JournalFileDetails setFileStatus(JournalFileStatus fileStatus) {
this.fileStatus = fileStatus;
return this;
}
public String getType() {
return type;
}
public void setType(String type) {
this.type = type;
}
}
}

View file

@ -11,20 +11,22 @@ import static ru.micord.ervu.util.StringUtils.convertToFio;
public class JournalDtoMapper {
public static JournalDto mapToJournalDto(JournalFileInfo journalFileInfo) {
public static JournalDto mapToJournalDto(JournalFileInfo journalFileInfo,
JournalFileInfo.JournalFileDetails journalFileDetails) {
SenderInfo senderInfo = journalFileInfo.getSenderInfo();
return new JournalDto()
.setFileId(journalFileInfo.getFileId())
.setDepartureDateTime(Timestamp.valueOf(journalFileInfo.getDepartureDateTime()).toString())
.setFileName(journalFileInfo.getFileName())
.setFilePatternCode(journalFileInfo.getFilePatternCode())
.setFileId(journalFileDetails.getFileId())
.setDepartureDateTime(Timestamp.valueOf(journalFileDetails.getDepartureDateTime()).toString())
.setFileName(journalFileDetails.getFileName())
.setFilePatternCode(journalFileDetails.getFilePatternCode())
.setSenderFio(convertToFio(senderInfo.getFirstName(), senderInfo.getMiddleName(),
senderInfo.getLastName()
)
)
.setStatus(journalFileInfo.getFileStatus().getStatus())
.setFilesSentCount(journalFileInfo.getRowsCount())
.setAcceptedFilesCount(journalFileInfo.getRowsSuccess());
.setStatus(journalFileInfo.getPackInfo().getStatus())
.setRowsCount(journalFileInfo.getRowsCount())
.setRowsSuccess(journalFileInfo.getRowsSuccess())
.setRowsError(journalFileInfo.getRowsError());
}
public static JournalDto mapToJournalDto(InteractionLogRecord record) {
@ -34,8 +36,9 @@ public class JournalDtoMapper {
.setFilePatternCode(Integer.valueOf(record.getForm()))
.setSenderFio(record.getSender())
.setStatus(record.getStatus())
.setFilesSentCount(record.getRecordsSent())
.setAcceptedFilesCount(record.getRecordsAccepted())
.setRowsCount(0)
.setRowsSuccess(0)
.setRowsError(0)
.setFileId(record.getFileId());
}
}

View file

@ -4,8 +4,10 @@ import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
@ -52,60 +54,104 @@ public class ReplyingKafkaConfig {
private String password;
@Value("${kafka.auth_sasl_mech}")
private String saslMechanism;
@Value("${av.kafka.download.response}")
private String avReplyTopic;
@Value("${ervu.kafka.validate.export.response}")
private String validateReplyTopic;
@Bean("ervuProducerFactory")
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
Map<String, Object> configProps = commonProducerConfig();
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
+ username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
return new DefaultKafkaProducerFactory<>(configProps);
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
private Map<String, Object> commonProducerConfig() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId + "-" + UUID.randomUUID());
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
+ username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\"" + username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return configProps;
}
@Bean("ervuConsumerFactory")
public ConsumerFactory<String, String> ervuConsumerFactory() {
Map<String, Object> configProps = commonConsumerConfig();
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(configProps);
}
@Bean("validateConsumerFactory")
public ConsumerFactory<String, Bytes> validateConsumerFactory() {
Map<String, Object> configProps = commonConsumerConfig();
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
return new DefaultKafkaConsumerFactory<>(configProps);
}
private Map<String, Object> commonConsumerConfig() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId + "-" + UUID.randomUUID());
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\"" + username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return configProps;
}
@Bean("ervuContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
public ConcurrentKafkaListenerContainerFactory<String, String> ervuContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(ervuConsumerFactory());
return factory;
}
@Bean
public ConcurrentMessageListenerContainer<String, String> replyContainer(
@Qualifier("ervuContainerFactory") ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory) {
return kafkaListenerContainerFactory.createContainer(orgReplyTopic, excerptReplyTopic, journalReplyTopic);
@Bean("validateContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, Bytes> validateContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Bytes> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(validateConsumerFactory());
return factory;
}
@Bean
public ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate(
@Qualifier("ervuProducerFactory") ProducerFactory<String, String> producerFactory,
ConcurrentMessageListenerContainer<String, String> replyContainer) {
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate =
new ReplyingKafkaTemplate<>(producerFactory, replyContainer);
replyingKafkaTemplate.setCorrelationHeaderName("messageId");
replyingKafkaTemplate.setCorrelationIdStrategy(record ->
@Bean("ervuContainer")
public ConcurrentMessageListenerContainer<String, String> ervuContainer(
@Qualifier("ervuContainerFactory") ConcurrentKafkaListenerContainerFactory<String, String> factory) {
return factory.createContainer(orgReplyTopic, excerptReplyTopic,
journalReplyTopic, avReplyTopic);
}
@Bean("validateContainer")
public ConcurrentMessageListenerContainer<String, Bytes> validateContainer(
@Qualifier("validateContainerFactory") ConcurrentKafkaListenerContainerFactory<String, Bytes> factory) {
return factory.createContainer(validateReplyTopic);
}
@Bean("ervuReplyingTemplate")
public ReplyingKafkaTemplate<String, String, String> ervuReplyingTemplate(
ProducerFactory<String, String> producerFactory,
@Qualifier("ervuContainer") ConcurrentMessageListenerContainer<String, String> container) {
ReplyingKafkaTemplate<String, String, String> template = new ReplyingKafkaTemplate<>(producerFactory, container);
customizeTemplate(template);
return template;
}
@Bean("validateReplyingTemplate")
public ReplyingKafkaTemplate<String, String, Bytes> validateReplyingTemplate(
ProducerFactory<String, String> producerFactory,
@Qualifier("validateContainer") ConcurrentMessageListenerContainer<String, Bytes> container) {
ReplyingKafkaTemplate<String, String, Bytes> template = new ReplyingKafkaTemplate<>(producerFactory, container);
customizeTemplate(template);
return template;
}
private void customizeTemplate(ReplyingKafkaTemplate<?, ?, ?> template) {
template.setCorrelationHeaderName("messageId");
template.setCorrelationIdStrategy(record ->
new CorrelationKey(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8)));
replyingKafkaTemplate.setDefaultReplyTimeout(Duration.ofSeconds(replyTimeout));
replyingKafkaTemplate.setSharedReplyTopic(true);
return replyingKafkaTemplate;
template.setDefaultReplyTimeout(Duration.ofSeconds(replyTimeout));
template.setSharedReplyTopic(true);
}
}

View file

@ -11,7 +11,7 @@ import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RestController;
import ru.micord.ervu.audit.constants.AuditConstants;
import ru.micord.ervu.audit.service.AuditService;
@ -31,7 +31,7 @@ import ru.micord.ervu.util.UrlUtils;
public class ErvuKafkaController {
@Autowired
private ReplyingKafkaService replyingKafkaService;
private ReplyingKafkaService<String, String> replyingKafkaService;
@Autowired
private AuditService auditService;
@ -48,7 +48,7 @@ public class ErvuKafkaController {
@Autowired
private ObjectMapper objectMapper;
@RequestMapping(value = "/kafka/excerpt")
@GetMapping(value = "/kafka/excerpt")
public ResponseEntity<Resource> getExcerptFile(HttpServletRequest request) {
String fileUrl = null;
String fileName = null;

View file

@ -1,8 +1,9 @@
package ru.micord.ervu.kafka.service;
public interface ReplyingKafkaService {
String sendMessageAndGetReply(String requestTopic,
String requestReplyTopic,
String requestMessage);
}
public interface ReplyingKafkaService<T, V> {
V sendMessageAndGetReply(String requestTopic,
String replyTopic,
T requestMessage);
}

View file

@ -6,12 +6,10 @@ import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.requestreply.RequestReplyFuture;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
import ru.micord.ervu.kafka.exception.KafkaMessageException;
import ru.micord.ervu.kafka.exception.KafkaMessageReplyTimeoutException;
@ -21,35 +19,30 @@ import ru.micord.ervu.kafka.service.ReplyingKafkaService;
* @author Eduard Tihomirov
*/
@Service
public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
public abstract class BaseReplyingKafkaService<T, V> implements ReplyingKafkaService<T, V> {
private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate;
public BaseReplyingKafkaServiceImpl(
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
public String sendMessageAndGetReply(String requestTopic,
String replyTopic,
String requestMessage) {
@Override
public V sendMessageAndGetReply(String requestTopic, String replyTopic, T requestMessage) {
long startTime = System.currentTimeMillis();
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
RequestReplyFuture<String, String, String> replyFuture = replyingKafkaTemplate.sendAndReceive(record);
RequestReplyFuture<String, T, V> replyFuture = getTemplate().sendAndReceive(
getProducerRecord(requestTopic, replyTopic, requestMessage));
try {
String result = Optional.ofNullable(replyFuture.get())
.map(ConsumerRecord::value)
.orElseThrow(() -> new KafkaMessageException("Kafka return result is null."));
LOGGER.info("Thread {} - KafkaSendMessageAndGetReply: {} ms",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime);
return result;
ConsumerRecord<String, V> result = Optional.ofNullable(replyFuture.get())
.orElseThrow(() -> new KafkaMessageException("Kafka return result is null"));
LOGGER.info("Thread {} - KafkaSendMessageAndGetReply: {} ms, replyTopic: {}",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime, replyTopic);
return result.value();
}
catch (InterruptedException | ExecutionException e) {
LOGGER.error("Thread {} - KafkaSendMessageAndGetReply: {} ms",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime);
LOGGER.error("Thread {} - KafkaSendMessageAndGetReply: {} ms, replyTopic: {}",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime, replyTopic);
throw new KafkaMessageReplyTimeoutException(e);
}
}
protected abstract ReplyingKafkaTemplate<String, T, V> getTemplate();
protected abstract ProducerRecord<String, T> getProducerRecord(String requestTopic,
String replyTopic, T requestMessage);
}

View file

@ -0,0 +1,31 @@
package ru.micord.ervu.kafka.service.impl;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
@Service
public class ErvuReplyingKafkaService extends BaseReplyingKafkaService<String, String>{
private final ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate;
public ErvuReplyingKafkaService(
@Qualifier("ervuReplyingTemplate") ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
@Override
protected ReplyingKafkaTemplate<String, String, String> getTemplate() {
return replyingKafkaTemplate;
}
@Override
protected ProducerRecord<String, String> getProducerRecord(String requestTopic, String replyTopic,
String requestMessage) {
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
return record;
}
}

View file

@ -0,0 +1,32 @@
package ru.micord.ervu.kafka.service.impl;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
@Service
public class ValidateReplyingKafkaService extends BaseReplyingKafkaService<String, Bytes> {
private final ReplyingKafkaTemplate<String, String, Bytes> replyingKafkaTemplate;
public ValidateReplyingKafkaService(
ReplyingKafkaTemplate<String, String, Bytes> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
@Override
protected ReplyingKafkaTemplate<String, String, Bytes> getTemplate() {
return replyingKafkaTemplate;
}
@Override
protected ProducerRecord<String, String> getProducerRecord(String requestTopic, String replyTopic,
String requestMessage) {
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
return record;
}
}

View file

@ -0,0 +1,4 @@
package ru.micord.ervu.model;
public record ValidateExportRequest(String orgId, String fileId) {
}

View file

@ -0,0 +1,29 @@
package ru.micord.ervu.model;
import com.google.protobuf.ByteString;
import com.google.protobuf.InvalidProtocolBufferException;
import rtl.pgs.ervu.lkrp.journal.proto.ResponseData;
public class ValidateExportResponse {
private final String fileName;
private final ByteString file;
public ValidateExportResponse(byte[] bytes) throws InvalidProtocolBufferException {
ResponseData responseData = ResponseData.parseFrom(bytes);
fileName = responseData.getFileName();
file = responseData.getFile();
}
public String getFileName() {
return fileName;
}
public ByteString getFile() {
return file;
}
public boolean hasFile() {
return file != null && !file.isEmpty();
}
}

View file

@ -7,5 +7,6 @@ public enum FilterType {
TEXT,
DATE,
NUMBER,
SET
SET,
FILE
}

View file

@ -71,6 +71,9 @@ public class EsiaConfig {
@Value("${esia.login.attempts.count:5}")
private long esiaLoginAttemptsCount;
@Value("${file.sign.verify.url}")
private String fileSignVerifyUrl;
public String getEsiaOrgScopes() {
String[] scopeItems = esiaOrgScopes.split(",");
return String.join(" ", Arrays.stream(scopeItems).map(item -> orgScopeUrl + item.trim()).toArray(String[]::new));
@ -146,4 +149,8 @@ public class EsiaConfig {
public long getEsiaLoginAttemptsCount() {
return esiaLoginAttemptsCount;
}
public String getFileSignVerifyUrl() {
return fileSignVerifyUrl;
}
}

View file

@ -0,0 +1,148 @@
package ru.micord.ervu.security.esia.model;
import java.io.Serializable;
import java.util.List;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
/**
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class MchdInfoModel implements Serializable {
private static final long serialVersionUID = 1L;
private String guid;
private String status;
private String parentGuid;
private Member principals;
private Member agents;
public String getGuid() {
return guid;
}
public void setGuid(String guid) {
this.guid = guid;
}
public String getStatus() {
return status;
}
public void setStatus(String status) {
this.status = status;
}
public String getParentGuid() {
return parentGuid;
}
public void setParentGuid(String parentGuid) {
this.parentGuid = parentGuid;
}
public Member getPrincipals() {
return principals;
}
public void setPrincipals(Member principals) {
this.principals = principals;
}
public Member getAgents() {
return agents;
}
public void setAgents(Member agents) {
this.agents = agents;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Member implements Serializable {
private List<Element> elements;
public List<Element> getElements() {
return elements;
}
public void setElements(List<Element> elements) {
this.elements = elements;
}
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Element implements Serializable {
private Person person;
private Organization organization;
public Organization getOrganization() {
return organization;
}
public void setOrganization(Organization organization) {
this.organization = organization;
}
public Person getPerson() {
return person;
}
public void setPerson(Person person) {
this.person = person;
}
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Organization implements Serializable {
private String name;
private Person chief;
public String getName() {
return name;
}
public void setName(String name) {
this.name = name;
}
public Person getChief() {
return chief;
}
public void setChief(Person chief) {
this.chief = chief;
}
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class Person implements Serializable {
private String firstName;
private String lastName;
private String middleName;
public String getFirstName() {
return firstName;
}
public void setFirstName(String firstName) {
this.firstName = firstName;
}
public String getLastName() {
return lastName;
}
public void setLastName(String lastName) {
this.lastName = lastName;
}
public String getMiddleName() {
return middleName;
}
public void setMiddleName(String middleName) {
this.middleName = middleName;
}
}
}

View file

@ -0,0 +1,35 @@
package ru.micord.ervu.security.esia.model;
import java.io.Serializable;
import com.fasterxml.jackson.annotation.JsonIgnoreProperties;
import com.fasterxml.jackson.annotation.JsonProperty;
/**
* @author Eduard Tihomirov
*/
@JsonIgnoreProperties(ignoreUnknown = true)
public class VerifyDocumentSignResponse implements Serializable {
private static final long serialVersionUID = 1L;
@JsonProperty("error_code")
private String errorCode;
@JsonProperty("signer_subject")
private String signerInfo;
public String getSignVerifyResult() {
return errorCode;
}
public void setSignVerifyResult(String signVerifyResult) {
this.errorCode = signVerifyResult;
}
public String getSignerInfo() {
return signerInfo;
}
public void setSignerInfo(String signerInfo) {
this.signerInfo = signerInfo;
}
}

View file

@ -86,7 +86,7 @@ public class EsiaAuthService {
@Autowired
private JwtTokenService jwtTokenService;
@Autowired
private ReplyingKafkaService replyingKafkaService;
private ReplyingKafkaService<String, String> replyingKafkaService;
@Autowired
private OkopfService okopfService;
@Autowired

View file

@ -22,4 +22,6 @@ public interface UlDataService {
String getAllUserRoles(String accessToken);
EsiaHeader readHeader(String accessToken);
MchdInfoModel getMchdInfoModel(String guid, String accessToken);
}

View file

@ -16,11 +16,11 @@ import ru.micord.ervu.security.esia.config.EsiaConfig;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.HttpHeaders;
import org.springframework.stereotype.Service;
import ru.micord.ervu.security.esia.exception.EsiaException;
import ru.micord.ervu.security.esia.model.BrhsModel;
import ru.micord.ervu.security.esia.model.EmployeeModel;
import ru.micord.ervu.security.esia.model.EsiaAccessToken;
import ru.micord.ervu.security.esia.model.EsiaHeader;
import ru.micord.ervu.security.esia.model.MchdInfoModel;
import ru.micord.ervu.security.esia.model.OrganizationModel;
import ru.micord.ervu.security.esia.model.PersonModel;
@ -282,4 +282,25 @@ public class UlDataServiceImpl implements UlDataService {
throw new EsiaException(e);
}
}
public MchdInfoModel getMchdInfoModel(String guid, String accessToken) {
try {
String url = esiaConfig.getEsiaBaseUri() + "/poa-registry/api/public/v1/poa/" + guid;
HttpRequest getReq = HttpRequest.newBuilder(URI.create(url))
.header(HttpHeaders.CONTENT_TYPE, "application/x-www-form-urlencoded")
.header("Authorization", "Bearer ".concat(accessToken))
.GET()
.timeout(Duration.ofSeconds(esiaConfig.getRequestTimeout()))
.build();
HttpResponse<String> getResp = HttpClient.newBuilder()
.connectTimeout(Duration.ofSeconds(esiaConfig.getConnectionTimeout()))
.build()
.send(getReq, HttpResponse.BodyHandlers.ofString());
errorHandler(getResp);
return objectMapper.readValue(getResp.body(), MchdInfoModel.class);
}
catch (Exception e) {
throw new EsiaException(e);
}
}
}

View file

@ -41,8 +41,6 @@ public class InteractionServiceImpl implements InteractionService {
.set(INTERACTION_LOG.SENT_DATE, timestamp)
.set(INTERACTION_LOG.SENDER, sender)
.set(INTERACTION_LOG.FILE_NAME, fileName)
.set(INTERACTION_LOG.RECORDS_SENT, 0)
.set(INTERACTION_LOG.RECORDS_ACCEPTED, 0)
.set(INTERACTION_LOG.ERVU_ID, ervuId)
.execute();
}

View file

@ -0,0 +1,48 @@
package ru.micord.ervu.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import ru.micord.ervu.exception.ExportException;
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
import ru.micord.ervu.model.ValidateExportRequest;
import ru.micord.ervu.model.ValidateExportResponse;
/**
* @author Adel Kalimullin
*/
@Service
public class ValidationFileService {
private final ReplyingKafkaService<String, Bytes> replyingKafkaService;
private final ObjectMapper objectMapper;
private final String validateExportRequestTopic;
private final String validateExportReplyTopic;
public ValidationFileService(
ReplyingKafkaService<String, Bytes> replyingKafkaService,
ObjectMapper objectMapper,
@Value("${ervu.kafka.validate.export.request}") String validateExportRequestTopic,
@Value("${ervu.kafka.validate.export.response}") String validateExportReplyTopic) {
this.replyingKafkaService = replyingKafkaService;
this.objectMapper = objectMapper;
this.validateExportRequestTopic = validateExportRequestTopic;
this.validateExportReplyTopic = validateExportReplyTopic;
}
public ValidateExportResponse exportFile(String ervuId, String fileId) {
try {
ValidateExportRequest validateExportRequest = new ValidateExportRequest(ervuId, fileId);
byte[] bytes = replyingKafkaService.sendMessageAndGetReply(
validateExportRequestTopic, validateExportReplyTopic,
objectMapper.writeValueAsString(validateExportRequest)
).get();
return new ValidateExportResponse(bytes);
}
catch (Exception e) {
throw new ExportException("Failed to export file: " + e.getMessage(), e);
}
}
}

View file

@ -5,6 +5,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.HashSet;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import javax.servlet.http.Cookie;
@ -25,13 +26,15 @@ import org.springframework.stereotype.Service;
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
import ru.micord.ervu.security.webbpm.jwt.service.JwtTokenService;
import static ervu.FileConstants.DOCUMENT;
@Service
public class JournalInMemoryStaticGridLoadService implements
InMemoryStaticGridLoadService<JournalDto> {
private final JwtTokenService jwtTokenService;
private final InteractionService interactionService;
private final ReplyingKafkaService replyingKafkaService;
private final ReplyingKafkaService<String, String> replyingKafkaService;
private final ObjectMapper objectMapper;
private final HttpServletRequest request;
@ -44,7 +47,7 @@ public class JournalInMemoryStaticGridLoadService implements
public JournalInMemoryStaticGridLoadService(JwtTokenService jwtTokenService,
InteractionService interactionService,
ReplyingKafkaService replyingKafkaService,
ReplyingKafkaService<String, String> replyingKafkaService,
ObjectMapper objectMapper, HttpServletRequest request) {
this.jwtTokenService = jwtTokenService;
this.interactionService = interactionService;
@ -68,17 +71,26 @@ public class JournalInMemoryStaticGridLoadService implements
JournalFileDataResponse journalFileDataResponse = objectMapper.readValue(responseJsonString,
JournalFileDataResponse.class);
ervuJournalList = journalFileDataResponse.getFilesInfo().stream()
.map(JournalDtoMapper::mapToJournalDto)
.flatMap(fileInfo -> fileInfo.getPackFiles().stream()
.filter(fileDetail -> DOCUMENT.equals(fileDetail.getType()))
.map(fileDetail -> JournalDtoMapper.mapToJournalDto(fileInfo, fileDetail))
)
.toList();
}
catch (JsonProcessingException e) {
throw new JsonParsingException("Failed to parse JournalFileDataResponse.", e);
}
AtomicInteger counter = new AtomicInteger(1);
HashSet<String> seenFileIds = new HashSet<>();
return Stream.concat(dbJournalList.stream(), ervuJournalList.stream())
.filter(journal -> seenFileIds.add(journal.getFileId()))
.sorted(Comparator.comparing(JournalDto::getDepartureDateTime))
.sorted(Comparator.comparing(JournalDto::getDepartureDateTime).reversed())
.map(journal ->
journal
.setDocumentNumber(counter.getAndIncrement())
.setHasFailedRows(journal.getRowsError() != null && journal.getRowsError() > 0)
)
.toList();
}

View file

@ -0,0 +1,13 @@
syntax = "proto3";
package rtl.pgs.ervu.lkrp.journal;
import "google/protobuf/timestamp.proto";
option java_multiple_files = true;
option java_outer_classname = "LkrpResponse";
option java_package = "rtl.pgs.ervu.lkrp.journal.proto";
message ResponseData {
string fileName = 1;
bytes file = 2;
}

View file

@ -1,4 +1,15 @@
kafka_reply_timeout=Превышено время ожидания ответа от сервера.
access_denied=Доступ запрещен. Пользователь должен быть включен в группу "Сотрудник, ответственный за военно-учетную работу" в ЕСИА
login_attempts_exceeded=Слишком большое количество попыток авторизоваться в ЕСИА за короткий промежуток времени. Рекомендуем почистить cookie и cash браузера, после повторить авторизацию.
crl_certificate_expired=Превышено время ожидания ответа из ЕСИА
crl_certificate_expired=Превышено время ожидания ответа из ЕСИА
cert_trust_revocation=Неизвестен статус отзыва сертификата или одного из сертификатов в цепочке (проблема с CRL)
cert_untrusted_root=Сертификат или цепочка сертификатов основана на ненадежном корневом сертификате
cert_is_not_time_valid=Этот сертификат или один из сертификатов в цепочке сертификатов является недопустимым по времени
file_sign_validate=Ошибка проверки файлов. Некорректная электронная подпись
mchd_validate_agent=Ошибка проверки файлов. Некорректная машиночитаемая доверенность. Представитель не совпадает с подписантом
mchd_null=Ошибка проверки файлов. Отсутствует машиночитаемая доверенность. Подписант не является руководителем организации
mchd_expired=Ошибка проверки файлов. Недействующая машиночитаемая доверенность.
mchd_tree_expired=Ошибка проверки файлов. Одна из родительских доверенностей недействующая.
mchd_validate_principal=Ошибка проверки файлов. Некорректная машиночитаемая доверенность. Доверитель не совпадает с руководителем организации
av_file_infected=Ошибка проверки файлов. Файлы заражены вирусом
mchd_cant_parse=Ошибка проверки файлов. Некорректный формат машиночитаемой доверенности

View file

@ -1,4 +1,15 @@
kafka_reply_timeout=Превышено время ожидания ответа от сервера.
access_denied=Доступ запрещен. Пользователь должен быть включен в группу "Сотрудник, ответственный за военно-учетную работу" в ЕСИА
login_attempts_exceeded=Слишком большое количество попыток авторизоваться в ЕСИА за короткий промежуток времени. Рекомендуем почистить cookie и cash браузера, после повторить авторизацию.
crl_certificate_expired=Превышено время ожидания ответа из ЕСИА
crl_certificate_expired=Превышено время ожидания ответа из ЕСИА
cert_trust_revocation=Неизвестен статус отзыва сертификата или одного из сертификатов в цепочке (проблема с CRL)
cert_untrusted_root=Сертификат или цепочка сертификатов основана на ненадежном корневом сертификате
cert_is_not_time_valid=Этот сертификат или один из сертификатов в цепочке сертификатов является недопустимым по времени
file_sign_validate=Ошибка проверки файлов. Некорректная электронная подпись
mchd_validate_agent=Ошибка проверки файлов. Некорректная машиночитаемая доверенность. Представитель не совпадает с подписантом
mchd_null=Ошибка проверки файлов. Отсутствует машиночитаемая доверенность. Подписант не является руководителем организации
mchd_expired=Ошибка проверки файлов. Недействующая машиночитаемая доверенность.
mchd_tree_expired=Ошибка проверки файлов. Одна из родительских доверенностей недействующая.
mchd_validate_principal=Ошибка проверки файлов. Некорректная машиночитаемая доверенность. Доверитель не совпадает с руководителем организации
av_file_infected=Ошибка проверки файлов. Файлы заражены вирусом
mchd_cant_parse=Ошибка проверки файлов. Некорректный формат машиночитаемой доверенности

View file

@ -106,20 +106,23 @@
## Топики Kafka
| Наименование | Описание | Пишет | Читает | Имя конфигурации |
| ---------------------------------- | ------------------------------------------------------------------------------------------ | -------------------------- | -------------------------------------------------- | -------------------------------- |
| ervu.organization.request | топик для записи данных об организации, для получения id организации из ЕРВУ | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_ORG_REQUEST_TOPIC |
| ervu.organization.response | топик для чтения id организации из ЕРВУ | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_ORG_REPLY_TOPIC |
| ervu.organization.journal.request | топик для записи запроса для получения данных по журналу взаимодействия | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_JOURNAL_REQUEST_TOPIC |
| ervu.organization.journal.response | топик для чтения данных по журналу взаимодействия | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_JOURNAL_REPLY_TOPIC |
| ervu.lkrp.excerpt.request | топик для записи запроса для получения выписки по журналу взаимодействия | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_EXCERPT_REQUEST_TOPIC |
| ervu.lkrp.excerpt.response | топик для чтения выписки по журналу взаимодействия. Содержит ссылку на S3 с файлом выписки | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_EXCERPT_REPLY_TOPIC |
| ervu.lkrp.av-fileupload | топик для записи данных по файлу для перекладчика | ervu-lkrp-ul-backend | ervu-lkrp-av | AV_KAFKA_MESSAGE_TOPIC_NAME |
| ervu.lkrp.av-fileupload-status | топик для чтения статусов файла, полученных от перекладчика | ervu-lkrp-av | ervu-lkrp-ul-backend | AV_KAFKA_DOWNLOAD_RESPONSE |
| ervu.lkrp.auth.events | топик для отправки аудита в журнал авторизации | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_AUTHORIZATION_TOPIC |
| ervu.lkrp.action.events | топик для отправки аудита в журнал действий пользователя | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_ACTION_TOPIC |
| Наименование | Описание | Пишет | Читает | Имя конфигурации |
|------------------------------------|--------------------------------------------------------------------------------------------|----------------------------|---------------------------------------------------|----------------------------------|
| ervu.organization.request | топик для записи данных об организации, для получения id организации из ЕРВУ | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_ORG_REQUEST_TOPIC |
| ervu.organization.response | топик для чтения id организации из ЕРВУ | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_ORG_REPLY_TOPIC |
| ervu.organization.journal.request | топик для записи запроса для получения данных по журналу взаимодействия | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_JOURNAL_REQUEST_TOPIC |
| ervu.organization.journal.response | топик для чтения данных по журналу взаимодействия | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_JOURNAL_REPLY_TOPIC |
| ervu.lkrp.excerpt.request | топик для записи запроса для получения выписки по журналу взаимодействия | ervu-lkrp-ul-backend | ervu-organization-registry | ERVU_KAFKA_EXCERPT_REQUEST_TOPIC |
| ervu.lkrp.excerpt.response | топик для чтения выписки по журналу взаимодействия. Содержит ссылку на S3 с файлом выписки | ervu-organization-registry | ervu-lkrp-ul-backend | ERVU_KAFKA_EXCERPT_REPLY_TOPIC |
| ervu.lkrp.av-fileupload | топик для записи данных по файлу для перекладчика | ervu-lkrp-ul-backend | ervu-lkrp-av | AV_KAFKA_DOWNLOAD_REQUEST |
| ervu.lkrp.av-fileupload-status | топик для чтения статусов файла, полученных от перекладчика | ervu-lkrp-av | ervu-lkrp-ul-backend | AV_KAFKA_DOWNLOAD_RESPONSE |
| ervu.lkrp.auth.events | топик для отправки аудита в журнал авторизации | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_AUTHORIZATION_TOPIC |
| ervu.lkrp.action.events | топик для отправки аудита в журнал действий пользователя | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_ACTION_TOPIC |
| ervu.lkrp.download.request | топик для отправки аудита в журнал обмена файлами | ervu-lkrp-ul-backend | ervu-organization-registry, ervu-validate-recruits | AUDIT_KAFKA_FILE_UPLOAD_TOPIC |
| ervu.lkrp.import.file | топик для отправки аудита в журнал загрузки ЮЛ и ФЛ | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_FILE_DOWNLOAD_TOPIC |
| ervu.lkrp.import.file | топик для отправки аудита в журнал загрузки ЮЛ и ФЛ | ervu-lkrp-ul-backend | ervu-lkrp-journal-service | AUDIT_KAFKA_FILE_DOWNLOAD_TOPIC |
| ervu.lkrp.download.request | топик для записи данных по загруженным файлам | ervu-lkrp-ul-backend | ervu-organization-registry, ervu-validate-recruits| ERVU_KAFKA_DOWNLOAD_REQUEST |
| ervu.lkrp.download.response | топик для чтения данных по загруженным файлам после обработки ерву | ervu-validate-recruits | ervu-lkrp-ul-backend | ERVU_KAFKA_DOWNLOAR_RESPONSE |
| ervu.lkrp.av-clear-s3 | топик для отчистки фалов из s3 | ervu-lkrp-ul-backend | ervu-lkrp-av | AV_KAFKA_CLEAR_S3 |
## Прочее

View file

@ -16,7 +16,7 @@ KAFKA_AUTH_SASL_MODULE=org.apache.kafka.common.security.plain.PlainLoginModule
KAFKA_USER=user1
KAFKA_PASS=Blfi9d2OFG
AV_KAFKA_MESSAGE_TOPIC_NAME=file-to-upload
AV_KAFKA_DOWNLOAD_REQUEST=file-to-upload
AV_KAFKA_GROUP_ID=local-ervu-lkrp-ul-backend
AV_KAFKA_DOWNLOAD_RESPONSE=ervu.lkrp.av-fileupload-status

View file

@ -13,7 +13,7 @@ KAFKA_AUTH_SASL_MODULE=org.apache.kafka.common.security.scram.ScramLoginModule
KAFKA_USER=user1
KAFKA_PASS=Blfi9d2OFG
AV_KAFKA_MESSAGE_TOPIC_NAME=file-to-upload
AV_KAFKA_DOWNLOAD_REQUEST=file-to-upload
AV_KAFKA_GROUP_ID=1
AV_KAFKA_DOWNLOAD_RESPONSE=ervu.lkrp.av-fileupload-status

View file

@ -64,7 +64,8 @@
<property name="kafka.user" value="user1"/>
<property name="kafka.pass" value="Blfi9d2OFG"/>
<property name="av.kafka.download-request-topic" value="ervu.lkrp.download.request"/>
<property name="av.kafka.message.topic.name" value="file-to-upload"/>
<property name="av.kafka.download.request" value="file-to-upload"/>
<property name="av.kafka.clear.s3" value="clear-s3"/>
<property name="esia.scopes" value="fullname, snils, id_doc, birthdate, usr_org, openid"/>
<property name="esia.org.scopes" value="org_fullname, org_shortname, org_brhs, org_brhs_ctts, org_brhs_addrs, org_type, org_ogrn, org_inn, org_leg, org_kpp, org_ctts, org_addrs, org_grps, org_emps"/>
<property name="esia.org.scope.url" value="http://esia.gosuslugi.ru/"/>

View file

@ -306,6 +306,44 @@
background-image: url(../img/svg/pin.svg);
}
.download-cell-renderer {
display: flex;
justify-content: center;
align-items: center;
}
.download-btn {
background: transparent;
border: none;
cursor: pointer;
padding: 6px;
display: flex;
align-items: center;
justify-content: center;
transition: transform 0.15s ease, opacity 0.15s ease;
min-width: 36px;
min-height: 36px;
}
.download-btn i {
color: #555;
font-size: 26px;
}
.download-btn:hover i {
color: #000;
}
.download-btn:active i {
transform: scale(0.9);
}
.download-btn:disabled,
.download-btn[disabled] {
opacity: 0.3;
cursor: default;
}
.webbpm.ervu_lkrp_ul .paragraph-group > .vertical-container > * {
margin-bottom: 16px;
}
@ -818,11 +856,14 @@
margin-bottom: 0;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload {
.webbpm.ervu_lkrp_ul .modal.show :is(ervu-file-upload, ervu-additional-file-upload) {
display: block;
margin: var(--indent-mini) 0;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .file-drop-zone {
.webbpm.ervu_lkrp_ul .modal.show :is(ervu-file-upload, ervu-additional-file-upload) ~ button-component:not(.info):not(.link) {
padding-top: 0;
}
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .file-drop-zone {
display: flex;
flex-direction: column;
align-items: center;
@ -831,14 +872,14 @@
border: 2px dashed var(--border-light);
background: transparent;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .file-drop-zone .select-file-field-text {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .file-drop-zone .select-file-field-text {
color: var(--color-light);
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .file-drop-zone .select-file-btn {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .file-drop-zone .select-file-btn {
margin-top: var(--indent-mini);
outline: transparent;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file {
display: flex;
white-space: nowrap;
padding: 12px 16px;
@ -846,7 +887,7 @@
border-radius: 4px;
background-color: var(--bg-light);
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-name {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .selected-file-name {
position: relative;
color: var(--color-text-primary);
font-size: var(--size-text-secondary);
@ -855,24 +896,24 @@
overflow: hidden;
padding-left: 56px;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-name::before {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .icon-csv::before {
position: absolute;
content: url(../img/svg/file-csv.svg);
top: -2px;
left: 0;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-size {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .selected-file-size {
color: var(--border-light);
font-family: 'InterL';
font-size: var(--size-text-secondary);
margin-left: 8px;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-size::before {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .selected-file-size::before {
position: relative;
content: "|";
margin-right: 8px;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-delete-btn {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .selected-file-delete-btn {
color: var(--color-link);
font-size: var(--size-text-secondary);
margin-left: auto;
@ -880,12 +921,9 @@
background-color: transparent;
outline: transparent;
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload .selected-file .selected-file-delete-btn:is(:hover, :focus, :active) {
.webbpm.ervu_lkrp_ul .modal.show .ervu-file-upload .selected-file .selected-file-delete-btn:is(:hover, :focus, :active) {
color: var(--color-link-hover);
}
.webbpm.ervu_lkrp_ul .modal.show ervu-file-upload ~ button-component:not(.info):not(.link) {
padding-top: 0;
}
.webbpm.ervu_lkrp_ul .modal.show .modal-content .paragraph-list > .vertical-container > * {
position: relative;

View file

@ -18,7 +18,7 @@
hidden>
<div class="selected-file-list" *ngIf="isFilesListVisible">
<div class="selected-file" *ngFor="let item of uploader.queue">
<span class="selected-file-name">{{item?.file?.name}}</span>
<span class="selected-file-name" [ngClass]="getFileIconClass()">{{item?.file?.name}}</span>
<span class="selected-file-size" *ngIf="displayFileSize">{{item?.file?.size/1024/1024 | number: '.2'}} MB</span>
<button class="selected-file-delete-btn" (click)="removeFile(item)">{{removeFileButtonName}}</button>
</div>

View file

@ -56,12 +56,12 @@ export class ErvuFileUpload extends InputControl {
public isFilesListVisible: boolean = true;
public isProgressBarVisible: boolean = false;
private fileInputEl: HTMLInputElement;
public fileInputEl: HTMLInputElement;
private url: string = '/backend/employee/document';
private messagesService: MessagesService;
private isUploadErrorOccurred = false;
public messagesService: MessagesService;
public isUploadErrorOccurred = false;
private appConfigService: AppConfigService;
private cookieService: CookieService;
public cookieService: CookieService;
constructor(el: ElementRef, cd: ChangeDetectorRef) {
super(el, cd);
@ -135,7 +135,7 @@ export class ErvuFileUpload extends InputControl {
this.cd.markForCheck();
}
private setUploaderMethods() {
public setUploaderMethods() {
this.uploader.onBeforeUploadItem = (fileItem: FileItem) => {
//refresh headers
@ -221,6 +221,13 @@ export class ErvuFileUpload extends InputControl {
return undefined;
}
getFileIconClass(): string {
if (this.extensionFilter && this.extensionFilter.length > 0) {
return `icon-${this.extensionFilter[0].toLowerCase()}`;
}
return 'icon-default';
}
subscribeToModelChange() {
//empty because there is no ngModel here
}

View file

@ -0,0 +1,121 @@
import {NotNull, ObjectRef} from "@webbpm/base-package";
import {ChangeDetectionStrategy, Component} from "@angular/core";
import {FileItem} from "ng2-file-upload";
import {EmployeeInfoFileFormType} from "./EmployeeInfoFileFormType";
import {TokenConstants} from "../../../modules/security/TokenConstants";
import {ErvuFileUpload} from "./ErvuFileUpload";
import {FileLikeObject} from "ng2-file-upload/file-upload/file-like-object.class";
@Component({
moduleId: module.id,
selector: "ervu-additional-file-upload",
templateUrl: "./../../../../../src/resources/template/ervu/component/ErvuFileUpload.html",
changeDetection: ChangeDetectionStrategy.OnPush
})
export class ErvuFileUploadWithAdditionalFiles extends ErvuFileUpload {
@ObjectRef()
@NotNull()
public signFileUploadRef: ErvuFileUpload;
@ObjectRef()
@NotNull()
public mchdFileUploadRef: ErvuFileUpload;
public setUploaderMethods() {
this.uploader.onBeforeUploadItem = (fileItem: FileItem) => {
const additionalParams: any = {
signFile: this.signFileUploadRef.uploader.queue[0]._file
};
if (this.mchdFileUploadRef && this.mchdFileUploadRef.uploader.queue.length > 0) {
additionalParams.mchdFile = this.mchdFileUploadRef.uploader.queue[0]._file;
}
//refresh headers
this.uploader.setOptions({
headers: [
{
name: "X-Employee-Info-File-Form-Type",
value: EmployeeInfoFileFormType[this.formType]
},
{
name: "Client-Time-Zone",
value: Intl.DateTimeFormat().resolvedOptions().timeZone
},
{
name: TokenConstants.CSRF_HEADER_NAME,
value: this.cookieService.get(TokenConstants.CSRF_TOKEN_NAME)
}
],
additionalParameter: additionalParams
});
this.fileUploadStartEvent.trigger();
this.isDropZoneVisible = false;
this.isFilesListVisible = false;
this.isProgressBarVisible = true;
this.cd.markForCheck();
};
this.uploader.onErrorItem = (item: FileItem,
response: string) => {
this.fileUploadFailedEvent.trigger();
this.uploader.cancelAll();
try {
var error = JSON.parse(response);
error.messages.forEach((errorMessage) => {
this.messagesService.error(errorMessage, error);
})
}
catch (error) {
this.messagesService.error(`Не удалось отправить следующие файлы: ${item.file.name}`
+ ` ${this.uploader.getNotUploadedItems()
.map(notUploadeditem => notUploadeditem.file.name)
.join(', ')}.`);
}
this.fileInputEl.value = null;
this.uploader.clearQueue();
this.mchdFileUploadRef.uploader.clearQueue();
this.mchdFileUploadRef.fileInputEl.value = null;
this.signFileUploadRef.uploader.clearQueue();
this.signFileUploadRef.fileInputEl.value = null;
this.isDropZoneVisible = true;
this.isFilesListVisible = true;
this.isProgressBarVisible = false;
this.isUploadErrorOccurred = true;
this.cd.markForCheck();
};
this.uploader.onCompleteAll = () => {
if (!this.isUploadErrorOccurred) {
this.uploader.clearQueue();
this.mchdFileUploadRef.uploader.clearQueue();
this.signFileUploadRef.uploader.clearQueue();
this.fileUploadEndEvent.trigger();
this.isProgressBarVisible = false;
this.cd.markForCheck();
}
};
this.uploader.onAfterAddingFile = (fileItem: FileItem) => {
this.fileAddedEvent.trigger();
}
this.uploader.onWhenAddingFileFailed = (item: FileLikeObject, filter: any, options: any) => {
switch (filter.name) {
case "fileSize":
this.messagesService.error(`Размер файла ${item.name} превышает предельно допустимый = ${this.maxFileSizeMb} MB`);
break;
case "queueLimit":
this.messagesService.error(`Не удалось добавить файл ${item.name}. `
+ `Достигнуто максимальное количество файлов для загрузки = ${this.maxFilesToUpload}`);
break;
case "extension":
this.messagesService.error(`Файл ${item.name} имеет недопустимое расширение.`);
break;
default:
this.messagesService.error(`Не удалось добавить файл ${item.name}.`);
}
this.fileInputEl.value = null;
this.cd.markForCheck();
};
}
}

View file

@ -1,5 +1,4 @@
import {
Event,
GridColumnIdUtils,
GridRow,
GridRowModelType,
@ -11,17 +10,19 @@ import {ChangeDetectionStrategy, Component} from "@angular/core";
import {
ColDef,
GridReadyEvent,
FilterChangedEvent,
ICellRendererParams,
ITooltipParams,
ValueFormatterParams,
ValueGetterParams
ValueGetterParams,
} from "ag-grid-community";
import {StaticColumnInitializer} from "./StaticColumnInitializer";
import {InMemoryStaticGridRpcService} from "../../../generated/ru/micord/ervu/service/rpc/InMemoryStaticGridRpcService";
import {
InMemoryStaticGridRpcService
} from "../../../generated/ru/micord/ervu/service/rpc/InMemoryStaticGridRpcService";
import {StaticGridColumn} from "../../../generated/ru/micord/ervu/property/grid/StaticGridColumn";
import { FilterService } from "../../service/FilterService";
import {FilterService} from "../../service/FilterService";
import {AuditConstants, AuditService, FilterInfo} from "../../service/AuditService";
import {ValidateFileService} from "../../service/ValidateFileService";
@Component({
@ -34,6 +35,7 @@ export class InMemoryStaticGrid extends GridV2 {
private rpcService: InMemoryStaticGridRpcService;
private auditService: AuditService;
private validateFileService: ValidateFileService;
getRowModelType(): string {
return GridRowModelType.CLIENT_SIDE;
@ -42,6 +44,7 @@ export class InMemoryStaticGrid extends GridV2 {
protected initGrid() {
super.initGrid();
this.auditService = this.injector.get(AuditService);
this.validateFileService = this.injector.get(ValidateFileService);
this.rpcService = this.getScript(InMemoryStaticGridRpcService);
if (this.rpcService) {
this.rpcService.loadData().then(response => {
@ -85,6 +88,10 @@ export class InMemoryStaticGrid extends GridV2 {
}
}
public downloadFile(fileId : string){
this.validateFileService.exportFile(fileId);
}
getColumns(): any[] {
return this.getScriptsInChildren(GridV2Column)
.map(columnV2 => columnV2.getScript(StaticGridColumn));
@ -125,6 +132,13 @@ export class InMemoryStaticGrid extends GridV2 {
return columnComp.valueFormatter.format(params);
}
}
colDef.cellRendererParams = {
context : {
parentComponent: this
}
};
return colDef;
}

View file

@ -1,6 +1,7 @@
import {FilterType} from "../../../../generated/ru/micord/ervu/property/grid/FilterType";
import {DateFilter, NumberFilter, TextFilter} from "ag-grid-community";
import {SetFilter} from "./SetFilter";
import {FileAvailableFilterComp} from "./FileAvailableFilterComp";
export class CustomGridColumnFilterUtils {
@ -16,6 +17,8 @@ export class CustomGridColumnFilterUtils {
return DateFilter;
case FilterType.SET:
return SetFilter;
case FilterType.FILE:
return FileAvailableFilterComp;
case FilterType.TEXT:
default:
return TextFilter;

View file

@ -0,0 +1,77 @@
import {BaseBooleanComboBoxFilter} from "@webbpm/base-package";
import {IFilterParams, IDoesFilterPassParams, IFilterComp} from "ag-grid-community";
export class FileAvailableFilterComp extends BaseBooleanComboBoxFilter implements IFilterComp {
private filterValue: boolean | undefined = undefined;
private params!: IFilterParams;
init(params: IFilterParams): void {
this.params = params;
this.createComboBox("ag-combobox-file-filter");
this.populateComboBoxWithFixedValues();
this.comboBox.addEventListener("change", () => this.onComboBoxChanged());
}
protected populateComboBoxWithFixedValues(): void {
const options = [
{ label: "Все", value: undefined },
{ label: "Файл присутствует", value: true },
{ label: "Файл отсутствует", value: false }
];
this.comboBox.innerHTML = "";
options.forEach(({ label }) => {
const option = document.createElement("option");
option.textContent = label;
this.comboBox.appendChild(option);
});
}
protected onComboBoxChanged(): void {
const selectedIndex = this.comboBox.selectedIndex;
this.filterValue =
selectedIndex === 1 ? true :
selectedIndex === 2 ? false :
undefined;
this.params.filterChangedCallback();
}
doesFilterPass(params: IDoesFilterPassParams): boolean {
const cellValue = params.data[this.params.colDef.field!];
return this.filterValue === undefined || cellValue === this.filterValue;
}
isFilterActive(): boolean {
return this.filterValue !== undefined;
}
getModel(): any {
return this.filterValue === undefined
? undefined
: { filter: this.filterValue, type: "equals" };
}
setModel(model: any): void {
this.filterValue = model ? model.filter : undefined;
if (this.filterValue === true) {
this.comboBox.selectedIndex = 1;
}
else if (this.filterValue === false) {
this.comboBox.selectedIndex = 2;
}
else {
this.comboBox.selectedIndex = 0;
}
this.params.filterChangedCallback();
}
getGui(): HTMLElement {
return this.eGui;
}
}

View file

@ -49,7 +49,7 @@ export class SetFilter implements IFilterComp {
private initCheckBox(id: string, value: string, index: number): HTMLInputElement {
this.eGui.insertAdjacentHTML('beforeend', this.OPTION_TEMPLATE);
this.eGui.querySelectorAll('.ag-filter-value')[index].innerHTML = value;
(this.eGui.querySelectorAll('.ag-filter-value')[index] as HTMLElement).textContent = value;
let checkbox = this.eGui.querySelectorAll('.ag-filter-checkbox')[index] as HTMLInputElement;
checkbox.setAttribute('id', id);
checkbox.addEventListener('change', this.onCheckBoxChanged.bind(this));

View file

@ -0,0 +1,38 @@
import {ICellRendererParams} from "ag-grid-community";
import {GridCellValueRenderer} from "@webbpm/base-package";
export class FileDownloadCellRenderer implements GridCellValueRenderer {
render(params: ICellRendererParams): HTMLElement {
const container = document.createElement('div');
container.className = 'download-cell-renderer';
const button = document.createElement('button');
button.className = 'download-btn';
button.innerHTML = '<i class="fa fa-download"></i>';
button.title = params.data.fileName || 'Скачать файл';
if (!params.value) {
button.style.display = 'none';
}
button.addEventListener('click', () => {
event.stopPropagation();
const fileId = params.data.fileId;
if (!fileId) {
return;
}
const parentComponent = params.context.parentComponent;
if (parentComponent) {
parentComponent.downloadFile(fileId);
}
button.blur();
});
container.appendChild(button);
return container;
}
}

View file

@ -1,6 +1,7 @@
import {DateFilter, NumberFilter, TextFilter} from "ag-grid-community";
import {SetFilter} from "../component/grid/filter/SetFilter";
import {FilterInfo} from "./AuditService";
import {FileAvailableFilterComp} from "../component/grid/filter/FileAvailableFilterComp";
export class FilterService {
static getFilterData(columnDef: any, agFilter: any): FilterInfo {
@ -15,6 +16,8 @@ export class FilterService {
return this.processSetFilter(agFilter);
case TextFilter:
return this.processTextFilter(agFilter);
case FileAvailableFilterComp:
return this.processFileAvailableFilter(agFilter);
default:
return;
}
@ -63,13 +66,25 @@ export class FilterService {
return this.createDualConditionData(agFilter);
}
private static processFileAvailableFilter(agFilter: any): FilterInfo {
const displayValue = agFilter.filter
? "Файл присутствует"
: "Файл отсутствует";
return {
conditions: [{
filterValue: displayValue,
filterType: "equals"
}]
};
}
private static createSingleConditionData(
filterValue: string,
filterType: string,
endValue?: string
): FilterInfo {
return {
conditionOperator: undefined,
conditions: [{
filterValue: endValue ? `${filterValue} to ${endValue}` : filterValue,
filterType: filterType,

View file

@ -0,0 +1,34 @@
import {Injectable} from "@angular/core";
import {HttpClient} from "@angular/common/http";
import {MessagesService} from "@webbpm/base-package";
@Injectable({
providedIn: "root"
})
export class ValidateFileService {
constructor(private httpClient: HttpClient) {
}
public exportFile(fileId: string) {
const timeZone = Intl.DateTimeFormat().resolvedOptions().timeZone;
this.httpClient.get('validate/export/' + fileId, {
responseType: 'blob',
headers: {
"Client-Time-Zone": timeZone,
},
observe: 'response'
}).toPromise()
.then((response) => {
const data = window.URL.createObjectURL(response.body);
const link = document.createElement("a");
link.href = data;
const contentDisposition = response.headers.get('Content-Disposition');
const fileNameMatch = contentDisposition.match(/filename\*=?UTF-8''(.+)/i);
link.download = decodeURIComponent(fileNameMatch[1].replace(/\+/g, '%20'));
document.body.appendChild(link);
link.click();
window.URL.revokeObjectURL(data);
link.remove();
});
}
}

View file

@ -27,6 +27,10 @@ import {ErvuDownloadFileButton} from "../../ervu/component/button/ErvuDownloadFi
import {AuthenticationService} from "../security/authentication.service";
import {HomeLandingComponent} from "./component/home-landing.component";
import {AuditService} from "../../ervu/service/AuditService";
import {
ErvuFileUploadWithAdditionalFiles
} from "../../ervu/component/fileupload/ErvuFileUploadWithAdditionalFiles";
import {ValidateFileService} from "../../ervu/service/ValidateFileService";
registerLocaleData(localeRu);
export const DIRECTIVES = [
@ -36,6 +40,7 @@ export const DIRECTIVES = [
forwardRef(() => AccessDeniedComponent),
forwardRef(() => AppProgressIndicationComponent),
forwardRef(() => ErvuFileUpload),
forwardRef(() => ErvuFileUploadWithAdditionalFiles),
forwardRef(() => ErvuDownloadFileButton),
forwardRef(() => InMemoryStaticGrid),
forwardRef(() => HomeLandingComponent),
@ -66,7 +71,7 @@ export function checkAuthentication(authService: AuthenticationService): () => P
DIRECTIVES
],
providers: [
AuthenticationService, AuditService,
AuthenticationService, AuditService, ValidateFileService,
{
provide: APP_INITIALIZER,
useFactory: checkAuthentication,

21
pom.xml
View file

@ -22,6 +22,7 @@
<joda-time.version>2.9.2</joda-time.version>
<webbpm-platform.version>3.192.26</webbpm-platform.version>
<wbp.overall-timeout>72000</wbp.overall-timeout>
<os-maven-plugin.version>1.6.2</os-maven-plugin.version>
</properties>
<dependencyManagement>
<dependencies>
@ -270,6 +271,16 @@
<version>1.1.10.7</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>4.28.3</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
<version>4.28.3</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
@ -300,6 +311,11 @@
<artifactId>sardine</artifactId>
<version>5.12</version>
</dependency>
<dependency>
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
<version>4.5.14</version>
</dependency>
</dependencies>
</dependencyManagement>
<repositories>
@ -387,6 +403,11 @@
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>0.6.1</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>

View file

@ -982,11 +982,106 @@
</entry>
</properties>
</scripts>
<children id="7094b2c2-60c7-43d0-84d7-3adbd3c77fe9">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>7094b2c2-60c7-43d0-84d7-3adbd3c77fe9</componentRootId>
<name>№П/П</name>
<container>false</container>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"№П/П"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"documentNumber"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"NUMBER"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Number"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>pinned</key>
<value>
<simple>"LEFT"</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>50</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="9b895ce1-494f-4f2e-b87b-78d019fc3760">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>9b895ce1-494f-4f2e-b87b-78d019fc3760</componentRootId>
<name>Дата и время направления</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
@ -1064,7 +1159,7 @@
<entry>
<key>filter</key>
<value>
<simple>true</simple>
<simple>null</simple>
</value>
</entry>
<entry>
@ -1118,6 +1213,12 @@
<simple>true</simple>
</value>
</entry>
<entry>
<key>disableHiding</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
@ -1489,7 +1590,7 @@
<entry>
<key>column</key>
<value>
<simple>"filesSentCount"</simple>
<simple>"rowsCount"</simple>
</value>
</entry>
<entry>
@ -1539,6 +1640,7 @@
<componentRootId>4c070d5d-cac7-4cc4-8ee4-e9a4b9b289a5</componentRootId>
<name>Записей принято</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
@ -1577,7 +1679,7 @@
<entry>
<key>column</key>
<value>
<simple>"acceptedFilesCount"</simple>
<simple>"rowsSuccess"</simple>
</value>
</entry>
<entry>
@ -1616,6 +1718,293 @@
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="7189ce80-0090-44c9-a8ba-b3860b1fefb8">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>7189ce80-0090-44c9-a8ba-b3860b1fefb8</componentRootId>
<name>Записей не принято</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"Записей не принято"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"rowsError"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"NUMBER"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Number"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>80</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="33376e71-d633-464c-bd0a-da95caeae54b">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>33376e71-d633-464c-bd0a-da95caeae54b</componentRootId>
<name>Загрузка файла</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
<entry>
<key>renderer</key>
<value>
<implRef type="TS">
<className>FileDownloadCellRenderer</className>
<packageName>ervu.component.grid.renderer</packageName>
</implRef>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"Описание ошибки"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"hasFailedRows"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"FILE"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Boolean"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>hidden</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>80</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="438864af-918d-4264-bc37-192f94263673">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>438864af-918d-4264-bc37-192f94263673</componentRootId>
<name>id Файла(скрытое)</name>
<container>false</container>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"id файла"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"fileId"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"TEXT"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.String"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>hidden</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>