SUPPORT-9368: new journal

This commit is contained in:
adel.ka 2025-09-09 09:15:53 +03:00
parent d8fe629e65
commit d84b08411d
29 changed files with 1045 additions and 107 deletions

View file

@ -97,14 +97,6 @@
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>org.apache.kafka</groupId>
<artifactId>kafka-clients</artifactId>
</dependency>
<dependency>
<groupId>org.xerial.snappy</groupId>
<artifactId>snappy-java</artifactId>
</dependency>
<dependency>
<groupId>ru.cg.webbpm.modules</groupId>
<artifactId>inject</artifactId>
@ -206,9 +198,24 @@
<groupId>org.apache.httpcomponents</groupId>
<artifactId>httpmime</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
</dependency>
</dependencies>
<build>
<finalName>${project.parent.artifactId}</finalName>
<extensions>
<extension>
<groupId>kr.motd.maven</groupId>
<artifactId>os-maven-plugin</artifactId>
<version>1.6.2</version>
</extension>
</extensions>
<plugins>
<plugin>
<artifactId>maven-compiler-plugin</artifactId>
@ -237,6 +244,22 @@
</execution>
</executions>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<executions>
<execution>
<goals>
<goal>compile</goal>
</goals>
</execution>
</executions>
<configuration>
<protocArtifact>com.google.protobuf:protoc:4.27.3:exe:${os.detected.classifier}</protocArtifact>
<protoSourceRoot>${project.parent.basedir}/backend/src/main/resources</protoSourceRoot>
<outputDirectory>${project.parent.basedir}/backend/target/generated-sources/java/protobuf</outputDirectory>
</configuration>
</plugin>
</plugins>
</build>
<profiles>

View file

@ -87,7 +87,7 @@ public class EmployeeInfoFileUploadService {
"messages/common_errors_messages");
private final WebDavClient webDavClient;
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
private final ReplyingKafkaService replyingKafkaService;
private final ReplyingKafkaService<String, String> replyingKafkaService;
private final KafkaTemplate<String, String> kafkaTemplate;
private final InteractionService interactionService;
private final UlDataService ulDataService;
@ -110,7 +110,7 @@ public class EmployeeInfoFileUploadService {
public EmployeeInfoFileUploadService(
WebDavClient webDavClient,
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
ReplyingKafkaService replyingKafkaService,
ReplyingKafkaService<String, String> replyingKafkaService,
InteractionService interactionService,
UlDataService ulDataService,
AuditService auditService,

View file

@ -0,0 +1,88 @@
package ru.micord.ervu.controller;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import javax.servlet.http.HttpServletRequest;
import org.apache.kafka.shaded.com.google.protobuf.ByteString;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.util.StringUtils;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import ru.micord.ervu.audit.constants.AuditConstants;
import ru.micord.ervu.audit.service.AuditService;
import ru.micord.ervu.model.ValidateExportResponse;
import ru.micord.ervu.security.webbpm.jwt.UserIdsPair;
import ru.micord.ervu.security.webbpm.jwt.util.SecurityUtil;
import ru.micord.ervu.service.ValidationFileService;
@RestController
@RequestMapping("/validate")
public class ValidationFileController {
private static final Logger LOGGER = LoggerFactory.getLogger(ValidationFileController.class);
private final AuditService auditService;
private final ValidationFileService validationFileService;
public ValidationFileController(AuditService auditService,
ValidationFileService validationFileService) {
this.auditService = auditService;
this.validationFileService = validationFileService;
}
@GetMapping("/export/{fileId}")
public ResponseEntity<Resource> exportFile(HttpServletRequest servletRequest,
@PathVariable String fileId) {
int size = 0;
String fileName = null;
String status = null;
try {
if (!StringUtils.hasText(fileId)) {
return ResponseEntity.notFound().build();
}
UserIdsPair userIdsPair = SecurityUtil.getUserIdsPair();
String ervuId = userIdsPair.getErvuId();
ValidateExportResponse validateExportResponse = validationFileService.exportFile(ervuId,
fileId
);
if (!validateExportResponse.hasFile()) {
LOGGER.error("Response does not contain file content for fileId: {}, user: {}", fileId, ervuId);
status = AuditConstants.FAILURE_STATUS_TYPE;
return ResponseEntity.noContent().build();
}
ByteString file = validateExportResponse.getFile();
size = file.size();
fileName = validateExportResponse.getFileName();
String encodedFilename = URLEncoder.encode(fileName, StandardCharsets.UTF_8);
InputStreamResource resource = new InputStreamResource(file.newInput());
status = AuditConstants.SUCCESS_STATUS_TYPE;
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename*=UTF-8''" + encodedFilename
)
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}
catch (Exception e) {
if (status == null) {
status = AuditConstants.FAILURE_STATUS_TYPE;
}
throw e;
}
finally {
auditService.processDownloadEvent(servletRequest, size, fileName, 2, status, null);
}
}
}

View file

@ -0,0 +1,14 @@
package ru.micord.ervu.exception;
/**
* @author Adel Kalimullin
*/
public class ExportException extends RuntimeException {
public ExportException(String message) {
super(message);
}
public ExportException(String message, Throwable cause) {
super(message, cause);
}
}

View file

@ -2,14 +2,18 @@ package ru.micord.ervu.journal;
public class JournalDto {
private Integer documentNumber;
private String fileId;
private String departureDateTime;
private String fileName;
private Integer filePatternCode;
private String senderFio;
private String status;
public Integer filesSentCount;
public Integer acceptedFilesCount;
private Integer rowsCount;
private Integer rowsSuccess;
private Integer rowsError;
private Boolean hasFailedRows;
public String getDepartureDateTime() {
return departureDateTime;
@ -65,21 +69,48 @@ public class JournalDto {
return this;
}
public Integer getFilesSentCount() {
return filesSentCount;
public Integer getRowsCount() {
return rowsCount;
}
public JournalDto setFilesSentCount(Integer filesSentCount) {
this.filesSentCount = filesSentCount;
public JournalDto setRowsCount(Integer rowsCount) {
this.rowsCount = rowsCount;
return this;
}
public Integer getAcceptedFilesCount() {
return acceptedFilesCount;
public Integer getRowsSuccess() {
return rowsSuccess;
}
public JournalDto setAcceptedFilesCount(Integer acceptedFilesCount) {
this.acceptedFilesCount = acceptedFilesCount;
public JournalDto setRowsSuccess(Integer rowsSuccess) {
this.rowsSuccess = rowsSuccess;
return this;
}
public Integer getRowsError() {
return rowsError;
}
public JournalDto setRowsError(Integer rowsError) {
this.rowsError = rowsError;
return this;
}
public Integer getDocumentNumber() {
return documentNumber;
}
public JournalDto setDocumentNumber(Integer documentNumber) {
this.documentNumber = documentNumber;
return this;
}
public Boolean isHasFailedRows() {
return hasFailedRows;
}
public JournalDto setHasFailedRows(Boolean hasFailedRows) {
this.hasFailedRows = hasFailedRows;
return this;
}
}

View file

@ -14,6 +14,7 @@ public class JournalFileInfo {
private SenderInfo senderInfo;
private Integer rowsCount; //Общее количество записей отправленных в файле
private Integer rowsSuccess; //Количество записей принятых в файле
private Integer rowsError; //Количество записей непринятых в файле
public List<JournalFileDetails> getPackFiles() {
return packFiles;
@ -51,6 +52,15 @@ public class JournalFileInfo {
return this;
}
public Integer getRowsError() {
return rowsError;
}
public JournalFileInfo setRowsError(Integer rowsError) {
this.rowsError = rowsError;
return this;
}
@JsonIgnoreProperties(ignoreUnknown = true)
public static class JournalFileDetails {
private String fileId; //ИД файла полученный при создании записи о файле в реестр организаций (в ЕРВУ)

View file

@ -24,8 +24,9 @@ public class JournalDtoMapper {
)
)
.setStatus(journalFileDetails.getFileStatus().getStatus())
.setFilesSentCount(journalFileInfo.getRowsCount())
.setAcceptedFilesCount(journalFileInfo.getRowsSuccess());
.setRowsCount(journalFileInfo.getRowsCount())
.setRowsSuccess(journalFileInfo.getRowsSuccess())
.setRowsError(journalFileInfo.getRowsError());
}
public static JournalDto mapToJournalDto(InteractionLogRecord record) {
@ -35,8 +36,9 @@ public class JournalDtoMapper {
.setFilePatternCode(Integer.valueOf(record.getForm()))
.setSenderFio(record.getSender())
.setStatus(record.getStatus())
.setFilesSentCount(record.getRecordsSent())
.setAcceptedFilesCount(record.getRecordsAccepted())
.setRowsCount(0)
.setRowsSuccess(0)
.setRowsError(0)
.setFileId(record.getFileId());
}
}

View file

@ -4,8 +4,10 @@ import org.apache.kafka.clients.CommonClientConfigs;
import org.apache.kafka.clients.consumer.ConsumerConfig;
import org.apache.kafka.clients.producer.ProducerConfig;
import org.apache.kafka.common.config.SaslConfigs;
import org.apache.kafka.common.serialization.BytesDeserializer;
import org.apache.kafka.common.serialization.StringDeserializer;
import org.apache.kafka.common.serialization.StringSerializer;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
@ -54,63 +56,102 @@ public class ReplyingKafkaConfig {
private String saslMechanism;
@Value("${av.kafka.download.response}")
private String avReplyTopic;
@Value("${ervu.kafka.validate.export.reply.topic}")
private String validateReplyTopic;
@Bean("ervuProducerFactory")
@Bean
public ProducerFactory<String, String> producerFactory() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
Map<String, Object> configProps = commonProducerConfig();
configProps.put(ProducerConfig.VALUE_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
+ username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
return new DefaultKafkaProducerFactory<>(configProps);
}
@Bean
public ConsumerFactory<String, String> consumerFactory() {
private Map<String, Object> commonProducerConfig() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId + "-" + UUID.randomUUID());
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
configProps.put(ProducerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\""
+ username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\"" + username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
configProps.put(ProducerConfig.KEY_SERIALIZER_CLASS_CONFIG, StringSerializer.class);
return configProps;
}
@Bean("ervuConsumerFactory")
public ConsumerFactory<String, String> ervuConsumerFactory() {
Map<String, Object> configProps = commonConsumerConfig();
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return new DefaultKafkaConsumerFactory<>(configProps);
}
@Bean("validateConsumerFactory")
public ConsumerFactory<String, Bytes> validateConsumerFactory() {
Map<String, Object> configProps = commonConsumerConfig();
configProps.put(ConsumerConfig.VALUE_DESERIALIZER_CLASS_CONFIG, BytesDeserializer.class);
return new DefaultKafkaConsumerFactory<>(configProps);
}
private Map<String, Object> commonConsumerConfig() {
Map<String, Object> configProps = new HashMap<>();
configProps.put(ConsumerConfig.BOOTSTRAP_SERVERS_CONFIG, bootstrapServers);
configProps.put(ConsumerConfig.GROUP_ID_CONFIG, groupId + "-" + UUID.randomUUID());
configProps.put(CommonClientConfigs.SECURITY_PROTOCOL_CONFIG, securityProtocol);
configProps.put(SaslConfigs.SASL_JAAS_CONFIG, loginModule + " required username=\"" + username + "\" password=\"" + password + "\";");
configProps.put(SaslConfigs.SASL_MECHANISM, saslMechanism);
configProps.put(ConsumerConfig.AUTO_OFFSET_RESET_CONFIG, "latest");
configProps.put(ConsumerConfig.KEY_DESERIALIZER_CLASS_CONFIG, StringDeserializer.class);
return configProps;
}
@Bean("ervuContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory =
new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(consumerFactory());
public ConcurrentKafkaListenerContainerFactory<String, String> ervuContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, String> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(ervuConsumerFactory());
return factory;
}
@Bean
public ConcurrentMessageListenerContainer<String, String> replyContainer(
@Qualifier("ervuContainerFactory")
ConcurrentKafkaListenerContainerFactory<String, String> kafkaListenerContainerFactory) {
return kafkaListenerContainerFactory.createContainer(orgReplyTopic, excerptReplyTopic,
journalReplyTopic, avReplyTopic
);
@Bean("validateContainerFactory")
public ConcurrentKafkaListenerContainerFactory<String, Bytes> validateContainerFactory() {
ConcurrentKafkaListenerContainerFactory<String, Bytes> factory = new ConcurrentKafkaListenerContainerFactory<>();
factory.setConsumerFactory(validateConsumerFactory());
return factory;
}
@Bean
public ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate(
@Qualifier("ervuProducerFactory") ProducerFactory<String, String> producerFactory,
ConcurrentMessageListenerContainer<String, String> replyContainer) {
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate =
new ReplyingKafkaTemplate<>(producerFactory, replyContainer);
replyingKafkaTemplate.setCorrelationHeaderName("messageId");
replyingKafkaTemplate.setCorrelationIdStrategy(record ->
@Bean("ervuContainer")
public ConcurrentMessageListenerContainer<String, String> ervuContainer(
@Qualifier("ervuContainerFactory") ConcurrentKafkaListenerContainerFactory<String, String> factory) {
return factory.createContainer(orgReplyTopic, excerptReplyTopic,
journalReplyTopic, avReplyTopic);
}
@Bean("validateContainer")
public ConcurrentMessageListenerContainer<String, Bytes> validateContainer(
@Qualifier("validateContainerFactory") ConcurrentKafkaListenerContainerFactory<String, Bytes> factory) {
return factory.createContainer(validateReplyTopic);
}
@Bean("ervuReplyingTemplate")
public ReplyingKafkaTemplate<String, String, String> ervuReplyingTemplate(
ProducerFactory<String, String> producerFactory,
@Qualifier("ervuContainer") ConcurrentMessageListenerContainer<String, String> container) {
ReplyingKafkaTemplate<String, String, String> template = new ReplyingKafkaTemplate<>(producerFactory, container);
customizeTemplate(template);
return template;
}
@Bean("validateReplyingTemplate")
public ReplyingKafkaTemplate<String, String, Bytes> validateReplyingTemplate(
ProducerFactory<String, String> producerFactory,
@Qualifier("validateContainer") ConcurrentMessageListenerContainer<String, Bytes> container) {
ReplyingKafkaTemplate<String, String, Bytes> template = new ReplyingKafkaTemplate<>(producerFactory, container);
customizeTemplate(template);
return template;
}
private void customizeTemplate(ReplyingKafkaTemplate<?, ?, ?> template) {
template.setCorrelationHeaderName("messageId");
template.setCorrelationIdStrategy(record ->
new CorrelationKey(UUID.randomUUID().toString().getBytes(StandardCharsets.UTF_8)));
replyingKafkaTemplate.setDefaultReplyTimeout(Duration.ofSeconds(replyTimeout));
replyingKafkaTemplate.setSharedReplyTopic(true);
return replyingKafkaTemplate;
template.setDefaultReplyTimeout(Duration.ofSeconds(replyTimeout));
template.setSharedReplyTopic(true);
}
}

View file

@ -31,7 +31,7 @@ import ru.micord.ervu.util.UrlUtils;
public class ErvuKafkaController {
@Autowired
private ReplyingKafkaService replyingKafkaService;
private ReplyingKafkaService<String, String> replyingKafkaService;
@Autowired
private AuditService auditService;

View file

@ -1,8 +1,9 @@
package ru.micord.ervu.kafka.service;
public interface ReplyingKafkaService {
String sendMessageAndGetReply(String requestTopic,
String requestReplyTopic,
String requestMessage);
}
public interface ReplyingKafkaService<T, V> {
V sendMessageAndGetReply(String requestTopic,
String replyTopic,
T requestMessage);
}

View file

@ -6,12 +6,10 @@ import java.util.concurrent.ExecutionException;
import org.apache.kafka.clients.consumer.ConsumerRecord;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.requestreply.RequestReplyFuture;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
import ru.micord.ervu.kafka.exception.KafkaMessageException;
import ru.micord.ervu.kafka.exception.KafkaMessageReplyTimeoutException;
@ -21,35 +19,30 @@ import ru.micord.ervu.kafka.service.ReplyingKafkaService;
* @author Eduard Tihomirov
*/
@Service
public class BaseReplyingKafkaServiceImpl implements ReplyingKafkaService {
public abstract class BaseReplyingKafkaService<T, V> implements ReplyingKafkaService<T, V> {
private static final Logger LOGGER = LoggerFactory.getLogger(MethodHandles.lookup().lookupClass());
private final ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate;
public BaseReplyingKafkaServiceImpl(
ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
public String sendMessageAndGetReply(String requestTopic,
String replyTopic,
String requestMessage) {
@Override
public V sendMessageAndGetReply(String requestTopic, String replyTopic, T requestMessage) {
long startTime = System.currentTimeMillis();
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
RequestReplyFuture<String, String, String> replyFuture = replyingKafkaTemplate.sendAndReceive(record);
RequestReplyFuture<String, T, V> replyFuture = getTemplate().sendAndReceive(
getProducerRecord(requestTopic, replyTopic, requestMessage));
try {
String result = Optional.ofNullable(replyFuture.get())
.map(ConsumerRecord::value)
.orElseThrow(() -> new KafkaMessageException("Kafka return result is null."));
LOGGER.info("Thread {} - KafkaSendMessageAndGetReply: {} ms",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime);
return result;
ConsumerRecord<String, V> result = Optional.ofNullable(replyFuture.get())
.orElseThrow(() -> new KafkaMessageException("Kafka return result is null"));
LOGGER.info("Thread {} - KafkaSendMessageAndGetReply: {} ms, replyTopic: {}",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime, replyTopic);
return result.value();
}
catch (InterruptedException | ExecutionException e) {
LOGGER.error("Thread {} - KafkaSendMessageAndGetReply: {} ms",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime);
LOGGER.error("Thread {} - KafkaSendMessageAndGetReply: {} ms, replyTopic: {}",
Thread.currentThread().getId(), System.currentTimeMillis() - startTime, replyTopic);
throw new KafkaMessageReplyTimeoutException(e);
}
}
protected abstract ReplyingKafkaTemplate<String, T, V> getTemplate();
protected abstract ProducerRecord<String, T> getProducerRecord(String requestTopic,
String replyTopic, T requestMessage);
}

View file

@ -0,0 +1,31 @@
package ru.micord.ervu.kafka.service.impl;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
@Service
public class ErvuReplyingKafkaService extends BaseReplyingKafkaService<String, String>{
private final ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate;
public ErvuReplyingKafkaService(
@Qualifier("ervuReplyingTemplate") ReplyingKafkaTemplate<String, String, String> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
@Override
protected ReplyingKafkaTemplate<String, String, String> getTemplate() {
return replyingKafkaTemplate;
}
@Override
protected ProducerRecord<String, String> getProducerRecord(String requestTopic, String replyTopic,
String requestMessage) {
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
return record;
}
}

View file

@ -0,0 +1,32 @@
package ru.micord.ervu.kafka.service.impl;
import org.apache.kafka.clients.producer.ProducerRecord;
import org.apache.kafka.common.header.internals.RecordHeader;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.kafka.requestreply.ReplyingKafkaTemplate;
import org.springframework.kafka.support.KafkaHeaders;
import org.springframework.stereotype.Service;
@Service
public class ValidateReplyingKafkaService extends BaseReplyingKafkaService<String, Bytes> {
private final ReplyingKafkaTemplate<String, String, Bytes> replyingKafkaTemplate;
public ValidateReplyingKafkaService(
ReplyingKafkaTemplate<String, String, Bytes> replyingKafkaTemplate) {
this.replyingKafkaTemplate = replyingKafkaTemplate;
}
@Override
protected ReplyingKafkaTemplate<String, String, Bytes> getTemplate() {
return replyingKafkaTemplate;
}
@Override
protected ProducerRecord<String, String> getProducerRecord(String requestTopic, String replyTopic,
String requestMessage) {
ProducerRecord<String, String> record = new ProducerRecord<>(requestTopic, requestMessage);
record.headers().add(new RecordHeader(KafkaHeaders.REPLY_TOPIC, replyTopic.getBytes()));
return record;
}
}

View file

@ -0,0 +1,4 @@
package ru.micord.ervu.model;
public record ValidateExportRequest(String orgId, String fileId) {
}

View file

@ -0,0 +1,28 @@
package ru.micord.ervu.model;
import org.apache.kafka.shaded.com.google.protobuf.ByteString;
import org.apache.kafka.shaded.com.google.protobuf.InvalidProtocolBufferException;
public class ValidateExportResponse {
private String fileName;
private ByteString file;
public ValidateExportResponse(byte[] bytes) throws InvalidProtocolBufferException {
// TODO: Заменить ValidateExportResponseProto на реальный protobuf класс
// ValidateExportResponseProto protoResponse = ValidateExportResponseProto.parseFrom(bytes);
// this.fileName = protoResponse.getFileName();
// this.file = protoResponse.getFile();
}
public String getFileName() {
return fileName;
}
public ByteString getFile() {
return file;
}
public boolean hasFile() {
return file != null && !file.isEmpty();
}
}

View file

@ -7,5 +7,6 @@ public enum FilterType {
TEXT,
DATE,
NUMBER,
SET
SET,
FILE
}

View file

@ -86,7 +86,7 @@ public class EsiaAuthService {
@Autowired
private JwtTokenService jwtTokenService;
@Autowired
private ReplyingKafkaService replyingKafkaService;
private ReplyingKafkaService<String, String> replyingKafkaService;
@Autowired
private OkopfService okopfService;
@Autowired

View file

@ -41,8 +41,6 @@ public class InteractionServiceImpl implements InteractionService {
.set(INTERACTION_LOG.SENT_DATE, timestamp)
.set(INTERACTION_LOG.SENDER, sender)
.set(INTERACTION_LOG.FILE_NAME, fileName)
.set(INTERACTION_LOG.RECORDS_SENT, 0)
.set(INTERACTION_LOG.RECORDS_ACCEPTED, 0)
.set(INTERACTION_LOG.ERVU_ID, ervuId)
.execute();
}

View file

@ -0,0 +1,48 @@
package ru.micord.ervu.service;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.apache.kafka.common.utils.Bytes;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.stereotype.Service;
import ru.micord.ervu.exception.ExportException;
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
import ru.micord.ervu.model.ValidateExportRequest;
import ru.micord.ervu.model.ValidateExportResponse;
/**
* @author Adel Kalimullin
*/
@Service
public class ValidationFileService {
private final ReplyingKafkaService<Object, Bytes> replyingKafkaService;
private final ObjectMapper objectMapper;
private final String validateExportRequestTopic;
private final String validateExportReplyTopic;
public ValidationFileService(
ReplyingKafkaService<Object, Bytes> replyingKafkaService,
ObjectMapper objectMapper,
@Value("${ervu.kafka.validate.export.request.topic}") String validateExportRequestTopic,
@Value("${ervu.kafka.validate.export.reply.topic}") String validateExportReplyTopic) {
this.replyingKafkaService = replyingKafkaService;
this.objectMapper = objectMapper;
this.validateExportRequestTopic = validateExportRequestTopic;
this.validateExportReplyTopic = validateExportReplyTopic;
}
public ValidateExportResponse exportFile(String ervuId, String fileId) {
try {
ValidateExportRequest validateExportRequest = new ValidateExportRequest(ervuId, fileId);
byte[] bytes = replyingKafkaService.sendMessageAndGetReply(
validateExportRequestTopic, validateExportReplyTopic,
objectMapper.writeValueAsString(validateExportRequest)
).get();
return new ValidateExportResponse(bytes);
}
catch (Exception e) {
throw new ExportException("Failed to export file: " + e.getMessage(), e);
}
}
}

View file

@ -5,6 +5,7 @@ import java.util.Comparator;
import java.util.List;
import java.util.HashSet;
import java.util.Optional;
import java.util.concurrent.atomic.AtomicInteger;
import java.util.stream.Stream;
import javax.servlet.http.Cookie;
@ -33,7 +34,7 @@ public class JournalInMemoryStaticGridLoadService implements
private final JwtTokenService jwtTokenService;
private final InteractionService interactionService;
private final ReplyingKafkaService replyingKafkaService;
private final ReplyingKafkaService<String, String> replyingKafkaService;
private final ObjectMapper objectMapper;
private final HttpServletRequest request;
@ -46,7 +47,7 @@ public class JournalInMemoryStaticGridLoadService implements
public JournalInMemoryStaticGridLoadService(JwtTokenService jwtTokenService,
InteractionService interactionService,
ReplyingKafkaService replyingKafkaService,
ReplyingKafkaService<String, String> replyingKafkaService,
ObjectMapper objectMapper, HttpServletRequest request) {
this.jwtTokenService = jwtTokenService;
this.interactionService = interactionService;
@ -80,10 +81,16 @@ public class JournalInMemoryStaticGridLoadService implements
throw new JsonParsingException("Failed to parse JournalFileDataResponse.", e);
}
AtomicInteger counter = new AtomicInteger(1);
HashSet<String> seenFileIds = new HashSet<>();
return Stream.concat(dbJournalList.stream(), ervuJournalList.stream())
.filter(journal -> seenFileIds.add(journal.getFileId()))
.sorted(Comparator.comparing(JournalDto::getDepartureDateTime))
.map(journal ->
journal
.setDocumentNumber(counter.getAndIncrement())
.setHasFailedRows(journal.getRowsError() != null && journal.getRowsError() > 0)
)
.toList();
}

View file

@ -1,5 +1,4 @@
import {
Event,
GridColumnIdUtils,
GridRow,
GridRowModelType,
@ -11,17 +10,19 @@ import {ChangeDetectionStrategy, Component} from "@angular/core";
import {
ColDef,
GridReadyEvent,
FilterChangedEvent,
ICellRendererParams,
ITooltipParams,
ValueFormatterParams,
ValueGetterParams
ValueGetterParams,
} from "ag-grid-community";
import {StaticColumnInitializer} from "./StaticColumnInitializer";
import {InMemoryStaticGridRpcService} from "../../../generated/ru/micord/ervu/service/rpc/InMemoryStaticGridRpcService";
import {
InMemoryStaticGridRpcService
} from "../../../generated/ru/micord/ervu/service/rpc/InMemoryStaticGridRpcService";
import {StaticGridColumn} from "../../../generated/ru/micord/ervu/property/grid/StaticGridColumn";
import { FilterService } from "../../service/FilterService";
import {FilterService} from "../../service/FilterService";
import {AuditConstants, AuditService, FilterInfo} from "../../service/AuditService";
import {ValidateFileService} from "../../service/ValidateFileService";
@Component({
@ -34,6 +35,7 @@ export class InMemoryStaticGrid extends GridV2 {
private rpcService: InMemoryStaticGridRpcService;
private auditService: AuditService;
private validateFileService: ValidateFileService;
getRowModelType(): string {
return GridRowModelType.CLIENT_SIDE;
@ -42,6 +44,7 @@ export class InMemoryStaticGrid extends GridV2 {
protected initGrid() {
super.initGrid();
this.auditService = this.injector.get(AuditService);
this.validateFileService = this.injector.get(ValidateFileService);
this.rpcService = this.getScript(InMemoryStaticGridRpcService);
if (this.rpcService) {
this.rpcService.loadData().then(response => {
@ -85,6 +88,10 @@ export class InMemoryStaticGrid extends GridV2 {
}
}
public downloadFile(fileId : string){
this.validateFileService.exportFile(fileId);
}
getColumns(): any[] {
return this.getScriptsInChildren(GridV2Column)
.map(columnV2 => columnV2.getScript(StaticGridColumn));
@ -125,6 +132,13 @@ export class InMemoryStaticGrid extends GridV2 {
return columnComp.valueFormatter.format(params);
}
}
colDef.cellRendererParams = {
context : {
parentComponent: this
}
};
return colDef;
}

View file

@ -1,6 +1,7 @@
import {FilterType} from "../../../../generated/ru/micord/ervu/property/grid/FilterType";
import {DateFilter, NumberFilter, TextFilter} from "ag-grid-community";
import {SetFilter} from "./SetFilter";
import {FileAvailableFilterComp} from "./FileAvailableFilterComp";
export class CustomGridColumnFilterUtils {
@ -16,6 +17,8 @@ export class CustomGridColumnFilterUtils {
return DateFilter;
case FilterType.SET:
return SetFilter;
case FilterType.FILE:
return FileAvailableFilterComp;
case FilterType.TEXT:
default:
return TextFilter;

View file

@ -0,0 +1,77 @@
import {BaseBooleanComboBoxFilter} from "@webbpm/base-package";
import {IFilterParams, IDoesFilterPassParams, IFilterComp} from "ag-grid-community";
export class FileAvailableFilterComp extends BaseBooleanComboBoxFilter implements IFilterComp {
private filterValue: boolean | undefined = undefined;
private params!: IFilterParams;
init(params: IFilterParams): void {
this.params = params;
this.createComboBox("ag-combobox-file-filter");
this.populateComboBoxWithFixedValues();
this.comboBox.addEventListener("change", () => this.onComboBoxChanged());
}
protected populateComboBoxWithFixedValues(): void {
const options = [
{ label: "Все", value: undefined },
{ label: "Файл присутствует", value: true },
{ label: "Файл отсутствует", value: false }
];
this.comboBox.innerHTML = "";
options.forEach(({ label }) => {
const option = document.createElement("option");
option.textContent = label;
this.comboBox.appendChild(option);
});
}
protected onComboBoxChanged(): void {
const selectedIndex = this.comboBox.selectedIndex;
this.filterValue =
selectedIndex === 1 ? true :
selectedIndex === 2 ? false :
undefined;
this.params.filterChangedCallback();
}
doesFilterPass(params: IDoesFilterPassParams): boolean {
const cellValue = params.data[this.params.colDef.field!];
return this.filterValue === undefined || cellValue === this.filterValue;
}
isFilterActive(): boolean {
return this.filterValue !== undefined;
}
getModel(): any {
return this.filterValue === undefined
? undefined
: { filter: this.filterValue, type: "equals" };
}
setModel(model: any): void {
this.filterValue = model ? model.filter : undefined;
if (this.filterValue === true) {
this.comboBox.selectedIndex = 1;
}
else if (this.filterValue === false) {
this.comboBox.selectedIndex = 2;
}
else {
this.comboBox.selectedIndex = 0;
}
this.params.filterChangedCallback();
}
getGui(): HTMLElement {
return this.eGui;
}
}

View file

@ -0,0 +1,36 @@
import {ICellRendererParams} from "ag-grid-community";
import {GridCellValueRenderer} from "@webbpm/base-package";
export class FileDownloadCellRenderer implements GridCellValueRenderer {
render(params: ICellRendererParams): HTMLElement {
const container = document.createElement('div');
container.className = 'download-cell-renderer';
const button = document.createElement('button');
button.className = 'download-btn';
button.innerHTML = '<i class="fa fa-download"></i>';
button.title = params.data.fileName || 'Скачать файл';
if (!params.value) {
button.style.display = 'none';
}
button.addEventListener('click', () => {
const fileId = params.data.fileId;
if (!fileId) {
return;
}
const parentComponent = params.context.parentComponent;
if (parentComponent) {
parentComponent.downloadFile(fileId, params.data);
}
button.blur();
});
container.appendChild(button);
return container;
}
}

View file

@ -1,6 +1,7 @@
import {DateFilter, NumberFilter, TextFilter} from "ag-grid-community";
import {SetFilter} from "../component/grid/filter/SetFilter";
import {FilterInfo} from "./AuditService";
import {FileAvailableFilterComp} from "../component/grid/filter/FileAvailableFilterComp";
export class FilterService {
static getFilterData(columnDef: any, agFilter: any): FilterInfo {
@ -15,6 +16,8 @@ export class FilterService {
return this.processSetFilter(agFilter);
case TextFilter:
return this.processTextFilter(agFilter);
case FileAvailableFilterComp:
return this.processFileAvailableFilter(agFilter);
default:
return;
}
@ -63,6 +66,20 @@ export class FilterService {
return this.createDualConditionData(agFilter);
}
private static processFileAvailableFilter(agFilter: any): FilterInfo {
const displayValue = agFilter.filter
? "Файл присутствует"
: "Файл отсутствует";
return {
conditionOperator: undefined,
conditions: [{
filterValue: displayValue,
filterType: "equals"
}]
};
}
private static createSingleConditionData(
filterValue: string,
filterType: string,

View file

@ -0,0 +1,34 @@
import {Injectable} from "@angular/core";
import {HttpClient} from "@angular/common/http";
import {MessagesService} from "@webbpm/base-package";
@Injectable({
providedIn: "root"
})
export class ValidateFileService {
constructor(private httpClient: HttpClient,
private messageService: MessagesService) {
}
public exportFile(fileId: string) {
this.httpClient.get('validate/export/' + fileId, {
responseType: 'blob',
observe: 'response'
}).toPromise()
.then((response) => {
const data = window.URL.createObjectURL(response.body);
const link = document.createElement("a");
link.href = data;
const contentDisposition = response.headers.get('Content-Disposition');
const fileNameMatch = contentDisposition.match(/filename\*=?UTF-8''(.+)/i);
link.download = decodeURIComponent(fileNameMatch[1].replace(/\+/g, '%20'));
document.body.appendChild(link);
link.click();
window.URL.revokeObjectURL(data);
link.remove();
})
.catch(() => {
this.messageService.error("Не удалось скачать файл")
});
}
}

View file

@ -30,6 +30,7 @@ import {AuditService} from "../../ervu/service/AuditService";
import {
ErvuFileUploadWithAdditionalFiles
} from "../../ervu/component/fileupload/ErvuFileUploadWithAdditionalFiles";
import {ValidateFileService} from "../../ervu/service/ValidateFileService";
registerLocaleData(localeRu);
export const DIRECTIVES = [
@ -70,7 +71,7 @@ export function checkAuthentication(authService: AuthenticationService): () => P
DIRECTIVES
],
providers: [
AuthenticationService, AuditService,
AuthenticationService, AuditService, ValidateFileService,
{
provide: APP_INITIALIZER,
useFactory: checkAuthentication,

15
pom.xml
View file

@ -270,6 +270,16 @@
<version>1.1.10.7</version>
<scope>runtime</scope>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java</artifactId>
<version>4.28.3</version>
</dependency>
<dependency>
<groupId>com.google.protobuf</groupId>
<artifactId>protobuf-java-util</artifactId>
<version>4.28.3</version>
</dependency>
<dependency>
<groupId>org.slf4j</groupId>
<artifactId>slf4j-api</artifactId>
@ -392,6 +402,11 @@
</archive>
</configuration>
</plugin>
<plugin>
<groupId>org.xolstice.maven.plugins</groupId>
<artifactId>protobuf-maven-plugin</artifactId>
<version>0.6.1</version>
</plugin>
</plugins>
</pluginManagement>
<plugins>

View file

@ -982,11 +982,106 @@
</entry>
</properties>
</scripts>
<children id="7094b2c2-60c7-43d0-84d7-3adbd3c77fe9">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>7094b2c2-60c7-43d0-84d7-3adbd3c77fe9</componentRootId>
<name>№П/П</name>
<container>false</container>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"№П/П"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"documentNumber"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"NUMBER"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Number"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>pinned</key>
<value>
<simple>"LEFT"</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>50</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="9b895ce1-494f-4f2e-b87b-78d019fc3760">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>9b895ce1-494f-4f2e-b87b-78d019fc3760</componentRootId>
<name>Дата и время направления</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
@ -1064,7 +1159,7 @@
<entry>
<key>filter</key>
<value>
<simple>true</simple>
<simple>null</simple>
</value>
</entry>
<entry>
@ -1118,6 +1213,12 @@
<simple>true</simple>
</value>
</entry>
<entry>
<key>disableHiding</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
@ -1489,7 +1590,7 @@
<entry>
<key>column</key>
<value>
<simple>"filesSentCount"</simple>
<simple>"rowsCount"</simple>
</value>
</entry>
<entry>
@ -1539,6 +1640,7 @@
<componentRootId>4c070d5d-cac7-4cc4-8ee4-e9a4b9b289a5</componentRootId>
<name>Записей принято</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
@ -1577,7 +1679,7 @@
<entry>
<key>column</key>
<value>
<simple>"acceptedFilesCount"</simple>
<simple>"rowsSuccess"</simple>
</value>
</entry>
<entry>
@ -1616,6 +1718,293 @@
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="7189ce80-0090-44c9-a8ba-b3860b1fefb8">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>7189ce80-0090-44c9-a8ba-b3860b1fefb8</componentRootId>
<name>Записей не принято</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"Записей не принято"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"rowsError"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"NUMBER"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Number"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>80</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="33376e71-d633-464c-bd0a-da95caeae54b">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>33376e71-d633-464c-bd0a-da95caeae54b</componentRootId>
<name>Загрузка файла</name>
<container>false</container>
<expanded>false</expanded>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
<entry>
<key>renderer</key>
<value>
<implRef type="TS">
<className>FileDownloadCellRenderer</className>
<packageName>ervu.component.grid.renderer</packageName>
</implRef>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"Описание ошибки"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"hasFailedRows"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"FILE"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.Boolean"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>hidden</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>80</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>
</properties>
</scripts>
</children>
<children id="438864af-918d-4264-bc37-192f94263673">
<prototypeId>c556264f-221b-4af8-9e64-f380a67c41ec</prototypeId>
<componentRootId>438864af-918d-4264-bc37-192f94263673</componentRootId>
<name>id Файла(скрытое)</name>
<container>false</container>
<childrenReordered>false</childrenReordered>
<scripts id="9c5c7a86-dc40-4b30-a5a7-5e7b4c7ea1e1">
<properties>
<entry>
<key>floatingFilter</key>
<value>
<simple>false</simple>
</value>
</entry>
</properties>
</scripts>
<scripts id="0a01c185-920b-4328-a82d-277f917b185e">
<properties>
<entry>
<key>autoHeight</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>displayName</key>
<value>
<simple>"id файла"</simple>
</value>
</entry>
<entry>
<key>displayPopup</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>field</key>
<value>
<complex>
<entry>
<key>column</key>
<value>
<simple>"fileId"</simple>
</value>
</entry>
<entry>
<key>filterType</key>
<value>
<simple>"TEXT"</simple>
</value>
</entry>
<entry>
<key>type</key>
<value>
<simple>"java.lang.String"</simple>
</value>
</entry>
</complex>
</value>
</entry>
<entry>
<key>filter</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>hidden</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>sortable</key>
<value>
<simple>true</simple>
</value>
</entry>
<entry>
<key>width</key>
<value>
<simple>null</simple>
</value>
</entry>
<entry>
<key>widthFixed</key>
<value>
<simple>null</simple>
</value>
</entry>