Merge remote-tracking branch 'origin/hotfix/1.9.1' into feateure/SUPPORT-8653_fixes

# Conflicts:
#	backend/pom.xml
This commit is contained in:
kochetkov 2024-11-08 14:23:29 +03:00
commit b49d003498
14 changed files with 224 additions and 175 deletions

View file

@ -179,8 +179,8 @@
<artifactId>postgresql</artifactId>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<groupId>com.github.lookfirst</groupId>
<artifactId>sardine</artifactId>
</dependency>
</dependencies>
<build>

View file

@ -1,48 +0,0 @@
package ervu.client.fileupload;
import java.io.IOException;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.retry.annotation.Backoff;
import org.springframework.retry.annotation.Retryable;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
/**
* @author Alexandr Shalaginov
*/
@Component
public class FileUploadWebDavClient {
private static final Logger logger = LoggerFactory.getLogger(FileUploadWebDavClient.class);
@Retryable(value = {IOException.class, InterruptedException.class}, backoff = @Backoff(delay = 500L))
public boolean webDavUploadFile(String url, String username, String password, MultipartFile multipartFile) {
try {
HttpClient httpClient = HttpClient.newBuilder()
.authenticator(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password.toCharArray());
}
})
.build();
HttpRequest httpRequest = HttpRequest.newBuilder().uri(URI.create(url))
.PUT(HttpRequest.BodyPublishers.ofByteArray(multipartFile.getBytes())).build();
HttpResponse<String> response = httpClient.send(httpRequest, HttpResponse.BodyHandlers.ofString());
logger.debug("Response status code: {}", response.statusCode());
return (response.statusCode() >= 200) && (response.statusCode() <= 202);
}
catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
}

View file

@ -0,0 +1,161 @@
package ervu.client.fileupload;
import java.io.IOException;
import java.io.InputStream;
import java.net.Authenticator;
import java.net.PasswordAuthentication;
import java.net.URI;
import java.net.URLEncoder;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.nio.charset.StandardCharsets;
import java.util.Arrays;
import java.util.List;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.function.Predicate;
import com.github.sardine.DavResource;
import com.github.sardine.Sardine;
import com.github.sardine.SardineFactory;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.retry.annotation.Backoff;
import org.springframework.retry.annotation.Retryable;
import org.springframework.stereotype.Component;
import org.springframework.web.multipart.MultipartFile;
/**
* @author Alexandr Shalaginov
*/
@Component
public class WebDavClient {
private static final Logger logger = LoggerFactory.getLogger(WebDavClient.class);
@Value("${file.webdav.upload.username}")
private String username;
@Value("${file.webdav.upload.password}")
private String password;
@Retryable(value = {IOException.class}, backoff = @Backoff(delayExpression = "${webdav.retry.delay:500}"))
public boolean uploadFile(String url, MultipartFile multipartFile) {
Sardine sardine = initClient(username, password);
try {
sardine.put(url, multipartFile.getBytes());
return sardine.exists(url);
}
catch (IOException e) {
throw new RuntimeException("Failed to put or check file in WebDAV", e);
}
finally {
try {
sardine.shutdown();
}
catch (IOException e) {
logger.error("Failed to shutdown WebDAV client", e);
}
}
}
@Retryable(value = {IOException.class, InterruptedException.class},
backoff = @Backoff(delayExpression = "${webdav.retry.delay:500}"))
public ResponseEntity<Resource> webDavDownloadFile(String url) {
try {
HttpClient httpClient = HttpClient.newBuilder()
.authenticator(new Authenticator() {
@Override
protected PasswordAuthentication getPasswordAuthentication() {
return new PasswordAuthentication(username, password.toCharArray());
}
}).build();
HttpRequest httpRequest = HttpRequest.newBuilder().uri(URI.create(url))
.GET().build();
HttpResponse<InputStream> response = httpClient.send(httpRequest,
HttpResponse.BodyHandlers.ofInputStream()
);
if (response.statusCode() == 200) {
InputStreamResource resource = new InputStreamResource(response.body());
String encodedFilename = URLEncoder.encode(getFilenameFromUrl(url), StandardCharsets.UTF_8);
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION,
"attachment; filename*=UTF-8''" + encodedFilename
)
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}
else {
logger.error("Failed to download file: HTTP status code {}", response.statusCode());
return ResponseEntity.status(response.statusCode()).build();
}
}
catch (IOException | InterruptedException e) {
throw new RuntimeException(e);
}
}
private String getFilenameFromUrl(String url) {
String path = URI.create(url).getPath();
return path.substring(path.lastIndexOf('/') + 1);
}
@Retryable(value = {IOException.class}, backoff = @Backoff(delayExpression = "${webdav.retry.delay:500}"))
public void deleteFilesOlderThan(long seconds, String url, String username, String password,
String... extensions) {
Sardine sardine = initClient(username, password);
try {
List<DavResource> resources = sardine.list(url);
resources.stream().filter(getPredicate(extensions))
.forEach(davResource -> {
long age = System.currentTimeMillis() - davResource.getModified().getTime();
if (age > seconds * 1000) {
try {
sardine.delete(url + davResource.getPath());
}
catch (IOException e) {
throw new RuntimeException("Failed to delete file " + davResource.getName(), e);
}
}
});
}
catch (IOException e) {
throw new RuntimeException("Failed to delete old files from WebDAV", e);
}
finally {
try {
sardine.shutdown();
}
catch (IOException e) {
logger.error("Failed to shutdown WebDAV client", e);
}
}
}
private Sardine initClient(String username, String password) {
return SardineFactory.begin(username, password);
}
private Predicate<DavResource> getPredicate(String... extensions) {
if (extensions.length == 0) {
return davResource -> true;
}
return davResource -> {
AtomicBoolean condition = new AtomicBoolean(false);
Arrays.stream(extensions).forEach(extension -> condition.set(
condition.get() || davResource.getName().endsWith(extension)));
return condition.get();
};
}
}

View file

@ -9,7 +9,7 @@ import java.util.UUID;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.client.fileupload.FileUploadWebDavClient;
import ervu.client.fileupload.WebDavClient;
import ervu.model.fileupload.DownloadResponse;
import ervu.model.fileupload.EmployeeInfoFileFormType;
import ervu.model.fileupload.EmployeeInfoKafkaMessage;
@ -43,7 +43,7 @@ public class EmployeeInfoFileUploadService {
private static final Logger logger = LoggerFactory.getLogger(EmployeeInfoFileUploadService.class);
private static final String FORMAT = "dd.MM.yyyy HH:mm:ss";
private final FileUploadWebDavClient fileWebDavUploadClient;
private final WebDavClient webDavClient;
private final EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService;
private final KafkaTemplate<String, String> kafkaTemplate;
private final InteractionService interactionService;
@ -54,18 +54,14 @@ public class EmployeeInfoFileUploadService {
private String kafkaTopicName;
@Value("${file.webdav.upload.url:http://localhost:5757}")
private String url;
@Value("${file.webdav.upload.username}")
private String username;
@Value("${file.webdav.upload.password}")
private String password;
public EmployeeInfoFileUploadService(
FileUploadWebDavClient fileWebDavUploadClient,
WebDavClient webDavClient,
EmployeeInfoKafkaMessageService employeeInfoKafkaMessageService,
@Qualifier("avTemplate") KafkaTemplate<String, String> kafkaTemplate,
InteractionService interactionService,
UlDataService ulDataService, JwtTokenService jwtTokenService) {
this.fileWebDavUploadClient = fileWebDavUploadClient;
this.webDavClient = webDavClient;
this.employeeInfoKafkaMessageService = employeeInfoKafkaMessageService;
this.kafkaTemplate = kafkaTemplate;
this.interactionService = interactionService;
@ -77,7 +73,7 @@ public class EmployeeInfoFileUploadService {
String fileUploadUrl = this.url + "/" + getNewFilename(multipartFile.getOriginalFilename());
LocalDateTime now = LocalDateTime.now();
if (this.fileWebDavUploadClient.webDavUploadFile(fileUploadUrl, username, password, multipartFile)) {
if (this.webDavClient.uploadFile(fileUploadUrl, multipartFile)) {
FileStatus fileStatus = new FileStatus();
fileStatus.setStatus("Загрузка.");
fileStatus.setCode("01");

View file

@ -1,4 +1,4 @@
package ervu.service.scheduer;
package ervu.service.scheduler;
/**
* @author Artyom Hackimullin

View file

@ -1,4 +1,4 @@
package ervu.service.scheduer;
package ervu.service.scheduler;
import java.util.Arrays;
import java.util.List;

View file

@ -0,0 +1,37 @@
package ervu.service.scheduler;
import ervu.client.fileupload.WebDavClient;
import net.javacrumbs.shedlock.core.SchedulerLock;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.scheduling.annotation.Scheduled;
import org.springframework.stereotype.Service;
/**
* @author gulnaz
*/
@Service
public class WebDavSchedulerService {
private final WebDavClient webDavClient;
@Value("${file.webdav.upload.url}")
private String url;
@Value("${file.webdav.upload.username}")
private String username;
@Value("${file.webdav.upload.password}")
private String password;
@Value("${file.webdav.lifetime.seconds:300}")
private long fileLifetime;
@Value("${file.webdav.extensions:}")
private String[] fileExtensions;
public WebDavSchedulerService(WebDavClient webDavClient) {
this.webDavClient = webDavClient;
}
@Scheduled(cron = "${webdav.cleanup.cron:0 0 0 * * *}")
@SchedulerLock
public void deleteOldFiles() {
webDavClient.deleteFilesOlderThan(fileLifetime, url, username, password, fileExtensions);
}
}

View file

@ -4,6 +4,7 @@ import javax.servlet.http.Cookie;
import javax.servlet.http.HttpServletRequest;
import com.fasterxml.jackson.databind.ObjectMapper;
import ervu.client.fileupload.WebDavClient;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
@ -13,7 +14,6 @@ import org.springframework.web.bind.annotation.RestController;
import ru.micord.ervu.kafka.model.Data;
import ru.micord.ervu.kafka.model.ExcerptResponse;
import ru.micord.ervu.kafka.service.ReplyingKafkaService;
import ru.micord.ervu.s3.S3Service;
import ru.micord.ervu.security.webbpm.jwt.model.Token;
import ru.micord.ervu.security.webbpm.jwt.service.JwtTokenService;
@ -27,7 +27,7 @@ public class ErvuKafkaController {
private ReplyingKafkaService replyingKafkaService;
@Autowired
private S3Service s3Service;
private WebDavClient webDavClient;
@Autowired
private JwtTokenService jwtTokenService;
@ -56,12 +56,11 @@ public class ErvuKafkaController {
objectMapper.writeValueAsString(data)
);
ExcerptResponse excerptResponse = objectMapper.readValue(kafkaResponse, ExcerptResponse.class);
return s3Service.getFile(excerptResponse.getFileUrl());
return webDavClient.webDavDownloadFile(excerptResponse.getFileUrl());
}
catch (Exception e) {
throw new RuntimeException(e);
}
}
private String getAuthToken(HttpServletRequest request) {

View file

@ -1,44 +0,0 @@
package ru.micord.ervu.s3;
import com.amazonaws.auth.AWSCredentials;
import com.amazonaws.auth.AWSStaticCredentialsProvider;
import com.amazonaws.auth.BasicAWSCredentials;
import com.amazonaws.client.builder.AwsClientBuilder;
import com.amazonaws.regions.Region;
import com.amazonaws.regions.Regions;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3ClientBuilder;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
/**
* @author Eduard Tihomirov
*/
@Configuration
public class S3Connection {
@Value("${s3.endpoint}")
private String endpoint;
@Value("${s3.access_key}")
private String accessKey;
@Value("${s3.secret_key}")
private String secretKey;
@Value("${s3.path.style.access.enabled:true}")
private boolean pathStyleAccessEnabled;
@Bean("outClient")
public AmazonS3 getS3OutClient() {
return getS3Client(endpoint, accessKey, secretKey, pathStyleAccessEnabled);
}
private static AmazonS3 getS3Client(String endpoint, String accessKey, String secretKey, Boolean pathStyleAccessEnabled) {
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
String region = Region.getRegion(Regions.DEFAULT_REGION).toString();
return AmazonS3ClientBuilder.standard()
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region))
.withCredentials(new AWSStaticCredentialsProvider(credentials))
.withPathStyleAccessEnabled(pathStyleAccessEnabled)
.build();
}
}

View file

@ -1,48 +0,0 @@
package ru.micord.ervu.s3;
import java.net.URLEncoder;
import java.nio.charset.StandardCharsets;
import com.amazonaws.AmazonServiceException;
import com.amazonaws.services.s3.AmazonS3;
import com.amazonaws.services.s3.AmazonS3URI;
import com.amazonaws.services.s3.model.S3Object;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.core.io.Resource;
import org.springframework.http.HttpHeaders;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service;
/**
* @author Eduard Tihomirov
*/
@Service
public class S3Service {
private final AmazonS3 outClient;
@Autowired
public S3Service(AmazonS3 outClient) {
this.outClient = outClient;
}
public ResponseEntity<Resource> getFile(String fileUrl) {
try {
if (fileUrl == null || fileUrl.isEmpty()) {
return ResponseEntity.noContent().build();
}
AmazonS3URI uri = new AmazonS3URI(fileUrl);
S3Object s3Object = outClient.getObject(uri.getBucket(), uri.getKey());
InputStreamResource resource = new InputStreamResource(s3Object.getObjectContent());
String encodedFilename = URLEncoder.encode(uri.getKey(), StandardCharsets.UTF_8);
return ResponseEntity.ok()
.header(HttpHeaders.CONTENT_DISPOSITION, "attachment; filename*=UTF-8''" + encodedFilename)
.contentLength(s3Object.getObjectMetadata().getContentLength())
.contentType(MediaType.APPLICATION_OCTET_STREAM)
.body(resource);
}
catch (AmazonServiceException e) {
throw new RuntimeException(e);
}
}
}

View file

@ -813,10 +813,4 @@ JBPM использует 3 корневых категории логирова
- `ERVU_KAFKA_JOURNAL_REPLY_TOPIC` - топик для чтения данных по журналу взаимодействия
- `ERVU_KAFKA_EXCERPT_REQUEST_TOPIC` - топик для записи запроса для получения выписки по журналу взаимодействия
- `ERVU_KAFKA_EXCERPT_REPLY_TOPIC` - топик для чтения выписки по журналу взаимодействия. Содержит ссылку на S3 с файлом выписки
- `DB.JOURNAL.EXCLUDED.STATUSES` - статусы файла, которые необходимо исключить при получении данных по журналу взаимодействия из базы данных приложения
#### Взаимодействие с S3
- `S3_ENDPOINT` - url для подключения к S3
- `S3_ACCESS_KEY` - публичная часть учетных данных AWS
- `S3_SECRET_KEY` - закрытая часть пары ключей AWS
- `DB.JOURNAL.EXCLUDED.STATUSES` - статусы файла, которые необходимо исключить при получении данных по журналу взаимодействия из базы данных приложения

View file

@ -48,9 +48,10 @@ ERVU_FILE_UPLOAD_MAX_FILE_SIZE=5242880
ERVU_FILE_UPLOAD_MAX_REQUEST_SIZE=6291456
ERVU_FILE_UPLOAD_FILE_SIZE_THRESHOLD=0
S3_ENDPOINT=http://ervu-minio.k8s.micord.ru:31900
S3_ACCESS_KEY=rlTdTvkmSXu9FsLhfecw
S3_SECRET_KEY=NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0
ESIA_TOKEN_CLEAR_CRON=0 0 */1 * * *
COOKIE_PATH=/ul
COOKIE_PATH=/ul
WEBDAV_CLEANUP_CRON=0 0 0 * * *
WEBDAV_RETRY_DELAY=500
FILE_WEBDAV_LIFETIME_SECONDS=300
FILE_WEBDAV_EXTENSIONS=csv,xlsx

View file

@ -89,13 +89,14 @@
<property name="db.journal.excluded.statuses" value="Направлено в ЕРВУ,Получен ЕРВУ"/>
<property name="ervu.kafka.excerpt.reply.topic" value="ervu.lkrp.excerpt.response"/>
<property name="ervu.kafka.excerpt.request.topic" value="ervu.lkrp.excerpt.request"/>
<property name="s3.endpoint" value="http://ervu-minio.k8s.micord.ru:31900"/>
<property name="s3.access_key" value="rlTdTvkmSXu9FsLhfecw"/>
<property name="s3.secret_key" value="NUmY0wwRIEyAd98GCKd1cOgJWvLQYAcMMul5Ulu0"/>
<property name="av.kafka.group.id" value="1"/>
<property name="av.kafka.download.response" value="ervu.lkrp.av-fileupload-status"/>
<property name="esia.token.clear.cron" value="0 0 */1 * * *"/>
<property name="esia.upload.data.role" value="MNSV89_UPLOAD_DATA"/>
<property name="webdav.cleanup.cron" value="0 0 0 * * *"/>
<property name="webdav.retry.delay" value="500"/>
<property name="file.webdav.lifetime.seconds" value="300"/>
<property name="file.webdav.extensions" value="csv,xlsx"/>
</system-properties>
<management>
<audit-log>

View file

@ -277,9 +277,9 @@
<version>2.23.1</version>
</dependency>
<dependency>
<groupId>com.amazonaws</groupId>
<artifactId>aws-java-sdk-s3</artifactId>
<version>1.12.759</version>
<groupId>com.github.lookfirst</groupId>
<artifactId>sardine</artifactId>
<version>5.12</version>
</dependency>
</dependencies>
</dependencyManagement>