Merge remote-tracking branch 'origin/release/1.0.0' into feature/SUPPORT-8556_db_delete
This commit is contained in:
commit
86771a5f98
5 changed files with 53 additions and 30 deletions
|
|
@ -16,34 +16,37 @@ import org.springframework.context.annotation.Configuration;
|
|||
* @author r.latypov
|
||||
*/
|
||||
@Configuration
|
||||
public class S3Connection {
|
||||
@Value("${s3.out.endpoint}")
|
||||
private String endpointOut;
|
||||
@Value("${s3.out.access_key}")
|
||||
private String accessKeyOut;
|
||||
@Value("${s3.out.secret_key}")
|
||||
private String secretKeyOut;
|
||||
@Value("${s3.out.bucket_name}")
|
||||
private String bucketNameOut;
|
||||
public class S3Config {
|
||||
@Value("${s3.endpoint}")
|
||||
private String endpoint;
|
||||
@Value("${s3.access_key}")
|
||||
private String accessKey;
|
||||
@Value("${s3.secret_key}")
|
||||
private String secretKey;
|
||||
@Value("${s3.bucket_name}")
|
||||
private String bucketName;
|
||||
@Value("${s3.path.style.enabled:true}")
|
||||
private boolean pathStyleAccessEnabled;
|
||||
|
||||
@Bean("outBucketName")
|
||||
public String getBucketNameOut() {
|
||||
return bucketNameOut;
|
||||
public String getBucket() {
|
||||
return bucketName;
|
||||
}
|
||||
|
||||
@Bean("outClient")
|
||||
public AmazonS3 getS3OutClient() {
|
||||
return getS3Client(endpointOut, accessKeyOut, secretKeyOut);
|
||||
return getS3Client(endpoint, accessKey, secretKey, pathStyleAccessEnabled);
|
||||
}
|
||||
|
||||
private static AmazonS3 getS3Client(String endpoint, String accessKey,
|
||||
String secretKey) {
|
||||
String secretKey, boolean pathStyleAccessEnabled) {
|
||||
AWSCredentials credentials = new BasicAWSCredentials(accessKey, secretKey);
|
||||
String region = Region.getRegion(Regions.DEFAULT_REGION).toString();
|
||||
|
||||
return AmazonS3ClientBuilder.standard()
|
||||
.withEndpointConfiguration(new AwsClientBuilder.EndpointConfiguration(endpoint, region))
|
||||
.withCredentials(new AWSStaticCredentialsProvider(credentials))
|
||||
.withPathStyleAccessEnabled(pathStyleAccessEnabled)
|
||||
.build();
|
||||
}
|
||||
}
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
package ru.micord.ervu.av.s3;
|
||||
|
||||
import java.io.File;
|
||||
import java.nio.file.Path;
|
||||
|
||||
import com.amazonaws.AmazonServiceException;
|
||||
import com.amazonaws.services.s3.AmazonS3;
|
||||
|
|
@ -30,11 +30,10 @@ public class S3Service {
|
|||
}
|
||||
}
|
||||
|
||||
public void putFile(String filePath, String key) throws FileUploadException {
|
||||
public void putFile(Path filePath, String key) throws FileUploadException {
|
||||
try {
|
||||
outClient.putObject(outBucketName, generateResourceName(outBucketName, key),
|
||||
new File(filePath)
|
||||
);
|
||||
filePath.toFile());
|
||||
}
|
||||
catch (AmazonServiceException e) {
|
||||
// todo message
|
||||
|
|
|
|||
|
|
@ -3,6 +3,9 @@ package ru.micord.ervu.av.service;
|
|||
import java.io.File;
|
||||
import java.io.FileOutputStream;
|
||||
import java.io.IOException;
|
||||
import java.nio.file.Files;
|
||||
import java.nio.file.Path;
|
||||
import java.nio.file.Paths;
|
||||
import java.util.Arrays;
|
||||
import java.util.concurrent.CompletableFuture;
|
||||
import java.util.concurrent.TimeUnit;
|
||||
|
|
@ -49,6 +52,8 @@ import ru.micord.ervu.av.s3.S3Service;
|
|||
@Service
|
||||
public class FileUploadService {
|
||||
private static final Logger logger = LoggerFactory.getLogger(FileUploadService.class);
|
||||
@Value("${av.check.enabled}")
|
||||
private boolean avCheckEnabled;
|
||||
@Value("${av.rest.address}")
|
||||
private String avRestAddress;
|
||||
@Value("${av.first.timeout.milliseconds}")
|
||||
|
|
@ -84,16 +89,20 @@ public class FileUploadService {
|
|||
|
||||
try {
|
||||
FileUrl fileUrl = parseFileUrl(downloadRequest.fileInfo().getFileUrl());
|
||||
String filePath = fileSavingPath + fileUrl.fileName();
|
||||
Path filePath = Paths.get(fileSavingPath, fileUrl.fileName());
|
||||
String downloadUrl = fileUrl.fileUrl();
|
||||
downloadFile(downloadUrl, filePath);
|
||||
boolean clean = true;
|
||||
boolean infected = false;
|
||||
|
||||
AvResponse avResponse = checkFile(filePath);
|
||||
if (avCheckEnabled) {
|
||||
AvResponse avResponse = checkFile(filePath);
|
||||
|
||||
boolean clean = Arrays.stream(avResponse.verdicts())
|
||||
.anyMatch(verdict -> verdict.equalsIgnoreCase(AvResponse.Scan.VERDICT_CLEAN));
|
||||
boolean infected = Arrays.stream(avResponse.verdicts())
|
||||
.anyMatch(verdict -> verdict.equalsIgnoreCase(AvResponse.Scan.VERDICT_INFECTED));
|
||||
clean = Arrays.stream(avResponse.verdicts())
|
||||
.anyMatch(verdict -> verdict.equalsIgnoreCase(AvResponse.Scan.VERDICT_CLEAN));
|
||||
infected = Arrays.stream(avResponse.verdicts())
|
||||
.anyMatch(verdict -> verdict.equalsIgnoreCase(AvResponse.Scan.VERDICT_INFECTED));
|
||||
}
|
||||
|
||||
if (infected || !clean) {
|
||||
downloadRequest.fileInfo().setFileUrl(null);
|
||||
|
|
@ -110,7 +119,14 @@ public class FileUploadService {
|
|||
sendMessage(inStatusTopic.name(), downloadRequest, inKafkaTemplate);
|
||||
|
||||
deleteFile(downloadUrl);
|
||||
if (new File(filePath).delete()) {
|
||||
|
||||
try {
|
||||
Files.delete(filePath);
|
||||
}
|
||||
catch (IOException e) {
|
||||
throw new RuntimeException("Failed to delete file " + filePath.getFileName());
|
||||
}
|
||||
finally {
|
||||
acknowledgment.acknowledge();
|
||||
}
|
||||
}
|
||||
|
|
@ -158,9 +174,9 @@ public class FileUploadService {
|
|||
}
|
||||
}
|
||||
|
||||
private void downloadFile(String fileUrl, String filePath)
|
||||
private void downloadFile(String fileUrl, Path filePath)
|
||||
throws InvalidHttpFileUrlException, FileUploadException {
|
||||
File file = new File(filePath);
|
||||
File file = filePath.toFile();
|
||||
HttpGet request = new HttpGet(fileUrl);
|
||||
|
||||
try (CloseableHttpClient client = HttpClients.createDefault();
|
||||
|
|
@ -192,8 +208,8 @@ public class FileUploadService {
|
|||
}
|
||||
}
|
||||
|
||||
private AvResponse checkFile(String filePath) throws FileUploadException {
|
||||
File file = new File(filePath);
|
||||
private AvResponse checkFile(Path filePath) throws FileUploadException {
|
||||
File file = filePath.toFile();
|
||||
|
||||
try (CloseableHttpClient client = HttpClients.createDefault()) {
|
||||
HttpPost post = new HttpPost(avRestAddress);
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ spring.kafka.consumer.properties.sasl.jaas.config=org.apache.kafka.common.securi
|
|||
spring.kafka.consumer.properties.sasl.mechanism=${AV_KAFKA_SASL_MECHANISM:SCRAM-SHA-256}
|
||||
#
|
||||
spring.kafka.consumer.enable.auto.commit=false
|
||||
spring.kafka.consumer.group.id=file-to-upload-consumers
|
||||
spring.kafka.consumer.group.id=${AV_KAFKA_GROUP_ID:file-to-upload-consumers}
|
||||
# kafka in listeners
|
||||
spring.kafka.listener.ack.mode=MANUAL_IMMEDIATE
|
||||
# kafka in producer (with possibility for default bean)
|
||||
|
|
@ -50,6 +50,7 @@ kafka.out.error.topic.name=${ERVU_KAFKA_ERROR_TOPIC_NAME}
|
|||
kafka.out.success.topic.name=${ERVU_KAFKA_SUCCESS_TOPIC_NAME}
|
||||
kafka.out.response.topic.name=${ERVU_KAFKA_RESPONSE_TOPIC_NAME}
|
||||
#
|
||||
av.check.enabled=${AV_CHECK_ENABLED:true}
|
||||
av.rest.address=${AV_REST_ADDRESS}
|
||||
av.first.timeout.milliseconds=${AV_FIRST_TIMEOUT_MILLISECONDS:1000}
|
||||
av.retry.max.attempts.count=${AV_RETRY_MAX_ATTEMPTS_COUNT:10}
|
||||
|
|
@ -60,6 +61,7 @@ s3.out.endpoint=${S3_ENDPOINT}
|
|||
s3.out.access_key=${S3_ACCESS_KEY}
|
||||
s3.out.secret_key=${S3_SECRET_KEY}
|
||||
s3.out.bucket_name=${S3_OUT_BUCKET_NAME}
|
||||
s3.out.path.style.access.enabled=${S3_OUT_PATH_STYLE_ACCESS_ENABLED}
|
||||
#
|
||||
# spring jooq dsl bean properties begin ->
|
||||
spring.jooq.sql-dialect=Postgres
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue