From dca23cab08bcf992ceb73e2d8d7c87921e579820 Mon Sep 17 00:00:00 2001 From: Eduard Tihomiorv Date: Wed, 8 Oct 2025 11:35:26 +0300 Subject: [PATCH] ERVU-580: Add new params --- .../java/service/ConfigExecutorService.java | 43 ++-- .../models/SqlDownloadBuildQueryResponse.java | 18 ++ .../micord/models/requests/BaseRequest.java | 3 + .../models/requests/RequestArgument.java | 6 + .../downloads/AQLDownloadRequest.java | 3 + .../org/micord/service/DownloadService.java | 224 ++++++++++-------- 6 files changed, 189 insertions(+), 108 deletions(-) create mode 100644 config-data-executor/src/main/java/org/micord/models/SqlDownloadBuildQueryResponse.java diff --git a/backend/src/main/java/service/ConfigExecutorService.java b/backend/src/main/java/service/ConfigExecutorService.java index ed879f4..4c976ff 100644 --- a/backend/src/main/java/service/ConfigExecutorService.java +++ b/backend/src/main/java/service/ConfigExecutorService.java @@ -25,8 +25,10 @@ import org.springframework.http.HttpStatus; import org.springframework.http.MediaType; import org.springframework.http.ResponseEntity; import org.springframework.stereotype.Service; +import org.springframework.util.StreamUtils; import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.RestTemplate; +import org.springframework.core.io.Resource; import ru.micord.ervu_eks.exception.ConfigExecutorException; /** @@ -52,25 +54,36 @@ public class ConfigExecutorService { HttpEntity entity = new HttpEntity<>(request, headers); try { - - ResponseEntity response = restTemplate.exchange(url.concat("/").concat("downloadCSV"), - HttpMethod.POST, entity, byte[].class - ); - - String content = ""; - if (response != null && response.getBody() != null) { - content = Base64.getEncoder().encodeToString(response.getBody()); + ResponseEntity response = restTemplate.exchange( + url.concat("/").concat("downloadCSV"), HttpMethod.POST, entity, Resource.class); + if (response.getBody() == null) { + throw new RuntimeException("Empty response from downloadCSV"); } - + byte[] fileBytes = StreamUtils.copyToByteArray(response.getBody().getInputStream()); + String content = Base64.getEncoder().encodeToString(fileBytes); + String fileExtension = ".csv"; + List contentDisposition = response.getHeaders().get(HttpHeaders.CONTENT_DISPOSITION); + if (contentDisposition != null && !contentDisposition.isEmpty()) { + String disposition = contentDisposition.get(0); + int idx = disposition.indexOf("filename="); + if (idx != -1) { + String fileNameFromHeader = disposition.substring(idx + 9).replace("\"", ""); + int dotIndex = fileNameFromHeader.lastIndexOf("."); + if (dotIndex != -1) { + fileExtension = fileNameFromHeader.substring(dotIndex); + } + } + } + String fileName = + request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date()) + + fileExtension; FileModel fileModel = new FileModel(); fileModel.setFileContent(content); - fileModel.setFileExtension(".csv"); - fileModel.setFileName( - request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date()) + ".csv"); + fileModel.setFileExtension(fileExtension); + fileModel.setFileName(fileName); return fileModel; - - } catch (HttpClientErrorException e) { - + } + catch (HttpClientErrorException e) { if (e.getStatusCode() == HttpStatus.BAD_REQUEST) { Map responseMap = new Gson().fromJson(e.getResponseBodyAsString(), new TypeToken>() { diff --git a/config-data-executor/src/main/java/org/micord/models/SqlDownloadBuildQueryResponse.java b/config-data-executor/src/main/java/org/micord/models/SqlDownloadBuildQueryResponse.java new file mode 100644 index 0000000..dcab2ee --- /dev/null +++ b/config-data-executor/src/main/java/org/micord/models/SqlDownloadBuildQueryResponse.java @@ -0,0 +1,18 @@ +package org.micord.models; + +import java.util.List; + +import lombok.AllArgsConstructor; +import lombok.Getter; + +/** + * @author Eduard Tihomirov + */ +@Getter +@AllArgsConstructor +public class SqlDownloadBuildQueryResponse { + + private String sql; + private String paramName; + private List params; +} diff --git a/config-data-executor/src/main/java/org/micord/models/requests/BaseRequest.java b/config-data-executor/src/main/java/org/micord/models/requests/BaseRequest.java index 59b2464..606969a 100644 --- a/config-data-executor/src/main/java/org/micord/models/requests/BaseRequest.java +++ b/config-data-executor/src/main/java/org/micord/models/requests/BaseRequest.java @@ -19,6 +19,9 @@ public abstract class BaseRequest { @XmlElement(name = "RequestURL") private String requestURL; + @XmlElement(name = "RequestArgumentLimit") + private Integer requestArgumentLimit; + @XmlElement(name = "RequestValidationRules") private RequestValidationRules requestValidationRules; diff --git a/config-data-executor/src/main/java/org/micord/models/requests/RequestArgument.java b/config-data-executor/src/main/java/org/micord/models/requests/RequestArgument.java index 9141454..cad982f 100644 --- a/config-data-executor/src/main/java/org/micord/models/requests/RequestArgument.java +++ b/config-data-executor/src/main/java/org/micord/models/requests/RequestArgument.java @@ -18,6 +18,7 @@ public class RequestArgument { private String requestArgumentName;; private String requestArgumentURL; private SqlConnectionParams requestArgumentConnectionParams; + private Integer requestArgumentLimit; @XmlAttribute(name = "type") public RequestArgumentType getType() { @@ -39,4 +40,9 @@ public class RequestArgument { return requestArgumentConnectionParams; } + @XmlElement(name = "RequestArgumentLimit") + public Integer getRequestArgumentLimit() { + return requestArgumentLimit; + } + } diff --git a/config-data-executor/src/main/java/org/micord/models/requests/downloads/AQLDownloadRequest.java b/config-data-executor/src/main/java/org/micord/models/requests/downloads/AQLDownloadRequest.java index 4aad112..4d0f33d 100644 --- a/config-data-executor/src/main/java/org/micord/models/requests/downloads/AQLDownloadRequest.java +++ b/config-data-executor/src/main/java/org/micord/models/requests/downloads/AQLDownloadRequest.java @@ -24,6 +24,9 @@ public class AQLDownloadRequest extends BaseDownloadRequest { @XmlElement(name = "DownloadRequestEntitySelectorQuery") private String downloadRequestEntitySelectorQuery; + @XmlElement(name = "AglDownloadLimit") + private Integer aqlDownloadLimit; + @XmlElementWrapper(name = "AqlRequestCollections") @XmlElement(name = "AqlRequestCollection") private List aqlRequestCollections; diff --git a/config-data-executor/src/main/java/org/micord/service/DownloadService.java b/config-data-executor/src/main/java/org/micord/service/DownloadService.java index e908480..588d5bd 100644 --- a/config-data-executor/src/main/java/org/micord/service/DownloadService.java +++ b/config-data-executor/src/main/java/org/micord/service/DownloadService.java @@ -7,6 +7,7 @@ import com.arangodb.model.AqlQueryOptions; import org.micord.config.ArangoDBConnection; import org.micord.config.DatabaseConnection; import org.micord.exceptions.NoDownloadReportRecordsException; +import org.micord.models.SqlDownloadBuildQueryResponse; import org.micord.models.requests.RequestParameters; import org.micord.models.requests.downloads.AQLDownloadRequest; import org.micord.models.requests.downloads.BaseDownloadRequest; @@ -25,6 +26,7 @@ import java.sql.SQLException; import java.time.LocalDate; import java.util.*; import java.util.stream.Collectors; +import java.util.stream.IntStream; import java.util.zip.ZipEntry; import java.util.zip.ZipOutputStream; @@ -32,7 +34,7 @@ import java.util.zip.ZipOutputStream; public class DownloadService { private static final Logger logger = LoggerFactory.getLogger(DownloadService.class); - private static final int MAX_ROWS_PER_CSV = 600000; + private static final String REQUEST_WITH_ADDITIONAL_ID = "requestWithAdditionalId"; public File download(BaseDownloadRequest selectedRequest, List ids, RequestParameters parameters, Map validationResults) throws SQLException { LocalDate startDate = parameters.getStartDate(); @@ -70,7 +72,7 @@ public class DownloadService { // ); // } - return writeResultsToCsv(entities); + return writeResultsToCsv(entities, request.getAqlDownloadLimit()); } catch (ArangoDBException e) { logger.error("Error connecting to ArangoDB or executing AQL query: {}", e.getMessage(), e); @@ -107,30 +109,72 @@ public class DownloadService { // return writeResultsToCsv(results); } - private File processSqlDownloadRequest(SQLDownloadRequest request, List ids, LocalDate startDate, LocalDate endDate, Map validationResults) { - - Map query = buildSqlQuery(request, ids, startDate, endDate, validationResults); + private File processSqlDownloadRequest(SQLDownloadRequest request, + List ids, LocalDate startDate, LocalDate endDate, + Map validationResults) { + List allFiles = new ArrayList<>(); + int offset = 0; + int pageIndex = 1; + int limit = request.getRequestArgumentLimit(); try (Connection connection = DatabaseConnection.getConnection( - request.getSqlConnectionParams())) { - String requestURL = (String) query.get("requestURL"); - - List results = executeSqlQuery(connection, requestURL); - - // Check if we need to split into multiple files - // Note: results includes header row, so we check size - 1 for actual data rows - if (results.size() - 1 <= MAX_ROWS_PER_CSV) { - // Single CSV file - return writeSingleSqlCsvFile(results, "download-", ".csv"); - } else { - // Multiple CSV files in ZIP - return writeSqlResultsToZip(results); + request.getSqlConnectionParams())) { + Map query = buildSqlQuery(request, ids, startDate, endDate, + validationResults, limit, offset + ); + if (query.get(REQUEST_WITH_ADDITIONAL_ID) != null) { + SqlDownloadBuildQueryResponse response = (SqlDownloadBuildQueryResponse) query.get(REQUEST_WITH_ADDITIONAL_ID); + String url = response.getSql(); + String paramName = response.getParamName(); + Collection> pages = partitionList(response.getParams(), limit); + for (List page : pages) { + String resultSet = "(" + page.stream() + .map(s -> "'" + s.trim() + "'") + .collect(Collectors.joining(", ")) + ")"; + url = url.replace("${" + paramName + "}", + resultSet + ); + List results = new ArrayList<>(executeSqlQuery(connection, url)); + if (results.size() <= 1) { + break; + } + File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv"); + allFiles.add(file); + pageIndex++; + } } + else { + String requestURL = (String) query.get("requestURL"); + while (true) { + List results = new ArrayList<>(executeSqlQuery(connection, requestURL)); + if (results.size() <= 1) { + break; + } + File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv"); + allFiles.add(file); + if (results.size() - 1 < limit) { + break; + } + String oldPagination = " OFFSET " + offset + ";"; + offset += limit; + String newPagination = " OFFSET " + offset + ";"; + requestURL = requestURL.replace(oldPagination, newPagination); + pageIndex++; + } + } + if (allFiles.isEmpty()) { + return writeSingleSqlCsvFile(new ArrayList<>(), "download-part 0", ".csv"); + } + if (allFiles.size() == 1) { + return allFiles.get(0); + } + else { + return createZipArchive(allFiles, "download-"); + } } catch (SQLException | IOException e) { - logger.error("SQL execution failed for query: {}", query, e); + throw new RuntimeException("Ошибка при выгрузке данных, запрос: " + request.getRequestURL(), e); } - return null; } private File writeSingleSqlCsvFile(List results, String prefix, String suffix) throws IOException { @@ -149,39 +193,6 @@ public class DownloadService { return csvFile; } - private File writeSqlResultsToZip(List results) throws IOException { - List csvFiles = new ArrayList<>(); - - // Extract header - String[] headers = results.isEmpty() ? new String[0] : results.get(0); - - int fileIndex = 1; - int currentRowIndex = 1; // Start from 1 to skip header in original results - - while (currentRowIndex < results.size()) { - List chunk = new ArrayList<>(); - // Always add headers as first row - chunk.add(headers); - - // Add data rows up to MAX_ROWS_PER_CSV (including the header) - int chunkEndIndex = Math.min(currentRowIndex + MAX_ROWS_PER_CSV - 1, results.size()); - for (int i = currentRowIndex; i < chunkEndIndex; i++) { - chunk.add(results.get(i)); - } - - File csvFile = writeSingleSqlCsvFile(chunk, "download-part" + fileIndex + "-", ".csv"); - if (csvFile != null) { - csvFiles.add(csvFile); - fileIndex++; - } - - currentRowIndex = chunkEndIndex; - } - - // Create ZIP archive - return createZipArchive(csvFiles, "download-"); - } - private List> executeSelectAqlRequest(ArangoDatabase arangoDb, String downloadRequestEntitySelectorQuery, List ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) { @@ -316,10 +327,10 @@ public class DownloadService { return results; } - private File writeResultsToCsv(List> results) { + private File writeResultsToCsv(List> results, int limit) { try { // If results fit in a single file, create one CSV - if (results.size() <= MAX_ROWS_PER_CSV) { + if (results.size() <= limit) { return writeSingleCsvFile(results, "arango-download-", ".csv"); } @@ -327,8 +338,8 @@ public class DownloadService { List csvFiles = new ArrayList<>(); int fileIndex = 1; - for (int i = 0; i < results.size(); i += MAX_ROWS_PER_CSV) { - int endIndex = Math.min(i + MAX_ROWS_PER_CSV, results.size()); + for (int i = 0; i < results.size(); i += limit) { + int endIndex = Math.min(i + limit, results.size()); List> chunk = results.subList(i, endIndex); File csvFile = writeSingleCsvFile(chunk, "arango-download-part" + fileIndex + "-", ".csv"); @@ -430,11 +441,12 @@ public class DownloadService { return field.replace("\"", "\"\""); } - private Map buildSqlQuery(SQLDownloadRequest request, List ids, LocalDate startDate, LocalDate endDate, Map validationResults) { + private Map buildSqlQuery(SQLDownloadRequest request, List ids, + LocalDate startDate, LocalDate endDate, Map validationResults, int limit, + int offset) { Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED); Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED); - Map resultMap = new HashMap<>(); String endpointArguments; @@ -445,46 +457,41 @@ public class DownloadService { } if (emptyIdsAllowed != null && emptyIdsAllowed) { - resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", "")); - return resultMap; + requestURL = requestURL.replace("where id in ${endpointArguments}", ""); } - - if (ids == null || ids.isEmpty()) { - resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", "")); - return resultMap; + else if (ids == null || ids.isEmpty()) { + requestURL = requestURL.replace("where id in ${endpointArguments}", ""); } - - if (requestURL.contains(":=")) { - endpointArguments = "'{" + ids.stream() - .map(String::trim) - .collect(Collectors.joining(", ")) + "}'"; - } else { - endpointArguments = "(" + ids.stream() - .map(s -> "'" + s.trim() + "'") - .collect(Collectors.joining(", ")) + ")"; + else { + if (requestURL.contains(":=")) { + endpointArguments = + "'{" + ids.stream().map(String::trim).collect(Collectors.joining(", ")) + "}'"; + } + else { + endpointArguments = + "(" + ids.stream().map(s -> "'" + s.trim() + "'").collect(Collectors.joining(", ")) + + ")"; + } + requestURL = requestURL.replace("${endpointArguments}", endpointArguments); } - + Map> params = new HashMap<>(); if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) { for (RequestArgument argument : request.getRequestArguments()) { - if (argument.getRequestArgumentConnectionParams() != null) { try (Connection connection = DatabaseConnection.getConnection( - argument.getRequestArgumentConnectionParams())) { + argument.getRequestArgumentConnectionParams())) { String query = argument.getRequestArgumentURL(); - List result = fetchFileListFromDatabaseSQL(connection, query); - - resultMap.put("ids", result); - - - if (result != null && !result.isEmpty()) { - String resultSet = "(" + result.stream() - .map(s -> "'" + s.trim() + "'") - .collect(Collectors.joining(", ")) + ")"; - - requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet); - + int subOffset = 0; + List aggregatedIds = new ArrayList<>(); + while (true) { + String paginatedQuery = query + " LIMIT " + limit + " OFFSET " + subOffset; + List result = fetchFileListFromDatabaseSQL(connection, paginatedQuery); + if (result.isEmpty()) break; + aggregatedIds.addAll(result); + if (result.size() < limit) break; + subOffset += limit; } - + params.put(argument.getRequestArgumentName(), aggregatedIds); } catch (SQLException e) { logger.error("Failed to execute query for RequestArgument", e); @@ -492,10 +499,35 @@ public class DownloadService { } } } + if (!params.isEmpty()) { + if (params.size() == 1) { + Map.Entry> entry = params.entrySet().iterator().next(); + String key = entry.getKey(); + List value = entry.getValue(); + resultMap.put(REQUEST_WITH_ADDITIONAL_ID, new SqlDownloadBuildQueryResponse(requestURL, key, value)); + return resultMap; + } + else { + for (Map.Entry> entry : params.entrySet()) { + String resultSet = "(" + entry.getValue().stream() + .map(s -> "'" + s.trim() + "'") + .collect(Collectors.joining(", ")) + ")"; + requestURL = requestURL.replace("${" + entry.getKey() + "}", + resultSet + ); + } + requestURL = + requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET " + + offset + ";"; + } + } + else { + requestURL = + requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET " + + offset + ";"; + } - resultMap.put("requestURL", requestURL - .replace("${endpointArguments}", endpointArguments)); - + resultMap.put("requestURL", requestURL); return resultMap; } @@ -549,4 +581,10 @@ public class DownloadService { return results; } + private Collection> partitionList(List list, int size) { + return IntStream.range(0, (list.size() + size - 1) / size) + .mapToObj(i -> list.subList(i * size, Math.min((i + 1) * size, list.size()))) + .map(ArrayList::new) + .collect(Collectors.toList()); + } }