ERVU-580: Add new params

This commit is contained in:
Eduard Tihomiorv 2025-10-08 11:35:26 +03:00
parent 2a1d34ac10
commit dca23cab08
6 changed files with 189 additions and 108 deletions

View file

@ -25,8 +25,10 @@ import org.springframework.http.HttpStatus;
import org.springframework.http.MediaType; import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity; import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Service; import org.springframework.stereotype.Service;
import org.springframework.util.StreamUtils;
import org.springframework.web.client.HttpClientErrorException; import org.springframework.web.client.HttpClientErrorException;
import org.springframework.web.client.RestTemplate; import org.springframework.web.client.RestTemplate;
import org.springframework.core.io.Resource;
import ru.micord.ervu_eks.exception.ConfigExecutorException; import ru.micord.ervu_eks.exception.ConfigExecutorException;
/** /**
@ -52,25 +54,36 @@ public class ConfigExecutorService {
HttpEntity<ExportDataRequest> entity = new HttpEntity<>(request, headers); HttpEntity<ExportDataRequest> entity = new HttpEntity<>(request, headers);
try { try {
ResponseEntity<Resource> response = restTemplate.exchange(
ResponseEntity<byte[]> response = restTemplate.exchange(url.concat("/").concat("downloadCSV"), url.concat("/").concat("downloadCSV"), HttpMethod.POST, entity, Resource.class);
HttpMethod.POST, entity, byte[].class if (response.getBody() == null) {
); throw new RuntimeException("Empty response from downloadCSV");
String content = "";
if (response != null && response.getBody() != null) {
content = Base64.getEncoder().encodeToString(response.getBody());
} }
byte[] fileBytes = StreamUtils.copyToByteArray(response.getBody().getInputStream());
String content = Base64.getEncoder().encodeToString(fileBytes);
String fileExtension = ".csv";
List<String> contentDisposition = response.getHeaders().get(HttpHeaders.CONTENT_DISPOSITION);
if (contentDisposition != null && !contentDisposition.isEmpty()) {
String disposition = contentDisposition.get(0);
int idx = disposition.indexOf("filename=");
if (idx != -1) {
String fileNameFromHeader = disposition.substring(idx + 9).replace("\"", "");
int dotIndex = fileNameFromHeader.lastIndexOf(".");
if (dotIndex != -1) {
fileExtension = fileNameFromHeader.substring(dotIndex);
}
}
}
String fileName =
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date())
+ fileExtension;
FileModel fileModel = new FileModel(); FileModel fileModel = new FileModel();
fileModel.setFileContent(content); fileModel.setFileContent(content);
fileModel.setFileExtension(".csv"); fileModel.setFileExtension(fileExtension);
fileModel.setFileName( fileModel.setFileName(fileName);
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date()) + ".csv");
return fileModel; return fileModel;
}
} catch (HttpClientErrorException e) { catch (HttpClientErrorException e) {
if (e.getStatusCode() == HttpStatus.BAD_REQUEST) { if (e.getStatusCode() == HttpStatus.BAD_REQUEST) {
Map<String, Object> responseMap = new Gson().fromJson(e.getResponseBodyAsString(), Map<String, Object> responseMap = new Gson().fromJson(e.getResponseBodyAsString(),
new TypeToken<Map<String, Object>>() { new TypeToken<Map<String, Object>>() {

View file

@ -0,0 +1,18 @@
package org.micord.models;
import java.util.List;
import lombok.AllArgsConstructor;
import lombok.Getter;
/**
* @author Eduard Tihomirov
*/
@Getter
@AllArgsConstructor
public class SqlDownloadBuildQueryResponse {
private String sql;
private String paramName;
private List<String> params;
}

View file

@ -19,6 +19,9 @@ public abstract class BaseRequest {
@XmlElement(name = "RequestURL") @XmlElement(name = "RequestURL")
private String requestURL; private String requestURL;
@XmlElement(name = "RequestArgumentLimit")
private Integer requestArgumentLimit;
@XmlElement(name = "RequestValidationRules") @XmlElement(name = "RequestValidationRules")
private RequestValidationRules requestValidationRules; private RequestValidationRules requestValidationRules;

View file

@ -18,6 +18,7 @@ public class RequestArgument {
private String requestArgumentName;; private String requestArgumentName;;
private String requestArgumentURL; private String requestArgumentURL;
private SqlConnectionParams requestArgumentConnectionParams; private SqlConnectionParams requestArgumentConnectionParams;
private Integer requestArgumentLimit;
@XmlAttribute(name = "type") @XmlAttribute(name = "type")
public RequestArgumentType getType() { public RequestArgumentType getType() {
@ -39,4 +40,9 @@ public class RequestArgument {
return requestArgumentConnectionParams; return requestArgumentConnectionParams;
} }
@XmlElement(name = "RequestArgumentLimit")
public Integer getRequestArgumentLimit() {
return requestArgumentLimit;
}
} }

View file

@ -24,6 +24,9 @@ public class AQLDownloadRequest extends BaseDownloadRequest {
@XmlElement(name = "DownloadRequestEntitySelectorQuery") @XmlElement(name = "DownloadRequestEntitySelectorQuery")
private String downloadRequestEntitySelectorQuery; private String downloadRequestEntitySelectorQuery;
@XmlElement(name = "AglDownloadLimit")
private Integer aqlDownloadLimit;
@XmlElementWrapper(name = "AqlRequestCollections") @XmlElementWrapper(name = "AqlRequestCollections")
@XmlElement(name = "AqlRequestCollection") @XmlElement(name = "AqlRequestCollection")
private List<AqlRequestCollection> aqlRequestCollections; private List<AqlRequestCollection> aqlRequestCollections;

View file

@ -7,6 +7,7 @@ import com.arangodb.model.AqlQueryOptions;
import org.micord.config.ArangoDBConnection; import org.micord.config.ArangoDBConnection;
import org.micord.config.DatabaseConnection; import org.micord.config.DatabaseConnection;
import org.micord.exceptions.NoDownloadReportRecordsException; import org.micord.exceptions.NoDownloadReportRecordsException;
import org.micord.models.SqlDownloadBuildQueryResponse;
import org.micord.models.requests.RequestParameters; import org.micord.models.requests.RequestParameters;
import org.micord.models.requests.downloads.AQLDownloadRequest; import org.micord.models.requests.downloads.AQLDownloadRequest;
import org.micord.models.requests.downloads.BaseDownloadRequest; import org.micord.models.requests.downloads.BaseDownloadRequest;
@ -25,6 +26,7 @@ import java.sql.SQLException;
import java.time.LocalDate; import java.time.LocalDate;
import java.util.*; import java.util.*;
import java.util.stream.Collectors; import java.util.stream.Collectors;
import java.util.stream.IntStream;
import java.util.zip.ZipEntry; import java.util.zip.ZipEntry;
import java.util.zip.ZipOutputStream; import java.util.zip.ZipOutputStream;
@ -32,7 +34,7 @@ import java.util.zip.ZipOutputStream;
public class DownloadService { public class DownloadService {
private static final Logger logger = LoggerFactory.getLogger(DownloadService.class); private static final Logger logger = LoggerFactory.getLogger(DownloadService.class);
private static final int MAX_ROWS_PER_CSV = 600000; private static final String REQUEST_WITH_ADDITIONAL_ID = "requestWithAdditionalId";
public File download(BaseDownloadRequest selectedRequest, List<String> ids, RequestParameters parameters, Map<String, Boolean> validationResults) throws SQLException { public File download(BaseDownloadRequest selectedRequest, List<String> ids, RequestParameters parameters, Map<String, Boolean> validationResults) throws SQLException {
LocalDate startDate = parameters.getStartDate(); LocalDate startDate = parameters.getStartDate();
@ -70,7 +72,7 @@ public class DownloadService {
// ); // );
// } // }
return writeResultsToCsv(entities); return writeResultsToCsv(entities, request.getAqlDownloadLimit());
} catch (ArangoDBException e) { } catch (ArangoDBException e) {
logger.error("Error connecting to ArangoDB or executing AQL query: {}", e.getMessage(), e); logger.error("Error connecting to ArangoDB or executing AQL query: {}", e.getMessage(), e);
@ -107,30 +109,72 @@ public class DownloadService {
// return writeResultsToCsv(results); // return writeResultsToCsv(results);
} }
private File processSqlDownloadRequest(SQLDownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults) { private File processSqlDownloadRequest(SQLDownloadRequest request,
List<String> ids, LocalDate startDate, LocalDate endDate,
Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate, validationResults); Map<String, Boolean> validationResults) {
List<File> allFiles = new ArrayList<>();
int offset = 0;
int pageIndex = 1;
int limit = request.getRequestArgumentLimit();
try (Connection connection = DatabaseConnection.getConnection( try (Connection connection = DatabaseConnection.getConnection(
request.getSqlConnectionParams())) { request.getSqlConnectionParams())) {
String requestURL = (String) query.get("requestURL"); Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate,
validationResults, limit, offset
List<String[]> results = executeSqlQuery(connection, requestURL); );
if (query.get(REQUEST_WITH_ADDITIONAL_ID) != null) {
// Check if we need to split into multiple files SqlDownloadBuildQueryResponse response = (SqlDownloadBuildQueryResponse) query.get(REQUEST_WITH_ADDITIONAL_ID);
// Note: results includes header row, so we check size - 1 for actual data rows String url = response.getSql();
if (results.size() - 1 <= MAX_ROWS_PER_CSV) { String paramName = response.getParamName();
// Single CSV file Collection<List<String>> pages = partitionList(response.getParams(), limit);
return writeSingleSqlCsvFile(results, "download-", ".csv"); for (List<String> page : pages) {
} else { String resultSet = "(" + page.stream()
// Multiple CSV files in ZIP .map(s -> "'" + s.trim() + "'")
return writeSqlResultsToZip(results); .collect(Collectors.joining(", ")) + ")";
url = url.replace("${" + paramName + "}",
resultSet
);
List<String[]> results = new ArrayList<>(executeSqlQuery(connection, url));
if (results.size() <= 1) {
break;
}
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
allFiles.add(file);
pageIndex++;
}
} }
else {
String requestURL = (String) query.get("requestURL");
while (true) {
List<String[]> results = new ArrayList<>(executeSqlQuery(connection, requestURL));
if (results.size() <= 1) {
break;
}
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
allFiles.add(file);
if (results.size() - 1 < limit) {
break;
}
String oldPagination = " OFFSET " + offset + ";";
offset += limit;
String newPagination = " OFFSET " + offset + ";";
requestURL = requestURL.replace(oldPagination, newPagination);
pageIndex++;
}
}
if (allFiles.isEmpty()) {
return writeSingleSqlCsvFile(new ArrayList<>(), "download-part 0", ".csv");
}
if (allFiles.size() == 1) {
return allFiles.get(0);
}
else {
return createZipArchive(allFiles, "download-");
}
} }
catch (SQLException | IOException e) { catch (SQLException | IOException e) {
logger.error("SQL execution failed for query: {}", query, e); throw new RuntimeException("Ошибка при выгрузке данных, запрос: " + request.getRequestURL(), e);
} }
return null;
} }
private File writeSingleSqlCsvFile(List<String[]> results, String prefix, String suffix) throws IOException { private File writeSingleSqlCsvFile(List<String[]> results, String prefix, String suffix) throws IOException {
@ -149,39 +193,6 @@ public class DownloadService {
return csvFile; return csvFile;
} }
private File writeSqlResultsToZip(List<String[]> results) throws IOException {
List<File> csvFiles = new ArrayList<>();
// Extract header
String[] headers = results.isEmpty() ? new String[0] : results.get(0);
int fileIndex = 1;
int currentRowIndex = 1; // Start from 1 to skip header in original results
while (currentRowIndex < results.size()) {
List<String[]> chunk = new ArrayList<>();
// Always add headers as first row
chunk.add(headers);
// Add data rows up to MAX_ROWS_PER_CSV (including the header)
int chunkEndIndex = Math.min(currentRowIndex + MAX_ROWS_PER_CSV - 1, results.size());
for (int i = currentRowIndex; i < chunkEndIndex; i++) {
chunk.add(results.get(i));
}
File csvFile = writeSingleSqlCsvFile(chunk, "download-part" + fileIndex + "-", ".csv");
if (csvFile != null) {
csvFiles.add(csvFile);
fileIndex++;
}
currentRowIndex = chunkEndIndex;
}
// Create ZIP archive
return createZipArchive(csvFiles, "download-");
}
private List<Map<String, Object>> executeSelectAqlRequest(ArangoDatabase arangoDb, private List<Map<String, Object>> executeSelectAqlRequest(ArangoDatabase arangoDb,
String downloadRequestEntitySelectorQuery, String downloadRequestEntitySelectorQuery,
List<String> ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) { List<String> ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) {
@ -316,10 +327,10 @@ public class DownloadService {
return results; return results;
} }
private File writeResultsToCsv(List<Map<String, Object>> results) { private File writeResultsToCsv(List<Map<String, Object>> results, int limit) {
try { try {
// If results fit in a single file, create one CSV // If results fit in a single file, create one CSV
if (results.size() <= MAX_ROWS_PER_CSV) { if (results.size() <= limit) {
return writeSingleCsvFile(results, "arango-download-", ".csv"); return writeSingleCsvFile(results, "arango-download-", ".csv");
} }
@ -327,8 +338,8 @@ public class DownloadService {
List<File> csvFiles = new ArrayList<>(); List<File> csvFiles = new ArrayList<>();
int fileIndex = 1; int fileIndex = 1;
for (int i = 0; i < results.size(); i += MAX_ROWS_PER_CSV) { for (int i = 0; i < results.size(); i += limit) {
int endIndex = Math.min(i + MAX_ROWS_PER_CSV, results.size()); int endIndex = Math.min(i + limit, results.size());
List<Map<String, Object>> chunk = results.subList(i, endIndex); List<Map<String, Object>> chunk = results.subList(i, endIndex);
File csvFile = writeSingleCsvFile(chunk, "arango-download-part" + fileIndex + "-", ".csv"); File csvFile = writeSingleCsvFile(chunk, "arango-download-part" + fileIndex + "-", ".csv");
@ -430,11 +441,12 @@ public class DownloadService {
return field.replace("\"", "\"\""); return field.replace("\"", "\"\"");
} }
private Map<String, Object> buildSqlQuery(SQLDownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults) { private Map<String, Object> buildSqlQuery(SQLDownloadRequest request, List<String> ids,
LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults, int limit,
int offset) {
Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED); Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED);
Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED); Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED);
Map<String, Object> resultMap = new HashMap<>(); Map<String, Object> resultMap = new HashMap<>();
String endpointArguments; String endpointArguments;
@ -445,46 +457,41 @@ public class DownloadService {
} }
if (emptyIdsAllowed != null && emptyIdsAllowed) { if (emptyIdsAllowed != null && emptyIdsAllowed) {
resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", "")); requestURL = requestURL.replace("where id in ${endpointArguments}", "");
return resultMap;
} }
else if (ids == null || ids.isEmpty()) {
if (ids == null || ids.isEmpty()) { requestURL = requestURL.replace("where id in ${endpointArguments}", "");
resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", ""));
return resultMap;
} }
else {
if (requestURL.contains(":=")) { if (requestURL.contains(":=")) {
endpointArguments = "'{" + ids.stream() endpointArguments =
.map(String::trim) "'{" + ids.stream().map(String::trim).collect(Collectors.joining(", ")) + "}'";
.collect(Collectors.joining(", ")) + "}'"; }
} else { else {
endpointArguments = "(" + ids.stream() endpointArguments =
.map(s -> "'" + s.trim() + "'") "(" + ids.stream().map(s -> "'" + s.trim() + "'").collect(Collectors.joining(", "))
.collect(Collectors.joining(", ")) + ")"; + ")";
}
requestURL = requestURL.replace("${endpointArguments}", endpointArguments);
} }
Map<String, List<String>> params = new HashMap<>();
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) { if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
for (RequestArgument argument : request.getRequestArguments()) { for (RequestArgument argument : request.getRequestArguments()) {
if (argument.getRequestArgumentConnectionParams() != null) { if (argument.getRequestArgumentConnectionParams() != null) {
try (Connection connection = DatabaseConnection.getConnection( try (Connection connection = DatabaseConnection.getConnection(
argument.getRequestArgumentConnectionParams())) { argument.getRequestArgumentConnectionParams())) {
String query = argument.getRequestArgumentURL(); String query = argument.getRequestArgumentURL();
List<String> result = fetchFileListFromDatabaseSQL(connection, query); int subOffset = 0;
List<String> aggregatedIds = new ArrayList<>();
resultMap.put("ids", result); while (true) {
String paginatedQuery = query + " LIMIT " + limit + " OFFSET " + subOffset;
List<String> result = fetchFileListFromDatabaseSQL(connection, paginatedQuery);
if (result != null && !result.isEmpty()) { if (result.isEmpty()) break;
String resultSet = "(" + result.stream() aggregatedIds.addAll(result);
.map(s -> "'" + s.trim() + "'") if (result.size() < limit) break;
.collect(Collectors.joining(", ")) + ")"; subOffset += limit;
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
} }
params.put(argument.getRequestArgumentName(), aggregatedIds);
} }
catch (SQLException e) { catch (SQLException e) {
logger.error("Failed to execute query for RequestArgument", e); logger.error("Failed to execute query for RequestArgument", e);
@ -492,10 +499,35 @@ public class DownloadService {
} }
} }
} }
if (!params.isEmpty()) {
if (params.size() == 1) {
Map.Entry<String, List<String>> entry = params.entrySet().iterator().next();
String key = entry.getKey();
List<String> value = entry.getValue();
resultMap.put(REQUEST_WITH_ADDITIONAL_ID, new SqlDownloadBuildQueryResponse(requestURL, key, value));
return resultMap;
}
else {
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
String resultSet = "(" + entry.getValue().stream()
.map(s -> "'" + s.trim() + "'")
.collect(Collectors.joining(", ")) + ")";
requestURL = requestURL.replace("${" + entry.getKey() + "}",
resultSet
);
}
requestURL =
requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET "
+ offset + ";";
}
}
else {
requestURL =
requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET "
+ offset + ";";
}
resultMap.put("requestURL", requestURL resultMap.put("requestURL", requestURL);
.replace("${endpointArguments}", endpointArguments));
return resultMap; return resultMap;
} }
@ -549,4 +581,10 @@ public class DownloadService {
return results; return results;
} }
private <T> Collection<List<T>> partitionList(List<T> list, int size) {
return IntStream.range(0, (list.size() + size - 1) / size)
.mapToObj(i -> list.subList(i * size, Math.min((i + 1) * size, list.size())))
.map(ArrayList::new)
.collect(Collectors.toList());
}
} }