ERVU-580: Add new params
This commit is contained in:
parent
2a1d34ac10
commit
dca23cab08
6 changed files with 189 additions and 108 deletions
|
|
@ -25,8 +25,10 @@ import org.springframework.http.HttpStatus;
|
|||
import org.springframework.http.MediaType;
|
||||
import org.springframework.http.ResponseEntity;
|
||||
import org.springframework.stereotype.Service;
|
||||
import org.springframework.util.StreamUtils;
|
||||
import org.springframework.web.client.HttpClientErrorException;
|
||||
import org.springframework.web.client.RestTemplate;
|
||||
import org.springframework.core.io.Resource;
|
||||
import ru.micord.ervu_eks.exception.ConfigExecutorException;
|
||||
|
||||
/**
|
||||
|
|
@ -52,25 +54,36 @@ public class ConfigExecutorService {
|
|||
HttpEntity<ExportDataRequest> entity = new HttpEntity<>(request, headers);
|
||||
|
||||
try {
|
||||
|
||||
ResponseEntity<byte[]> response = restTemplate.exchange(url.concat("/").concat("downloadCSV"),
|
||||
HttpMethod.POST, entity, byte[].class
|
||||
);
|
||||
|
||||
String content = "";
|
||||
if (response != null && response.getBody() != null) {
|
||||
content = Base64.getEncoder().encodeToString(response.getBody());
|
||||
ResponseEntity<Resource> response = restTemplate.exchange(
|
||||
url.concat("/").concat("downloadCSV"), HttpMethod.POST, entity, Resource.class);
|
||||
if (response.getBody() == null) {
|
||||
throw new RuntimeException("Empty response from downloadCSV");
|
||||
}
|
||||
|
||||
byte[] fileBytes = StreamUtils.copyToByteArray(response.getBody().getInputStream());
|
||||
String content = Base64.getEncoder().encodeToString(fileBytes);
|
||||
String fileExtension = ".csv";
|
||||
List<String> contentDisposition = response.getHeaders().get(HttpHeaders.CONTENT_DISPOSITION);
|
||||
if (contentDisposition != null && !contentDisposition.isEmpty()) {
|
||||
String disposition = contentDisposition.get(0);
|
||||
int idx = disposition.indexOf("filename=");
|
||||
if (idx != -1) {
|
||||
String fileNameFromHeader = disposition.substring(idx + 9).replace("\"", "");
|
||||
int dotIndex = fileNameFromHeader.lastIndexOf(".");
|
||||
if (dotIndex != -1) {
|
||||
fileExtension = fileNameFromHeader.substring(dotIndex);
|
||||
}
|
||||
}
|
||||
}
|
||||
String fileName =
|
||||
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date())
|
||||
+ fileExtension;
|
||||
FileModel fileModel = new FileModel();
|
||||
fileModel.setFileContent(content);
|
||||
fileModel.setFileExtension(".csv");
|
||||
fileModel.setFileName(
|
||||
request.getType() + "_" + new SimpleDateFormat("dd.MM.yyyy").format(new Date()) + ".csv");
|
||||
fileModel.setFileExtension(fileExtension);
|
||||
fileModel.setFileName(fileName);
|
||||
return fileModel;
|
||||
|
||||
} catch (HttpClientErrorException e) {
|
||||
|
||||
}
|
||||
catch (HttpClientErrorException e) {
|
||||
if (e.getStatusCode() == HttpStatus.BAD_REQUEST) {
|
||||
Map<String, Object> responseMap = new Gson().fromJson(e.getResponseBodyAsString(),
|
||||
new TypeToken<Map<String, Object>>() {
|
||||
|
|
|
|||
|
|
@ -0,0 +1,18 @@
|
|||
package org.micord.models;
|
||||
|
||||
import java.util.List;
|
||||
|
||||
import lombok.AllArgsConstructor;
|
||||
import lombok.Getter;
|
||||
|
||||
/**
|
||||
* @author Eduard Tihomirov
|
||||
*/
|
||||
@Getter
|
||||
@AllArgsConstructor
|
||||
public class SqlDownloadBuildQueryResponse {
|
||||
|
||||
private String sql;
|
||||
private String paramName;
|
||||
private List<String> params;
|
||||
}
|
||||
|
|
@ -19,6 +19,9 @@ public abstract class BaseRequest {
|
|||
@XmlElement(name = "RequestURL")
|
||||
private String requestURL;
|
||||
|
||||
@XmlElement(name = "RequestArgumentLimit")
|
||||
private Integer requestArgumentLimit;
|
||||
|
||||
@XmlElement(name = "RequestValidationRules")
|
||||
private RequestValidationRules requestValidationRules;
|
||||
|
||||
|
|
|
|||
|
|
@ -18,6 +18,7 @@ public class RequestArgument {
|
|||
private String requestArgumentName;;
|
||||
private String requestArgumentURL;
|
||||
private SqlConnectionParams requestArgumentConnectionParams;
|
||||
private Integer requestArgumentLimit;
|
||||
|
||||
@XmlAttribute(name = "type")
|
||||
public RequestArgumentType getType() {
|
||||
|
|
@ -39,4 +40,9 @@ public class RequestArgument {
|
|||
return requestArgumentConnectionParams;
|
||||
}
|
||||
|
||||
@XmlElement(name = "RequestArgumentLimit")
|
||||
public Integer getRequestArgumentLimit() {
|
||||
return requestArgumentLimit;
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
|||
|
|
@ -24,6 +24,9 @@ public class AQLDownloadRequest extends BaseDownloadRequest {
|
|||
@XmlElement(name = "DownloadRequestEntitySelectorQuery")
|
||||
private String downloadRequestEntitySelectorQuery;
|
||||
|
||||
@XmlElement(name = "AglDownloadLimit")
|
||||
private Integer aqlDownloadLimit;
|
||||
|
||||
@XmlElementWrapper(name = "AqlRequestCollections")
|
||||
@XmlElement(name = "AqlRequestCollection")
|
||||
private List<AqlRequestCollection> aqlRequestCollections;
|
||||
|
|
|
|||
|
|
@ -7,6 +7,7 @@ import com.arangodb.model.AqlQueryOptions;
|
|||
import org.micord.config.ArangoDBConnection;
|
||||
import org.micord.config.DatabaseConnection;
|
||||
import org.micord.exceptions.NoDownloadReportRecordsException;
|
||||
import org.micord.models.SqlDownloadBuildQueryResponse;
|
||||
import org.micord.models.requests.RequestParameters;
|
||||
import org.micord.models.requests.downloads.AQLDownloadRequest;
|
||||
import org.micord.models.requests.downloads.BaseDownloadRequest;
|
||||
|
|
@ -25,6 +26,7 @@ import java.sql.SQLException;
|
|||
import java.time.LocalDate;
|
||||
import java.util.*;
|
||||
import java.util.stream.Collectors;
|
||||
import java.util.stream.IntStream;
|
||||
import java.util.zip.ZipEntry;
|
||||
import java.util.zip.ZipOutputStream;
|
||||
|
||||
|
|
@ -32,7 +34,7 @@ import java.util.zip.ZipOutputStream;
|
|||
public class DownloadService {
|
||||
|
||||
private static final Logger logger = LoggerFactory.getLogger(DownloadService.class);
|
||||
private static final int MAX_ROWS_PER_CSV = 600000;
|
||||
private static final String REQUEST_WITH_ADDITIONAL_ID = "requestWithAdditionalId";
|
||||
|
||||
public File download(BaseDownloadRequest selectedRequest, List<String> ids, RequestParameters parameters, Map<String, Boolean> validationResults) throws SQLException {
|
||||
LocalDate startDate = parameters.getStartDate();
|
||||
|
|
@ -70,7 +72,7 @@ public class DownloadService {
|
|||
// );
|
||||
// }
|
||||
|
||||
return writeResultsToCsv(entities);
|
||||
return writeResultsToCsv(entities, request.getAqlDownloadLimit());
|
||||
|
||||
} catch (ArangoDBException e) {
|
||||
logger.error("Error connecting to ArangoDB or executing AQL query: {}", e.getMessage(), e);
|
||||
|
|
@ -107,30 +109,72 @@ public class DownloadService {
|
|||
// return writeResultsToCsv(results);
|
||||
}
|
||||
|
||||
private File processSqlDownloadRequest(SQLDownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults) {
|
||||
|
||||
Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate, validationResults);
|
||||
private File processSqlDownloadRequest(SQLDownloadRequest request,
|
||||
List<String> ids, LocalDate startDate, LocalDate endDate,
|
||||
Map<String, Boolean> validationResults) {
|
||||
List<File> allFiles = new ArrayList<>();
|
||||
int offset = 0;
|
||||
int pageIndex = 1;
|
||||
int limit = request.getRequestArgumentLimit();
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
request.getSqlConnectionParams())) {
|
||||
String requestURL = (String) query.get("requestURL");
|
||||
|
||||
List<String[]> results = executeSqlQuery(connection, requestURL);
|
||||
|
||||
// Check if we need to split into multiple files
|
||||
// Note: results includes header row, so we check size - 1 for actual data rows
|
||||
if (results.size() - 1 <= MAX_ROWS_PER_CSV) {
|
||||
// Single CSV file
|
||||
return writeSingleSqlCsvFile(results, "download-", ".csv");
|
||||
} else {
|
||||
// Multiple CSV files in ZIP
|
||||
return writeSqlResultsToZip(results);
|
||||
request.getSqlConnectionParams())) {
|
||||
Map<String, Object> query = buildSqlQuery(request, ids, startDate, endDate,
|
||||
validationResults, limit, offset
|
||||
);
|
||||
if (query.get(REQUEST_WITH_ADDITIONAL_ID) != null) {
|
||||
SqlDownloadBuildQueryResponse response = (SqlDownloadBuildQueryResponse) query.get(REQUEST_WITH_ADDITIONAL_ID);
|
||||
String url = response.getSql();
|
||||
String paramName = response.getParamName();
|
||||
Collection<List<String>> pages = partitionList(response.getParams(), limit);
|
||||
for (List<String> page : pages) {
|
||||
String resultSet = "(" + page.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
url = url.replace("${" + paramName + "}",
|
||||
resultSet
|
||||
);
|
||||
List<String[]> results = new ArrayList<>(executeSqlQuery(connection, url));
|
||||
if (results.size() <= 1) {
|
||||
break;
|
||||
}
|
||||
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
|
||||
allFiles.add(file);
|
||||
pageIndex++;
|
||||
}
|
||||
}
|
||||
else {
|
||||
String requestURL = (String) query.get("requestURL");
|
||||
while (true) {
|
||||
List<String[]> results = new ArrayList<>(executeSqlQuery(connection, requestURL));
|
||||
if (results.size() <= 1) {
|
||||
break;
|
||||
}
|
||||
File file = writeSingleSqlCsvFile(results, "download-part" + pageIndex + "-", ".csv");
|
||||
allFiles.add(file);
|
||||
if (results.size() - 1 < limit) {
|
||||
break;
|
||||
}
|
||||
String oldPagination = " OFFSET " + offset + ";";
|
||||
offset += limit;
|
||||
String newPagination = " OFFSET " + offset + ";";
|
||||
requestURL = requestURL.replace(oldPagination, newPagination);
|
||||
pageIndex++;
|
||||
|
||||
}
|
||||
}
|
||||
if (allFiles.isEmpty()) {
|
||||
return writeSingleSqlCsvFile(new ArrayList<>(), "download-part 0", ".csv");
|
||||
}
|
||||
if (allFiles.size() == 1) {
|
||||
return allFiles.get(0);
|
||||
}
|
||||
else {
|
||||
return createZipArchive(allFiles, "download-");
|
||||
}
|
||||
}
|
||||
catch (SQLException | IOException e) {
|
||||
logger.error("SQL execution failed for query: {}", query, e);
|
||||
throw new RuntimeException("Ошибка при выгрузке данных, запрос: " + request.getRequestURL(), e);
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
private File writeSingleSqlCsvFile(List<String[]> results, String prefix, String suffix) throws IOException {
|
||||
|
|
@ -149,39 +193,6 @@ public class DownloadService {
|
|||
return csvFile;
|
||||
}
|
||||
|
||||
private File writeSqlResultsToZip(List<String[]> results) throws IOException {
|
||||
List<File> csvFiles = new ArrayList<>();
|
||||
|
||||
// Extract header
|
||||
String[] headers = results.isEmpty() ? new String[0] : results.get(0);
|
||||
|
||||
int fileIndex = 1;
|
||||
int currentRowIndex = 1; // Start from 1 to skip header in original results
|
||||
|
||||
while (currentRowIndex < results.size()) {
|
||||
List<String[]> chunk = new ArrayList<>();
|
||||
// Always add headers as first row
|
||||
chunk.add(headers);
|
||||
|
||||
// Add data rows up to MAX_ROWS_PER_CSV (including the header)
|
||||
int chunkEndIndex = Math.min(currentRowIndex + MAX_ROWS_PER_CSV - 1, results.size());
|
||||
for (int i = currentRowIndex; i < chunkEndIndex; i++) {
|
||||
chunk.add(results.get(i));
|
||||
}
|
||||
|
||||
File csvFile = writeSingleSqlCsvFile(chunk, "download-part" + fileIndex + "-", ".csv");
|
||||
if (csvFile != null) {
|
||||
csvFiles.add(csvFile);
|
||||
fileIndex++;
|
||||
}
|
||||
|
||||
currentRowIndex = chunkEndIndex;
|
||||
}
|
||||
|
||||
// Create ZIP archive
|
||||
return createZipArchive(csvFiles, "download-");
|
||||
}
|
||||
|
||||
private List<Map<String, Object>> executeSelectAqlRequest(ArangoDatabase arangoDb,
|
||||
String downloadRequestEntitySelectorQuery,
|
||||
List<String> ids, LocalDate startDate, LocalDate endDate, Boolean emptyIdsAllowed, Boolean emptyDatesAllowed) {
|
||||
|
|
@ -316,10 +327,10 @@ public class DownloadService {
|
|||
return results;
|
||||
}
|
||||
|
||||
private File writeResultsToCsv(List<Map<String, Object>> results) {
|
||||
private File writeResultsToCsv(List<Map<String, Object>> results, int limit) {
|
||||
try {
|
||||
// If results fit in a single file, create one CSV
|
||||
if (results.size() <= MAX_ROWS_PER_CSV) {
|
||||
if (results.size() <= limit) {
|
||||
return writeSingleCsvFile(results, "arango-download-", ".csv");
|
||||
}
|
||||
|
||||
|
|
@ -327,8 +338,8 @@ public class DownloadService {
|
|||
List<File> csvFiles = new ArrayList<>();
|
||||
int fileIndex = 1;
|
||||
|
||||
for (int i = 0; i < results.size(); i += MAX_ROWS_PER_CSV) {
|
||||
int endIndex = Math.min(i + MAX_ROWS_PER_CSV, results.size());
|
||||
for (int i = 0; i < results.size(); i += limit) {
|
||||
int endIndex = Math.min(i + limit, results.size());
|
||||
List<Map<String, Object>> chunk = results.subList(i, endIndex);
|
||||
|
||||
File csvFile = writeSingleCsvFile(chunk, "arango-download-part" + fileIndex + "-", ".csv");
|
||||
|
|
@ -430,11 +441,12 @@ public class DownloadService {
|
|||
return field.replace("\"", "\"\"");
|
||||
}
|
||||
|
||||
private Map<String, Object> buildSqlQuery(SQLDownloadRequest request, List<String> ids, LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults) {
|
||||
private Map<String, Object> buildSqlQuery(SQLDownloadRequest request, List<String> ids,
|
||||
LocalDate startDate, LocalDate endDate, Map<String, Boolean> validationResults, int limit,
|
||||
int offset) {
|
||||
Boolean emptyIdsAllowed = validationResults.get(ValidationService.IS_EMPTY_IDS_ALLOWED);
|
||||
Boolean emptyDatesAllowed = validationResults.get(ValidationService.IS_EMPTY_DATES_ALLOWED);
|
||||
|
||||
|
||||
Map<String, Object> resultMap = new HashMap<>();
|
||||
String endpointArguments;
|
||||
|
||||
|
|
@ -445,46 +457,41 @@ public class DownloadService {
|
|||
}
|
||||
|
||||
if (emptyIdsAllowed != null && emptyIdsAllowed) {
|
||||
resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", ""));
|
||||
return resultMap;
|
||||
requestURL = requestURL.replace("where id in ${endpointArguments}", "");
|
||||
}
|
||||
|
||||
if (ids == null || ids.isEmpty()) {
|
||||
resultMap.put("requestURL", requestURL.replace("where id in ${endpointArguments}", ""));
|
||||
return resultMap;
|
||||
else if (ids == null || ids.isEmpty()) {
|
||||
requestURL = requestURL.replace("where id in ${endpointArguments}", "");
|
||||
}
|
||||
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments = "'{" + ids.stream()
|
||||
.map(String::trim)
|
||||
.collect(Collectors.joining(", ")) + "}'";
|
||||
} else {
|
||||
endpointArguments = "(" + ids.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
else {
|
||||
if (requestURL.contains(":=")) {
|
||||
endpointArguments =
|
||||
"'{" + ids.stream().map(String::trim).collect(Collectors.joining(", ")) + "}'";
|
||||
}
|
||||
else {
|
||||
endpointArguments =
|
||||
"(" + ids.stream().map(s -> "'" + s.trim() + "'").collect(Collectors.joining(", "))
|
||||
+ ")";
|
||||
}
|
||||
requestURL = requestURL.replace("${endpointArguments}", endpointArguments);
|
||||
}
|
||||
|
||||
Map<String, List<String>> params = new HashMap<>();
|
||||
if (request.getRequestArguments() != null && !request.getRequestArguments().isEmpty()) {
|
||||
for (RequestArgument argument : request.getRequestArguments()) {
|
||||
|
||||
if (argument.getRequestArgumentConnectionParams() != null) {
|
||||
try (Connection connection = DatabaseConnection.getConnection(
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
argument.getRequestArgumentConnectionParams())) {
|
||||
String query = argument.getRequestArgumentURL();
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, query);
|
||||
|
||||
resultMap.put("ids", result);
|
||||
|
||||
|
||||
if (result != null && !result.isEmpty()) {
|
||||
String resultSet = "(" + result.stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
|
||||
requestURL = requestURL.replace("${" + argument.getRequestArgumentName() + "}", resultSet);
|
||||
|
||||
int subOffset = 0;
|
||||
List<String> aggregatedIds = new ArrayList<>();
|
||||
while (true) {
|
||||
String paginatedQuery = query + " LIMIT " + limit + " OFFSET " + subOffset;
|
||||
List<String> result = fetchFileListFromDatabaseSQL(connection, paginatedQuery);
|
||||
if (result.isEmpty()) break;
|
||||
aggregatedIds.addAll(result);
|
||||
if (result.size() < limit) break;
|
||||
subOffset += limit;
|
||||
}
|
||||
|
||||
params.put(argument.getRequestArgumentName(), aggregatedIds);
|
||||
}
|
||||
catch (SQLException e) {
|
||||
logger.error("Failed to execute query for RequestArgument", e);
|
||||
|
|
@ -492,10 +499,35 @@ public class DownloadService {
|
|||
}
|
||||
}
|
||||
}
|
||||
if (!params.isEmpty()) {
|
||||
if (params.size() == 1) {
|
||||
Map.Entry<String, List<String>> entry = params.entrySet().iterator().next();
|
||||
String key = entry.getKey();
|
||||
List<String> value = entry.getValue();
|
||||
resultMap.put(REQUEST_WITH_ADDITIONAL_ID, new SqlDownloadBuildQueryResponse(requestURL, key, value));
|
||||
return resultMap;
|
||||
}
|
||||
else {
|
||||
for (Map.Entry<String, List<String>> entry : params.entrySet()) {
|
||||
String resultSet = "(" + entry.getValue().stream()
|
||||
.map(s -> "'" + s.trim() + "'")
|
||||
.collect(Collectors.joining(", ")) + ")";
|
||||
requestURL = requestURL.replace("${" + entry.getKey() + "}",
|
||||
resultSet
|
||||
);
|
||||
}
|
||||
requestURL =
|
||||
requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET "
|
||||
+ offset + ";";
|
||||
}
|
||||
}
|
||||
else {
|
||||
requestURL =
|
||||
requestURL.substring(0, requestURL.length() - 1) + " LIMIT " + limit + " OFFSET "
|
||||
+ offset + ";";
|
||||
}
|
||||
|
||||
resultMap.put("requestURL", requestURL
|
||||
.replace("${endpointArguments}", endpointArguments));
|
||||
|
||||
resultMap.put("requestURL", requestURL);
|
||||
return resultMap;
|
||||
}
|
||||
|
||||
|
|
@ -549,4 +581,10 @@ public class DownloadService {
|
|||
return results;
|
||||
}
|
||||
|
||||
private <T> Collection<List<T>> partitionList(List<T> list, int size) {
|
||||
return IntStream.range(0, (list.size() + size - 1) / size)
|
||||
.mapToObj(i -> list.subList(i * size, Math.min((i + 1) * size, list.size())))
|
||||
.map(ArrayList::new)
|
||||
.collect(Collectors.toList());
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue